gt
stringclasses
1 value
context
stringlengths
2.05k
161k
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.facebook.presto.hive; import com.facebook.presto.common.predicate.Domain; import com.facebook.presto.common.predicate.NullableValue; import com.facebook.presto.common.predicate.TupleDomain; import com.facebook.presto.common.type.CharType; import com.facebook.presto.common.type.Type; import com.facebook.presto.common.type.TypeManager; import com.facebook.presto.common.type.VarcharType; import com.facebook.presto.hive.HiveBucketing.HiveBucketFilter; import com.facebook.presto.hive.metastore.Column; import com.facebook.presto.hive.metastore.SemiTransactionalHiveMetastore; import com.facebook.presto.hive.metastore.Table; import com.facebook.presto.spi.ColumnHandle; import com.facebook.presto.spi.ConnectorSession; import com.facebook.presto.spi.ConnectorTableHandle; import com.facebook.presto.spi.Constraint; import com.facebook.presto.spi.PrestoException; import com.facebook.presto.spi.SchemaTableName; import com.facebook.presto.spi.TableNotFoundException; import com.google.common.base.Predicates; import com.google.common.base.VerifyException; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Maps; import org.joda.time.DateTimeZone; import javax.inject.Inject; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Optional; import static com.facebook.presto.hive.HiveBucketing.getHiveBucketFilter; import static com.facebook.presto.hive.HiveBucketing.getHiveBucketHandle; import static com.facebook.presto.hive.HiveColumnHandle.BUCKET_COLUMN_NAME; import static com.facebook.presto.hive.HiveErrorCode.HIVE_EXCEEDED_PARTITION_LIMIT; import static com.facebook.presto.hive.HiveSessionProperties.getMaxBucketsForGroupedExecution; import static com.facebook.presto.hive.HiveSessionProperties.getMinBucketCountToNotIgnoreTableBucketing; import static com.facebook.presto.hive.HiveSessionProperties.isOfflineDataDebugModeEnabled; import static com.facebook.presto.hive.HiveSessionProperties.shouldIgnoreTableBucketing; import static com.facebook.presto.hive.HiveUtil.getPartitionKeyColumnHandles; import static com.facebook.presto.hive.HiveUtil.parsePartitionValue; import static com.facebook.presto.hive.metastore.MetastoreUtil.extractPartitionValues; import static com.facebook.presto.hive.metastore.MetastoreUtil.getProtectMode; import static com.facebook.presto.hive.metastore.MetastoreUtil.makePartName; import static com.facebook.presto.hive.metastore.MetastoreUtil.verifyOnline; import static com.facebook.presto.hive.metastore.PrestoTableType.TEMPORARY_TABLE; import static com.facebook.presto.spi.Constraint.alwaysTrue; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Predicates.not; import static com.google.common.collect.ImmutableList.toImmutableList; import static java.lang.String.format; import static java.util.Objects.requireNonNull; import static java.util.stream.Collectors.toList; public class HivePartitionManager { private final DateTimeZone timeZone; private final boolean assumeCanonicalPartitionKeys; private final TypeManager typeManager; private final int maxPartitionsPerScan; private final int domainCompactionThreshold; @Inject public HivePartitionManager( TypeManager typeManager, HiveClientConfig hiveClientConfig) { this( typeManager, hiveClientConfig.getDateTimeZone(), hiveClientConfig.isAssumeCanonicalPartitionKeys(), hiveClientConfig.getMaxPartitionsPerScan(), hiveClientConfig.getDomainCompactionThreshold()); } public HivePartitionManager( TypeManager typeManager, DateTimeZone timeZone, boolean assumeCanonicalPartitionKeys, int maxPartitionsPerScan, int domainCompactionThreshold) { this.timeZone = requireNonNull(timeZone, "timeZone is null"); this.assumeCanonicalPartitionKeys = assumeCanonicalPartitionKeys; this.typeManager = requireNonNull(typeManager, "typeManager is null"); this.maxPartitionsPerScan = maxPartitionsPerScan; checkArgument(domainCompactionThreshold >= 1, "domainCompactionThreshold must be at least 1"); this.domainCompactionThreshold = domainCompactionThreshold; } public Iterable<HivePartition> getPartitionsIterator( SemiTransactionalHiveMetastore metastore, ConnectorTableHandle tableHandle, Constraint<ColumnHandle> constraint, ConnectorSession session) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; TupleDomain<ColumnHandle> effectivePredicateColumnHandles = constraint.getSummary(); SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = getTable(metastore, tableName, isOfflineDataDebugModeEnabled(session)); List<HiveColumnHandle> partitionColumns = getPartitionKeyColumnHandles(table); List<Type> partitionTypes = partitionColumns.stream() .map(column -> typeManager.getType(column.getTypeSignature())) .collect(toList()); Map<Column, Domain> effectivePredicate = createPartitionPredicates( effectivePredicateColumnHandles, partitionColumns, assumeCanonicalPartitionKeys); if (partitionColumns.isEmpty()) { return ImmutableList.of(new HivePartition(tableName)); } else { return () -> { List<String> filteredPartitionNames = getFilteredPartitionNames(metastore, tableName, effectivePredicate); return filteredPartitionNames.stream() // Apply extra filters which could not be done by getFilteredPartitionNames .map(partitionName -> parseValuesAndFilterPartition(tableName, partitionName, partitionColumns, partitionTypes, constraint)) .filter(Optional::isPresent) .map(Optional::get) .iterator(); }; } } private Map<Column, Domain> createPartitionPredicates( TupleDomain<ColumnHandle> effectivePredicateColumnHandles, List<HiveColumnHandle> partitionColumns, boolean assumeCanonicalPartitionKeys) { Optional<Map<ColumnHandle, Domain>> domains = effectivePredicateColumnHandles.getDomains(); if (domains.isPresent()) { Map<ColumnHandle, Domain> columnHandleDomainMap = domains.get(); ImmutableMap.Builder<Column, Domain> partitionPredicateBuilder = ImmutableMap.builder(); for (HiveColumnHandle partitionColumn : partitionColumns) { Column key = new Column(partitionColumn.getName(), partitionColumn.getHiveType(), partitionColumn.getComment()); if (columnHandleDomainMap.containsKey(partitionColumn)) { if (assumeCanonicalPartitionKeys) { partitionPredicateBuilder.put(key, columnHandleDomainMap.get(partitionColumn)); } else { Type type = typeManager.getType(partitionColumn.getTypeSignature()); if (type instanceof VarcharType || type instanceof CharType) { partitionPredicateBuilder.put(key, columnHandleDomainMap.get(partitionColumn)); } else { Domain allDomain = Domain.all(typeManager.getType(partitionColumn.getTypeSignature())); partitionPredicateBuilder.put(key, allDomain); } } } else { Domain allDomain = Domain.all(typeManager.getType(partitionColumn.getTypeSignature())); partitionPredicateBuilder.put(key, allDomain); } } return partitionPredicateBuilder.build(); } else { return ImmutableMap.of(); } } public HivePartitionResult getPartitions(SemiTransactionalHiveMetastore metastore, ConnectorTableHandle tableHandle, Constraint<ColumnHandle> constraint, ConnectorSession session) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; TupleDomain<ColumnHandle> effectivePredicate = constraint.getSummary(); SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = getTable(metastore, tableName, isOfflineDataDebugModeEnabled(session)); List<HiveColumnHandle> partitionColumns = getPartitionKeyColumnHandles(table); List<HivePartition> partitions = getPartitionsAsList(getPartitionsIterator(metastore, tableHandle, constraint, session).iterator()); Optional<HiveBucketHandle> hiveBucketHandle = getBucketHandle(table, session); Optional<HiveBucketFilter> bucketFilter = hiveBucketHandle.flatMap(value -> getHiveBucketFilter(table, effectivePredicate)); if (!queryUsesHiveBucketColumn(effectivePredicate) && hiveBucketHandle.isPresent() && queryAccessesTooManyBuckets(hiveBucketHandle.get(), bucketFilter, partitions, session)) { hiveBucketHandle = Optional.empty(); bucketFilter = Optional.empty(); } if (effectivePredicate.isNone()) { return new HivePartitionResult( partitionColumns, table.getDataColumns(), table.getParameters(), partitions, TupleDomain.none(), TupleDomain.none(), TupleDomain.none(), hiveBucketHandle, Optional.empty()); } TupleDomain<ColumnHandle> compactEffectivePredicate = effectivePredicate.compact(domainCompactionThreshold); if (partitionColumns.isEmpty()) { return new HivePartitionResult( partitionColumns, table.getDataColumns(), table.getParameters(), partitions, compactEffectivePredicate, effectivePredicate, TupleDomain.all(), hiveBucketHandle, bucketFilter); } // All partition key domains will be fully evaluated, so we don't need to include those TupleDomain<ColumnHandle> remainingTupleDomain = TupleDomain.withColumnDomains(Maps.filterKeys(effectivePredicate.getDomains().get(), not(Predicates.in(partitionColumns)))); TupleDomain<ColumnHandle> enforcedTupleDomain = TupleDomain.withColumnDomains(Maps.filterKeys(effectivePredicate.getDomains().get(), Predicates.in(partitionColumns))); return new HivePartitionResult( partitionColumns, table.getDataColumns(), table.getParameters(), partitions, compactEffectivePredicate, remainingTupleDomain, enforcedTupleDomain, hiveBucketHandle, bucketFilter); } private Optional<HiveBucketHandle> getBucketHandle(Table table, ConnectorSession session) { // never ignore table bucketing for temporary tables as those are created such explicitly by the engine request if (table.getTableType().equals(TEMPORARY_TABLE)) { return getHiveBucketHandle(table); } Optional<HiveBucketHandle> hiveBucketHandle = getHiveBucketHandle(table); if (!hiveBucketHandle.isPresent()) { return Optional.empty(); } int requiredTableBucketCount = getMinBucketCountToNotIgnoreTableBucketing(session); if (hiveBucketHandle.get().getTableBucketCount() < requiredTableBucketCount) { return Optional.empty(); } return shouldIgnoreTableBucketing(session) ? Optional.empty() : hiveBucketHandle; } private boolean queryUsesHiveBucketColumn(TupleDomain<ColumnHandle> effectivePredicate) { if (!effectivePredicate.getDomains().isPresent()) { return false; } return effectivePredicate.getDomains().get().keySet().stream().anyMatch(key -> ((HiveColumnHandle) key).getName().equals(BUCKET_COLUMN_NAME)); } private boolean queryAccessesTooManyBuckets(HiveBucketHandle handle, Optional<HiveBucketFilter> filter, List<HivePartition> partitions, ConnectorSession session) { int bucketsPerPartition = filter.map(hiveBucketFilter -> hiveBucketFilter.getBucketsToKeep().size()) .orElseGet(handle::getReadBucketCount); return bucketsPerPartition * partitions.size() > getMaxBucketsForGroupedExecution(session); } private List<HivePartition> getPartitionsAsList(Iterator<HivePartition> partitionsIterator) { ImmutableList.Builder<HivePartition> partitionList = ImmutableList.builder(); int count = 0; while (partitionsIterator.hasNext()) { HivePartition partition = partitionsIterator.next(); if (count == maxPartitionsPerScan) { throw new PrestoException(HIVE_EXCEEDED_PARTITION_LIMIT, format( "Query over table '%s' can potentially read more than %s partitions", partition.getTableName(), maxPartitionsPerScan)); } partitionList.add(partition); count++; } return partitionList.build(); } public HivePartitionResult getPartitions(SemiTransactionalHiveMetastore metastore, ConnectorTableHandle tableHandle, List<List<String>> partitionValuesList, ConnectorSession session) { HiveTableHandle hiveTableHandle = (HiveTableHandle) tableHandle; SchemaTableName tableName = hiveTableHandle.getSchemaTableName(); Table table = getTable(metastore, tableName, isOfflineDataDebugModeEnabled(session)); List<HiveColumnHandle> partitionColumns = getPartitionKeyColumnHandles(table); List<Type> partitionColumnTypes = partitionColumns.stream() .map(column -> typeManager.getType(column.getTypeSignature())) .collect(toImmutableList()); List<HivePartition> partitionList = partitionValuesList.stream() .map(partitionValues -> makePartName(table.getPartitionColumns(), partitionValues)) .map(partitionName -> parseValuesAndFilterPartition(tableName, partitionName, partitionColumns, partitionColumnTypes, alwaysTrue())) .map(partition -> partition.orElseThrow(() -> new VerifyException("partition must exist"))) .collect(toImmutableList()); Optional<HiveBucketHandle> bucketHandle = shouldIgnoreTableBucketing(session) ? Optional.empty() : getHiveBucketHandle(table); return new HivePartitionResult( partitionColumns, table.getDataColumns(), table.getParameters(), partitionList, TupleDomain.all(), TupleDomain.all(), TupleDomain.none(), bucketHandle, Optional.empty()); } private Optional<HivePartition> parseValuesAndFilterPartition( SchemaTableName tableName, String partitionId, List<HiveColumnHandle> partitionColumns, List<Type> partitionColumnTypes, Constraint<ColumnHandle> constraint) { HivePartition partition = parsePartition(tableName, partitionId, partitionColumns, partitionColumnTypes, timeZone); Map<ColumnHandle, Domain> domains = constraint.getSummary().getDomains().get(); for (HiveColumnHandle column : partitionColumns) { NullableValue value = partition.getKeys().get(column); Domain allowedDomain = domains.get(column); if (allowedDomain != null && !allowedDomain.includesNullableValue(value.getValue())) { return Optional.empty(); } } if (constraint.predicate().isPresent() && !constraint.predicate().get().test(partition.getKeys())) { return Optional.empty(); } return Optional.of(partition); } private Table getTable(SemiTransactionalHiveMetastore metastore, SchemaTableName tableName, boolean offlineDataDebugModeEnabled) { Optional<Table> target = metastore.getTable(tableName.getSchemaName(), tableName.getTableName()); if (!target.isPresent()) { throw new TableNotFoundException(tableName); } Table table = target.get(); if (!offlineDataDebugModeEnabled) { verifyOnline(tableName, Optional.empty(), getProtectMode(table), table.getParameters()); } return table; } private List<String> getFilteredPartitionNames(SemiTransactionalHiveMetastore metastore, SchemaTableName tableName, Map<Column, Domain> partitionPredicates) { if (partitionPredicates.isEmpty()) { return ImmutableList.of(); } // fetch the partition names return metastore.getPartitionNamesByFilter(tableName.getSchemaName(), tableName.getTableName(), partitionPredicates) .orElseThrow(() -> new TableNotFoundException(tableName)); } public static HivePartition parsePartition( SchemaTableName tableName, String partitionName, List<HiveColumnHandle> partitionColumns, List<Type> partitionColumnTypes, DateTimeZone timeZone) { List<String> partitionValues = extractPartitionValues(partitionName); ImmutableMap.Builder<ColumnHandle, NullableValue> builder = ImmutableMap.builder(); for (int i = 0; i < partitionColumns.size(); i++) { HiveColumnHandle column = partitionColumns.get(i); NullableValue parsedValue = parsePartitionValue(partitionName, partitionValues.get(i), partitionColumnTypes.get(i), timeZone); builder.put(column, parsedValue); } Map<ColumnHandle, NullableValue> values = builder.build(); return new HivePartition(tableName, partitionName, values); } }
/* * JBoss, Home of Professional Open Source. * Copyright 2014 Red Hat, Inc., and individual contributors * as indicated by the @author tags. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.undertow.server.handlers.cache; import static io.undertow.server.handlers.cache.LimitedBufferSlicePool.PooledByteBuffer; import java.nio.ByteBuffer; import java.util.HashSet; import java.util.Set; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; import java.util.concurrent.ConcurrentHashMap; import io.undertow.util.ConcurrentDirectDeque; import org.xnio.BufferAllocator; /** * A non-blocking buffer cache where entries are indexed by a path and are made up of a * subsequence of blocks in a fixed large direct buffer. An ideal application is * a file system cache, where the path corresponds to a file location. * * <p>To reduce contention, entry allocation and eviction execute in a sampling * fashion (entry hits modulo N). Eviction follows an LRU approach (oldest sampled * entries are removed first) when the cache is out of capacity</p> * * <p>In order to expedite reclamation, cache entries are reference counted as * opposed to garbage collected.</p> * * @author Jason T. Greene */ public class DirectBufferCache { private static final int SAMPLE_INTERVAL = 5; private final LimitedBufferSlicePool pool; private final ConcurrentMap<Object, CacheEntry> cache; private final ConcurrentDirectDeque<CacheEntry> accessQueue; private final int sliceSize; private final int maxAge; public DirectBufferCache(int sliceSize, int slicesPerPage, int maxMemory) { this(sliceSize, slicesPerPage, maxMemory, BufferAllocator.DIRECT_BYTE_BUFFER_ALLOCATOR); } public DirectBufferCache(int sliceSize, int slicesPerPage, int maxMemory, final BufferAllocator<ByteBuffer> bufferAllocator) { this(sliceSize, slicesPerPage, maxMemory, bufferAllocator, -1); } public DirectBufferCache(int sliceSize, int slicesPerPage, int maxMemory, final BufferAllocator<ByteBuffer> bufferAllocator, int maxAge) { this.sliceSize = sliceSize; this.pool = new LimitedBufferSlicePool(bufferAllocator, sliceSize, sliceSize * slicesPerPage, maxMemory / (sliceSize * slicesPerPage)); this.cache = new ConcurrentHashMap<>(16); this.accessQueue = ConcurrentDirectDeque.newInstance(); this.maxAge = maxAge; } public CacheEntry add(Object key, int size) { return add(key, size, maxAge); } public CacheEntry add(Object key, int size, int maxAge) { CacheEntry value = cache.get(key); if (value == null) { value = new CacheEntry(key, size, this, maxAge); CacheEntry result = cache.putIfAbsent(key, value); if (result != null) { value = result; } else { bumpAccess(value); } } return value; } public CacheEntry get(Object key) { CacheEntry cacheEntry = cache.get(key); if (cacheEntry == null) { return null; } long expires = cacheEntry.getExpires(); if(expires != -1) { if(System.currentTimeMillis() > expires) { remove(key); return null; } } if (cacheEntry.hit() % SAMPLE_INTERVAL == 0) { bumpAccess(cacheEntry); if (! cacheEntry.allocate()) { // Try and make room int reclaimSize = cacheEntry.size(); for (CacheEntry oldest : accessQueue) { if (oldest == cacheEntry) { continue; } if (oldest.buffers().length > 0) { reclaimSize -= oldest.size(); } this.remove(oldest.key()); if (reclaimSize <= 0) { break; } } // Maybe lucky? cacheEntry.allocate(); } } return cacheEntry; } /** * Returns a set of all the keys in the cache. This is a copy of the * key set at the time of method invocation. * * @return all the keys in this cache */ public Set<Object> getAllKeys() { return new HashSet<>(cache.keySet()); } private void bumpAccess(CacheEntry cacheEntry) { Object prevToken = cacheEntry.claimToken(); if (!Boolean.FALSE.equals(prevToken)) { if (prevToken != null) { accessQueue.removeToken(prevToken); } Object token = null; try { token = accessQueue.offerLastAndReturnToken(cacheEntry); } catch (Throwable t) { // In case of disaster (OOME), we need to release the claim, so leave it aas null } if (! cacheEntry.setToken(token) && token != null) { // Always set if null accessQueue.removeToken(token); } } } public void remove(Object key) { CacheEntry remove = cache.remove(key); if (remove != null) { Object old = remove.clearToken(); if (old != null) { accessQueue.removeToken(old); } remove.dereference(); } } public static final class CacheEntry { private static final PooledByteBuffer[] EMPTY_BUFFERS = new PooledByteBuffer[0]; private static final PooledByteBuffer[] INIT_BUFFERS = new PooledByteBuffer[0]; private static final Object CLAIM_TOKEN = new Object(); private static final AtomicIntegerFieldUpdater<CacheEntry> hitsUpdater = AtomicIntegerFieldUpdater.newUpdater(CacheEntry.class, "hits"); private static final AtomicIntegerFieldUpdater<CacheEntry> refsUpdater = AtomicIntegerFieldUpdater.newUpdater(CacheEntry.class, "refs"); private static final AtomicIntegerFieldUpdater<CacheEntry> enabledUpdator = AtomicIntegerFieldUpdater.newUpdater(CacheEntry.class, "enabled"); private static final AtomicReferenceFieldUpdater<CacheEntry, PooledByteBuffer[]> bufsUpdater = AtomicReferenceFieldUpdater.newUpdater(CacheEntry.class, PooledByteBuffer[].class, "buffers"); private static final AtomicReferenceFieldUpdater<CacheEntry, Object> tokenUpdator = AtomicReferenceFieldUpdater.newUpdater(CacheEntry.class, Object.class, "accessToken"); private final Object key; private final int size; private final DirectBufferCache cache; private final int maxAge; private volatile PooledByteBuffer[] buffers = INIT_BUFFERS; private volatile int refs = 1; private volatile int hits = 1; private volatile Object accessToken; private volatile int enabled; private volatile long expires = -1; private CacheEntry(Object key, int size, DirectBufferCache cache, final int maxAge) { this.key = key; this.size = size; this.cache = cache; this.maxAge = maxAge; } public int size() { return size; } public PooledByteBuffer[] buffers() { return buffers; } public int hit() { for (;;) { int i = hits; if (hitsUpdater.weakCompareAndSet(this, i, ++i)) { return i; } } } public Object key() { return key; } public boolean enabled() { return enabled == 2; } public void enable() { if(maxAge == -1) { this.expires = -1; } else { this.expires = System.currentTimeMillis() + maxAge; } this.enabled = 2; } public void disable() { this.enabled = 0; } public boolean claimEnable() { return enabledUpdator.compareAndSet(this, 0, 1); } public boolean reference() { for(;;) { int refs = this.refs; if (refs < 1) { return false; // destroying } if (refsUpdater.compareAndSet(this, refs++, refs)) { return true; } } } public boolean dereference() { for(;;) { int refs = this.refs; if (refs < 1) { return false; // destroying } if (refsUpdater.compareAndSet(this, refs--, refs)) { if (refs == 0) { destroy(); } return true; } } } public boolean allocate() { if (buffers.length > 0) return true; if (! bufsUpdater.compareAndSet(this, INIT_BUFFERS, EMPTY_BUFFERS)) { return true; } int reserveSize = size; int n = 1; DirectBufferCache bufferCache = cache; while ((reserveSize -= bufferCache.sliceSize) > 0) { n++; } // Try to avoid mutations LimitedBufferSlicePool slicePool = bufferCache.pool; if (! slicePool.canAllocate(n)) { this.buffers = INIT_BUFFERS; return false; } PooledByteBuffer[] buffers = new PooledByteBuffer[n]; for (int i = 0; i < n; i++) { PooledByteBuffer allocate = slicePool.allocate(); if (allocate == null) { while (--i >= 0) { buffers[i].free(); } this.buffers = INIT_BUFFERS; return false; } buffers[i] = allocate; } this.buffers = buffers; return true; } private void destroy() { this.buffers = EMPTY_BUFFERS; for (PooledByteBuffer buffer : buffers) { buffer.free(); } } Object claimToken() { for (;;) { Object current = this.accessToken; if (current == CLAIM_TOKEN) { return Boolean.FALSE; } if (tokenUpdator.compareAndSet(this, current, CLAIM_TOKEN)) { return current; } } } boolean setToken(Object token) { return tokenUpdator.compareAndSet(this, CLAIM_TOKEN, token); } Object clearToken() { Object old = tokenUpdator.getAndSet(this, null); return old == CLAIM_TOKEN ? null : old; } long getExpires() { return expires; } } }
/* * Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; import javax.annotation.Generated; /** * <p> * A distribution list. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cloudfront-2019-03-26/DistributionList" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class DistributionList implements Serializable, Cloneable { /** * <p> * The value you provided for the <code>Marker</code> request parameter. * </p> */ private String marker; /** * <p> * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can use for * the <code>Marker</code> request parameter to continue listing your distributions where they left off. * </p> */ private String nextMarker; /** * <p> * The value you provided for the <code>MaxItems</code> request parameter. * </p> */ private Integer maxItems; /** * <p> * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you can * make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * </p> */ private Boolean isTruncated; /** * <p> * The number of distributions that were created by the current AWS account. * </p> */ private Integer quantity; /** * <p> * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was created * by the current AWS account. * </p> */ private com.amazonaws.internal.SdkInternalList<DistributionSummary> items; /** * <p> * The value you provided for the <code>Marker</code> request parameter. * </p> * * @param marker * The value you provided for the <code>Marker</code> request parameter. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * The value you provided for the <code>Marker</code> request parameter. * </p> * * @return The value you provided for the <code>Marker</code> request parameter. */ public String getMarker() { return this.marker; } /** * <p> * The value you provided for the <code>Marker</code> request parameter. * </p> * * @param marker * The value you provided for the <code>Marker</code> request parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withMarker(String marker) { setMarker(marker); return this; } /** * <p> * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can use for * the <code>Marker</code> request parameter to continue listing your distributions where they left off. * </p> * * @param nextMarker * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can * use for the <code>Marker</code> request parameter to continue listing your distributions where they left * off. */ public void setNextMarker(String nextMarker) { this.nextMarker = nextMarker; } /** * <p> * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can use for * the <code>Marker</code> request parameter to continue listing your distributions where they left off. * </p> * * @return If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can * use for the <code>Marker</code> request parameter to continue listing your distributions where they left * off. */ public String getNextMarker() { return this.nextMarker; } /** * <p> * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can use for * the <code>Marker</code> request parameter to continue listing your distributions where they left off. * </p> * * @param nextMarker * If <code>IsTruncated</code> is <code>true</code>, this element is present and contains the value you can * use for the <code>Marker</code> request parameter to continue listing your distributions where they left * off. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withNextMarker(String nextMarker) { setNextMarker(nextMarker); return this; } /** * <p> * The value you provided for the <code>MaxItems</code> request parameter. * </p> * * @param maxItems * The value you provided for the <code>MaxItems</code> request parameter. */ public void setMaxItems(Integer maxItems) { this.maxItems = maxItems; } /** * <p> * The value you provided for the <code>MaxItems</code> request parameter. * </p> * * @return The value you provided for the <code>MaxItems</code> request parameter. */ public Integer getMaxItems() { return this.maxItems; } /** * <p> * The value you provided for the <code>MaxItems</code> request parameter. * </p> * * @param maxItems * The value you provided for the <code>MaxItems</code> request parameter. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withMaxItems(Integer maxItems) { setMaxItems(maxItems); return this; } /** * <p> * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you can * make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * </p> * * @param isTruncated * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you * can make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. */ public void setIsTruncated(Boolean isTruncated) { this.isTruncated = isTruncated; } /** * <p> * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you can * make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * </p> * * @return A flag that indicates whether more distributions remain to be listed. If your results were truncated, you * can make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. */ public Boolean getIsTruncated() { return this.isTruncated; } /** * <p> * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you can * make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * </p> * * @param isTruncated * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you * can make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withIsTruncated(Boolean isTruncated) { setIsTruncated(isTruncated); return this; } /** * <p> * A flag that indicates whether more distributions remain to be listed. If your results were truncated, you can * make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. * </p> * * @return A flag that indicates whether more distributions remain to be listed. If your results were truncated, you * can make a follow-up pagination request using the <code>Marker</code> request parameter to retrieve more * distributions in the list. */ public Boolean isTruncated() { return this.isTruncated; } /** * <p> * The number of distributions that were created by the current AWS account. * </p> * * @param quantity * The number of distributions that were created by the current AWS account. */ public void setQuantity(Integer quantity) { this.quantity = quantity; } /** * <p> * The number of distributions that were created by the current AWS account. * </p> * * @return The number of distributions that were created by the current AWS account. */ public Integer getQuantity() { return this.quantity; } /** * <p> * The number of distributions that were created by the current AWS account. * </p> * * @param quantity * The number of distributions that were created by the current AWS account. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withQuantity(Integer quantity) { setQuantity(quantity); return this; } /** * <p> * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was created * by the current AWS account. * </p> * * @return A complex type that contains one <code>DistributionSummary</code> element for each distribution that was * created by the current AWS account. */ public java.util.List<DistributionSummary> getItems() { if (items == null) { items = new com.amazonaws.internal.SdkInternalList<DistributionSummary>(); } return items; } /** * <p> * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was created * by the current AWS account. * </p> * * @param items * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was * created by the current AWS account. */ public void setItems(java.util.Collection<DistributionSummary> items) { if (items == null) { this.items = null; return; } this.items = new com.amazonaws.internal.SdkInternalList<DistributionSummary>(items); } /** * <p> * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was created * by the current AWS account. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setItems(java.util.Collection)} or {@link #withItems(java.util.Collection)} if you want to override the * existing values. * </p> * * @param items * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was * created by the current AWS account. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withItems(DistributionSummary... items) { if (this.items == null) { setItems(new com.amazonaws.internal.SdkInternalList<DistributionSummary>(items.length)); } for (DistributionSummary ele : items) { this.items.add(ele); } return this; } /** * <p> * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was created * by the current AWS account. * </p> * * @param items * A complex type that contains one <code>DistributionSummary</code> element for each distribution that was * created by the current AWS account. * @return Returns a reference to this object so that method calls can be chained together. */ public DistributionList withItems(java.util.Collection<DistributionSummary> items) { setItems(items); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getMarker() != null) sb.append("Marker: ").append(getMarker()).append(","); if (getNextMarker() != null) sb.append("NextMarker: ").append(getNextMarker()).append(","); if (getMaxItems() != null) sb.append("MaxItems: ").append(getMaxItems()).append(","); if (getIsTruncated() != null) sb.append("IsTruncated: ").append(getIsTruncated()).append(","); if (getQuantity() != null) sb.append("Quantity: ").append(getQuantity()).append(","); if (getItems() != null) sb.append("Items: ").append(getItems()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof DistributionList == false) return false; DistributionList other = (DistributionList) obj; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; if (other.getNextMarker() == null ^ this.getNextMarker() == null) return false; if (other.getNextMarker() != null && other.getNextMarker().equals(this.getNextMarker()) == false) return false; if (other.getMaxItems() == null ^ this.getMaxItems() == null) return false; if (other.getMaxItems() != null && other.getMaxItems().equals(this.getMaxItems()) == false) return false; if (other.getIsTruncated() == null ^ this.getIsTruncated() == null) return false; if (other.getIsTruncated() != null && other.getIsTruncated().equals(this.getIsTruncated()) == false) return false; if (other.getQuantity() == null ^ this.getQuantity() == null) return false; if (other.getQuantity() != null && other.getQuantity().equals(this.getQuantity()) == false) return false; if (other.getItems() == null ^ this.getItems() == null) return false; if (other.getItems() != null && other.getItems().equals(this.getItems()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); hashCode = prime * hashCode + ((getNextMarker() == null) ? 0 : getNextMarker().hashCode()); hashCode = prime * hashCode + ((getMaxItems() == null) ? 0 : getMaxItems().hashCode()); hashCode = prime * hashCode + ((getIsTruncated() == null) ? 0 : getIsTruncated().hashCode()); hashCode = prime * hashCode + ((getQuantity() == null) ? 0 : getQuantity().hashCode()); hashCode = prime * hashCode + ((getItems() == null) ? 0 : getItems().hashCode()); return hashCode; } @Override public DistributionList clone() { try { return (DistributionList) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intel.ssg.dcst.panthera.parse.sql.transformer; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.Stack; import org.antlr.runtime.tree.CommonTree; import com.intel.ssg.dcst.panthera.parse.sql.PantheraExpParser; import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateException; import com.intel.ssg.dcst.panthera.parse.sql.SqlXlateUtil; import com.intel.ssg.dcst.panthera.parse.sql.TranslateContext; import com.intel.ssg.dcst.panthera.parse.sql.transformer.fb.FilterBlockUtil; import br.com.porcelli.parser.plsql.PantheraParser_PLSQLParser; /** * deal with USING/NATURAL JOIN/nested JOIN * * ComplexJoinTransformer. * */ public class ComplexJoinTransformer extends BaseSqlASTTransformer { SqlASTTransformer tf; public ComplexJoinTransformer(SqlASTTransformer tf) { this.tf = tf; } @Override public void transform(CommonTree tree, TranslateContext context) throws SqlXlateException { tf.transformAST(tree, context); // omit the root node of the tree trans((CommonTree) tree.getChild(0), context); } private void trans(CommonTree node, TranslateContext context) throws SqlXlateException { firstTrans(node, context); oldTrans(node, context); } /** * eliminate first child of join-ref being JOIN, mainly for speed. * * @param node * @param context * @throws SqlXlateException */ private void firstTrans(CommonTree node, TranslateContext context) throws SqlXlateException { for (int i = 0; i < node.getChildCount(); i++) { firstTrans((CommonTree) node.getChild(i), context); } if (node.getType() == PantheraParser_PLSQLParser.TABLE_REF) { // only deal the first child of every table_ref since this is firstTrans() CommonTree element = (CommonTree) node.getChild(0); assert(element.getType() == PantheraParser_PLSQLParser.TABLE_REF_ELEMENT); // two situations for table_ref_element // 1) table_expression, or alias and table_expression // 2) only one child, and is table_ref if (element.getFirstChildWithType(PantheraParser_PLSQLParser.TABLE_REF) != null) { // first table_ref_element of node(table_ref) is eligible to optimize assert (element.getChildCount() == 1); CommonTree splitTableRef = (CommonTree) element.deleteChild(0); splitTableRef.token = null; node.replaceChildren(0, 0, splitTableRef); } } } private void oldTrans(CommonTree node, TranslateContext context) throws SqlXlateException { int childCount = node.getChildCount(); for (int i = 0; i < childCount; i++) { oldTrans((CommonTree) node.getChild(i), context); } if (node.getType() == PantheraExpParser.TABLE_REF && node.getParent().getType() == PantheraExpParser.TABLE_REF_ELEMENT) { processNested(node, context); } if (node.getType() == PantheraExpParser.PLSQL_NON_RESERVED_USING) { CommonTree join = (CommonTree) node.getParent(); if (join.getType() != PantheraExpParser.JOIN_DEF) { throw new SqlXlateException(join, "Unsupported USING type:" + join.getText()); } processUsing(node, context); } if (node.getType() == PantheraExpParser.TABLE_REF && node.getChildCount() > 1 && node.getChild(1).getType() == PantheraExpParser.JOIN_DEF && node.getChild(1).getChild(0).getType() == PantheraExpParser.NATURAL_VK) { Map<String, Set<String>> columnSetMap = processNaturalJoin(node, context); // take INNER as LEFT, because when INNER join on left & right equal, neither cannot be null. int joinType = PantheraParser_PLSQLParser.LEFT_VK; if (node.getChild(1).getChild(0).getType() == PantheraParser_PLSQLParser.RIGHT_VK || node.getChild(1).getChild(0).getType() == PantheraParser_PLSQLParser.FULL_VK) { joinType = node.getChild(1).getChild(0).getType(); } // parent of table_ref can also be table_ref_element CommonTree select = (CommonTree) node.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT); Map<String, CommonTree> commonMap = rebuildSelectNatural(select, columnSetMap, joinType); FilterBlockUtil.rebuildColumn((CommonTree) select .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_WHERE), commonMap); FilterBlockUtil.rebuildColumn((CommonTree) select .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_GROUP), commonMap); FilterBlockUtil.deleteAllTableAlias((CommonTree) ((CommonTree) (select.getParent().getParent())) .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_ORDER)); } } private void processNested(CommonTree node, TranslateContext context) throws SqlXlateException { CommonTree select = (CommonTree) node.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT); CommonTree closingTabRefElement = (CommonTree) node.getParent(); CommonTree closingTabExpression = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.TABLE_EXPRESSION, "TABLE_EXPRESSION"); closingTabRefElement.replaceChildren(node.childIndex, node.childIndex, closingTabExpression); CommonTree closingSelectMode = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.SELECT_MODE, "SELECT_MODE"); closingTabExpression.addChild(closingSelectMode); CommonTree closingSelectStatement = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.SELECT_STATEMENT, "SELECT_STATEMENT"); closingSelectMode.addChild(closingSelectStatement); CommonTree closingSubquery = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.SUBQUERY, "SUBQUERY"); closingSelectStatement.addChild(closingSubquery); CommonTree closingSelect = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT, "select"); closingSubquery.addChild(closingSelect); // FIXME here the from is made out from air, must have many problems, ahhh CommonTree closingFrom = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.SQL92_RESERVED_FROM, "from"); closingSelect.addChild(closingFrom); closingFrom.addChild(node); CommonTree asterisk = FilterBlockUtil.createSqlASTNode(closingTabRefElement, PantheraParser_PLSQLParser.ASTERISK, "*"); closingSelect.addChild(asterisk); // // rebuild select-list // Map<String, CommonTree> commonMap = rebuildSelectNested(select, context); // // rebuild others // FilterBlockUtil.rebuildColumn((CommonTree) select // .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_WHERE), commonMap); // FilterBlockUtil.rebuildColumn((CommonTree) select // .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_GROUP), commonMap); // FilterBlockUtil.deleteAllTableAlias((CommonTree) ((CommonTree) (select.getParent().getParent())) // .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_ORDER)); } private void processUsing(CommonTree node, TranslateContext context) throws SqlXlateException { CommonTree join = (CommonTree) node.getParent(); CommonTree tableRef = (CommonTree) join.getParent(); assert (join.childIndex > 0); CommonTree leftTableRefElement = (CommonTree) tableRef.getChild(join.childIndex - 1); if (leftTableRefElement.getType() != PantheraExpParser.TABLE_REF_ELEMENT) { throw new SqlXlateException(join, "currently only support USING specified in first join of a TABLE_REF"); } // parent of table_ref can also be table_ref_element CommonTree select = (CommonTree) node.getAncestor(PantheraParser_PLSQLParser.SQL92_RESERVED_SELECT); CommonTree rightTableRefElement = (CommonTree) join .getFirstChildWithType(PantheraExpParser.TABLE_REF_ELEMENT); String leftTableName = SqlXlateUtil.findTableReferenceName(leftTableRefElement); String rightTableName = SqlXlateUtil.findTableReferenceName(rightTableRefElement); List<String> leftColumnList = new ArrayList<String>(); List<String> rightColumnList = new ArrayList<String>(); for (int i = 0; i < node.getChildCount(); i++) { CommonTree columnName = (CommonTree) node.getChild(i); String column = columnName.getChild(0).getText(); leftColumnList.add(column); rightColumnList.add(column); } assert (join.getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_ON) == null); // build on to replace USING CommonTree on = FilterBlockUtil.makeOn(node, leftTableName, rightTableName, leftColumnList, rightColumnList); join.replaceChildren(node.childIndex, node.childIndex, on); // take INNER as LEFT, because when INNER join on left & right equal, neither cannot be null. int joinType = PantheraParser_PLSQLParser.LEFT_VK; if (join.getChild(0).getType() == PantheraParser_PLSQLParser.RIGHT_VK || join.getChild(0).getType() == PantheraParser_PLSQLParser.FULL_VK) { joinType = join.getChild(0).getType(); } // rebuild select-list Map<String, CommonTree> commonMap = rebuildSelectUsing(select, node, joinType, leftTableRefElement, rightTableRefElement, context); // rebuild others FilterBlockUtil.rebuildColumn((CommonTree) select .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_WHERE), commonMap); FilterBlockUtil.rebuildColumn((CommonTree) select .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_GROUP), commonMap); FilterBlockUtil.deleteAllTableAlias((CommonTree) ((CommonTree) (select.getParent().getParent())) .getFirstChildWithType(PantheraExpParser.SQL92_RESERVED_ORDER)); } /** * * 1. expand select * if exists, omit USING cols, use common colname as alias.<br> * 2. replace table alias in select-list * 3. replace upper order by x.a with order by a * 4. replace filters of a to x.a * * @param select * @param columnSetMap * @param joinType * @return commonMap * @throws SqlXlateException */ private Map<String, CommonTree> rebuildSelectUsing(CommonTree select, CommonTree node, int joinType, CommonTree leftTable, CommonTree rightTable, TranslateContext context) throws SqlXlateException { CommonTree selectList = (CommonTree) select.getFirstChildWithType(PantheraParser_PLSQLParser.SELECT_LIST); CommonTree asterisk = (CommonTree) select.getFirstChildWithType(PantheraParser_PLSQLParser.ASTERISK); CommonTree newSelectList = FilterBlockUtil.createSqlASTNode(asterisk != null ? asterisk : selectList, PantheraParser_PLSQLParser.SELECT_LIST, "SELECT_LIST"); Map<String, CommonTree> commonMap = rebuildAsteriskUsing(node, newSelectList, joinType, leftTable, rightTable, context); if (asterisk != null) { if (selectList == null) { assert(asterisk.childIndex == 1); select.replaceChildren(asterisk.childIndex, asterisk.childIndex, newSelectList); selectList = newSelectList; } else if (asterisk.childIndex == 2) { for (int i = 0; i < newSelectList.getChildCount(); i++) { selectList.addChild(newSelectList.getChild(i)); } } else { for (int i = 0; i < newSelectList.getChildCount(); i++) { SqlXlateUtil.addCommonTreeChild(selectList, i, (CommonTree) newSelectList.getChild(i)); } } } for (int i = 0; i < selectList.getChildCount(); i++) { CommonTree selectItem = (CommonTree) selectList.getChild(i); List<CommonTree> anyList = new ArrayList<CommonTree>(); FilterBlockUtil.findNode(selectItem, PantheraExpParser.ANY_ELEMENT, anyList); if (anyList.size() == 0) { continue; } for (CommonTree anyElement : anyList) { String colname = anyElement.getChild(anyElement.getChildCount() - 1).getText(); if (selectItem.getChildCount() == 1 && selectItem.getChild(0).getChild(0).getType() == PantheraParser_PLSQLParser.CASCATED_ELEMENT && selectItem.getChild(0).getChild(0).getChild(0) == anyElement) { // if no alias user defined, rewrite it as col name to ensure dup col be deleted. selectItem.addChild(FilterBlockUtil.createAlias(selectItem, colname)); } FilterBlockUtil.rebuildColumn(anyElement, commonMap); } } return commonMap; } private Map<String, CommonTree> rebuildAsteriskUsing(CommonTree node, CommonTree selectList, int joinType, CommonTree leftTable, CommonTree rightTable, TranslateContext context) throws SqlXlateException { Map<String, CommonTree> commonMap = new HashMap<String, CommonTree>(); Set<String> set = new HashSet<String>(); String leftTableName = SqlXlateUtil.findTableReferenceName(leftTable); String rightTableName = SqlXlateUtil.findTableReferenceName(rightTable); for (int i = 0; i < node.getChildCount(); i++) { String columnName = node.getChild(i).getChild(0).getText(); CommonTree selectItem = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SELECT_ITEM, "SELECT_ITEM"); CommonTree expr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.EXPR, "EXPR"); selectItem.addChild(expr); if (joinType == PantheraParser_PLSQLParser.LEFT_VK) { CommonTree cascated = FilterBlockUtil.createCascatedElementBranch(selectList, leftTableName, columnName); expr.addChild(cascated); } else if (joinType == PantheraParser_PLSQLParser.RIGHT_VK) { CommonTree cascated = FilterBlockUtil.createCascatedElementBranch(selectList, rightTableName, columnName); expr.addChild(cascated); } else { CommonTree leftCascated = FilterBlockUtil.createCascatedElementBranch(selectList, leftTableName, columnName); CommonTree rightCascated = FilterBlockUtil.createCascatedElementBranch(selectList, leftTableName, columnName); CommonTree search = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SEARCHED_CASE, "case"); expr.addChild(search); CommonTree tokenWhen = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SQL92_RESERVED_WHEN, "when"); CommonTree tokenElse = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SQL92_RESERVED_ELSE, "else"); search.addChild(tokenWhen); search.addChild(tokenElse); CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.LOGIC_EXPR, "LOGIC_EXPR"); tokenWhen.addChild(logicExpr); CommonTree isNull = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.IS_NULL, "IS_NULL"); logicExpr.addChild(isNull); isNull.addChild(FilterBlockUtil.cloneTree(FilterBlockUtil.cloneTree(rightCascated))); CommonTree leftExpr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.EXPR, "EXPR"); CommonTree rightExpr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.EXPR, "EXPR"); tokenElse.addChild(rightExpr); tokenWhen.addChild(leftExpr); leftExpr.addChild(leftCascated); rightExpr.addChild(rightCascated); } CommonTree alias = FilterBlockUtil.createAlias(selectList, columnName); selectItem.addChild(alias); selectList.addChild(selectItem); commonMap.put(columnName, selectItem); set.add(columnName); } addRemainingCols(selectList, leftTable, leftTableName, set, context); addRemainingCols(selectList, rightTable, rightTableName, set, context); return commonMap; } private void addRemainingCols(CommonTree selectList, CommonTree table, String tableName, Set<String> set, TranslateContext context) throws SqlXlateException { Set<String> colSet = FilterBlockUtil.getColumnSet(table, context); Iterator<String> colIt = colSet.iterator(); while (colIt.hasNext()) { String col = colIt.next(); if (set.contains(col)) { continue; } CommonTree selectItem = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SELECT_ITEM, "SELECT_ITEM"); CommonTree expr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.EXPR, "EXPR"); selectItem.addChild(expr); CommonTree cascated = FilterBlockUtil.createCascatedElementBranch(selectList, tableName, col); expr.addChild(cascated); CommonTree alias = FilterBlockUtil.createAlias(selectList, col); selectItem.addChild(alias); selectList.addChild(selectItem); } } /** * * 1. expand select * if exists, omit dup col, use common colname as alias.<br> * 2. replace table alias in select-list * 3. replace upper order by x.a with order by a * 4. replace filters of a to x.a * * @param select * @param columnSetMap * @param joinType * @return commonMap */ private Map<String, CommonTree> rebuildSelectNatural(CommonTree select, Map<String, Set<String>> columnSetMap, int joinType) { CommonTree selectList = (CommonTree) select.getFirstChildWithType(PantheraParser_PLSQLParser.SELECT_LIST); CommonTree asterisk = (CommonTree) select.getFirstChildWithType(PantheraParser_PLSQLParser.ASTERISK); CommonTree newSelectList = FilterBlockUtil.createSqlASTNode(asterisk != null ? asterisk : selectList, PantheraParser_PLSQLParser.SELECT_LIST, "SELECT_LIST"); Map<String, CommonTree> commonMap = rebuildAsteriskNatural(newSelectList, columnSetMap, joinType); if (asterisk != null) { if (selectList == null) { assert(asterisk.childIndex == 1); select.replaceChildren(asterisk.childIndex, asterisk.childIndex, newSelectList); selectList = newSelectList; } else if (asterisk.childIndex == 2) { for (int i = 0; i < newSelectList.getChildCount(); i++) { selectList.addChild(newSelectList.getChild(i)); } } else { for (int i = 0; i < newSelectList.getChildCount(); i++) { SqlXlateUtil.addCommonTreeChild(selectList, i, (CommonTree) newSelectList.getChild(i)); } } } for (int i = 0; i < selectList.getChildCount(); i++) { CommonTree selectItem = (CommonTree) selectList.getChild(i); List<CommonTree> anyList = new ArrayList<CommonTree>(); FilterBlockUtil.findNode(selectItem, PantheraExpParser.ANY_ELEMENT, anyList); if (anyList.size() == 0) { continue; } for (CommonTree anyElement : anyList) { String colname = anyElement.getChild(anyElement.getChildCount() - 1).getText(); if (selectItem.getChildCount() == 1 && selectItem.getChild(0).getChild(0).getType() == PantheraParser_PLSQLParser.CASCATED_ELEMENT && selectItem.getChild(0).getChild(0).getChild(0) == anyElement) { // if no alias user defined, rewrite it as col name to ensure dup col be deleted. selectItem.addChild(FilterBlockUtil.createAlias(selectItem, colname)); } FilterBlockUtil.rebuildColumn(anyElement, commonMap); } } return commonMap; } private Map<String, CommonTree> rebuildAsteriskNatural(CommonTree selectList, Map<String, Set<String>> columnSetMap, int joinType) { Iterator<Entry<String, Set<String>>> it = columnSetMap.entrySet().iterator(); Set<String> set = new HashSet<String>(); Map<String, CommonTree> index = new HashMap<String, CommonTree>(); Map<String, CommonTree> commonMap = new HashMap<String, CommonTree>(); Stack<Entry<String, Set<String>>> entries = new Stack<Entry<String, Set<String>>>(); int commonCount = 0; while (it.hasNext()) { entries.push(it.next()); } // use stack to first do right table and then left table while (entries.size() > 0) { Entry<String, Set<String>> entry = entries.pop(); String table = entry.getKey(); Set<String> val = entry.getValue(); Iterator<String> colIter = val.iterator(); int independentCount = commonCount; while (colIter.hasNext()) { String col = colIter.next(); CommonTree cascated = FilterBlockUtil.createCascatedElementBranch(selectList, table, col); CommonTree expr = FilterBlockUtil.createSqlASTNode(cascated, PantheraParser_PLSQLParser.EXPR, "EXPR"); expr.addChild(cascated); CommonTree alias = FilterBlockUtil.createAlias(cascated, col); CommonTree selectItem = FilterBlockUtil.createSqlASTNode(expr, PantheraParser_PLSQLParser.SELECT_ITEM, "SELECT_ITEM"); selectItem.addChild(expr); selectItem.addChild(alias); if (set.contains(col)) { // this is a shared column, put it at first of selectList CommonTree existSelectItem = index.get(col); // here in the same table will not have two cols with same colname // TODO need check existSelectItem.getParent().deleteChild(existSelectItem.getChildIndex()); if (commonMap.get(col) != null) { commonCount--; independentCount--; } if (joinType == PantheraParser_PLSQLParser.LEFT_VK) { SqlXlateUtil.addCommonTreeChild(selectList, commonCount++, selectItem); commonMap.put(col, selectItem); } else if (joinType == PantheraParser_PLSQLParser.RIGHT_VK) { SqlXlateUtil.addCommonTreeChild(selectList, commonCount++, existSelectItem); commonMap.put(col, existSelectItem); } else { // natural full outer join // (case when t1.a is null then t2.a else t1.a end) CommonTree compositeSelectItem = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SELECT_ITEM, "SELECT_ITEM"); CommonTree compositeExpr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.EXPR, "EXPR"); compositeSelectItem.addChild(compositeExpr); compositeSelectItem.addChild((CommonTree) selectItem.deleteChild(1)); CommonTree search = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SEARCHED_CASE, "case"); compositeExpr.addChild(search); CommonTree tokenWhen = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SQL92_RESERVED_WHEN, "when"); CommonTree tokenElse = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.SQL92_RESERVED_ELSE, "else"); search.addChild(tokenWhen); search.addChild(tokenElse); CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.LOGIC_EXPR, "LOGIC_EXPR"); tokenWhen.addChild(logicExpr); CommonTree isNull = FilterBlockUtil.createSqlASTNode(selectList, PantheraParser_PLSQLParser.IS_NULL, "IS_NULL"); logicExpr.addChild(isNull); isNull.addChild(FilterBlockUtil.cloneTree((CommonTree) selectItem.getChild(0).getChild(0))); tokenElse.addChild((CommonTree) selectItem.deleteChild(0)); tokenWhen.addChild((CommonTree) existSelectItem.deleteChild(0)); SqlXlateUtil.addCommonTreeChild(selectList, commonCount++, compositeSelectItem); commonMap.put(col, compositeSelectItem); } independentCount++; } else { SqlXlateUtil.addCommonTreeChild(selectList, independentCount++, selectItem); set.add(col); index.put(col, selectItem); } } } return commonMap; } /** * change natural join to equijoin * * @param node * @param context * @return return a map for sets of all table_ref_element, key is table alias, * val is a set for all cols. * @throws SqlXlateException */ private Map<String, Set<String>> processNaturalJoin(CommonTree node, TranslateContext context) throws SqlXlateException { Map<String, Set<String>> columnSetMap = new LinkedHashMap<String, Set<String>>(); for (int i = 0; i < node.getChildCount(); i++) { CommonTree child = (CommonTree) node.getChild(i); if (child.getType() == PantheraExpParser.TABLE_REF_ELEMENT) { String leftTable = SqlXlateUtil.findTableReferenceName(child); columnSetMap.put(leftTable, FilterBlockUtil.getColumnSet(child, context)); } if (child.getType() == PantheraExpParser.JOIN_DEF && child.getChild(0).getType() == PantheraExpParser.NATURAL_VK) { // change join type CommonTree natural = processNaturalNode(child); CommonTree tableRefElement = (CommonTree) child .getFirstChildWithType(PantheraExpParser.TABLE_REF_ELEMENT); String alias = SqlXlateUtil.findTableReferenceName(tableRefElement); Set<String> columnSet = FilterBlockUtil.getColumnSet(tableRefElement, context); makeOn(natural, alias, child, columnSet, columnSetMap); columnSetMap.put(alias, columnSet); } } return columnSetMap; } /** * make equijoin condition * * @param njoin * @param thisTable * @param join * @param thisColumnSet * @param columnSetMap */ private void makeOn(CommonTree natural, String thisTable, CommonTree join, Set<String> thisColumnSet, Map<String, Set<String>> columnSetMap) { CommonTree on = FilterBlockUtil.createSqlASTNode(natural, PantheraExpParser.SQL92_RESERVED_ON, "on");//njoin.getChild(0) is natural_vk CommonTree logicExpr = FilterBlockUtil.createSqlASTNode(on, PantheraExpParser.LOGIC_EXPR, "LOGIC_EXPR"); on.addChild(logicExpr); boolean hasCondition = false; for (String column : thisColumnSet) { for (Entry<String, Set<String>> entry : columnSetMap.entrySet()) { Set<String> preColumnSet = entry.getValue(); String tableAlias = entry.getKey(); if (preColumnSet.contains(column)) { hasCondition = true; CommonTree equal = FilterBlockUtil.makeEqualCondition(on, thisTable, tableAlias, column, column); FilterBlockUtil.addConditionToLogicExpr(logicExpr, equal); break; } } } if (hasCondition) { join.addChild(on); } } /** * build columnSetMap and delete INNER node * * @param join * @return natural join node */ private CommonTree processNaturalNode(CommonTree join) { // delete NATURAL node CommonTree natural = (CommonTree) join.deleteChild(0); if (join.getChild(0).getType() == PantheraExpParser.INNER_VK) { // delete INNER node join.deleteChild(0); } return natural; } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.lib.elasticsearch; import com.google.gson.Gson; import com.google.gson.GsonBuilder; import com.google.gson.JsonElement; import com.google.gson.JsonObject; import com.google.gson.JsonParser; import com.google.gson.JsonPrimitive; import com.google.gson.JsonSyntaxException; import com.streamsets.pipeline.api.Stage; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.lib.aws.AwsUtil; import com.streamsets.pipeline.stage.config.elasticsearch.ElasticsearchConfig; import com.streamsets.pipeline.stage.config.elasticsearch.Errors; import com.streamsets.pipeline.stage.config.elasticsearch.Groups; import com.streamsets.pipeline.stage.config.elasticsearch.SecurityConfig; import com.streamsets.pipeline.stage.config.elasticsearch.SecurityMode; import com.streamsets.pipeline.stage.lib.aws.AwsRegion; import org.apache.commons.codec.binary.Base64; import org.apache.commons.lang.StringUtils; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpRequestInterceptor; import org.apache.http.HttpStatus; import org.apache.http.entity.StringEntity; import org.apache.http.impl.nio.client.HttpAsyncClientBuilder; import org.apache.http.message.BasicHeader; import org.apache.http.ssl.SSLContexts; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestClientBuilder; import org.elasticsearch.client.sniff.ElasticsearchHostsSniffer; import org.elasticsearch.client.sniff.HostsSniffer; import org.elasticsearch.client.sniff.Sniffer; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.net.ssl.SSLContext; import java.io.IOException; import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.security.KeyManagementException; import java.security.KeyStore; import java.security.KeyStoreException; import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.Collections; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Scanner; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import static com.google.gson.FieldNamingPolicy.LOWER_CASE_WITH_UNDERSCORES; import static com.streamsets.pipeline.stage.config.elasticsearch.Errors.ELASTICSEARCH_09; import static com.streamsets.pipeline.stage.config.elasticsearch.Errors.ELASTICSEARCH_46; import static com.streamsets.pipeline.stage.config.elasticsearch.Errors.ELASTICSEARCH_47; import static com.streamsets.pipeline.stage.config.elasticsearch.Errors.ELASTICSEARCH_48; import static com.streamsets.pipeline.stage.config.elasticsearch.SecurityMode.BASIC; import static org.apache.http.entity.ContentType.APPLICATION_JSON; public class ElasticsearchStageDelegate { private static final Logger LOG = LoggerFactory.getLogger(ElasticsearchStageDelegate.class); private static final Pattern URI_PATTERN = Pattern.compile("\\S+:(\\d+)"); private static final String AWS_SERVICE_NAME = "es"; private static final JsonParser JSON_PARSER = new JsonParser(); private static final Gson GSON = new GsonBuilder().setFieldNamingPolicy(LOWER_CASE_WITH_UNDERSCORES).create(); private static final String VALID_PROPERTY_NAME = "valid"; private static final String QUERY_PROPERTY_NAME = "query"; private static final String VALIDATE_QUERY_PATH = "/_validate/query"; private static final String HTTP_URIS_CONFIG_NAME = "httpUris"; private static final String QUERY_CONFIG_NAME = "query"; private static final String SECURITY_CONFIG_NAME = "securityConfig"; private static final String USER_CONFIG_NAME = "securityUser"; private static final String USE_SECURITY_CONFIG_NAME = "useSecurity"; private static final String ACCESS_KEY_ID_CONFIG_NAME = "awsAccessKeyId"; private final Stage.Context context; private final ElasticsearchConfig conf; private RestClient restClient; private Sniffer sniffer; private String version = "not-known"; private int majorVersion = -1; public ElasticsearchStageDelegate(Stage.Context context, ElasticsearchConfig conf) { this.context = context; this.conf = conf; } public List<Stage.ConfigIssue> init(String prefix, List<Stage.ConfigIssue> issues) { if (conf.httpUris.isEmpty()) { issues.add( context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), prefix + ".httpUris", Errors.ELASTICSEARCH_06 ) ); } else { for (String uri : conf.httpUris) { validateUri(uri, issues, prefix + ".httpUris"); } } String securityUser = null; String securityPassword = null; if (conf.useSecurity && SecurityMode.BASIC.equals(conf.securityConfig.securityMode)) { try { securityUser = conf.securityConfig.securityUser.get(); } catch (StageException e) { issues.add(context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".securityUser", Errors.ELASTICSEARCH_32, e.getMessage(), e )); } try { securityPassword = conf.securityConfig.securityPassword.get(); } catch (StageException e) { issues.add(context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".securityPassword", Errors.ELASTICSEARCH_38, e.getMessage(), e )); } if (securityUser == null || securityPassword == null) { issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".securityUser", Errors.ELASTICSEARCH_40 ) ); } else { if (securityUser.isEmpty()) { issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".securityUser", Errors.ELASTICSEARCH_20 ) ); } else if (!securityUser.contains(":") && securityPassword.isEmpty()) { issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".securityPassword", Errors.ELASTICSEARCH_39 ) ); } } } if (!issues.isEmpty()) { return issues; } int numHosts = conf.httpUris.size(); HttpHost[] hosts = new HttpHost[numHosts]; for (int i = 0; i < numHosts; i++) { hosts[i] = HttpHost.create(conf.httpUris.get(i)); } RestClientBuilder restClientBuilder = RestClient.builder(hosts); try { Response response = null; if (conf.useSecurity) { buildSSLContext(prefix, issues, restClientBuilder); if (!issues.isEmpty()) { return issues; } switch (conf.securityConfig.securityMode) { case BASIC: restClient = restClientBuilder.build(); break; case AWSSIGV4: AwsRegion awsRegion = conf.securityConfig.awsRegion; if (awsRegion == AwsRegion.OTHER) { if (conf.securityConfig.endpoint == null || conf.securityConfig.endpoint.isEmpty()) { issues.add(context.createConfigIssue(Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".endpoint", Errors.ELASTICSEARCH_33)); return issues; } } HttpRequestInterceptor interceptor = AwsUtil.getAwsSigV4Interceptor( AWS_SERVICE_NAME, awsRegion, conf.securityConfig.endpoint, conf.securityConfig.awsAccessKeyId, conf.securityConfig.awsSecretAccessKey); restClient = RestClient.builder(hosts).setHttpClientConfigCallback(hacb -> hacb.addInterceptorLast(interceptor)).build(); break; } response = restClient.performRequest("GET", "/", getAuthenticationHeader(securityUser, securityPassword)); } else { restClient = restClientBuilder.build(); response = restClient.performRequest("GET", "/"); } JsonElement version = null; String responseBody = readResponseBody(prefix, response, issues); JsonElement jsonResponse = parseResponseBody(prefix, responseBody, issues); if (jsonResponse != null && jsonResponse.isJsonObject()) { version = jsonResponse.getAsJsonObject().get("version"); } if(version != null && version.isJsonObject() && version.getAsJsonObject().get("number") != null) { this.version = version.getAsJsonObject().get("number").getAsString(); this.majorVersion = Integer.parseInt(this.version.split("\\.")[0]); LOG.info("ElasticSearch server version {} (major line {})", this.version, this.majorVersion); } else { LOG.error("Unable to determine ElasticSearch version"); LOG.debug("Response from server: {}", responseBody); } } catch (final ResponseException ex) { addHTTPResponseError(prefix, "httpUris", "/", ex.getResponse(), issues); } catch (final Exception e) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), prefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_43, createHostsString(), e.getMessage(), e )); } if (!issues.isEmpty()) { return issues; } addSniffer(hosts); return issues; } public void destroy() { try { if (sniffer != null) { sniffer.close(); } if (restClient != null) { restClient.close(); } } catch (IOException e) { LOG.warn("Exception thrown while closing REST client: " + e); } } private String createHostsString() { return conf.httpUris.stream() .map(HttpHost::create) .map(HttpHost::toHostString) .collect(Collectors.joining(",")); } private boolean addHTTPResponseError( final String configPrefix, final String failedConfig, final String endpoint, final Response response, final List<Stage.ConfigIssue> issues ) { int statusCode = response.getStatusLine().getStatusCode(); if (statusCode == HttpStatus.SC_BAD_REQUEST) { issues.add(context.createConfigIssue(Groups.ELASTIC_SEARCH.name(), configPrefix + "." + failedConfig, Errors.ELASTICSEARCH_44, endpoint )); } else if (statusCode == HttpStatus.SC_UNAUTHORIZED) { addAuthError(configPrefix, endpoint, issues, ELASTICSEARCH_09, ELASTICSEARCH_47); } else if (statusCode == HttpStatus.SC_FORBIDDEN) { addAuthError(configPrefix, endpoint, issues, ELASTICSEARCH_46, ELASTICSEARCH_48); } else { issues.add(context.createConfigIssue(Groups.ELASTIC_SEARCH.name(), configPrefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_45, endpoint, statusCode, response.getStatusLine().getReasonPhrase() )); } return false; } private void addAuthError( final String configPrefix, final String endpoint, final List<Stage.ConfigIssue> issues, final Errors error, final Errors anonymousError ) { if (conf.useSecurity) { if (conf.securityConfig.securityMode == BASIC) { issues.add(context.createConfigIssue( Groups.SECURITY.name(), configPrefix + "." + SECURITY_CONFIG_NAME + "." + USER_CONFIG_NAME, error, conf.securityConfig.securityUser.get(), endpoint )); } else { issues.add(context.createConfigIssue( Groups.SECURITY.name(), configPrefix + "." + SECURITY_CONFIG_NAME + "." + ACCESS_KEY_ID_CONFIG_NAME, anonymousError, endpoint )); } } else { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + USE_SECURITY_CONFIG_NAME, anonymousError, endpoint )); } } /** * Sends a request to validate a query. If an error happens during the validation, * appropriate issue will be added to the list of config issues. * * @param prefix - configuration name prefix. * @param query - query to validate. * @param isIncrementalMode - true if the incremental mode is enabled. * @param offsetPlaceholder - pattern of the offset placeholder. * @param timeOffset - initial time offset. * @param issues - list of config issues. */ public void validateQuery( final String prefix, final String index, final String query, final boolean isIncrementalMode, final String offsetPlaceholder, final String timeOffset, final List<Stage.ConfigIssue> issues ) { if (!issues.isEmpty()) { // It means there are prior validation errors that do not even allow to set up a REST client. return; } Header[] headers = conf.useSecurity ? getAuthenticationHeader( conf.securityConfig.securityUser.get(), conf.securityConfig.securityPassword.get() ) : new Header[]{}; String requestBody = prepareRequestBody(prefix, query, isIncrementalMode, offsetPlaceholder, timeOffset, issues); sendRequestAndValidateResponse(prefix, index, headers, requestBody, issues); } private String prepareRequestBody( final String configPrefix, final String query, final boolean isIncrementalMode, final String offsetPlaceholder, final String timeOffset, final List<Stage.ConfigIssue> issues ) { String result = null; String body = query; if (isIncrementalMode) { String validatedTimeOffset = timeOffset; try { Long.parseLong(timeOffset); } catch (final NumberFormatException ex) { validatedTimeOffset = '"' + timeOffset + '"'; } body = body.replaceAll(offsetPlaceholder, validatedTimeOffset); } JsonObject json = null; try { json = JSON_PARSER.parse(body).getAsJsonObject(); } catch (final JsonSyntaxException | IllegalStateException ex) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + QUERY_CONFIG_NAME, Errors.ELASTICSEARCH_34, body, ex.getMessage(), ex )); } if (json != null) { for (final Map.Entry<String, JsonElement> entry : new HashSet<>(json.entrySet())) { if (!entry.getKey().equals(QUERY_PROPERTY_NAME)) { json.remove(entry.getKey()); } } result = GSON.toJson(json); } return result; } private void sendRequestAndValidateResponse( final String configPrefix, final String index, final Header[] headers, final String requestBody, final List<Stage.ConfigIssue> issues ) { if (requestBody == null) { return; } // issues must be empty, if not - there is a bug in the implementation // Should we throw an exception? add an error to the issue list? log the error? String endpoint = Optional.ofNullable(index) .filter(i -> !i.trim().isEmpty()) .map(i -> "/" + i) .orElse("") + VALIDATE_QUERY_PATH; Response response = null; try { response = restClient.performRequest("POST", endpoint, Collections.emptyMap(), new StringEntity(requestBody, APPLICATION_JSON), headers ); } catch (final ResponseException ex) { addHTTPResponseError(configPrefix, "query", endpoint, ex.getResponse(), issues); } catch (final IOException ex) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_43, createHostsString(), ex.getMessage(), ex )); } String responseBody = readResponseBody(configPrefix, response, issues); validateResponseBody(configPrefix, requestBody, responseBody, issues); } private String readResponseBody( final String configPrefix, final Response response, final List<Stage.ConfigIssue> issues ) { if (response == null) { return null; } String responseBody = null; try ( // Converts an input stream into a string. // \A means the beginning of the input. // hasNext() and next() skip the delimiter at the beginning // Since \A doesn't correspond to any character, nothing is skipped. // Since there is only one \A match, next() returns the whole content // till the end of the stream. Scanner scanner = new Scanner(response.getEntity().getContent()).useDelimiter("\\A") ) { responseBody = scanner.hasNext() ? scanner.next() : ""; } catch (final IOException ex) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_42, ex )); } return responseBody; } private void validateResponseBody( final String configPrefix, final String requestBody, final String responseBody, final List<Stage.ConfigIssue> issues ) { if (responseBody == null) { return; } JsonElement json = parseResponseBody(configPrefix, responseBody, issues); if (json != null) { Boolean valid = extractValidPropertyValue(json); if (valid == null) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_49, responseBody )); } else if (!valid) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + QUERY_CONFIG_NAME, Errors.ELASTICSEARCH_41, requestBody )); } } } private JsonElement parseResponseBody( final String configPrefix, final String responseBody, final List<Stage.ConfigIssue> issues ) { JsonElement json = null; try { json = JSON_PARSER.parse(responseBody); } catch (final JsonSyntaxException ex) { issues.add(context.createConfigIssue( Groups.ELASTIC_SEARCH.name(), configPrefix + "." + HTTP_URIS_CONFIG_NAME, Errors.ELASTICSEARCH_49, responseBody, ex )); } return json; } private Boolean extractValidPropertyValue(final JsonElement json) { Boolean valid = null; if (json.isJsonObject()) { JsonObject jsonObject = json.getAsJsonObject(); if (jsonObject.has(VALID_PROPERTY_NAME)) { JsonElement validElement = jsonObject.get(VALID_PROPERTY_NAME); if (validElement.isJsonPrimitive()) { JsonPrimitive validPrimitive = validElement.getAsJsonPrimitive(); if (validPrimitive.isBoolean()) { valid = validPrimitive.getAsBoolean(); } } } } return valid; } public Response performRequest( String method, String endpoint, Map<String, String> params, HttpEntity entity, Header... headers ) throws IOException { return restClient.performRequest(method, endpoint, params, entity, headers); } private void addSniffer(HttpHost[] hosts) { if (conf.clientSniff) { switch (hosts[0].getSchemeName()) { case "http": sniffer = Sniffer.builder(restClient).build(); break; case "https": HostsSniffer hostsSniffer = new ElasticsearchHostsSniffer( restClient, ElasticsearchHostsSniffer.DEFAULT_SNIFF_REQUEST_TIMEOUT, ElasticsearchHostsSniffer.Scheme.HTTPS ); sniffer = Sniffer.builder(restClient).setHostsSniffer(hostsSniffer).build(); break; default: // unsupported scheme. do nothing. } } } private void buildSSLContext(String prefix, List<Stage.ConfigIssue> issues, RestClientBuilder restClientBuilder) throws IOException { try { final SSLContext sslcontext; final String trustStorePath = conf.securityConfig.sslTrustStorePath; if (StringUtils.isEmpty(trustStorePath)) { sslcontext = SSLContext.getDefault(); } else { String trustStorePass = null; try { trustStorePass = conf.securityConfig.sslTrustStorePassword.get(); } catch (StageException e) { issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".sslTrustStorePassword", Errors.ELASTICSEARCH_31, e.getMessage(), e ) ); } if (issues.isEmpty() && StringUtils.isEmpty(trustStorePass)) { trustStorePass = null; issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".sslTrustStorePassword", Errors.ELASTICSEARCH_10 ) ); } Path path = Paths.get(trustStorePath); if (!Files.exists(path)) { path = null; issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".sslTrustStorePath", Errors.ELASTICSEARCH_11, trustStorePath ) ); } if (path != null && trustStorePass != null) { KeyStore keyStore = KeyStore.getInstance("jks"); try (InputStream is = Files.newInputStream(path)) { keyStore.load(is, trustStorePass.toCharArray()); } sslcontext = SSLContexts.custom().loadTrustMaterial(keyStore, null).build(); } else { sslcontext = null; } } restClientBuilder.setHttpClientConfigCallback( new RestClientBuilder.HttpClientConfigCallback() { @Override public HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder) { return httpClientBuilder.setSSLContext(sslcontext); } } ); } catch (IOException | KeyStoreException | NoSuchAlgorithmException | KeyManagementException | CertificateException e) { issues.add( context.createConfigIssue( Groups.SECURITY.name(), prefix + "." + SecurityConfig.NAME + ".sslTrustStorePath", Errors.ELASTICSEARCH_12, Optional.ofNullable(e.getMessage()).orElse("no details provided"), e ) ); } } private void validateUri(String uri, List<Stage.ConfigIssue> issues, String configName) { Matcher matcher = URI_PATTERN.matcher(uri); if (!matcher.matches()) { issues.add( getContext().createConfigIssue( Groups.ELASTIC_SEARCH.name(), configName, Errors.ELASTICSEARCH_07, uri ) ); } else { int port = Integer.parseInt(matcher.group(1)); if (port < 0 || port > 65535) { issues.add( getContext().createConfigIssue( Groups.ELASTIC_SEARCH.name(), configName, Errors.ELASTICSEARCH_08, port ) ); } } } public Header[] getAuthenticationHeader(String securityUser, String securityPassword) { if (!conf.useSecurity || conf.securityConfig.securityMode.equals(SecurityMode.AWSSIGV4)) { return new Header[0]; } // Credentials are in form of "username:password". String securityData = (securityUser.contains(":")) ? securityUser: securityUser.concat(":").concat(securityPassword); byte[] credentials = securityData.getBytes(); return Collections.singletonList(new BasicHeader( "Authorization", "Basic " + Base64.encodeBase64String(credentials) )).toArray(new Header[1]); } private Stage.Context getContext() { return context; } public String getVersion() { return version; } public int getMajorVersion() { return majorVersion; } }
/*! ****************************************************************************** * * Pentaho Data Integration * * Copyright (C) 2019 by Hitachi Vantara : http://www.pentaho.com * ******************************************************************************* * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * ******************************************************************************/ package org.pentaho.di.plugins.fileopensave.providers.vfs; import org.apache.commons.io.IOUtils; import org.apache.commons.vfs2.FileObject; import org.apache.commons.vfs2.FileSystemException; import org.apache.commons.vfs2.FileType; import org.apache.commons.vfs2.Selectors; import org.pentaho.di.connections.ConnectionDetails; import org.pentaho.di.connections.ConnectionManager; import org.pentaho.di.connections.ConnectionProvider; import org.pentaho.di.connections.vfs.VFSConnectionDetails; import org.pentaho.di.connections.vfs.VFSConnectionProvider; import org.pentaho.di.connections.vfs.VFSHelper; import org.pentaho.di.connections.vfs.VFSRoot; import org.pentaho.di.core.exception.KettleException; import org.pentaho.di.core.exception.KettleFileException; import org.pentaho.di.core.variables.Variables; import org.pentaho.di.core.vfs.KettleVFS; import org.pentaho.di.plugins.fileopensave.api.providers.BaseFileProvider; import org.pentaho.di.plugins.fileopensave.api.providers.Utils; import org.pentaho.di.plugins.fileopensave.api.providers.exception.FileException; import org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSDirectory; import org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSFile; import org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSLocation; import org.pentaho.di.plugins.fileopensave.providers.vfs.model.VFSTree; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.function.Supplier; /** * Created by bmorrise on 2/14/19. */ public class VFSFileProvider extends BaseFileProvider<VFSFile> { public static final String NAME = "VFS Connections"; public static final String TYPE = "vfs"; private Supplier<ConnectionManager> connectionManagerSupplier = ConnectionManager::getInstance; private Map<String, List<VFSFile>> roots = new HashMap<>(); @Override public Class<VFSFile> getFileClass() { return VFSFile.class; } @Override public String getName() { return NAME; } @Override public String getType() { return TYPE; } /** * @return */ public boolean isAvailable() { return true; } /** * @return */ @Override public VFSTree getTree() { VFSTree vfsTree = new VFSTree( NAME ); List<ConnectionProvider<? extends ConnectionDetails>> providers = connectionManagerSupplier.get().getProvidersByType( VFSConnectionProvider.class ); for ( ConnectionProvider<? extends ConnectionDetails> provider : providers ) { for ( ConnectionDetails connectionDetails : provider.getConnectionDetails() ) { VFSLocation vfsLocation = new VFSLocation(); vfsLocation.setName( connectionDetails.getName() ); vfsLocation.setRoot( NAME ); vfsLocation.setHasChildren( true ); vfsLocation.setCanDelete( true ); vfsLocation.setConnection( connectionDetails.getName() ); vfsTree.addChild( vfsLocation ); } } return vfsTree; } /** * @param file * @return */ private List<VFSFile> getRoot( VFSFile file ) { if ( this.roots.containsKey( file.getConnection() ) ) { return this.roots.get( file.getConnection() ); } List<VFSFile> files = new ArrayList<>(); VFSConnectionDetails vfsConnectionDetails = (VFSConnectionDetails) ConnectionManager.getInstance().getConnectionDetails( file.getConnection() ); @SuppressWarnings( "unchecked" ) VFSConnectionProvider<VFSConnectionDetails> vfsConnectionProvider = (VFSConnectionProvider<VFSConnectionDetails>) ConnectionManager.getInstance() .getConnectionProvider( vfsConnectionDetails.getType() ); List<VFSRoot> roots = vfsConnectionProvider.getLocations( vfsConnectionDetails ); for ( VFSRoot root : roots ) { VFSDirectory vfsDirectory = new VFSDirectory(); vfsDirectory.setName( root.getName() ); vfsDirectory.setDate( root.getModifiedDate() ); vfsDirectory.setHasChildren( true ); vfsDirectory.setCanAddChildren( true ); vfsDirectory.setConnection( vfsConnectionDetails.getName() ); vfsDirectory.setPath( vfsConnectionProvider.getProtocol( vfsConnectionDetails ) + "://" + root.getName() ); vfsDirectory.setRoot( NAME ); files.add( vfsDirectory ); } this.roots.put( file.getConnection(), files ); return files; } /** * @param file * @param filters * @return */ @Override public List<VFSFile> getFiles( VFSFile file, String filters ) { if ( file.getPath() == null ) { return getRoot( file ); } List<VFSFile> files = new ArrayList<>(); try { FileObject fileObject = KettleVFS .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); FileType fileType = fileObject.getType(); if ( fileType.hasChildren() ) { FileObject[] children = fileObject.getChildren(); for ( FileObject child : children ) { FileType fileType1 = child.getType(); if ( fileType1.hasChildren() ) { files.add( VFSDirectory.create( file.getPath(), child, file.getConnection() ) ); } else { if ( Utils.matches( child.getName().getBaseName(), filters ) ) { files.add( VFSFile.create( file.getPath(), child, file.getConnection() ) ); } } } } } catch ( KettleFileException | FileSystemException ignored ) { // File does not exist } return files; } /** * @param files * @return */ public List<VFSFile> delete( List<VFSFile> files ) { List<VFSFile> deletedFiles = new ArrayList<>(); for ( VFSFile file : files ) { try { FileObject fileObject = KettleVFS .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); if ( fileObject.delete() ) { deletedFiles.add( file ); } } catch ( KettleFileException | FileSystemException kfe ) { // Ignore don't add } } return deletedFiles; } /** * @param folder * @return */ @Override public VFSFile add( VFSFile folder ) { try { FileObject fileObject = KettleVFS .getFileObject( folder.getPath(), new Variables(), VFSHelper.getOpts( folder.getPath(), folder.getConnection() ) ); fileObject.createFolder(); String parent = folder.getPath().substring( 0, folder.getPath().length() - 1 ); return VFSDirectory.create( parent, fileObject, folder.getConnection() ); } catch ( KettleFileException | FileSystemException kfe ) { // TODO: Do something smart here } return null; } /** * @param file * @param newPath * @param overwrite * @return */ @Override public VFSFile rename( VFSFile file, String newPath, boolean overwrite ) { return doMove( file, newPath, overwrite ); } /** * @param file * @param toPath * @param overwrite * @return */ @Override public VFSFile move( VFSFile file, String toPath, boolean overwrite ) { return doMove( file, toPath, overwrite ); } /** * @param file * @param newPath * @param overwrite * @return */ private VFSFile doMove( VFSFile file, String newPath, Boolean overwrite ) { try { FileObject fileObject = KettleVFS .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); FileObject renameObject = KettleVFS .getFileObject( newPath, new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); if ( overwrite ) { if ( renameObject.exists() ) { renameObject.delete(); } } fileObject.moveTo( renameObject ); if ( file instanceof VFSDirectory ) { return VFSDirectory.create( renameObject.getParent().getPublicURIString(), renameObject, file.getConnection() ); } else { return VFSFile.create( renameObject.getParent().getPublicURIString(), renameObject, file.getConnection() ); } } catch ( KettleFileException | FileSystemException e ) { return null; } } /** * @param file * @param toPath * @param overwrite * @return * @throws FileException */ @Override public VFSFile copy( VFSFile file, String toPath, boolean overwrite ) throws FileException { try { FileObject fileObject = KettleVFS .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); FileObject copyObject = KettleVFS.getFileObject( toPath, new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); copyObject.copyFrom( fileObject, Selectors.SELECT_SELF ); if ( file instanceof VFSDirectory ) { return VFSDirectory.create( copyObject.getParent().getPublicURIString(), fileObject, file.getConnection() ); } else { return VFSFile.create( copyObject.getParent().getPublicURIString(), fileObject, file.getConnection() ); } } catch ( KettleFileException | FileSystemException e ) { throw new FileException(); } } /** * @param dir * @param path * @return * @throws FileException */ @Override public boolean fileExists( VFSFile dir, String path ) throws FileException { path = sanitizeName( dir, path ); try { FileObject fileObject = KettleVFS.getFileObject( path, new Variables(), VFSHelper.getOpts( path, dir.getConnection() ) ); return fileObject.exists(); } catch ( KettleFileException | FileSystemException e ) { throw new FileException(); } } /** * @param file * @return */ public InputStream readFile( VFSFile file ) { try { FileObject fileObject = KettleVFS .getFileObject( file.getPath(), new Variables(), VFSHelper.getOpts( file.getPath(), file.getConnection() ) ); return fileObject.getContent().getInputStream(); } catch ( KettleException | FileSystemException e ) { return null; } } /** * @param inputStream * @param destDir * @param path * @param overwrite * @return * @throws FileException */ @Override public VFSFile writeFile( InputStream inputStream, VFSFile destDir, String path, boolean overwrite ) throws FileException { FileObject fileObject = null; try { fileObject = KettleVFS .getFileObject( path, new Variables(), VFSHelper.getOpts( destDir.getPath(), destDir.getConnection() ) ); } catch ( KettleException ke ) { throw new FileException(); } if ( fileObject != null ) { try ( OutputStream outputStream = fileObject.getContent().getOutputStream(); ) { IOUtils.copy( inputStream, outputStream ); outputStream.flush(); return VFSFile.create( destDir.getPath(), fileObject, destDir.getConnection() ); } catch ( IOException e ) { return null; } } return null; } /** * @param file1 * @param file2 * @return */ @Override public boolean isSame( org.pentaho.di.plugins.fileopensave.api.providers.File file1, org.pentaho.di.plugins.fileopensave.api.providers.File file2 ) { if ( file1 instanceof VFSFile && file2 instanceof VFSFile ) { VFSFile vfsFile1 = (VFSFile) file1; VFSFile vfsFile2 = (VFSFile) file2; return vfsFile1.getConnection().equals( vfsFile2.getConnection() ); } return false; } /** * @param destDir * @param newPath * @return * @throws FileException */ @Override public String getNewName( VFSFile destDir, String newPath ) throws FileException { String extension = Utils.getExtension( newPath ); String parent = Utils.getParent( newPath ); String name = Utils.getName( newPath ).replace( "." + extension, "" ); int i = 1; String testName = sanitizeName( destDir, newPath ); try { while ( KettleVFS .getFileObject( testName, new Variables(), VFSHelper.getOpts( testName, destDir.getConnection() ) ) .exists() ) { if ( Utils.isValidExtension( extension ) ) { testName = sanitizeName( destDir, parent + name + " " + i + "." + extension ); } else { testName = sanitizeName( destDir, newPath + " " + i ); } i++; } } catch ( KettleFileException | FileSystemException e ) { return testName; } return testName; } /** * @param file * @return */ @Override public VFSFile getParent( VFSFile file ) { VFSFile vfsFile = new VFSFile(); vfsFile.setConnection( file.getConnection() ); vfsFile.setPath( file.getParent() ); return vfsFile; } @Override public String sanitizeName( VFSFile destDir, String newPath ) { return getConnectionProvider( newPath ).sanitizeName( newPath ); } private VFSConnectionProvider<VFSConnectionDetails> getConnectionProvider( String key ) { @SuppressWarnings( "unchecked" ) VFSConnectionProvider<VFSConnectionDetails> vfsConnectionProvider = (VFSConnectionProvider<VFSConnectionDetails>) ConnectionManager.getInstance() .getConnectionProvider( key ); return vfsConnectionProvider; } public void clearProviderCache() { this.roots = new HashMap<>(); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.hive.bolt; import org.apache.storm.task.OutputCollector; import org.apache.storm.task.TopologyContext; import org.apache.storm.tuple.Tuple; import org.apache.storm.topology.base.BaseRichBolt; import org.apache.storm.topology.OutputFieldsDeclarer; import org.apache.storm.utils.TupleUtils; import org.apache.storm.Config; import org.apache.storm.hive.common.HiveWriter; import com.google.common.util.concurrent.ThreadFactoryBuilder; import org.apache.hive.hcatalog.streaming.*; import org.apache.storm.hive.common.HiveOptions; import org.apache.storm.hive.common.HiveUtils; import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.util.List; import java.util.ArrayList; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.Timer; import java.util.TimerTask; import java.util.Map.Entry; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import java.util.List; import java.util.LinkedList; import java.io.IOException; public class HiveBolt extends BaseRichBolt { private static final Logger LOG = LoggerFactory.getLogger(HiveBolt.class); private OutputCollector collector; private HiveOptions options; private ExecutorService callTimeoutPool; private transient Timer heartBeatTimer; private Boolean kerberosEnabled = false; private AtomicBoolean sendHeartBeat = new AtomicBoolean(false); private UserGroupInformation ugi = null; private Map<HiveEndPoint, HiveWriter> allWriters; private List<Tuple> tupleBatch; public HiveBolt(HiveOptions options) { this.options = options; tupleBatch = new LinkedList<Tuple>(); } @Override public void prepare(Map conf, TopologyContext topologyContext, OutputCollector collector) { try { if(options.getKerberosPrincipal() == null && options.getKerberosKeytab() == null) { kerberosEnabled = false; } else if(options.getKerberosPrincipal() != null && options.getKerberosKeytab() != null) { kerberosEnabled = true; } else { throw new IllegalArgumentException("To enable Kerberos, need to set both KerberosPrincipal " + " & KerberosKeytab"); } if (kerberosEnabled) { try { ugi = HiveUtils.authenticate(options.getKerberosKeytab(), options.getKerberosPrincipal()); } catch(HiveUtils.AuthenticationFailed ex) { LOG.error("Hive Kerberos authentication failed " + ex.getMessage(), ex); throw new IllegalArgumentException(ex); } } this.collector = collector; allWriters = new ConcurrentHashMap<HiveEndPoint,HiveWriter>(); String timeoutName = "hive-bolt-%d"; this.callTimeoutPool = Executors.newFixedThreadPool(1, new ThreadFactoryBuilder().setNameFormat(timeoutName).build()); sendHeartBeat.set(true); heartBeatTimer = new Timer(); setupHeartBeatTimer(); } catch(Exception e) { LOG.warn("unable to make connection to hive ", e); } } @Override public void execute(Tuple tuple) { try { boolean forceFlush = false; if (TupleUtils.isTick(tuple)) { LOG.debug("TICK received! current batch status [{}/{}]", tupleBatch.size(), options.getBatchSize()); forceFlush = true; } else { List<String> partitionVals = options.getMapper().mapPartitions(tuple); HiveEndPoint endPoint = HiveUtils.makeEndPoint(partitionVals, options); HiveWriter writer = getOrCreateWriter(endPoint); writer.write(options.getMapper().mapRecord(tuple)); tupleBatch.add(tuple); if (tupleBatch.size() >= options.getBatchSize()) forceFlush = true; } if(forceFlush && !tupleBatch.isEmpty()) { flushAllWriters(true); LOG.info("acknowledging tuples after writers flushed "); for(Tuple t : tupleBatch) { collector.ack(t); } tupleBatch.clear(); } } catch(SerializationError se) { LOG.info("Serialization exception occurred, tuple is acknowledged but not written to Hive.", tuple); this.collector.reportError(se); collector.ack(tuple); } catch(Exception e) { this.collector.reportError(e); collector.fail(tuple); for (Tuple t : tupleBatch) { collector.fail(t); } tupleBatch.clear(); abortAndCloseWriters(); } } @Override public void declareOutputFields(OutputFieldsDeclarer declarer) { } @Override public void cleanup() { sendHeartBeat.set(false); for (Entry<HiveEndPoint, HiveWriter> entry : allWriters.entrySet()) { try { HiveWriter w = entry.getValue(); w.flushAndClose(); } catch (Exception ex) { LOG.warn("Error while closing writer to " + entry.getKey() + ". Exception follows.", ex); if (ex instanceof InterruptedException) { Thread.currentThread().interrupt(); } } } ExecutorService toShutdown[] = {callTimeoutPool}; for (ExecutorService execService : toShutdown) { execService.shutdown(); try { while (!execService.isTerminated()) { execService.awaitTermination( options.getCallTimeOut(), TimeUnit.MILLISECONDS); } } catch (InterruptedException ex) { LOG.warn("shutdown interrupted on " + execService, ex); } } callTimeoutPool = null; super.cleanup(); LOG.info("Hive Bolt stopped"); } @Override public Map<String, Object> getComponentConfiguration() { Map<String, Object> conf = super.getComponentConfiguration(); if (conf == null) conf = new Config(); if (options.getTickTupleInterval() > 0) conf.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, options.getTickTupleInterval()); return conf; } private void setupHeartBeatTimer() { if(options.getHeartBeatInterval()>0) { heartBeatTimer.schedule(new TimerTask() { @Override public void run() { try { if (sendHeartBeat.get()) { LOG.debug("Start sending heartbeat on all writers"); sendHeartBeatOnAllWriters(); setupHeartBeatTimer(); } } catch (Exception e) { LOG.warn("Failed to heartbeat on HiveWriter ", e); } } }, options.getHeartBeatInterval() * 1000); } } private void sendHeartBeatOnAllWriters() throws InterruptedException { for (HiveWriter writer : allWriters.values()) { writer.heartBeat(); } } void flushAllWriters(boolean rollToNext) throws HiveWriter.CommitFailure, HiveWriter.TxnBatchFailure, HiveWriter.TxnFailure, InterruptedException { for(HiveWriter writer: allWriters.values()) { writer.flush(rollToNext); } } void abortAndCloseWriters() { try { abortAllWriters(); closeAllWriters(); } catch(Exception ie) { LOG.warn("unable to close hive connections. ", ie); } } /** * Abort current Txn on all writers */ private void abortAllWriters() throws InterruptedException, StreamingException, HiveWriter.TxnBatchFailure { for (Entry<HiveEndPoint,HiveWriter> entry : allWriters.entrySet()) { try { entry.getValue().abort(); } catch (Exception e) { LOG.error("Failed to abort hive transaction batch, HiveEndPoint " + entry.getValue() +" due to exception ", e); } } } /** * Closes all writers and remove them from cache */ private void closeAllWriters() { //1) Retire writers for (Entry<HiveEndPoint,HiveWriter> entry : allWriters.entrySet()) { try { entry.getValue().close(); } catch(Exception e) { LOG.warn("unable to close writers. ", e); } } //2) Clear cache allWriters.clear(); } private HiveWriter getOrCreateWriter(HiveEndPoint endPoint) throws HiveWriter.ConnectFailure, InterruptedException { try { HiveWriter writer = allWriters.get( endPoint ); if (writer == null) { LOG.debug("Creating Writer to Hive end point : " + endPoint); writer = HiveUtils.makeHiveWriter(endPoint, callTimeoutPool, ugi, options); if (allWriters.size() > (options.getMaxOpenConnections() - 1)) { LOG.info("cached HiveEndPoint size {} exceeded maxOpenConnections {} ", allWriters.size(), options.getMaxOpenConnections()); int retired = retireIdleWriters(); if(retired==0) { retireEldestWriter(); } } allWriters.put(endPoint, writer); HiveUtils.logAllHiveEndPoints(allWriters); } return writer; } catch (HiveWriter.ConnectFailure e) { LOG.error("Failed to create HiveWriter for endpoint: " + endPoint, e); throw e; } } /** * Locate writer that has not been used for longest time and retire it */ private void retireEldestWriter() { LOG.info("Attempting close eldest writers"); long oldestTimeStamp = System.currentTimeMillis(); HiveEndPoint eldest = null; for (Entry<HiveEndPoint,HiveWriter> entry : allWriters.entrySet()) { if (entry.getValue().getLastUsed() < oldestTimeStamp) { eldest = entry.getKey(); oldestTimeStamp = entry.getValue().getLastUsed(); } } try { LOG.info("Closing least used Writer to Hive end point : " + eldest); allWriters.remove(eldest).flushAndClose(); } catch (IOException e) { LOG.warn("Failed to close writer for end point: " + eldest, e); } catch (InterruptedException e) { LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e); Thread.currentThread().interrupt(); } catch (Exception e) { LOG.warn("Interrupted when attempting to close writer for end point: " + eldest, e); } } /** * Locate all writers past idle timeout and retire them * @return number of writers retired */ private int retireIdleWriters() { LOG.info("Attempting close idle writers"); int count = 0; long now = System.currentTimeMillis(); ArrayList<HiveEndPoint> retirees = new ArrayList<HiveEndPoint>(); //1) Find retirement candidates for (Entry<HiveEndPoint,HiveWriter> entry : allWriters.entrySet()) { if(now - entry.getValue().getLastUsed() > options.getIdleTimeout()) { ++count; retirees.add(entry.getKey()); } } //2) Retire them for(HiveEndPoint ep : retirees) { try { LOG.info("Closing idle Writer to Hive end point : {}", ep); allWriters.remove(ep).flushAndClose(); } catch (IOException e) { LOG.warn("Failed to close writer for end point: {}. Error: "+ ep, e); } catch (InterruptedException e) { LOG.warn("Interrupted when attempting to close writer for end point: " + ep, e); Thread.currentThread().interrupt(); } catch (Exception e) { LOG.warn("Interrupted when attempting to close writer for end point: " + ep, e); } } return count; } }
/* * Copyright @ 2015 Atlassian Pty Ltd * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.jitsi.impl.neomedia.transform.fec; import net.sf.fmj.media.rtp.*; import org.jitsi.impl.neomedia.*; import org.jitsi.impl.neomedia.transform.*; import org.jitsi.util.Logger; /** * <tt>PacketTransformer</tt> which adds ulpfec packets. Works for a * specific SSRC. * * @author Boris Grozev */ class FECSender implements PacketTransformer { /** * The <tt>Logger</tt> used by the <tt>FECSender</tt> class and its * instances to print debug information. */ private static final Logger logger = Logger.getLogger(FECSender.class); /** * The single SSRC with which this <tt>FECSender</tt> works. */ private long ssrc; /** * The ulpfec payload type. */ private byte ulpfecPT; /** * An ulpfec packet will be generated for every <tt>fecRate</tt> media * packets. * If set to 0, no ulpfec packets will be generated. */ private int fecRate; /** * A counter of packets. Incremented for every media packet. */ private int counter = 0; /** * Number of ulpfec packets added. */ private int nbFec = 0; /** * A fec packet, which will be sent once enough (that is <tt>fecRate</tt>) * media packets have passed, and have been "added" to the fec packet. * Should be always non-null. */ private FECPacket fecPacket; /** * Creates a new <tt>FECSender</tt> instance. * @param ssrc the SSRC with which this <tt>FECSender</tt> will work. * @param fecRate the rate at which to add ulpfec packets. * @param ulpfecPT the payload to use for ulpfec packets. */ FECSender(long ssrc, int fecRate, byte ulpfecPT) { this.ssrc = ssrc; this.fecRate = fecRate; this.ulpfecPT = ulpfecPT; fecPacket = new FECPacket(ssrc, ulpfecPT); } /** * {@inheritDoc} */ @Override public RawPacket[] reverseTransform(RawPacket[] pkts) { return pkts; } /** * {@inheritDoc} */ @Override public synchronized RawPacket[] transform(RawPacket[] pkts) { RawPacket pkt = null; for (RawPacket p : pkts) { if (p != null && p.getVersion() == RTPHeader.VERSION) { pkt = p; break; } } if (pkt == null) return pkts; return transformSingle(pkt, pkts); } /** * Processes <tt>pkt</tt> and, if <tt>fecRate</tt> packets have * passed, creates a fec packet protecting the last <tt>fecRate</tt> media * packets and adds this fec packet to <tt>pkts</tt>. * * @param pkt media packet to process. * @param pkts array to try to use for output. * @return an array that contains <tt>pkt</tt> (after processing) * and possible an ulpfec packet if one was added. */ private RawPacket[] transformSingle(RawPacket pkt, RawPacket[] pkts) { // TODO due to the overhead introduced by adding any redundant data it // is usually a good idea to activate it only when the network // conditions require it. counter++; pkt.setSequenceNumber(pkt.getSequenceNumber() + nbFec); if (fecRate != 0) fecPacket.addMedia(pkt); if (fecRate != 0 && (counter % fecRate) == 0) { fecPacket.finish(); boolean found = false; for (int i = 0; i < pkts.length; i++) { if (pkts[i] == null) { found = true; pkts[i] = fecPacket; break; } } if (!found) { RawPacket[] pkts2 = new RawPacket[pkts.length + 1]; System.arraycopy(pkts, 0, pkts2, 0, pkts.length); pkts2[pkts.length] = fecPacket; pkts = pkts2; } fecPacket = new FECPacket(ssrc, ulpfecPT); nbFec++; } return pkts; } /** * {@inheritDoc} */ @Override public void close() { if (logger.isInfoEnabled()) { logger.info( "Closing FECSender for ssrc=" + ssrc + ". Added " + nbFec + " ulpfec packets."); } } /** * Sets the ulpfec payload type. * @param ulpfecPT the payload type. */ public void setUlpfecPT(byte ulpfecPT) { this.ulpfecPT = ulpfecPT; if (fecPacket != null) fecPacket.payloadType = ulpfecPT; } /** * Updates the <tt>fecRate</tt> property. Re-allocates buffers, if * needed. * @param newFecRate the new rate to set. */ public void setFecRate(int newFecRate) { if (fecRate != newFecRate) { fecPacket = new FECPacket(ssrc, ulpfecPT); //reset it fecRate = newFecRate; counter = 0; } } /** * A <tt>RawPacket</tt> extension which represents an ulpfec packet. Allows * for a media packet to be protected to be added via the <tt>addMedia()</tt> * method. * * The format of this packet (see RFC3350 and RFC5109) is as follows: * * 12 byte RTP header (no CSRC or extensions): * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |V=2|P|X| CC |M| PT | sequence number | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | timestamp | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | synchronization source (SSRC) identifier | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * * 10 byte FEC Header: * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * |E|L|P|X| CC |M| PT recovery | SN base | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | TS recovery | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | length recovery | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * * 4 byte FEC Level 0 Header (the short mask is always used): * 0 1 2 3 * 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * | Protection Length | mask | * +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ * * Followed by 'Protection Length' bytes of 'FEC Level 0 Payload'. */ private static class FECPacket extends RawPacket { /** * SN base. The sequence number of the first media packet added. */ int base = -1; /** * Number of media packets added. */ int numPackets = 0; /** * The biggest payload (in the sense of RFC5109) of the media packets * added. */ int protectionLength = -1; /** * The sequence of the last media packet added. */ int lastAddedSeq = -1; /** * This <tt>RawPacket</tt>'s buffer. */ private byte[] buf; /** * The SSRC of this packet. */ private final long ssrc; /** * The RTP timestamp of the last added media packet. */ private long lastAddedTS = -1; /** * The payload type for this packet. */ byte payloadType; /** * Length of the RTP header of this packet. */ private static final int RTP_HDR_LEN = 12; /** * Length of the additional headers added to this packet (in bytes): * 10 bytes FEC Header + 4 bytes FEC Level 0 Header (short mask) */ private static final int FEC_HDR_LEN = 14; /** * Creates a new instance, initialized with a buffer obtained using * <tt>new</tt>. * @param ssrc the SSRC */ FECPacket(long ssrc, byte payloadType) { super( new byte[FECTransformEngine.INITIAL_BUFFER_SIZE], 0, FECTransformEngine.INITIAL_BUFFER_SIZE); buf = getBuffer(); this.ssrc = ssrc; this.payloadType = payloadType; } /** * Adds a media packet to be protected by this <tt>FECPacket</tt>. * @param media the media packet to add. */ private void addMedia(RawPacket media) { byte[] mediaBuf = media.getBuffer(); int mediaOff = media.getOffset(); // payload length in the sense of RFC5109 int mediaPayloadLen = media.getLength() - 12; // make sure that the buffer is big enough if (buf.length < mediaPayloadLen + RTP_HDR_LEN + FEC_HDR_LEN) { byte[] newBuff = new byte[mediaPayloadLen + RTP_HDR_LEN + FEC_HDR_LEN]; System.arraycopy(buf, 0, newBuff, 0, buf.length); for (int i = buf.length; i < newBuff.length; i++) newBuff[i] = (byte) 0; buf = newBuff; setBuffer(buf); } if (base == -1) { // first packet, make a copy and not XOR base = media.getSequenceNumber(); // 8 bytes from media's RTP header --> the FEC Header System.arraycopy(mediaBuf, mediaOff, buf, RTP_HDR_LEN, 8); // set the 'length recovery' field buf[RTP_HDR_LEN+8] = (byte) (mediaPayloadLen>>8 & 0xff); buf[RTP_HDR_LEN+9] = (byte) (mediaPayloadLen & 0xff); // copy the payload System.arraycopy( mediaBuf, mediaOff + RTP_HDR_LEN, buf, RTP_HDR_LEN + FEC_HDR_LEN, mediaPayloadLen); } else { // not the first packet, do XOR // 8 bytes from media's RTP header --> the FEC Header for (int i = 0; i < 8; i++) buf[RTP_HDR_LEN + i] ^= mediaBuf[mediaOff + i]; // 'length recovery' buf[RTP_HDR_LEN+8] ^= (byte) (mediaPayloadLen>>8 & 0xff); buf[RTP_HDR_LEN+9] ^= (byte) (mediaPayloadLen & 0xff); // payload for (int i = 0; i < mediaPayloadLen; i++) { buf[RTP_HDR_LEN + FEC_HDR_LEN + i] ^= mediaBuf[mediaOff + RTP_HDR_LEN + i]; } } lastAddedSeq = media.getSequenceNumber(); lastAddedTS = media.getTimestamp(); if (mediaPayloadLen > protectionLength) protectionLength = mediaPayloadLen; numPackets++; } /** * Fill in the required header fields and prepare this packet to be * sent. * @return the finished packet. */ private RawPacket finish() { // RTP header fields buf[0] = (byte) 0x80; //no Padding, no Extension, no CSRCs setPayloadType(payloadType); setSequenceNumber(lastAddedSeq + 1); setSSRC((int)ssrc); setTimestamp(lastAddedTS); //TODO: check 5109 -- which TS should be used? // FEC Header buf[RTP_HDR_LEN + 2] = (byte) (base>>8 & 0xff); buf[RTP_HDR_LEN + 3] = (byte) (base & 0xff); // FEC Level 0 header buf[RTP_HDR_LEN + 10] = (byte) (protectionLength>>8 & 0xff); buf[RTP_HDR_LEN + 11] = (byte) (protectionLength & 0xff); // assume all packets from base to lastAddedSeq were added int mask = ((1<<numPackets) - 1) << (16-numPackets); buf[RTP_HDR_LEN + 12] = (byte) (mask>>8 & 0xff); buf[RTP_HDR_LEN + 13] = (byte) (mask & 0xff); setLength(RTP_HDR_LEN + FEC_HDR_LEN + protectionLength); return this; } } }
package de.plushnikov.intellij.plugin.processor.clazz.constructor; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiAnnotation; import com.intellij.psi.PsiAnonymousClass; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.psi.PsiField; import com.intellij.psi.PsiMethod; import com.intellij.psi.PsiModifier; import com.intellij.psi.PsiModifierList; import com.intellij.psi.PsiParameter; import com.intellij.psi.PsiParameterList; import com.intellij.psi.PsiType; import com.intellij.psi.PsiTypeParameter; import com.intellij.psi.util.PsiTypesUtil; import com.intellij.util.StringBuilderSpinAllocator; import de.plushnikov.intellij.plugin.lombokconfig.ConfigKeys; import de.plushnikov.intellij.plugin.problem.ProblemBuilder; import de.plushnikov.intellij.plugin.processor.clazz.AbstractClassProcessor; import de.plushnikov.intellij.plugin.processor.field.AccessorsInfo; import de.plushnikov.intellij.plugin.psi.LombokLightMethodBuilder; import de.plushnikov.intellij.plugin.thirdparty.LombokUtils; import de.plushnikov.intellij.plugin.util.LombokProcessorUtil; import de.plushnikov.intellij.plugin.util.PsiAnnotationUtil; import de.plushnikov.intellij.plugin.util.PsiClassUtil; import de.plushnikov.intellij.plugin.util.PsiElementUtil; import de.plushnikov.intellij.plugin.util.PsiFieldUtil; import de.plushnikov.intellij.plugin.util.PsiMethodUtil; import lombok.Value; import lombok.experimental.NonFinal; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import java.lang.annotation.Annotation; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; /** * Base lombok processor class for constructor processing * * @author Plushnikov Michail */ public abstract class AbstractConstructorClassProcessor extends AbstractClassProcessor { protected AbstractConstructorClassProcessor(@NotNull Class<? extends Annotation> supportedAnnotationClass, @NotNull Class<? extends PsiElement> supportedClass) { super(supportedAnnotationClass, supportedClass); } @Override protected boolean validate(@NotNull PsiAnnotation psiAnnotation, @NotNull PsiClass psiClass, @NotNull ProblemBuilder builder) { boolean result = true; if (!validateAnnotationOnRightType(psiClass, builder)) { result = false; } if (!validateVisibility(psiAnnotation)) { result = false; } if (!validateBaseClassConstructor(psiClass, builder)) { result = false; } return result; } protected boolean validateVisibility(@NotNull PsiAnnotation psiAnnotation) { final String visibility = LombokProcessorUtil.getAccessVisibility(psiAnnotation); return null != visibility; } protected boolean validateAnnotationOnRightType(@NotNull PsiClass psiClass, @NotNull ProblemBuilder builder) { boolean result = true; if (psiClass.isAnnotationType() || psiClass.isInterface()) { builder.addError("Annotation is only supported on a class or enum type"); result = false; } return result; } protected boolean validateBaseClassConstructor(@NotNull PsiClass psiClass, @NotNull ProblemBuilder builder) { if (psiClass instanceof PsiAnonymousClass || psiClass.isEnum()) { return true; } PsiClass baseClass = psiClass.getSuperClass(); if (baseClass == null) { return true; } PsiMethod[] constructors = baseClass.getConstructors(); if (constructors.length == 0) { return true; } for (PsiMethod constructor : constructors) { final int parametersCount = constructor.getParameterList().getParametersCount(); if (parametersCount == 0 || parametersCount == 1 && constructor.isVarArgs()) { return true; } } builder.addError("Lombok needs a default constructor in the base class"); return false; } public boolean validateIsConstructorDefined(@NotNull PsiClass psiClass, @Nullable String staticConstructorName, @NotNull Collection<PsiField> params, @NotNull ProblemBuilder builder) { boolean result = true; final List<PsiType> paramTypes = new ArrayList<PsiType>(params.size()); for (PsiField param : params) { paramTypes.add(param.getType()); } final Collection<PsiMethod> definedConstructors = PsiClassUtil.collectClassConstructorIntern(psiClass); final String constructorName = getConstructorName(psiClass); if (containsMethod(definedConstructors, constructorName, paramTypes)) { if (paramTypes.isEmpty()) { builder.addError("Constructor without parameters is already defined"); } else { builder.addError("Constructor with %d parameters is already defined", paramTypes.size()); } result = false; } if (isStaticConstructor(staticConstructorName)) { final Collection<PsiMethod> definedMethods = PsiClassUtil.collectClassStaticMethodsIntern(psiClass); if (containsMethod(definedMethods, staticConstructorName, paramTypes)) { if (paramTypes.isEmpty()) { builder.addError("Method '%s' matched staticConstructorName is already defined", staticConstructorName); } else { builder.addError("Method '%s' with %d parameters matched staticConstructorName is already defined", staticConstructorName, paramTypes.size()); } result = false; } } return result; } @NotNull public String getConstructorName(@NotNull PsiClass psiClass) { return psiClass.getName(); } private boolean containsMethod(final Collection<PsiMethod> definedMethods, final String methodName, final List<PsiType> paramTypes) { for (PsiMethod method : definedMethods) { if (PsiElementUtil.methodMatches(method, null, null, methodName, paramTypes)) { return true; } } return false; } @NotNull @SuppressWarnings("deprecation") protected Collection<PsiField> getAllNotInitializedAndNotStaticFields(@NotNull PsiClass psiClass) { Collection<PsiField> allNotInitializedNotStaticFields = new ArrayList<PsiField>(); final boolean classAnnotatedWithValue = PsiAnnotationUtil.isAnnotatedWith(psiClass, Value.class, lombok.experimental.Value.class); for (PsiField psiField : psiClass.getFields()) { // skip fields named $ boolean addField = !psiField.getName().startsWith(LombokUtils.LOMBOK_INTERN_FIELD_MARKER); final PsiModifierList modifierList = psiField.getModifierList(); if (null != modifierList) { // skip static fields addField &= !modifierList.hasModifierProperty(PsiModifier.STATIC); boolean isFinal = isFieldFinal(psiField, modifierList, classAnnotatedWithValue); // skip initialized final fields addField &= (!isFinal || null == psiField.getInitializer()); } if (addField) { allNotInitializedNotStaticFields.add(psiField); } } return allNotInitializedNotStaticFields; } @NotNull @SuppressWarnings("deprecation") public Collection<PsiField> getRequiredFields(@NotNull PsiClass psiClass) { Collection<PsiField> result = new ArrayList<PsiField>(); final boolean classAnnotatedWithValue = PsiAnnotationUtil.isAnnotatedWith(psiClass, Value.class, lombok.experimental.Value.class); for (PsiField psiField : getAllNotInitializedAndNotStaticFields(psiClass)) { final PsiModifierList modifierList = psiField.getModifierList(); if (null != modifierList) { final boolean isFinal = isFieldFinal(psiField, modifierList, classAnnotatedWithValue); final boolean isNonNull = PsiAnnotationUtil.isAnnotatedWith(psiField, LombokUtils.NON_NULL_PATTERN); // accept initialized final or nonnull fields if ((isFinal || isNonNull) && null == psiField.getInitializer()) { result.add(psiField); } } } return result; } protected boolean isFieldFinal(@NotNull PsiField psiField, @NotNull PsiModifierList modifierList, boolean classAnnotatedWithValue) { boolean isFinal = PsiFieldUtil.isFinal(psiField); if (!isFinal && classAnnotatedWithValue) { isFinal = PsiAnnotationUtil.isNotAnnotatedWith(psiField, NonFinal.class); } return isFinal; } @NotNull protected Collection<PsiMethod> createConstructorMethod(@NotNull PsiClass psiClass, @PsiModifier.ModifierConstant @NotNull String methodModifier, @NotNull PsiAnnotation psiAnnotation, boolean useJavaDefaults, @NotNull Collection<PsiField> params) { final String staticName = getStaticConstructorName(psiAnnotation); return createConstructorMethod(psiClass, methodModifier, psiAnnotation, useJavaDefaults, params, staticName); } protected String getStaticConstructorName(@NotNull PsiAnnotation psiAnnotation) { return PsiAnnotationUtil.getStringAnnotationValue(psiAnnotation, "staticName"); } protected boolean isStaticConstructor(@Nullable String staticName) { return !StringUtil.isEmptyOrSpaces(staticName); } @NotNull protected Collection<PsiMethod> createConstructorMethod(@NotNull PsiClass psiClass, @PsiModifier.ModifierConstant @NotNull String methodModifier, @NotNull PsiAnnotation psiAnnotation, boolean useJavaDefaults, @NotNull Collection<PsiField> params, @Nullable String staticName) { final boolean staticConstructorRequired = isStaticConstructor(staticName); final String constructorVisibility = staticConstructorRequired || psiClass.isEnum() ? PsiModifier.PRIVATE : methodModifier; final boolean suppressConstructorProperties = useJavaDefaults || readAnnotationOrConfigProperty(psiAnnotation, psiClass, "suppressConstructorProperties", ConfigKeys.ANYCONSTRUCTOR_SUPPRESS_CONSTRUCTOR_PROPERTIES); final PsiMethod constructor = createConstructor(psiClass, constructorVisibility, suppressConstructorProperties, useJavaDefaults, params, psiAnnotation); if (staticConstructorRequired) { PsiMethod staticConstructor = createStaticConstructor(psiClass, staticName, useJavaDefaults, params, psiAnnotation); return Arrays.asList(constructor, staticConstructor); } return Collections.singletonList(constructor); } private PsiMethod createConstructor(@NotNull PsiClass psiClass, @PsiModifier.ModifierConstant @NotNull String modifier, boolean suppressConstructorProperties, boolean useJavaDefaults, @NotNull Collection<PsiField> params, @NotNull PsiAnnotation psiAnnotation) { LombokLightMethodBuilder constructor = new LombokLightMethodBuilder(psiClass.getManager(), getConstructorName(psiClass)) .withConstructor(true) .withContainingClass(psiClass) .withNavigationElement(psiAnnotation) .withModifier(modifier); final AccessorsInfo accessorsInfo = AccessorsInfo.build(psiClass); final PsiModifierList modifierList = constructor.getModifierList(); if (!suppressConstructorProperties && !useJavaDefaults && !params.isEmpty()) { StringBuilder constructorPropertiesAnnotation = new StringBuilder("java.beans.ConstructorProperties( {"); for (PsiField param : params) { constructorPropertiesAnnotation.append('"').append(accessorsInfo.removePrefix(param.getName())).append('"').append(','); } constructorPropertiesAnnotation.deleteCharAt(constructorPropertiesAnnotation.length() - 1); constructorPropertiesAnnotation.append("} ) "); modifierList.addAnnotation(constructorPropertiesAnnotation.toString()); } addOnXAnnotations(psiAnnotation, modifierList, "onConstructor"); if (!useJavaDefaults) { for (PsiField param : params) { constructor.withParameter(accessorsInfo.removePrefix(param.getName()), param.getType()); } } final StringBuilder blockText = new StringBuilder(); for (PsiField param : params) { final String fieldInitializer = useJavaDefaults ? PsiTypesUtil.getDefaultValueOfType(param.getType()) : accessorsInfo.removePrefix(param.getName()); blockText.append(String.format("this.%s = %s;\n", param.getName(), fieldInitializer)); } constructor.withBody(PsiMethodUtil.createCodeBlockFromText(blockText.toString(), psiClass)); return constructor; } private PsiMethod createStaticConstructor(PsiClass psiClass, String staticName, boolean useJavaDefaults, Collection<PsiField> params, PsiAnnotation psiAnnotation) { LombokLightMethodBuilder method = new LombokLightMethodBuilder(psiClass.getManager(), staticName) .withMethodReturnType(PsiClassUtil.getTypeWithGenerics(psiClass)) .withContainingClass(psiClass) .withNavigationElement(psiAnnotation) .withModifier(PsiModifier.PUBLIC, PsiModifier.STATIC); if (!useJavaDefaults) { for (PsiField param : params) { method.withParameter(param.getName(), param.getType()); } } final String psiClassName = buildClassNameWithGenericTypeParameters(psiClass); final String paramsText = useJavaDefaults ? "" : joinParameters(method.getParameterList()); final String blockText = String.format("return new %s(%s);", psiClassName, paramsText); method.withBody(PsiMethodUtil.createCodeBlockFromText(blockText, psiClass)); return method; } private String buildClassNameWithGenericTypeParameters(@NotNull final PsiClass psiClass) { StringBuilder psiClassName = new StringBuilder(getConstructorName(psiClass)); final PsiTypeParameter[] psiClassTypeParameters = psiClass.getTypeParameters(); if (psiClassTypeParameters.length > 0) { psiClassName.append('<'); for (PsiTypeParameter psiClassTypeParameter : psiClassTypeParameters) { psiClassName.append(psiClassTypeParameter.getName()).append(','); } psiClassName.setCharAt(psiClassName.length() - 1, '>'); } return psiClassName.toString(); } private String joinParameters(PsiParameterList parameterList) { final StringBuilder builder = StringBuilderSpinAllocator.alloc(); try { for (PsiParameter psiParameter : parameterList.getParameters()) { builder.append(psiParameter.getName()).append(','); } if (parameterList.getParameters().length > 0) { builder.deleteCharAt(builder.length() - 1); } return builder.toString(); } finally { StringBuilderSpinAllocator.dispose(builder); } } }
/******************************************************************************* * Copyright 2009-2016 Amazon Services. All Rights Reserved. * Licensed under the Apache License, Version 2.0 (the "License"); * * You may not use this file except in compliance with the License. * You may obtain a copy of the License at: http://aws.amazon.com/apache2.0 * This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the * specific language governing permissions and limitations under the License. ******************************************************************************* * Partnered Ltl Data Output * API Version: 2010-10-01 * Library Version: 2016-10-05 * Generated: Wed Oct 05 06:15:34 PDT 2016 */ package com.amazonservices.mws.FulfillmentInboundShipment.model; import javax.xml.bind.annotation.*; import com.amazonservices.mws.client.*; /** * PartneredLtlDataOutput complex type. * * XML schema: * * <pre> * &lt;complexType name="PartneredLtlDataOutput"&gt; * &lt;complexContent&gt; * &lt;restriction base="{http://www.w3.org/2001/XMLSchema}anyType"&gt; * &lt;sequence&gt; * &lt;element name="Contact" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}Contact"/&gt; * &lt;element name="BoxCount" type="{http://www.w3.org/2001/XMLSchema}long"/&gt; * &lt;element name="SellerFreightClass" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;element name="FreightReadyDate" type="{http://www.w3.org/2001/XMLSchema}string"/&gt; * &lt;element name="PalletList" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}PalletList"/&gt; * &lt;element name="TotalWeight" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}Weight"/&gt; * &lt;element name="SellerDeclaredValue" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}Amount" minOccurs="0"/&gt; * &lt;element name="AmazonCalculatedValue" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}Amount" minOccurs="0"/&gt; * &lt;element name="PreviewPickupDate" type="{http://www.w3.org/2001/XMLSchema}string"/&gt; * &lt;element name="PreviewDeliveryDate" type="{http://www.w3.org/2001/XMLSchema}string"/&gt; * &lt;element name="PreviewFreightClass" type="{http://www.w3.org/2001/XMLSchema}string"/&gt; * &lt;element name="AmazonReferenceId" type="{http://www.w3.org/2001/XMLSchema}string"/&gt; * &lt;element name="IsBillOfLadingAvailable" type="{http://www.w3.org/2001/XMLSchema}boolean"/&gt; * &lt;element name="PartneredEstimate" type="{http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/}PartneredEstimate" minOccurs="0"/&gt; * &lt;element name="CarrierName" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/&gt; * &lt;/sequence&gt; * &lt;/restriction&gt; * &lt;/complexContent&gt; * &lt;/complexType&gt; * </pre> */ @XmlAccessorType(XmlAccessType.FIELD) @XmlType(name="PartneredLtlDataOutput", propOrder={ "contact", "boxCount", "sellerFreightClass", "freightReadyDate", "palletList", "totalWeight", "sellerDeclaredValue", "amazonCalculatedValue", "previewPickupDate", "previewDeliveryDate", "previewFreightClass", "amazonReferenceId", "isBillOfLadingAvailable", "partneredEstimate", "carrierName" }) @XmlRootElement(name = "PartneredLtlDataOutput") public class PartneredLtlDataOutput extends AbstractMwsObject { @XmlElement(name="Contact",required=true) private Contact contact; @XmlElement(name="BoxCount",required=true) private long boxCount; @XmlElement(name="SellerFreightClass") private String sellerFreightClass; @XmlElement(name="FreightReadyDate",required=true) private String freightReadyDate; @XmlElement(name="PalletList",required=true) private PalletList palletList; @XmlElement(name="TotalWeight",required=true) private Weight totalWeight; @XmlElement(name="SellerDeclaredValue") private Amount sellerDeclaredValue; @XmlElement(name="AmazonCalculatedValue") private Amount amazonCalculatedValue; @XmlElement(name="PreviewPickupDate",required=true) private String previewPickupDate; @XmlElement(name="PreviewDeliveryDate",required=true) private String previewDeliveryDate; @XmlElement(name="PreviewFreightClass",required=true) private String previewFreightClass; @XmlElement(name="AmazonReferenceId",required=true) private String amazonReferenceId; @XmlElement(name="IsBillOfLadingAvailable",required=true) private boolean isBillOfLadingAvailable; @XmlElement(name="PartneredEstimate") private PartneredEstimate partneredEstimate; @XmlElement(name="CarrierName") private String carrierName; /** * Get the value of Contact. * * @return The value of Contact. */ public Contact getContact() { return contact; } /** * Set the value of Contact. * * @param contact * The new value to set. */ public void setContact(Contact contact) { this.contact = contact; } /** * Check to see if Contact is set. * * @return true if Contact is set. */ public boolean isSetContact() { return contact != null; } /** * Set the value of Contact, return this. * * @param contact * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withContact(Contact contact) { this.contact = contact; return this; } /** * Get the value of BoxCount. * * @return The value of BoxCount. */ public long getBoxCount() { return boxCount; } /** * Set the value of BoxCount. * * @param boxCount * The new value to set. */ public void setBoxCount(long boxCount) { this.boxCount = boxCount; } /** * Set the value of BoxCount, return this. * * @param boxCount * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withBoxCount(long boxCount) { this.boxCount = boxCount; return this; } /** * Get the value of SellerFreightClass. * * @return The value of SellerFreightClass. */ public String getSellerFreightClass() { return sellerFreightClass; } /** * Set the value of SellerFreightClass. * * @param sellerFreightClass * The new value to set. */ public void setSellerFreightClass(String sellerFreightClass) { this.sellerFreightClass = sellerFreightClass; } /** * Check to see if SellerFreightClass is set. * * @return true if SellerFreightClass is set. */ public boolean isSetSellerFreightClass() { return sellerFreightClass != null; } /** * Set the value of SellerFreightClass, return this. * * @param sellerFreightClass * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withSellerFreightClass(String sellerFreightClass) { this.sellerFreightClass = sellerFreightClass; return this; } /** * Get the value of FreightReadyDate. * * @return The value of FreightReadyDate. */ public String getFreightReadyDate() { return freightReadyDate; } /** * Set the value of FreightReadyDate. * * @param freightReadyDate * The new value to set. */ public void setFreightReadyDate(String freightReadyDate) { this.freightReadyDate = freightReadyDate; } /** * Check to see if FreightReadyDate is set. * * @return true if FreightReadyDate is set. */ public boolean isSetFreightReadyDate() { return freightReadyDate != null; } /** * Set the value of FreightReadyDate, return this. * * @param freightReadyDate * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withFreightReadyDate(String freightReadyDate) { this.freightReadyDate = freightReadyDate; return this; } /** * Get the value of PalletList. * * @return The value of PalletList. */ public PalletList getPalletList() { return palletList; } /** * Set the value of PalletList. * * @param palletList * The new value to set. */ public void setPalletList(PalletList palletList) { this.palletList = palletList; } /** * Check to see if PalletList is set. * * @return true if PalletList is set. */ public boolean isSetPalletList() { return palletList != null; } /** * Set the value of PalletList, return this. * * @param palletList * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withPalletList(PalletList palletList) { this.palletList = palletList; return this; } /** * Get the value of TotalWeight. * * @return The value of TotalWeight. */ public Weight getTotalWeight() { return totalWeight; } /** * Set the value of TotalWeight. * * @param totalWeight * The new value to set. */ public void setTotalWeight(Weight totalWeight) { this.totalWeight = totalWeight; } /** * Check to see if TotalWeight is set. * * @return true if TotalWeight is set. */ public boolean isSetTotalWeight() { return totalWeight != null; } /** * Set the value of TotalWeight, return this. * * @param totalWeight * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withTotalWeight(Weight totalWeight) { this.totalWeight = totalWeight; return this; } /** * Get the value of SellerDeclaredValue. * * @return The value of SellerDeclaredValue. */ public Amount getSellerDeclaredValue() { return sellerDeclaredValue; } /** * Set the value of SellerDeclaredValue. * * @param sellerDeclaredValue * The new value to set. */ public void setSellerDeclaredValue(Amount sellerDeclaredValue) { this.sellerDeclaredValue = sellerDeclaredValue; } /** * Check to see if SellerDeclaredValue is set. * * @return true if SellerDeclaredValue is set. */ public boolean isSetSellerDeclaredValue() { return sellerDeclaredValue != null; } /** * Set the value of SellerDeclaredValue, return this. * * @param sellerDeclaredValue * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withSellerDeclaredValue(Amount sellerDeclaredValue) { this.sellerDeclaredValue = sellerDeclaredValue; return this; } /** * Get the value of AmazonCalculatedValue. * * @return The value of AmazonCalculatedValue. */ public Amount getAmazonCalculatedValue() { return amazonCalculatedValue; } /** * Set the value of AmazonCalculatedValue. * * @param amazonCalculatedValue * The new value to set. */ public void setAmazonCalculatedValue(Amount amazonCalculatedValue) { this.amazonCalculatedValue = amazonCalculatedValue; } /** * Check to see if AmazonCalculatedValue is set. * * @return true if AmazonCalculatedValue is set. */ public boolean isSetAmazonCalculatedValue() { return amazonCalculatedValue != null; } /** * Set the value of AmazonCalculatedValue, return this. * * @param amazonCalculatedValue * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withAmazonCalculatedValue(Amount amazonCalculatedValue) { this.amazonCalculatedValue = amazonCalculatedValue; return this; } /** * Get the value of PreviewPickupDate. * * @return The value of PreviewPickupDate. */ public String getPreviewPickupDate() { return previewPickupDate; } /** * Set the value of PreviewPickupDate. * * @param previewPickupDate * The new value to set. */ public void setPreviewPickupDate(String previewPickupDate) { this.previewPickupDate = previewPickupDate; } /** * Check to see if PreviewPickupDate is set. * * @return true if PreviewPickupDate is set. */ public boolean isSetPreviewPickupDate() { return previewPickupDate != null; } /** * Set the value of PreviewPickupDate, return this. * * @param previewPickupDate * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withPreviewPickupDate(String previewPickupDate) { this.previewPickupDate = previewPickupDate; return this; } /** * Get the value of PreviewDeliveryDate. * * @return The value of PreviewDeliveryDate. */ public String getPreviewDeliveryDate() { return previewDeliveryDate; } /** * Set the value of PreviewDeliveryDate. * * @param previewDeliveryDate * The new value to set. */ public void setPreviewDeliveryDate(String previewDeliveryDate) { this.previewDeliveryDate = previewDeliveryDate; } /** * Check to see if PreviewDeliveryDate is set. * * @return true if PreviewDeliveryDate is set. */ public boolean isSetPreviewDeliveryDate() { return previewDeliveryDate != null; } /** * Set the value of PreviewDeliveryDate, return this. * * @param previewDeliveryDate * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withPreviewDeliveryDate(String previewDeliveryDate) { this.previewDeliveryDate = previewDeliveryDate; return this; } /** * Get the value of PreviewFreightClass. * * @return The value of PreviewFreightClass. */ public String getPreviewFreightClass() { return previewFreightClass; } /** * Set the value of PreviewFreightClass. * * @param previewFreightClass * The new value to set. */ public void setPreviewFreightClass(String previewFreightClass) { this.previewFreightClass = previewFreightClass; } /** * Check to see if PreviewFreightClass is set. * * @return true if PreviewFreightClass is set. */ public boolean isSetPreviewFreightClass() { return previewFreightClass != null; } /** * Set the value of PreviewFreightClass, return this. * * @param previewFreightClass * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withPreviewFreightClass(String previewFreightClass) { this.previewFreightClass = previewFreightClass; return this; } /** * Get the value of AmazonReferenceId. * * @return The value of AmazonReferenceId. */ public String getAmazonReferenceId() { return amazonReferenceId; } /** * Set the value of AmazonReferenceId. * * @param amazonReferenceId * The new value to set. */ public void setAmazonReferenceId(String amazonReferenceId) { this.amazonReferenceId = amazonReferenceId; } /** * Check to see if AmazonReferenceId is set. * * @return true if AmazonReferenceId is set. */ public boolean isSetAmazonReferenceId() { return amazonReferenceId != null; } /** * Set the value of AmazonReferenceId, return this. * * @param amazonReferenceId * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withAmazonReferenceId(String amazonReferenceId) { this.amazonReferenceId = amazonReferenceId; return this; } /** * Check the value of IsBillOfLadingAvailable. * * @return true if IsBillOfLadingAvailable is set to true. */ public boolean isIsBillOfLadingAvailable() { return isBillOfLadingAvailable; } /** * Get the value of IsBillOfLadingAvailable. * * @return The value of IsBillOfLadingAvailable. */ public boolean getIsBillOfLadingAvailable() { return isBillOfLadingAvailable; } /** * Set the value of IsBillOfLadingAvailable. * * @param isBillOfLadingAvailable * The new value to set. */ public void setIsBillOfLadingAvailable(boolean isBillOfLadingAvailable) { this.isBillOfLadingAvailable = isBillOfLadingAvailable; } /** * Set the value of IsBillOfLadingAvailable, return this. * * @param isBillOfLadingAvailable * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withIsBillOfLadingAvailable(boolean isBillOfLadingAvailable) { this.isBillOfLadingAvailable = isBillOfLadingAvailable; return this; } /** * Get the value of PartneredEstimate. * * @return The value of PartneredEstimate. */ public PartneredEstimate getPartneredEstimate() { return partneredEstimate; } /** * Set the value of PartneredEstimate. * * @param partneredEstimate * The new value to set. */ public void setPartneredEstimate(PartneredEstimate partneredEstimate) { this.partneredEstimate = partneredEstimate; } /** * Check to see if PartneredEstimate is set. * * @return true if PartneredEstimate is set. */ public boolean isSetPartneredEstimate() { return partneredEstimate != null; } /** * Set the value of PartneredEstimate, return this. * * @param partneredEstimate * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withPartneredEstimate(PartneredEstimate partneredEstimate) { this.partneredEstimate = partneredEstimate; return this; } /** * Get the value of CarrierName. * * @return The value of CarrierName. */ public String getCarrierName() { return carrierName; } /** * Set the value of CarrierName. * * @param carrierName * The new value to set. */ public void setCarrierName(String carrierName) { this.carrierName = carrierName; } /** * Check to see if CarrierName is set. * * @return true if CarrierName is set. */ public boolean isSetCarrierName() { return carrierName != null; } /** * Set the value of CarrierName, return this. * * @param carrierName * The new value to set. * * @return This instance. */ public PartneredLtlDataOutput withCarrierName(String carrierName) { this.carrierName = carrierName; return this; } /** * Read members from a MwsReader. * * @param r * The reader to read from. */ @Override public void readFragmentFrom(MwsReader r) { contact = r.read("Contact", Contact.class); boxCount = r.read("BoxCount", long.class); sellerFreightClass = r.read("SellerFreightClass", String.class); freightReadyDate = r.read("FreightReadyDate", String.class); palletList = r.read("PalletList", PalletList.class); totalWeight = r.read("TotalWeight", Weight.class); sellerDeclaredValue = r.read("SellerDeclaredValue", Amount.class); amazonCalculatedValue = r.read("AmazonCalculatedValue", Amount.class); previewPickupDate = r.read("PreviewPickupDate", String.class); previewDeliveryDate = r.read("PreviewDeliveryDate", String.class); previewFreightClass = r.read("PreviewFreightClass", String.class); amazonReferenceId = r.read("AmazonReferenceId", String.class); isBillOfLadingAvailable = r.read("IsBillOfLadingAvailable", boolean.class); partneredEstimate = r.read("PartneredEstimate", PartneredEstimate.class); carrierName = r.read("CarrierName", String.class); } /** * Write members to a MwsWriter. * * @param w * The writer to write to. */ @Override public void writeFragmentTo(MwsWriter w) { w.write("Contact", contact); w.write("BoxCount", boxCount); w.write("SellerFreightClass", sellerFreightClass); w.write("FreightReadyDate", freightReadyDate); w.write("PalletList", palletList); w.write("TotalWeight", totalWeight); w.write("SellerDeclaredValue", sellerDeclaredValue); w.write("AmazonCalculatedValue", amazonCalculatedValue); w.write("PreviewPickupDate", previewPickupDate); w.write("PreviewDeliveryDate", previewDeliveryDate); w.write("PreviewFreightClass", previewFreightClass); w.write("AmazonReferenceId", amazonReferenceId); w.write("IsBillOfLadingAvailable", isBillOfLadingAvailable); w.write("PartneredEstimate", partneredEstimate); w.write("CarrierName", carrierName); } /** * Write tag, xmlns and members to a MwsWriter. * * @param w * The Writer to write to. */ @Override public void writeTo(MwsWriter w) { w.write("http://mws.amazonaws.com/FulfillmentInboundShipment/2010-10-01/", "PartneredLtlDataOutput",this); } /** Value constructor. */ public PartneredLtlDataOutput(Contact contact,long boxCount,String sellerFreightClass,String freightReadyDate,PalletList palletList,Weight totalWeight,Amount sellerDeclaredValue,Amount amazonCalculatedValue,String previewPickupDate,String previewDeliveryDate,String previewFreightClass,String amazonReferenceId,boolean isBillOfLadingAvailable,PartneredEstimate partneredEstimate,String carrierName) { this.contact = contact; this.boxCount = boxCount; this.sellerFreightClass = sellerFreightClass; this.freightReadyDate = freightReadyDate; this.palletList = palletList; this.totalWeight = totalWeight; this.sellerDeclaredValue = sellerDeclaredValue; this.amazonCalculatedValue = amazonCalculatedValue; this.previewPickupDate = previewPickupDate; this.previewDeliveryDate = previewDeliveryDate; this.previewFreightClass = previewFreightClass; this.amazonReferenceId = amazonReferenceId; this.isBillOfLadingAvailable = isBillOfLadingAvailable; this.partneredEstimate = partneredEstimate; this.carrierName = carrierName; } /** Value constructor. */ public PartneredLtlDataOutput(Contact contact,long boxCount,String freightReadyDate,PalletList palletList,Weight totalWeight,String previewPickupDate,String previewDeliveryDate,String previewFreightClass,String amazonReferenceId,boolean isBillOfLadingAvailable) { this.contact = contact; this.boxCount = boxCount; this.freightReadyDate = freightReadyDate; this.palletList = palletList; this.totalWeight = totalWeight; this.previewPickupDate = previewPickupDate; this.previewDeliveryDate = previewDeliveryDate; this.previewFreightClass = previewFreightClass; this.amazonReferenceId = amazonReferenceId; this.isBillOfLadingAvailable = isBillOfLadingAvailable; } /** Default constructor. */ public PartneredLtlDataOutput() { super(); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.rest.action.support; import org.elasticsearch.common.Booleans; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Table; import org.elasticsearch.common.io.UTF8StreamWriter; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.regex.Regex; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.SizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.rest.*; import java.io.IOException; import java.util.*; /** */ public class RestTable { public static RestResponse buildResponse(Table table, RestChannel channel) throws Exception { RestRequest request = channel.request(); XContentType xContentType = XContentType.fromRestContentType(request.param("format", request.header("Content-Type"))); if (xContentType != null) { return buildXContentBuilder(table, channel); } return buildTextPlainResponse(table, channel); } public static RestResponse buildXContentBuilder(Table table, RestChannel channel) throws Exception { RestRequest request = channel.request(); XContentBuilder builder = channel.newBuilder(); List<DisplayHeader> displayHeaders = buildDisplayHeaders(table, request); builder.startArray(); for (int row = 0; row < table.getRows().size(); row++) { builder.startObject(); for (DisplayHeader header : displayHeaders) { builder.field(header.display, renderValue(request, table.getAsMap().get(header.name).get(row).value)); } builder.endObject(); } builder.endArray(); return new BytesRestResponse(RestStatus.OK, builder); } public static RestResponse buildTextPlainResponse(Table table, RestChannel channel) throws IOException { RestRequest request = channel.request(); boolean verbose = request.paramAsBoolean("v", false); List<DisplayHeader> headers = buildDisplayHeaders(table, request); int[] width = buildWidths(table, request, verbose, headers); BytesStreamOutput bytesOut = channel.bytesOutput(); UTF8StreamWriter out = new UTF8StreamWriter().setOutput(bytesOut); if (verbose) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); pad(new Table.Cell(header.display, table.findHeaderByName(header.name)), width[col], request, out); out.append(" "); } out.append("\n"); } for (int row = 0; row < table.getRows().size(); row++) { for (int col = 0; col < headers.size(); col++) { DisplayHeader header = headers.get(col); pad(table.getAsMap().get(header.name).get(row), width[col], request, out); out.append(" "); } out.append("\n"); } out.close(); return new BytesRestResponse(RestStatus.OK, BytesRestResponse.TEXT_CONTENT_TYPE, bytesOut.bytes()); } static List<DisplayHeader> buildDisplayHeaders(Table table, RestRequest request) { List<DisplayHeader> display = new ArrayList<>(); if (request.hasParam("h")) { Set<String> headers = expandHeadersFromRequest(table, request); for (String possibility : headers) { DisplayHeader dispHeader = null; if (table.getAsMap().containsKey(possibility)) { dispHeader = new DisplayHeader(possibility, possibility); } else { for (Table.Cell headerCell : table.getHeaders()) { String aliases = headerCell.attr.get("alias"); if (aliases != null) { for (String alias : Strings.splitStringByCommaToArray(aliases)) { if (possibility.equals(alias)) { dispHeader = new DisplayHeader(headerCell.value.toString(), alias); break; } } } } } if (dispHeader != null) { // We know we need the header asked for: display.add(dispHeader); // Look for accompanying sibling column Table.Cell hcell = table.getHeaderMap().get(dispHeader.name); String siblingFlag = hcell.attr.get("sibling"); if (siblingFlag != null) { // ...link the sibling and check that its flag is set String sibling = siblingFlag + "." + dispHeader.name; Table.Cell c = table.getHeaderMap().get(sibling); if (c != null && request.paramAsBoolean(siblingFlag, false)) { display.add(new DisplayHeader(c.value.toString(), siblingFlag + "." + dispHeader.display)); } } } } } else { for (Table.Cell cell : table.getHeaders()) { String d = cell.attr.get("default"); if (Booleans.parseBoolean(d, true)) { display.add(new DisplayHeader(cell.value.toString(), cell.value.toString())); } } } return display; } /** * Extracts all the required fields from the RestRequest 'h' parameter. In order to support wildcards like * 'bulk.*' this needs potentially parse all the configured headers and its aliases and needs to ensure * that everything is only added once to the returned headers, even if 'h=bulk.*.bulk.*' is specified * or some headers are contained twice due to matching aliases */ private static Set<String> expandHeadersFromRequest(Table table, RestRequest request) { Set<String> headers = new LinkedHashSet<>(table.getHeaders().size()); // check headers and aliases for (String header : Strings.splitStringByCommaToArray(request.param("h"))) { if (Regex.isSimpleMatchPattern(header)) { for (Table.Cell tableHeaderCell : table.getHeaders()) { String configuredHeader = tableHeaderCell.value.toString(); if (Regex.simpleMatch(header, configuredHeader)) { headers.add(configuredHeader); } else if (tableHeaderCell.attr.containsKey("alias")) { String[] aliases = Strings.splitStringByCommaToArray(tableHeaderCell.attr.get("alias")); for (String alias : aliases) { if (Regex.simpleMatch(header, alias)) { headers.add(configuredHeader); break; } } } } } else { headers.add(header); } } return headers; } public static int[] buildHelpWidths(Table table, RestRequest request) { int[] width = new int[3]; for (Table.Cell cell : table.getHeaders()) { String v = renderValue(request, cell.value); int vWidth = v == null ? 0 : v.length(); if (width[0] < vWidth) { width[0] = vWidth; } v = renderValue(request, cell.attr.containsKey("alias") ? cell.attr.get("alias") : ""); vWidth = v == null ? 0 : v.length(); if (width[1] < vWidth) { width[1] = vWidth; } v = renderValue(request, cell.attr.containsKey("desc") ? cell.attr.get("desc") : "not available"); vWidth = v == null ? 0 : v.length(); if (width[2] < vWidth) { width[2] = vWidth; } } return width; } private static int[] buildWidths(Table table, RestRequest request, boolean verbose, List<DisplayHeader> headers) { int[] width = new int[headers.size()]; int i; if (verbose) { i = 0; for (DisplayHeader hdr : headers) { int vWidth = hdr.display.length(); if (width[i] < vWidth) { width[i] = vWidth; } i++; } } i = 0; for (DisplayHeader hdr : headers) { for (Table.Cell cell : table.getAsMap().get(hdr.name)) { String v = renderValue(request, cell.value); int vWidth = v == null ? 0 : v.length(); if (width[i] < vWidth) { width[i] = vWidth; } } i++; } return width; } public static void pad(Table.Cell cell, int width, RestRequest request, UTF8StreamWriter out) throws IOException { String sValue = renderValue(request, cell.value); int length = sValue == null ? 0 : sValue.length(); byte leftOver = (byte) (width - length); String textAlign = cell.attr.get("text-align"); if (textAlign == null) { textAlign = "left"; } if (leftOver > 0 && textAlign.equals("right")) { for (byte i = 0; i < leftOver; i++) { out.append(" "); } if (sValue != null) { out.append(sValue); } } else { if (sValue != null) { out.append(sValue); } for (byte i = 0; i < leftOver; i++) { out.append(" "); } } } private static String renderValue(RestRequest request, Object value) { if (value == null) { return null; } if (value instanceof ByteSizeValue) { ByteSizeValue v = (ByteSizeValue) value; String resolution = request.param("bytes"); if ("b".equals(resolution)) { return Long.toString(v.bytes()); } else if ("k".equals(resolution)) { return Long.toString(v.kb()); } else if ("m".equals(resolution)) { return Long.toString(v.mb()); } else if ("g".equals(resolution)) { return Long.toString(v.gb()); } else if ("t".equals(resolution)) { return Long.toString(v.tb()); } else if ("p".equals(resolution)) { return Long.toString(v.pb()); } else { return v.toString(); } } if (value instanceof SizeValue) { SizeValue v = (SizeValue) value; String resolution = request.param("size"); if ("b".equals(resolution)) { return Long.toString(v.singles()); } else if ("k".equals(resolution)) { return Long.toString(v.kilo()); } else if ("m".equals(resolution)) { return Long.toString(v.mega()); } else if ("g".equals(resolution)) { return Long.toString(v.giga()); } else if ("t".equals(resolution)) { return Long.toString(v.tera()); } else if ("p".equals(resolution)) { return Long.toString(v.peta()); } else { return v.toString(); } } if (value instanceof TimeValue) { TimeValue v = (TimeValue) value; String resolution = request.param("time"); if ("ms".equals(resolution)) { return Long.toString(v.millis()); } else if ("s".equals(resolution)) { return Long.toString(v.seconds()); } else if ("m".equals(resolution)) { return Long.toString(v.minutes()); } else if ("h".equals(resolution)) { return Long.toString(v.hours()); } else { return v.toString(); } } // Add additional built in data points we can render based on request parameters? return value.toString(); } static class DisplayHeader { public final String name; public final String display; DisplayHeader(String name, String display) { this.name = name; this.display = display; } } }
package org.woodley.aigrid.app; import android.app.AlertDialog; import android.content.Context; import android.content.DialogInterface; import android.content.res.Resources; import android.graphics.Color; import android.os.Handler; import android.util.Log; import android.util.TypedValue; import android.view.SoundEffectConstants; import android.view.View; import android.view.ViewGroup; import android.widget.BaseAdapter; import android.widget.GridView; import android.widget.ImageView; import java.util.ArrayList; import java.util.HashSet; import java.util.List; public class ImageAdapter extends BaseAdapter { private static final String TAG = "ImageAdapter "; private MainActivity _Context; AlertDialog _alertDialog; // constants int _nRows = 5; int _nCols = 8; // this is also in the layout xml. int _level = 3; // numbers go from 0 to _level - 1 int _hpx = -1; // size of grid view square int _wpx = -1; // size of grid view square boolean _okToClick = false; boolean _dontHide; // used only for _milliseconds == -2 ('you control it' mode). int _gameId = 0; // use to deactivate obsolete timer events if user keeps hitting replay int _milliseconds = 250; // chimp mode. int _currentlyDisplaying; ArrayList<Integer> _positions; ArrayList<Integer> _numerals; List<ImageView> _imageViews; private Handler _handler = new Handler(); int _lastClick; public ImageAdapter(MainActivity con) { _Context = con; _alertDialog = new AlertDialog.Builder(_Context).create(); _alertDialog.setButton("OK", new DialogInterface.OnClickListener() { public void onClick(DialogInterface dialog, int which) { _alertDialog.cancel(); } }); //alertDialog.setIcon(R.drawable.icon); //Calculation of ImageView Size - density independent. //Resources r = Resources.getSystem(); //float px = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, 1, r.getDisplayMetrics()); //_hpx = (int) ((float) (r.getDisplayMetrics().heightPixels-32)/(_nRows)); //_wpx = (int) ((float) (r.getDisplayMetrics().widthPixels-128)/(_nCols)); } public void setWH(int w, int h) { _hpx = (int) ((float) (h-32)/(_nRows)); _wpx = (int) ((float) (w-128)/(_nCols)); } public void newGame() { _dontHide = false; _handler.removeCallbacks(hideNumbers); _positions = new ArrayList<Integer>(); _numerals = new ArrayList<Integer>(); _okToClick = false; _currentlyDisplaying = 0; _lastClick = -1; _imageViews = new ArrayList<ImageView>(_level); _imageViews.clear(); HashSet<Integer> tempHS = new HashSet<Integer>(); for (int i = 0; i < _level; i++) { _imageViews.add(i, null); Integer pos = -1; while (pos < 0 || _positions.contains(pos)) { pos = new Integer((int) (Math.random() * _nRows*_nCols)); } _positions.add(pos); // 0 thru 63 pos = -1; while (pos < 0 || tempHS.contains(pos)) { pos = new Integer((int) (Math.random() * _level)); } tempHS.add(pos); _numerals.add(pos); // 0 thru 9 } if (_milliseconds > 0) _handler.postDelayed(hideNumbers, _milliseconds); else _okToClick = true; } public void setMode(int ms) { _milliseconds = ms; } public void setLevel(int newLevel) { _level = newLevel; if (newLevel > 10) _level = 10; if (newLevel < 3) _level = 3; _Context.setTitle("You are at Level " + _level + "."); } private Runnable hideNumbers = new Runnable() { @Override public void run() { for (int position = 0; position < getCount(); position++) { int index = _positions.indexOf(position); if (index == -1) continue; int numeral = _numerals.get(index); // Log.i(TAG, "Hiding" + numeral); _imageViews.get(index).setImageResource(R.drawable.whitesquare); } _okToClick = true; } }; private void showAll() { for (int position = 0; position < getCount(); position++) { int index = _positions.indexOf(position); if (index == -1) continue; int numeral = _numerals.get(index); _imageViews.get(index).setImageResource(mThumbIds[numeral]); } } public int getCount() { return _nCols * _nRows; } public Object getItem(int position) { return null; } public long getItemId(int position) { return 0; } // create a new ImageView for each item referenced by the Adapter public View getView(int position, View convertView, ViewGroup parent) { ImageView imageView; if (convertView == null) { // if it's not recycled, initialize some attributes imageView = new ImageView(_Context); imageView.setLayoutParams(new GridView.LayoutParams(_wpx, _hpx)); imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); //imageView.setPadding(8, 8, 8, 8); //imageView.setBackgroundColor(Color.GRAY); } else { imageView = (ImageView) convertView; } //Log.i(TAG, "Looking up " + position); int index = _positions.indexOf(position); if (index > -1) { _imageViews.set(index, imageView); int numeral = _numerals.get(index); //Log.i(TAG, "Placing " + numeral + " at " + position); imageView.setImageResource(mThumbIds[numeral]); imageView.setScaleType(ImageView.ScaleType.CENTER_CROP); imageView.setLayoutParams(new GridView.LayoutParams(_wpx, _hpx)); } return imageView; } // references to our images private Integer[] mThumbIds = { R.drawable.number0, R.drawable.number1, R.drawable.number2, R.drawable.number3, R.drawable.number4, R.drawable.number5, R.drawable.number6, R.drawable.number7, R.drawable.number8, R.drawable.number9, }; public void handleClick(ImageView imageView, int position) { if (!_okToClick) return; // get numeral at position if (_milliseconds == -2 && !_dontHide) { hideNumbers.run(); _dontHide = true; } int index = _positions.indexOf(position); if (index == -1) { return; } int numeral = _numerals.get(index); // Log.i(TAG, "You touched " + numeral); imageView.setImageResource(mThumbIds[numeral]); if (numeral > ++_lastClick) gameOver(); else { if (numeral == _level - 1) { setLevel(_level + 1); showAlert("Good Job!!", "You Won!"); } } } private void gameOver() { _okToClick = false; showAll(); setLevel(_level - 1); showAlert("Fail!", "Game Over"); } private void showAlert(String mess, String title) { _alertDialog.setTitle(title); _alertDialog.setMessage(mess + " Your level is now " + _level); _alertDialog.show(); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.test.func.api.javax.management.loading; import javax.management.loading.MLet; import javax.management.loading.DefaultLoaderRepository; import javax.management.ObjectName; import javax.management.MBeanServer; import javax.management.MBeanServerFactory; import org.apache.harmony.share.MultiCase; import org.apache.harmony.share.Result; /** * Test for the class javax.management.loading.MLet * and related classes * */ public class LoadingMBeanTest extends MultiCase { /** * URL to mlet conf text file. */ public String MLET_URL; /** * Test for the constructor MLet() * * @see javax.management.loading#MLet() */ public Result testRegister() throws Exception { try { MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); } catch (Exception e) { //e.printStackTrace(); } return result(); } /** * Test for the method getURLs() * * @see javax.management.loading.mlet#getURLs() */ public Result testGetURLs() throws Exception { MLet mlet = new MLet(); try { MBeanServer server = MBeanServerFactory.createMBeanServer(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); mlet.addURL(MLET_URL); } catch (Exception e) { //e.printStackTrace(); return failed("Unexpected Exception occur"); } if (mlet.getURLs().length == 1 ) return passed(); else return failed("Wrong number of URLs returned by getURLs() method"); } /** * Test for the base mlet loadind functionality via MBeanServer * use invoke() method. */ public Result testLoadClass() throws Exception { try { DefaultLoaderRepository.loadClass("org.apache.harmony.test.func.api.javax.management.loading.mbeans.ThirdRemoteClass"); failed("class org.apache.harmony.test.func.api.javax.management.loading.mbeans.ThirdRemoteClass was already found in cass loader repository."); } catch (ClassNotFoundException e) { e.printStackTrace(); } try { DefaultLoaderRepository.loadClass("org.apache.harmony.test.func.api.javax.management.loading.mbeans.SecondRemoteClass"); failed("class org.apache.harmony.test.func.api.javax.management.loading.mbeans.ThirdRemoteClass was already found in cass loader repository."); } catch (ClassNotFoundException e) { e.printStackTrace(); } try { MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); server.invoke(name, "getMBeansFromURL",new Object[] { MLET_URL+"TrueMlet.conf" },new String[] { String.class.getName() }); if(! server.isRegistered(new ObjectName("test:name=ThirdRemoteClass"))) return failed("MBean ThirdRemoteClass does not registered by the server"); if(! server.isRegistered(new ObjectName("test:name=SecondRemoteClass"))) return failed("MBean SecondRemoteClass does not registered by the server"); server.invoke(new ObjectName("test:name=SecondRemoteClass"), "doOperation",new Object[] { "DrlChk" },new String[] { String.class.getName() }); server.invoke(new ObjectName("test:name=ThirdRemoteClass"), "doOperation",new Object[] { "DrlChk" },new String[] { String.class.getName() }); }catch (Exception e) { e.printStackTrace(); return failed("Unexpected Exception occur"); } return result(); } /** * Test for the method getLibraryDirectory() * * @see javax.management.loading.mlet#getLibraryDirectory() */ public Result testNativeLib() throws Exception { try{ MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); mlet.getMBeansFromURL(MLET_URL+"mlet2.conf"); String libPath = mlet.getLibraryDirectory(); //System.out.println(libPath); if(libPath.equals(null)) return failed("method MLet.getLibraryDirectory()return wrong value = " + libPath); }catch(Exception e){ //e.printStackTrace(); return failed("Unexpected Exception occur"); } return result(); } /** * Test for the method * getLibraryDirectory() * setLibraryDirectory() * * @see javax.management.loading.mlet#setLibraryDirectory() */ public Result testSetLibraryDirectory() throws Exception { try{ MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); mlet.getMBeansFromURL(MLET_URL+"mlet2.conf"); String libPath = mlet.getLibraryDirectory(); mlet.setLibraryDirectory(libPath); if(libPath.equals(null)) return failed("method MLet.getLibraryDirectory()return wrong value = " + libPath); }catch(Exception e){ //e.printStackTrace(); return failed("Unexpected Exception occur"); } return result(); } /** * Test for the method loadClass() * * @see javax.management.loading.mlet#loadClass() */ public Result testClassCheck() throws Exception { try { MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); server.invoke(name, "getMBeansFromURL",new Object[] { MLET_URL+"mlet1.conf" },new String[] { String.class.getName() }); //mlet.getMBeansFromURL(MLET_URL+"/mlet-1/mlet1.conf"); Class type = mlet.loadClass("FirstRemoteClass"); //System.out.println(type); if(type.equals(null)) return failed("method MLet.loadClass return wrong value = " + type); } catch (Exception e) { e.printStackTrace(); return failed("Unexpected Exception occur"); } return result(); } /** * Test for the method loadClass() * * @see javax.management.loading.mlet#loadClass() */ public Result testExceptionClassCheck() throws Exception { try { MBeanServer server = MBeanServerFactory.createMBeanServer(); MLet mlet = new MLet(); ObjectName name = new ObjectName("test:name=mlet"); server.registerMBean(mlet, name); server.invoke(name, "getMBeansFromURL",new Object[] { MLET_URL+"/mlet-1/mlet1.conf" },new String[] { String.class.getName() }); Class type = mlet.loadClass("IAmNotAClass"); System.out.println(type); if(type.equals(null)) return failed("method MLet.loadClass return wrong value = " + type); } catch (Exception e) { //e.printStackTrace(); return passed("Expected Exception occur"); } return result(); } public static void main(String[] args) { LoadingMBeanTest run = new LoadingMBeanTest(); run.MLET_URL= args[0]+ "/mlet/"; System.exit(run.test(args)); } }
package org.Webgatherer.Controller.Api; import com.google.inject.Guice; import com.google.inject.Injector; import com.rickdane.springmodularizedproject.api.transport.*; import org.Webgatherer.Api.Scraper.ScraperFactory; import org.Webgatherer.Common.Properties.PropertiesContainer; import org.Webgatherer.Controller.EntityTransport.EntryTransport; import org.Webgatherer.CoreEngine.Core.ThreadCommunication.ThreadCommunication; import org.Webgatherer.CoreEngine.Core.ThreadCommunication.ThreadCommunicationBase; import org.Webgatherer.ExperimentalLabs.DependencyInjection.DependencyBindingModule; import org.Webgatherer.ExperimentalLabs.EmailExtraction.PageRetrieverThreadManagerEmailExtraction; import org.Webgatherer.ExperimentalLabs.Mail.EmailSendReceive; import org.Webgatherer.ExperimentalLabs.Scraper.Core.ScraperBase; import org.Webgatherer.Utility.RandomSelector; import org.Webgatherer.WorkflowExample.Workflows.Implementations.WebGatherer.EnumUrlRetrieveOptions; import java.util.*; import java.util.concurrent.ConcurrentLinkedQueue; /** * This is the main method for the applicatin, it polls the API at a set interval to check for new jobs to run * * @author Rick Dane */ public class ApiCommunication extends BaseApiCommunication { private static final String baseApiUrl = "http://ec2-107-21-182-174.compute-1.amazonaws.com:8080/"; //private static final String baseApiUrl = "http://localhost:8080/springmodularizedproject1/"; private static final String serviceEndpointGetScraper = baseApiUrl + "webgathererjobs/getPendingJobToLaunch"; private static final String servicePersistRawscrapeddata = baseApiUrl + "rawscrapeddatas"; private static final String serviceUrlsAwaitingEmailScrape = baseApiUrl + "rawscrapeddatas/retrieveUrlsAwaitingEmailScrape"; private static final String scraperEndPoint = baseApiUrl + "/scrapers"; private static final String postEmailListEndPoint = baseApiUrl + "/receivedemails/uploadNewRetrievedEmails"; private static final String emailToSendEndPoint = baseApiUrl + "/emailaddresses/getEmailToSend"; private static int callIntervalSeconds = 10; private static boolean isRunning = true; private static int pageNum = 1; private static int maxPages = 2; private static int maxUrlEmailScrapeUrls = 20; private static int sizeOfStringArrayEnum = 9; private static PropertiesContainer propertiesContainer = new PropertiesContainer(); public static void main(String[] args) { emailImap.configureImap(emailProperties.getProperty("email1_imap"), emailProperties.getProperty("email1_imap_username"), emailProperties.getProperty("email1_password")); while (isRunning) { EntryTransport entryTransport = new EntryTransport(); Scraper curScraper = apiPost(entryTransport, serviceEndpointGetScraper, Scraper.class); if (curScraper != null) { runUrlScrapeJob(curScraper); } runEmailScrapeJob(); Date curTime = new Date(); if (nextEmailSendTime == null || curTime.getTime() > nextEmailSendTime.getTime()) { getEmailAndSend(); } //runEmailRetrieve(); sleep(); } } private static void runEmailRetrieve() { List<ReceivedEmail> receivedEmailList = emailImap.retrieveUnreadEmails(); apiPost(receivedEmailList, postEmailListEndPoint); } private static void runEmailScrapeJob() { int i = 1; Map<String, Rawscrapeddata> rawscrapeddataList = new HashMap<String, Rawscrapeddata>(); for (i = 1; i <= maxUrlEmailScrapeUrls; i++) { //dummy object TransportBase transportBase = new TransportBase(); Rawscrapeddata rawscrapeddata = apiPost(transportBase, serviceUrlsAwaitingEmailScrape, Rawscrapeddata.class); if (rawscrapeddata != null) { rawscrapeddataList.put(rawscrapeddata.getUrl(), rawscrapeddata); } } if (!rawscrapeddataList.isEmpty()) { runEmailExtractionJob(rawscrapeddataList); } } private static boolean runUrlScrapeJob(Scraper curScraper) { String scraperType = ""; if (curScraper.getType() == Scraper.Type.CRAIGSLIST) { scraperType = "generic"; } else { return false; } ScraperBase scraper = ScraperFactory.createScraper(scraperType); scraper.configure(curScraper.getUrlPrefix(), curScraper.getUrlPostfix(), curScraper.getBaseDomainName(), curScraper.getPageIncrementAmnt()); List<String[]> urlEntries = scraper.run(curScraper.getKeyword(), pageNum, maxPages); for (String[] curEntry : urlEntries) { Rawscrapeddata rawscrapeddata = new Rawscrapeddata(); rawscrapeddata.setUrl(curEntry[1]); rawscrapeddata.setFkScraperId(curScraper.getId()); rawscrapeddata.setRawscrapeddataEmailScrapeAttempted(Rawscrapeddata.RawscrapeddataEmailScrapeAttempted.NOT_ATTEMPTED); apiPost(rawscrapeddata, servicePersistRawscrapeddata, Rawscrapeddata.class); } curScraper.setStatus(Scraper.ProcessStatus.PROCESSED); apiPut(curScraper, scraperEndPoint); return true; } private static void sleep() { try { Thread.sleep(callIntervalSeconds * 1000); } catch (InterruptedException e) { e.printStackTrace(); //To change body of catch statement use File | Settings | File Templates. } } private static Queue<String> prepareQueueForEmails(Map<String, Rawscrapeddata> rawscrapeddataList) { Queue queue = new ConcurrentLinkedQueue<String>(); for (Map.Entry<String, Rawscrapeddata> curEntry : rawscrapeddataList.entrySet()) { Rawscrapeddata curRawscrapeddata = curEntry.getValue(); String[] testEntry = new String[sizeOfStringArrayEnum]; testEntry[ThreadCommunicationBase.PageQueueEntries.BASE_URL.ordinal()] = curRawscrapeddata.getUrl(); queue.add(testEntry); } return queue; } private static void runEmailExtractionJob(Map<String, Rawscrapeddata> rawscrapeddataList) { Injector injector = Guice.createInjector(new DependencyBindingModule()); PageRetrieverThreadManagerEmailExtraction pageRetrieverThreadManager = injector.getInstance(PageRetrieverThreadManagerEmailExtraction.class); ThreadCommunication threadCommunication = injector.getInstance(ThreadCommunication.class); pageRetrieverThreadManager.configure(threadCommunication); threadCommunication.setPageQueue(prepareQueueForEmails(rawscrapeddataList)); while (!threadCommunication.isPageQueueEmpty()) { try { Thread.sleep(25); } catch (InterruptedException e) { } pageRetrieverThreadManager.run(EnumUrlRetrieveOptions.HTMLPAGE.ordinal()); } try { Thread.sleep(15000); } catch (InterruptedException e) { } while (!threadCommunication.isOutputDataHolderEmpty()) { //TODO: Note that this will overwrite where there is more than 1 rawscrapeddata entry per url, consider re-working this at some point to account for this String[] curEntry = threadCommunication.getFromOutputDataHolder(); String urlKey = curEntry[ThreadCommunicationBase.PageQueueEntries.BASE_URL.ordinal()]; String email = curEntry[ThreadCommunicationBase.PageQueueEntries.EMAIL_ADDRESSES.ordinal()]; Rawscrapeddata curRawscrapeddata = rawscrapeddataList.get(urlKey); curRawscrapeddata.setEmailAddress(email); curRawscrapeddata.setRawscrapeddataEmailScrapeAttempted(Rawscrapeddata.RawscrapeddataEmailScrapeAttempted.ATTEMPTED); apiPut(curRawscrapeddata, servicePersistRawscrapeddata); } } private static void getEmailAndSend() { //dummy object TransportBase transportBase = new TransportBase(); EmailTransport emailTransport = apiPost(transportBase, emailToSendEndPoint, EmailTransport.class); if (emailTransport == null) { return; } String body = emailTransport.getBody(); if (body != null) { body = body.replace("//n", "/n"); emailTransport.setBody(body); if (emailTransport.getToEmail() != null) { sendEmail(emailTransport); } } } private static final int minDelay = 250000; private static final int maxDelay = 800000; private static Injector injector = Guice.createInjector(new DependencyBindingModule()); private static EmailSendReceive emailSendReceive = injector.getInstance(EmailSendReceive.class); private static EmailSendReceive emailImap = injector.getInstance(EmailSendReceive.class); private static RandomSelector randomSelector; private static Date nextEmailSendTime = null; private static Properties emailProperties = propertiesContainer.getProperties("emailAccounts"); private static void sendEmail(EmailTransport emailTransport) { emailSendReceive.configure(emailProperties.getProperty("email_fromName"), emailProperties.getProperty("email1_smtp"), emailProperties.getProperty("email1_address"), emailProperties.getProperty("email1_password"), emailProperties.getProperty("email1_smtp_port")); String attachmentFilePath = emailProperties.getProperty("email_attachment1"); String body = emailTransport.getBody(); String subject = emailTransport.getSubject(); emailSendReceive.sendEmail(body, subject, emailTransport.getToEmail(), attachmentFilePath); //curEmail randomSelector = injector.getInstance(RandomSelector.class); int delay = randomSelector.generateRandomNumberInRange(minDelay, maxDelay); Date date = new Date(); nextEmailSendTime = new Date(date.getTime() + delay); } }
// Copyright (C) 2016 The Android Open Source Project // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.gerrit.server.query.account; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import static com.google.common.truth.Truth8.assertThat; import static com.google.gerrit.testing.GerritJUnit.assertThrows; import static java.util.stream.Collectors.toList; import static org.junit.Assert.fail; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Streams; import com.google.gerrit.entities.Account; import com.google.gerrit.entities.Project; import com.google.gerrit.extensions.api.GerritApi; import com.google.gerrit.extensions.api.access.AccessSectionInfo; import com.google.gerrit.extensions.api.access.PermissionInfo; import com.google.gerrit.extensions.api.access.PermissionRuleInfo; import com.google.gerrit.extensions.api.access.ProjectAccessInput; import com.google.gerrit.extensions.api.accounts.Accounts.QueryRequest; import com.google.gerrit.extensions.api.groups.GroupInput; import com.google.gerrit.extensions.api.projects.ProjectInput; import com.google.gerrit.extensions.client.ListAccountsOption; import com.google.gerrit.extensions.client.ProjectWatchInfo; import com.google.gerrit.extensions.common.AccountExternalIdInfo; import com.google.gerrit.extensions.common.AccountInfo; import com.google.gerrit.extensions.common.ChangeInfo; import com.google.gerrit.extensions.common.ChangeInput; import com.google.gerrit.extensions.common.GroupInfo; import com.google.gerrit.extensions.restapi.AuthException; import com.google.gerrit.extensions.restapi.BadRequestException; import com.google.gerrit.extensions.restapi.ResourceNotFoundException; import com.google.gerrit.extensions.restapi.RestApiException; import com.google.gerrit.index.FieldDef; import com.google.gerrit.index.IndexConfig; import com.google.gerrit.index.QueryOptions; import com.google.gerrit.index.Schema; import com.google.gerrit.index.query.FieldBundle; import com.google.gerrit.lifecycle.LifecycleManager; import com.google.gerrit.server.AnonymousUser; import com.google.gerrit.server.CurrentUser; import com.google.gerrit.server.GerritPersonIdent; import com.google.gerrit.server.IdentifiedUser; import com.google.gerrit.server.ServerInitiated; import com.google.gerrit.server.account.AccountCache; import com.google.gerrit.server.account.AccountConfig; import com.google.gerrit.server.account.AccountDelta; import com.google.gerrit.server.account.AccountManager; import com.google.gerrit.server.account.AccountState; import com.google.gerrit.server.account.Accounts; import com.google.gerrit.server.account.AccountsUpdate; import com.google.gerrit.server.account.AuthRequest; import com.google.gerrit.server.account.externalids.ExternalId; import com.google.gerrit.server.account.externalids.ExternalIdKeyFactory; import com.google.gerrit.server.account.externalids.ExternalIds; import com.google.gerrit.server.config.AllProjectsName; import com.google.gerrit.server.config.AllUsersName; import com.google.gerrit.server.extensions.events.GitReferenceUpdated; import com.google.gerrit.server.git.GitRepositoryManager; import com.google.gerrit.server.git.meta.MetaDataUpdate; import com.google.gerrit.server.index.account.AccountField; import com.google.gerrit.server.index.account.AccountIndex; import com.google.gerrit.server.index.account.AccountIndexCollection; import com.google.gerrit.server.index.account.AccountIndexer; import com.google.gerrit.server.schema.SchemaCreator; import com.google.gerrit.server.util.ManualRequestContext; import com.google.gerrit.server.util.OneOffRequestContext; import com.google.gerrit.server.util.RequestContext; import com.google.gerrit.server.util.ThreadLocalRequestContext; import com.google.gerrit.testing.GerritServerTests; import com.google.gerrit.testing.GerritTestName; import com.google.inject.Inject; import com.google.inject.Injector; import com.google.inject.Provider; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Optional; import org.eclipse.jgit.lib.PersonIdent; import org.eclipse.jgit.lib.Repository; import org.junit.After; import org.junit.Before; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; @Ignore public abstract class AbstractQueryAccountsTest extends GerritServerTests { @Rule public final GerritTestName testName = new GerritTestName(); @Inject protected Accounts accounts; @Inject @ServerInitiated protected Provider<AccountsUpdate> accountsUpdate; @Inject protected AccountCache accountCache; @Inject protected AccountIndexer accountIndexer; @Inject protected AccountManager accountManager; @Inject protected GerritApi gApi; @Inject @GerritPersonIdent Provider<PersonIdent> serverIdent; @Inject protected IdentifiedUser.GenericFactory userFactory; @Inject private Provider<AnonymousUser> anonymousUser; @Inject protected SchemaCreator schemaCreator; @Inject protected ThreadLocalRequestContext requestContext; @Inject protected OneOffRequestContext oneOffRequestContext; @Inject protected Provider<InternalAccountQuery> queryProvider; @Inject protected AllProjectsName allProjects; @Inject protected AllUsersName allUsers; @Inject protected GitRepositoryManager repoManager; @Inject protected AccountIndexCollection indexes; @Inject protected ExternalIds externalIds; @Inject private ExternalIdKeyFactory externalIdKeyFactory; @Inject protected AuthRequest.Factory authRequestFactory; protected LifecycleManager lifecycle; protected Injector injector; protected AccountInfo currentUserInfo; protected CurrentUser admin; protected abstract Injector createInjector(); protected void validateAssumptions() {} @Before public void setUpInjector() throws Exception { lifecycle = new LifecycleManager(); injector = createInjector(); lifecycle.add(injector); injector.injectMembers(this); lifecycle.start(); initAfterLifecycleStart(); setUpDatabase(); validateAssumptions(); } @After public void cleanUp() { lifecycle.stop(); } protected void setUpDatabase() throws Exception { schemaCreator.create(); Account.Id adminId = createAccount("admin", "Administrator", "admin@example.com", true); admin = userFactory.create(adminId); requestContext.setContext(newRequestContext(adminId)); currentUserInfo = gApi.accounts().id(adminId.get()).get(); } protected void initAfterLifecycleStart() throws Exception {} protected RequestContext newRequestContext(Account.Id requestUserId) { final CurrentUser requestUser = userFactory.create(requestUserId); return () -> requestUser; } protected void setAnonymous() { requestContext.setContext(anonymousUser::get); } @After public void tearDownInjector() { if (lifecycle != null) { lifecycle.stop(); } requestContext.setContext(null); } @Test public void byId() throws Exception { AccountInfo user = newAccount("user"); assertQuery("9999999"); assertQuery(currentUserInfo._accountId, currentUserInfo); assertQuery(user._accountId, user); } @Test public void bySelf() throws Exception { assertQuery("self", currentUserInfo); } @Test public void byEmail() throws Exception { AccountInfo user1 = newAccountWithEmail("user1", name("user1@example.com")); String domain = name("test.com"); AccountInfo user2 = newAccountWithEmail("user2", "user2@" + domain); AccountInfo user3 = newAccountWithEmail("user3", "user3@" + domain); String prefix = name("prefix"); AccountInfo user4 = newAccountWithEmail("user4", prefix + "user4@example.com"); AccountInfo user5 = newAccountWithEmail("user5", name("user5MixedCase@example.com")); assertQuery("notexisting@example.com"); assertQuery(currentUserInfo.email, currentUserInfo); assertQuery("email:" + currentUserInfo.email, currentUserInfo); assertQuery(user1.email, user1); assertQuery("email:" + user1.email, user1); assertQuery(domain, user2, user3); assertQuery("email:" + prefix, user4); assertQuery(user5.email, user5); assertQuery("email:" + user5.email, user5); assertQuery("email:" + user5.email.toUpperCase(), user5); } @Test public void bySecondaryEmail() throws Exception { String prefix = name("secondary"); String domain = name("test.com"); String secondaryEmail = prefix + "@" + domain; AccountInfo user1 = newAccountWithEmail("user1", name("user1@example.com")); addEmails(user1, secondaryEmail); AccountInfo user2 = newAccountWithEmail("user2", name("user2@example.com")); addEmails(user2, name("other@" + domain)); assertQuery(secondaryEmail, user1); assertQuery("email:" + secondaryEmail, user1); assertQuery("email:" + prefix, user1); assertQuery(domain, user1, user2); } @Test public void byEmailWithoutModifyAccountCapability() throws Exception { String preferredEmail = name("primary@example.com"); String secondaryEmail = name("secondary@example.com"); AccountInfo user1 = newAccountWithEmail("user1", preferredEmail); addEmails(user1, secondaryEmail); AccountInfo user2 = newAccount("user"); requestContext.setContext(newRequestContext(Account.id(user2._accountId))); if (getSchemaVersion() < 5) { assertMissingField(AccountField.PREFERRED_EMAIL); assertFailingQuery("email:foo", "'email' operator is not supported by account index version"); return; } // This at least needs the PREFERRED_EMAIL field which is available from schema version 5. if (getSchemaVersion() >= 5) { assertQuery(preferredEmail, user1); } else { assertQuery(preferredEmail); } assertQuery(secondaryEmail); assertQuery("email:" + preferredEmail, user1); assertQuery("email:" + secondaryEmail); } @Test public void byUsername() throws Exception { AccountInfo user1 = newAccount("myuser"); assertQuery("notexisting"); assertQuery("Not Existing"); assertQuery(user1.username, user1); assertQuery("username:" + user1.username, user1); assertQuery("username:" + user1.username.toUpperCase(), user1); } @Test public void isActive() throws Exception { String domain = name("test.com"); AccountInfo user1 = newAccountWithEmail("user1", "user1@" + domain); AccountInfo user2 = newAccountWithEmail("user2", "user2@" + domain); AccountInfo user3 = newAccount("user3", "user3@" + domain, false); AccountInfo user4 = newAccount("user4", "user4@" + domain, false); // by default only active accounts are returned assertQuery(domain, user1, user2); assertQuery("name:" + domain, user1, user2); assertQuery("is:active name:" + domain, user1, user2); assertQuery("is:inactive name:" + domain, user3, user4); } @Test public void byName() throws Exception { AccountInfo user1 = newAccountWithFullName("jdoe", "John Doe"); AccountInfo user2 = newAccountWithFullName("jroe", "Jane Roe"); AccountInfo user3 = newAccountWithFullName("user3", "Mr Selfish"); assertQuery("notexisting"); assertQuery("Not Existing"); assertQuery(quote(user1.name), user1); assertQuery("name:" + quote(user1.name), user1); assertQuery("John", user1); assertQuery("john", user1); assertQuery("Doe", user1); assertQuery("doe", user1); assertQuery("DOE", user1); assertQuery("Jo Do", user1); assertQuery("jo do", user1); assertQuery("self", currentUserInfo, user3); assertQuery("me", currentUserInfo); assertQuery("name:John", user1); assertQuery("name:john", user1); assertQuery("name:Doe", user1); assertQuery("name:doe", user1); assertQuery("name:DOE", user1); assertQuery("name:self", user3); assertQuery(quote(user2.name), user2); assertQuery("name:" + quote(user2.name), user2); } @Test public void byNameWithoutModifyAccountCapability() throws Exception { AccountInfo user1 = newAccountWithFullName("jdoe", "John Doe"); AccountInfo user2 = newAccountWithFullName("jroe", "Jane Roe"); AccountInfo user3 = newAccount("user"); requestContext.setContext(newRequestContext(Account.id(user3._accountId))); assertQuery("notexisting"); assertQuery("Not Existing"); // by full name works with any index version assertQuery(quote(user1.name), user1); assertQuery("name:" + quote(user1.name), user1); assertQuery(quote(user2.name), user2); assertQuery("name:" + quote(user2.name), user2); // by self/me works with any index version assertQuery("self", user3); assertQuery("me", user3); if (getSchemaVersion() < 8) { assertMissingField(AccountField.NAME_PART_NO_SECONDARY_EMAIL); // prefix queries only work if the NAME_PART_NO_SECONDARY_EMAIL field is available assertQuery("john"); return; } assertQuery("John", user1); assertQuery("john", user1); assertQuery("Doe", user1); assertQuery("doe", user1); assertQuery("DOE", user1); assertQuery("Jo Do", user1); assertQuery("jo do", user1); assertQuery("name:John", user1); assertQuery("name:john", user1); assertQuery("name:Doe", user1); assertQuery("name:doe", user1); assertQuery("name:DOE", user1); } @Test public void byCansee() throws Exception { String domain = name("test.com"); AccountInfo user1 = newAccountWithEmail("account1", "account1@" + domain); AccountInfo user2 = newAccountWithEmail("account2", "account2@" + domain); AccountInfo user3 = newAccountWithEmail("account3", "account3@" + domain); Project.NameKey p = createProject(name("p")); ChangeInfo c = createChange(p); assertQuery("name:" + domain + " cansee:" + c.changeId, user1, user2, user3); GroupInfo group = createGroup(name("group"), user1, user2); blockRead(p, group); assertQuery("name:" + domain + " cansee:" + c.changeId, user3); } @Test public void byWatchedProject() throws Exception { Project.NameKey p = createProject(name("p")); Project.NameKey p2 = createProject(name("p2")); AccountInfo user1 = newAccountWithFullName("jdoe", "John Doe"); AccountInfo user2 = newAccountWithFullName("jroe", "Jane Roe"); AccountInfo user3 = newAccountWithFullName("user3", "Mr Selfish"); assertThat(queryProvider.get().byWatchedProject(p)).isEmpty(); watch(user1, p, null); assertAccounts(queryProvider.get().byWatchedProject(p), user1); watch(user2, p, "keyword"); assertAccounts(queryProvider.get().byWatchedProject(p), user1, user2); watch(user3, p2, "keyword"); watch(user3, allProjects, "keyword"); assertAccounts(queryProvider.get().byWatchedProject(p), user1, user2); assertAccounts(queryProvider.get().byWatchedProject(p2), user3); assertAccounts(queryProvider.get().byWatchedProject(allProjects), user3); } @Test public void byDeletedAccount() throws Exception { AccountInfo user = newAccountWithFullName("jdoe", "John Doe"); Account.Id userId = Account.Id.tryParse(user._accountId.toString()).get(); assertQuery("John", user); for (AccountIndex index : indexes.getWriteIndexes()) { index.delete(userId); } assertQuery("John"); } @Test public void withLimit() throws Exception { String domain = name("test.com"); AccountInfo user1 = newAccountWithEmail("user1", "user1@" + domain); AccountInfo user2 = newAccountWithEmail("user2", "user2@" + domain); AccountInfo user3 = newAccountWithEmail("user3", "user3@" + domain); List<AccountInfo> result = assertQuery(domain, user1, user2, user3); assertThat(Iterables.getLast(result)._moreAccounts).isNull(); result = assertQuery(newQuery(domain).withLimit(2), result.subList(0, 2)); assertThat(Iterables.getLast(result)._moreAccounts).isTrue(); } @Test public void withStart() throws Exception { String domain = name("test.com"); AccountInfo user1 = newAccountWithEmail("user1", "user1@" + domain); AccountInfo user2 = newAccountWithEmail("user2", "user2@" + domain); AccountInfo user3 = newAccountWithEmail("user3", "user3@" + domain); List<AccountInfo> result = assertQuery(domain, user1, user2, user3); assertQuery(newQuery(domain).withStart(1), result.subList(1, 3)); } @Test public void sortedByFullname() throws Exception { String appendix = name("name"); // Use an account creation order that ensures that sorting by fullname differs from sorting by // account ID. AccountInfo userFoo = newAccountWithFullName("user1", "foo-" + appendix); AccountInfo userBar = newAccountWithFullName("user2", "bar-" + appendix); AccountInfo userBaz = newAccountWithFullName("user3", "baz-" + appendix); assertThat(userFoo._accountId).isLessThan(userBar._accountId); assertThat(userBar._accountId).isLessThan(userBaz._accountId); String query = "name:" + userFoo.name + " OR name:" + userBar.name + " OR name:" + userBaz.name; // Must request details to populate fullname in the results. If fullname is not set sorting by // fullname is not possible. assertQuery(newQuery(query).withOption(ListAccountsOption.DETAILS), userBar, userBaz, userFoo); } @Test public void sortedByPreferredEmail() throws Exception { String appendix = name("name"); // Use an account creation order that ensures that sorting by preferred email differs from // sorting by account ID. Use the same fullname for all accounts so that sorting must be done by // preferred email. AccountInfo userFoo3 = newAccount("user3", "foo-" + appendix, "foo3-" + appendix + "@example.com", true); AccountInfo userFoo1 = newAccount("user1", "foo-" + appendix, "foo1-" + appendix + "@example.com", true); AccountInfo userFoo2 = newAccount("user2", "foo-" + appendix, "foo2-" + appendix + "@example.com", true); assertThat(userFoo3._accountId).isLessThan(userFoo1._accountId); assertThat(userFoo1._accountId).isLessThan(userFoo2._accountId); String query = "name:" + userFoo1.name + " OR name:" + userFoo2.name + " OR name:" + userFoo3.name; // Must request details to populate fullname and preferred email in the results. If fullname and // preferred email are not set sorting by fullname and preferred email is not possible. Since // all 3 accounts have the same fullname we expect sorting by preferred email. assertQuery( newQuery(query).withOption(ListAccountsOption.DETAILS), userFoo1, userFoo2, userFoo3); } @Test public void sortedById() throws Exception { String appendix = name("name"); // Each new account gets a higher account ID. Create the accounts in an order that sorting by // fullname differs from sorting by accout ID. AccountInfo userFoo = newAccountWithFullName("user1", "foo-" + appendix); AccountInfo userBar = newAccountWithFullName("user2", "bar-" + appendix); AccountInfo userBaz = newAccountWithFullName("user3", "baz-" + appendix); assertThat(userFoo._accountId).isLessThan(userBar._accountId); assertThat(userBar._accountId).isLessThan(userBaz._accountId); String query = "name:" + userFoo.name + " OR name:" + userBar.name + " OR name:" + userBaz.name; // Normally sorting is done by fullname and preferred email, but if no details are requested // fullname and preferred email are not set and then sorting is done by account ID. assertQuery(newQuery(query), userFoo, userBar, userBaz); } @Test public void withDetails() throws Exception { AccountInfo user1 = newAccount("myuser", "My User", "my.user@example.com", true); List<AccountInfo> result = assertQuery(user1.username, user1); AccountInfo ai = result.get(0); assertThat(ai._accountId).isEqualTo(user1._accountId); assertThat(ai.name).isNull(); assertThat(ai.username).isNull(); assertThat(ai.email).isNull(); assertThat(ai.avatars).isNull(); result = assertQuery(newQuery(user1.username).withOption(ListAccountsOption.DETAILS), user1); ai = result.get(0); assertThat(ai._accountId).isEqualTo(user1._accountId); assertThat(ai.name).isEqualTo(user1.name); assertThat(ai.username).isEqualTo(user1.username); assertThat(ai.email).isEqualTo(user1.email); assertThat(ai.avatars).isNull(); } @Test public void withSecondaryEmails() throws Exception { AccountInfo user1 = newAccount("myuser", "My User", "my.user@example.com", true); String[] secondaryEmails = new String[] {"bar@example.com", "foo@example.com"}; addEmails(user1, secondaryEmails); List<AccountInfo> result = assertQuery(user1.username, user1); assertThat(result.get(0).secondaryEmails).isNull(); result = assertQuery(newQuery(user1.username).withSuggest(true), user1); assertThat(result.get(0).secondaryEmails) .containsExactlyElementsIn(Arrays.asList(secondaryEmails)) .inOrder(); result = assertQuery(newQuery(user1.username).withOption(ListAccountsOption.DETAILS), user1); assertThat(result.get(0).secondaryEmails).isNull(); result = assertQuery(newQuery(user1.username).withOption(ListAccountsOption.ALL_EMAILS), user1); assertThat(result.get(0).secondaryEmails) .containsExactlyElementsIn(Arrays.asList(secondaryEmails)) .inOrder(); result = assertQuery( newQuery(user1.username) .withOptions(ListAccountsOption.DETAILS, ListAccountsOption.ALL_EMAILS), user1); assertThat(result.get(0).secondaryEmails) .containsExactlyElementsIn(Arrays.asList(secondaryEmails)) .inOrder(); } @Test public void withSecondaryEmailsWithoutModifyAccountCapability() throws Exception { AccountInfo user = newAccount("myuser", "My User", "other@example.com", true); AccountInfo otherUser = newAccount("otheruser", "Other User", "abc@example.com", true); String[] secondaryEmails = new String[] {"dfg@example.com", "hij@example.com"}; addEmails(otherUser, secondaryEmails); requestContext.setContext(newRequestContext(Account.id(user._accountId))); List<AccountInfo> result = newQuery(otherUser.username).withSuggest(true).get(); assertThat(result.get(0).secondaryEmails).isNull(); assertThrows( AuthException.class, () -> newQuery(otherUser.username).withOption(ListAccountsOption.ALL_EMAILS).get()); } @Test public void asAnonymous() throws Exception { AccountInfo user1 = newAccount("user1"); setAnonymous(); assertQuery("9999999"); assertQuery("self"); assertQuery("username:" + user1.username, user1); } // reindex permissions are tested by {@link AccountIT#reindexPermissions} @Test public void reindex() throws Exception { AccountInfo user1 = newAccountWithFullName("tester", "Test Usre"); // update account without reindex so that account index is stale Account.Id accountId = Account.id(user1._accountId); String newName = "Test User"; try (Repository repo = repoManager.openRepository(allUsers)) { MetaDataUpdate md = new MetaDataUpdate(GitReferenceUpdated.DISABLED, allUsers, repo); PersonIdent ident = serverIdent.get(); md.getCommitBuilder().setAuthor(ident); md.getCommitBuilder().setCommitter(ident); new AccountConfig(accountId, allUsers, repo) .load() .setAccountDelta(AccountDelta.builder().setFullName(newName).build()) .commit(md); } // Querying for the account here will not result in a stale document because // we load AccountStates from the cache after reading documents from the index // which means we always read fresh data when matching. // // Reindex document gApi.accounts().id(user1.username).index(); assertQuery("name:" + quote(user1.name)); assertQuery("name:" + quote(newName), user1); } @Test public void rawDocument() throws Exception { AccountInfo userInfo = gApi.accounts().id(admin.getAccountId().get()).get(); Schema<AccountState> schema = indexes.getSearchIndex().getSchema(); Optional<FieldBundle> rawFields = indexes .getSearchIndex() .getRaw( Account.id(userInfo._accountId), QueryOptions.create( IndexConfig.createDefault(), 0, 1, schema.getStoredFields().keySet())); assertThat(rawFields).isPresent(); if (schema.useLegacyNumericFields()) { assertThat(rawFields.get().getValue(AccountField.ID)).isEqualTo(userInfo._accountId); } else { assertThat(Integer.valueOf(rawFields.get().getValue(AccountField.ID_STR))) .isEqualTo(userInfo._accountId); } // The field EXTERNAL_ID_STATE is only supported from schema version 6. if (getSchemaVersion() < 6) { return; } List<AccountExternalIdInfo> externalIdInfos = gApi.accounts().self().getExternalIds(); List<ByteArrayWrapper> blobs = new ArrayList<>(); for (AccountExternalIdInfo info : externalIdInfos) { Optional<ExternalId> extId = externalIds.get(externalIdKeyFactory.parse(info.identity)); assertThat(extId).isPresent(); blobs.add(new ByteArrayWrapper(extId.get().toByteArray())); } assertThat(rawFields.get().getValue(AccountField.EXTERNAL_ID_STATE)).hasSize(blobs.size()); assertThat( Streams.stream(rawFields.get().getValue(AccountField.EXTERNAL_ID_STATE)) .map(ByteArrayWrapper::new) .collect(toList())) .containsExactlyElementsIn(blobs); } protected AccountInfo newAccount(String username) throws Exception { return newAccountWithEmail(username, null); } protected AccountInfo newAccountWithEmail(String username, String email) throws Exception { return newAccount(username, email, true); } protected AccountInfo newAccountWithFullName(String username, String fullName) throws Exception { return newAccount(username, fullName, null, true); } protected AccountInfo newAccount(String username, String email, boolean active) throws Exception { return newAccount(username, null, email, active); } protected AccountInfo newAccount(String username, String fullName, String email, boolean active) throws Exception { String uniqueName = name(username); try { gApi.accounts().id(uniqueName).get(); fail("user " + uniqueName + " already exists"); } catch (ResourceNotFoundException e) { // expected: user does not exist yet } Account.Id id = createAccount(uniqueName, fullName, email, active); return gApi.accounts().id(id.get()).get(); } protected Project.NameKey createProject(String name) throws RestApiException { ProjectInput in = new ProjectInput(); in.name = name; in.createEmptyCommit = true; gApi.projects().create(in); return Project.nameKey(name); } protected void blockRead(Project.NameKey project, GroupInfo group) throws RestApiException { ProjectAccessInput in = new ProjectAccessInput(); in.add = new HashMap<>(); AccessSectionInfo a = new AccessSectionInfo(); PermissionInfo p = new PermissionInfo(null, null); p.rules = ImmutableMap.of(group.id, new PermissionRuleInfo(PermissionRuleInfo.Action.BLOCK, false)); a.permissions = ImmutableMap.of("read", p); in.add = ImmutableMap.of("refs/*", a); gApi.projects().name(project.get()).access(in); } protected ChangeInfo createChange(Project.NameKey project) throws RestApiException { ChangeInput in = new ChangeInput(); in.subject = "A change"; in.project = project.get(); in.branch = "master"; return gApi.changes().create(in).get(); } protected GroupInfo createGroup(String name, AccountInfo... members) throws RestApiException { GroupInput in = new GroupInput(); in.name = name; in.members = Arrays.asList(members).stream().map(a -> String.valueOf(a._accountId)).collect(toList()); return gApi.groups().create(in).get(); } protected void watch(AccountInfo account, Project.NameKey project, String filter) throws RestApiException { List<ProjectWatchInfo> projectsToWatch = new ArrayList<>(); ProjectWatchInfo pwi = new ProjectWatchInfo(); pwi.project = project.get(); pwi.filter = filter; pwi.notifyAbandonedChanges = true; pwi.notifyNewChanges = true; pwi.notifyAllComments = true; projectsToWatch.add(pwi); gApi.accounts().id(account._accountId).setWatchedProjects(projectsToWatch); } protected String quote(String s) { return "\"" + s + "\""; } protected String name(String name) { if (name == null) { return null; } String suffix = testName.getSanitizedMethodName(); if (name.contains("@")) { return name + "." + suffix; } return name + "_" + suffix; } private Account.Id createAccount(String username, String fullName, String email, boolean active) throws Exception { try (ManualRequestContext ctx = oneOffRequestContext.open()) { Account.Id id = accountManager.authenticate(authRequestFactory.createForUser(username)).getAccountId(); if (email != null) { accountManager.link(id, authRequestFactory.createForEmail(email)); } accountsUpdate .get() .update( "Update Test Account", id, u -> { u.setFullName(fullName).setPreferredEmail(email).setActive(active); }); return id; } } private void addEmails(AccountInfo account, String... emails) throws Exception { Account.Id id = Account.id(account._accountId); for (String email : emails) { accountManager.link(id, authRequestFactory.createForEmail(email)); } accountIndexer.index(id); } protected QueryRequest newQuery(Object query) throws RestApiException { return gApi.accounts().query(query.toString()); } protected List<AccountInfo> assertQuery(Object query, AccountInfo... accounts) throws Exception { return assertQuery(newQuery(query), accounts); } protected List<AccountInfo> assertQuery(QueryRequest query, AccountInfo... accounts) throws Exception { return assertQuery(query, Arrays.asList(accounts)); } protected List<AccountInfo> assertQuery(QueryRequest query, List<AccountInfo> accounts) throws Exception { List<AccountInfo> result = query.get(); Iterable<Integer> ids = ids(result); assertWithMessage(format(query, result, accounts)) .that(ids) .containsExactlyElementsIn(ids(accounts)) .inOrder(); return result; } protected void assertAccounts(List<AccountState> accounts, AccountInfo... expectedAccounts) { assertThat(accounts.stream().map(a -> a.account().id().get()).collect(toList())) .containsExactlyElementsIn( Arrays.asList(expectedAccounts).stream().map(a -> a._accountId).collect(toList())); } private String format( QueryRequest query, List<AccountInfo> actualIds, List<AccountInfo> expectedAccounts) { StringBuilder b = new StringBuilder(); b.append("query '").append(query.getQuery()).append("' with expected accounts "); b.append(format(expectedAccounts)); b.append(" and result "); b.append(format(actualIds)); return b.toString(); } private String format(Iterable<AccountInfo> accounts) { StringBuilder b = new StringBuilder(); b.append("["); Iterator<AccountInfo> it = accounts.iterator(); while (it.hasNext()) { AccountInfo a = it.next(); b.append("{") .append(a._accountId) .append(", ") .append("name=") .append(a.name) .append(", ") .append("email=") .append(a.email) .append(", ") .append("username=") .append(a.username) .append("}"); if (it.hasNext()) { b.append(", "); } } b.append("]"); return b.toString(); } protected static Iterable<Integer> ids(AccountInfo... accounts) { return ids(Arrays.asList(accounts)); } protected static Iterable<Integer> ids(List<AccountInfo> accounts) { return accounts.stream().map(a -> a._accountId).collect(toList()); } protected void assertMissingField(FieldDef<AccountState, ?> field) { assertWithMessage("schema %s has field %s", getSchemaVersion(), field.getName()) .that(getSchema().hasField(field)) .isFalse(); } protected void assertFailingQuery(String query, String expectedMessage) throws Exception { try { assertQuery(query); fail("expected BadRequestException for query '" + query + "'"); } catch (BadRequestException e) { assertThat(e.getMessage()).isEqualTo(expectedMessage); } } protected int getSchemaVersion() { return getSchema().getVersion(); } protected Schema<AccountState> getSchema() { return indexes.getSearchIndex().getSchema(); } /** Boiler plate code to check two byte arrays for equality */ private static class ByteArrayWrapper { private byte[] arr; private ByteArrayWrapper(byte[] arr) { this.arr = arr; } @Override public boolean equals(Object other) { if (!(other instanceof ByteArrayWrapper)) { return false; } return Arrays.equals(arr, ((ByteArrayWrapper) other).arr); } @Override public int hashCode() { return Arrays.hashCode(arr); } } }
/* * Copyright 2014-2016 CyberVision, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.kaaproject.kaa.client.channel.impl.channels; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import java.util.concurrent.Future; import java.util.concurrent.RunnableScheduledFuture; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledThreadPoolExecutor; import org.kaaproject.kaa.client.AbstractKaaClient; import org.kaaproject.kaa.client.channel.ChannelDirection; import org.kaaproject.kaa.client.channel.failover.FailoverManager; import org.kaaproject.kaa.client.channel.IPTransportInfo; import org.kaaproject.kaa.client.channel.KaaDataChannel; import org.kaaproject.kaa.client.channel.KaaDataDemultiplexer; import org.kaaproject.kaa.client.channel.KaaDataMultiplexer; import org.kaaproject.kaa.client.channel.TransportConnectionInfo; import org.kaaproject.kaa.client.channel.ServerType; import org.kaaproject.kaa.client.channel.TransportProtocolId; import org.kaaproject.kaa.client.channel.TransportProtocolIdConstants; import org.kaaproject.kaa.client.channel.connectivity.ConnectivityChecker; import org.kaaproject.kaa.client.channel.failover.FailoverStatus; import org.kaaproject.kaa.client.channel.impl.channels.polling.CancelableCommandRunnable; import org.kaaproject.kaa.client.channel.impl.channels.polling.CancelableRunnable; import org.kaaproject.kaa.client.channel.impl.channels.polling.CancelableScheduledFuture; import org.kaaproject.kaa.client.channel.impl.channels.polling.PollCommand; import org.kaaproject.kaa.client.channel.impl.channels.polling.RawDataProcessor; import org.kaaproject.kaa.client.persistence.KaaClientState; import org.kaaproject.kaa.client.transport.AbstractHttpClient; import org.kaaproject.kaa.common.TransportType; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class DefaultOperationsChannel implements KaaDataChannel, RawDataProcessor { public static final Logger LOG = LoggerFactory // NOSONAR .getLogger(DefaultOperationsChannel.class); private static final Map<TransportType, ChannelDirection> SUPPORTED_TYPES = new HashMap<TransportType, ChannelDirection>(); static { SUPPORTED_TYPES.put(TransportType.PROFILE, ChannelDirection.BIDIRECTIONAL); SUPPORTED_TYPES.put(TransportType.CONFIGURATION, ChannelDirection.BIDIRECTIONAL); SUPPORTED_TYPES.put(TransportType.NOTIFICATION, ChannelDirection.BIDIRECTIONAL); SUPPORTED_TYPES.put(TransportType.USER, ChannelDirection.BIDIRECTIONAL); SUPPORTED_TYPES.put(TransportType.EVENT, ChannelDirection.DOWN); } private static final String CHANNEL_ID = "default_operations_long_poll_channel"; private AbstractHttpClient httpClient; private final Object httpClientLock = new Object(); private final Object httpClientSetLock = new Object(); private KaaDataDemultiplexer demultiplexer; private KaaDataMultiplexer multiplexer; private IPTransportInfo currentServer; private final AbstractKaaClient client; private final KaaClientState state; private final FailoverManager failoverManager; private ScheduledExecutorService scheduler; private volatile Future<?> pollFuture; private volatile boolean stopped = true; private volatile boolean processingResponse = false; private volatile boolean taskPosted = false; private volatile boolean isShutdown = false; private volatile boolean isPaused = false; private final CancelableCommandRunnable task = new CancelableCommandRunnable() { @Override protected void executeCommand() { if (!stopped) { taskPosted = false; synchronized (httpClientSetLock) { while (httpClient == null && !stopped && !Thread.currentThread().isInterrupted()) { try { httpClientSetLock.wait(); } catch (InterruptedException e) { break; } } } if (!stopped) { currentCommand = new PollCommand(httpClient, DefaultOperationsChannel.this, getSupportedTransportTypes(), currentServer); if (!Thread.currentThread().isInterrupted()) { currentCommand.execute(); } currentCommand = null; if (!taskPosted && !stopped && !Thread.currentThread().isInterrupted()) { taskPosted = true; pollFuture = scheduler.submit(task); } } } } }; public DefaultOperationsChannel(AbstractKaaClient client, KaaClientState state, FailoverManager failoverManager) { this.client = client; this.state = state; this.failoverManager = failoverManager; } protected ScheduledExecutorService createExecutor() { LOG.info("Creating a new executor for channel [{}]", getId()); return new ScheduledThreadPoolExecutor(1) { @Override protected <V> RunnableScheduledFuture<V> decorateTask(Runnable runnable, RunnableScheduledFuture<V> task) { if (runnable instanceof CancelableRunnable) { return new CancelableScheduledFuture<V>((CancelableRunnable) runnable, task); } return super.decorateTask(runnable, task); } }; } private void stopPollScheduler(boolean forced) { if (!stopped) { stopped = true; if (!processingResponse && pollFuture != null) { LOG.info("Stopping poll future.."); pollFuture.cancel(forced); if (forced) { task.waitUntilExecuted(); } LOG.info("Poll scheduler stopped"); } } } private void startPoll() { if (!stopped) { stopPollScheduler(true); } if (scheduler == null) { scheduler = createExecutor(); } stopped = false; LOG.info("Starting poll scheduler.."); taskPosted = true; pollFuture = scheduler.submit(task); LOG.info("Poll scheduler started"); } private void stopPoll() { stopPollScheduler(true); } @Override public LinkedHashMap<String, byte[]> createRequest(Map<TransportType, ChannelDirection> types) {// NOSONAR LinkedHashMap<String, byte[]> request = null; try { byte[] requestBodyRaw = multiplexer.compileRequest(types); synchronized (httpClientLock) { request = HttpRequestCreator.createOperationHttpRequest(requestBodyRaw, httpClient.getEncoderDecoder()); } } catch (Exception e) { LOG.error("Failed to create request {}", e); } return request; } @Override public void onResponse(byte[] response) { LOG.debug("Response for channel [{}] received", getId()); byte[] decodedResponse; try { processingResponse = true; synchronized (httpClientLock) { decodedResponse = httpClient.getEncoderDecoder().decodeData(response); } demultiplexer.processResponse(decodedResponse); processingResponse = false; failoverManager.onServerConnected(currentServer); } catch (Exception e) { LOG.error("Failed to process response {}", Arrays.toString(response)); LOG.error("Exception stack trace: ", e); } } @Override public void onServerError(TransportConnectionInfo info) { if (!stopped) { LOG.debug("Channel [{}] connection failed", getId()); synchronized (this) { stopPollScheduler(false); } failoverManager.onServerFailed(info, FailoverStatus.NO_CONNECTIVITY); } else { LOG.debug("Channel [{}] connection aborted", getId()); } } @Override public synchronized void sync(TransportType type) { sync(Collections.singleton(type)); } @Override public synchronized void sync(Set<TransportType> types) { if (isShutdown) { LOG.info("Can't sync. Channel [{}] is down", getId()); return; } if (isPaused) { LOG.info("Can't sync. Channel [{}] is paused", getId()); return; } if (multiplexer == null) { LOG.warn("Can't sync. Channel {} multiplexer is not set", getId()); return; } if (demultiplexer == null) { LOG.warn("Can't sync. Channel {} demultiplexer is not set", getId()); return; } if (currentServer == null) { LOG.warn("Can't sync. Server is null"); } for (TransportType type : types) { LOG.info("Processing sync {} for channel [{}]", type, getId()); if (getSupportedTransportTypes().get(type) == null) { LOG.error("Unsupported type {} for channel [{}]", type, getId()); return; } } stopPoll(); startPoll(); } @Override public synchronized void syncAll() { if (isShutdown) { LOG.info("Can't sync. Channel [{}] is down", getId()); return; } if (isPaused) { LOG.info("Can't sync. Channel [{}] is paused", getId()); return; } LOG.info("Processing sync all for channel [{}]", getId()); if (multiplexer != null && demultiplexer != null) { if (currentServer != null) { stopPoll(); startPoll(); } else { LOG.warn("Can't sync. Server is null"); } } } @Override public void syncAck(TransportType type) { syncAck(Collections.singleton(type)); } @Override public void syncAck(Set<TransportType> types) { LOG.info("Sync ack message is ignored for Channel {}", getId()); } @Override public String getId() { return CHANNEL_ID; } @Override public TransportProtocolId getTransportProtocolId() { return TransportProtocolIdConstants.HTTP_TRANSPORT_ID; } @Override public ServerType getServerType() { return ServerType.OPERATIONS; } @Override public synchronized void setDemultiplexer(KaaDataDemultiplexer demultiplexer) { if (demultiplexer != null) { this.demultiplexer = demultiplexer; } } @Override public synchronized void setMultiplexer(KaaDataMultiplexer multiplexer) { if (multiplexer != null) { this.multiplexer = multiplexer; } } // TODO: refactor this as part of KAA-126 @Override public synchronized void setServer(TransportConnectionInfo server) { if (isShutdown) { LOG.info("Can't set server. Channel [{}] is down", getId()); return; } if (server != null) { if (!isPaused) { stopPoll(); } this.currentServer = new IPTransportInfo(server); synchronized (httpClientLock) { LOG.debug("Channel [{}]: creating HTTP client..", getId()); this.httpClient = client.createHttpClient(currentServer.getURL() + "/EP/LongSync", state.getPrivateKey(), state.getPublicKey(), currentServer.getPublicKey()); synchronized (httpClientSetLock) { httpClientSetLock.notifyAll(); } LOG.debug("Channel [{}]: HTTP client created", getId()); } if (!isPaused) { startPoll(); } } } @Override public TransportConnectionInfo getServer() { return currentServer; } @Override public void setConnectivityChecker(ConnectivityChecker checker) { // Do nothing } @Override public synchronized void shutdown() { if (!isShutdown) { isShutdown = true; stopPoll(); if (scheduler != null) { scheduler.shutdownNow(); } } } @Override public synchronized void pause() { if (isShutdown) { LOG.info("Can't pause channel. Channel [{}] is down", getId()); return; } if (!isPaused) { isPaused = true; stopPoll(); if (scheduler != null) { scheduler.shutdownNow(); scheduler = null; } } } @Override public synchronized void resume() { if (isShutdown) { LOG.info("Can't resume channel. Channel [{}] is down", getId()); return; } if (isPaused) { isPaused = false; startPoll(); } } @Override public Map<TransportType, ChannelDirection> getSupportedTransportTypes() { return SUPPORTED_TYPES; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fontbox.ttf; import java.awt.geom.GeneralPath; import java.io.Closeable; import java.io.IOException; import java.io.InputStream; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.fontbox.FontBoxFont; import org.apache.fontbox.util.BoundingBox; /** * A TrueType font file. * * @author Ben Litchfield */ public class TrueTypeFont implements FontBoxFont, Closeable { private float version; private int numberOfGlyphs = -1; private int unitsPerEm = -1; protected Map<String,TTFTable> tables = new HashMap<String,TTFTable>(); private final TTFDataStream data; private Map<String, Integer> postScriptNames; /** * Constructor. Clients should use the TTFParser to create a new TrueTypeFont object. * * @param fontData The font data. */ TrueTypeFont(TTFDataStream fontData) { data = fontData; } @Override public void close() throws IOException { data.close(); } /** * @return Returns the version. */ public float getVersion() { return version; } /** * Set the version. Package-private, used by TTFParser only. * @param versionValue The version to set. */ void setVersion(float versionValue) { version = versionValue; } /** * Add a table definition. Package-private, used by TTFParser only. * * @param table The table to add. */ void addTable( TTFTable table ) { tables.put( table.getTag(), table ); } /** * Get all of the tables. * * @return All of the tables. */ public Collection<TTFTable> getTables() { return tables.values(); } /** * Get all of the tables. * * @return All of the tables. */ public Map<String, TTFTable> getTableMap() { return tables; } /** * Returns the war bytes of the given table. */ public synchronized byte[] getTableBytes(TTFTable table) throws IOException { // save current position long currentPosition = data.getCurrentPosition(); data.seek(table.getOffset()); // read all data byte[] bytes = data.read((int)table.getLength()); // restore current position data.seek(currentPosition); return bytes; } /** * This will get the naming table for the true type font. * * @return The naming table. */ public synchronized NamingTable getNaming() throws IOException { NamingTable naming = (NamingTable)tables.get( NamingTable.TAG ); if (naming != null && !naming.getInitialized()) { readTable(naming); } return naming; } /** * Get the postscript table for this TTF. * * @return The postscript table. */ public synchronized PostScriptTable getPostScript() throws IOException { PostScriptTable postscript = (PostScriptTable)tables.get( PostScriptTable.TAG ); if (postscript != null && !postscript.getInitialized()) { readTable(postscript); } return postscript; } /** * Get the OS/2 table for this TTF. * * @return The OS/2 table. */ public synchronized OS2WindowsMetricsTable getOS2Windows() throws IOException { OS2WindowsMetricsTable os2WindowsMetrics = (OS2WindowsMetricsTable)tables.get( OS2WindowsMetricsTable.TAG ); if (os2WindowsMetrics != null && !os2WindowsMetrics.getInitialized()) { readTable(os2WindowsMetrics); } return os2WindowsMetrics; } /** * Get the maxp table for this TTF. * * @return The maxp table. */ public synchronized MaximumProfileTable getMaximumProfile() throws IOException { MaximumProfileTable maximumProfile = (MaximumProfileTable)tables.get( MaximumProfileTable.TAG ); if (maximumProfile != null && !maximumProfile.getInitialized()) { readTable(maximumProfile); } return maximumProfile; } /** * Get the head table for this TTF. * * @return The head table. */ public synchronized HeaderTable getHeader() throws IOException { HeaderTable header = (HeaderTable)tables.get( HeaderTable.TAG ); if (header != null && !header.getInitialized()) { readTable(header); } return header; } /** * Get the hhea table for this TTF. * * @return The hhea table. */ public synchronized HorizontalHeaderTable getHorizontalHeader() throws IOException { HorizontalHeaderTable horizontalHeader = (HorizontalHeaderTable)tables.get( HorizontalHeaderTable.TAG ); if (horizontalHeader != null && !horizontalHeader.getInitialized()) { readTable(horizontalHeader); } return horizontalHeader; } /** * Get the hmtx table for this TTF. * * @return The hmtx table. */ public synchronized HorizontalMetricsTable getHorizontalMetrics() throws IOException { HorizontalMetricsTable horizontalMetrics = (HorizontalMetricsTable)tables.get( HorizontalMetricsTable.TAG ); if (horizontalMetrics != null && !horizontalMetrics.getInitialized()) { readTable(horizontalMetrics); } return horizontalMetrics; } /** * Get the loca table for this TTF. * * @return The loca table. */ public synchronized IndexToLocationTable getIndexToLocation() throws IOException { IndexToLocationTable indexToLocation = (IndexToLocationTable)tables.get( IndexToLocationTable.TAG ); if (indexToLocation != null && !indexToLocation.getInitialized()) { readTable(indexToLocation); } return indexToLocation; } /** * Get the glyf table for this TTF. * * @return The glyf table. */ public synchronized GlyphTable getGlyph() throws IOException { GlyphTable glyph = (GlyphTable)tables.get( GlyphTable.TAG ); if (glyph != null && !glyph.getInitialized()) { readTable(glyph); } return glyph; } /** * Get the "cmap" table for this TTF. * * @return The "cmap" table. */ public synchronized CmapTable getCmap() throws IOException { CmapTable cmap = (CmapTable)tables.get( CmapTable.TAG ); if (cmap != null && !cmap.getInitialized()) { readTable(cmap); } return cmap; } /** * Get the vhea table for this TTF. * * @return The vhea table. */ public synchronized VerticalHeaderTable getVerticalHeader() throws IOException { VerticalHeaderTable verticalHeader = (VerticalHeaderTable)tables.get( VerticalHeaderTable.TAG ); if (verticalHeader != null && !verticalHeader.getInitialized()) { readTable(verticalHeader); } return verticalHeader; } /** * Get the vmtx table for this TTF. * * @return The vmtx table. */ public synchronized VerticalMetricsTable getVerticalMetrics() throws IOException { VerticalMetricsTable verticalMetrics = (VerticalMetricsTable)tables.get( VerticalMetricsTable.TAG ); if (verticalMetrics != null && !verticalMetrics.getInitialized()) { readTable(verticalMetrics); } return verticalMetrics; } /** * Get the VORG table for this TTF. * * @return The VORG table. */ public synchronized VerticalOriginTable getVerticalOrigin() throws IOException { VerticalOriginTable verticalOrigin = (VerticalOriginTable)tables.get( VerticalOriginTable.TAG ); if (verticalOrigin != null && !verticalOrigin.getInitialized()) { readTable(verticalOrigin); } return verticalOrigin; } /** * Get the "kern" table for this TTF. * * @return The "kern" table. */ public synchronized KerningTable getKerning() throws IOException { KerningTable kerning = (KerningTable)tables.get( KerningTable.TAG ); if (kerning != null && !kerning.getInitialized()) { readTable(kerning); } return kerning; } /** * This permit to get the data of the True Type Font * program representing the stream used to build this * object (normally from the TTFParser object). * * @return COSStream True type font program stream * * @throws IOException If there is an error getting the font data. */ public InputStream getOriginalData() throws IOException { return data.getOriginalData(); } /** * Read the given table if necessary. Package-private, used by TTFParser only. * * @param table the table to be initialized * * @throws IOException if there was an error reading the table. */ void readTable(TTFTable table) throws IOException { // save current position long currentPosition = data.getCurrentPosition(); data.seek(table.getOffset()); table.read(this, data); // restore current position data.seek(currentPosition); } /** * Returns the number of glyphs (MaximuProfile.numGlyphs). * * @return the number of glyphs */ public int getNumberOfGlyphs() throws IOException { if (numberOfGlyphs == -1) { MaximumProfileTable maximumProfile = getMaximumProfile(); if (maximumProfile != null) { numberOfGlyphs = maximumProfile.getNumGlyphs(); } else { // this should never happen numberOfGlyphs = 0; } } return numberOfGlyphs; } /** * Returns the units per EM (Header.unitsPerEm). * * @return units per EM */ public int getUnitsPerEm() throws IOException { if (unitsPerEm == -1) { HeaderTable header = getHeader(); if (header != null) { unitsPerEm = header.getUnitsPerEm(); } else { // this should never happen unitsPerEm = 0; } } return unitsPerEm; } /** * Returns the width for the given GID. * * @param gid the GID * @return the width */ public int getAdvanceWidth(int gid) throws IOException { HorizontalMetricsTable hmtx = getHorizontalMetrics(); if (hmtx != null) { return hmtx.getAdvanceWidth(gid); } else { // this should never happen return 250; } } /** * Returns the height for the given GID. * * @param gid the GID * @return the height */ public int getAdvanceHeight(int gid) throws IOException { VerticalMetricsTable vmtx = getVerticalMetrics(); if (vmtx != null) { return vmtx.getAdvanceHeight(gid); } else { // this should never happen return 250; } } @Override public String getName() throws IOException { if (getNaming() != null) { return getNaming().getPostScriptName(); } else { return null; } } private synchronized void readPostScriptNames() throws IOException { if (postScriptNames == null) { postScriptNames = new HashMap<String, Integer>(); if (getPostScript() != null) { String[] names = getPostScript().getGlyphNames(); if (names != null) { for (int i = 0; i < names.length; i++) { postScriptNames.put(names[i], i); } } } } } /** * Returns the best Unicode from the font (the most general). The PDF spec says that "The means * by which this is accomplished are implementation-dependent." * * @throws IOException if the font could not be read */ public CmapSubtable getUnicodeCmap() throws IOException { return getUnicodeCmap(true); } /** * Returns the best Unicode from the font (the most general). The PDF spec says that "The means * by which this is accomplished are implementation-dependent." * * @param isStrict False if we allow falling back to any cmap, even if it's not Unicode. * @throws IOException if the font could not be read, or there is no Unicode cmap */ public CmapSubtable getUnicodeCmap(boolean isStrict) throws IOException { CmapTable cmapTable = getCmap(); if (cmapTable == null) { return null; } CmapSubtable cmap = cmapTable.getSubtable(CmapTable.PLATFORM_UNICODE, CmapTable.ENCODING_UNICODE_2_0_FULL); if (cmap == null) { cmap = cmapTable.getSubtable(CmapTable.PLATFORM_UNICODE, CmapTable.ENCODING_UNICODE_2_0_BMP); } if (cmap == null) { cmap = cmapTable.getSubtable(CmapTable.PLATFORM_WINDOWS, CmapTable.ENCODING_WIN_UNICODE_BMP); } if (cmap == null) { // Microsoft's "Recommendations for OpenType Fonts" says that "Symbol" encoding // actually means "Unicode, non-standard character set" cmap = cmapTable.getSubtable(CmapTable.PLATFORM_WINDOWS, CmapTable.ENCODING_WIN_SYMBOL); } if (cmap == null) { if (isStrict) { throw new IOException("The TrueType font does not contain a Unicode cmap"); } else { // fallback to the first cmap (may not be Unicode, so may produce poor results) cmap = cmapTable.getCmaps()[0]; } } return cmap; } /** * Returns the GID for the given PostScript name, if the "post" table is present. */ public int nameToGID(String name) throws IOException { // look up in 'post' table readPostScriptNames(); Integer gid = postScriptNames.get(name); if (gid != null && gid > 0 && gid < getMaximumProfile().getNumGlyphs()) { return gid; } // look up in 'cmap' int uni = parseUniName(name); if (uni > -1) { CmapSubtable cmap = getUnicodeCmap(false); return cmap.getGlyphId(uni); } return 0; } /** * Parses a Unicode PostScript name in the format uniXXXX. */ private int parseUniName(String name) throws IOException { if (name.startsWith("uni") && name.length() == 7) { int nameLength = name.length(); StringBuilder uniStr = new StringBuilder(); try { for (int chPos = 3; chPos + 4 <= nameLength; chPos += 4) { int codePoint = Integer.parseInt(name.substring(chPos, chPos + 4), 16); if (codePoint <= 0xD7FF || codePoint >= 0xE000) // disallowed code area { uniStr.append((char) codePoint); } } String unicode = uniStr.toString(); if (unicode.length() == 0) { return -1; } return unicode.codePointAt(0); } catch (NumberFormatException e) { return -1; } } return -1; } @Override public GeneralPath getPath(String name) throws IOException { int gid = nameToGID(name); // some glyphs have no outlines (e.g. space, table, newline) GlyphData glyph = getGlyph().getGlyph(gid); if (glyph == null) { return new GeneralPath(); } else { // must scaled by caller using FontMatrix return glyph.getPath(); } } @Override public float getWidth(String name) throws IOException { Integer gid = nameToGID(name); return getAdvanceWidth(gid); } @Override public boolean hasGlyph(String name) throws IOException { return nameToGID(name) != 0; } @Override public BoundingBox getFontBBox() throws IOException { short xMin = getHeader().getXMin(); short xMax = getHeader().getXMax(); short yMin = getHeader().getYMin(); short yMax = getHeader().getYMax(); float scale = 1000f / getUnitsPerEm(); return new BoundingBox(xMin * scale, yMin * scale, xMax * scale, yMax * scale); } @Override public List<Number> getFontMatrix() throws IOException { float scale = 1000f / getUnitsPerEm(); return Arrays.<Number>asList(0.001f * scale, 0, 0, 0.001f * scale, 0, 0); } @Override public String toString() { try { if (getNaming() != null) { return getNaming().getPostScriptName(); } else { return "(null)"; } } catch (IOException e) { return "(null - " + e.getMessage() + ")"; } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.examples.ml.tutorial.hyperparametertuning; import java.io.FileNotFoundException; import java.util.Arrays; import org.apache.ignite.Ignite; import org.apache.ignite.IgniteCache; import org.apache.ignite.Ignition; import org.apache.ignite.examples.ml.tutorial.TitanicUtils; import org.apache.ignite.ml.dataset.feature.extractor.Vectorizer; import org.apache.ignite.ml.dataset.feature.extractor.impl.DummyVectorizer; import org.apache.ignite.ml.environment.LearningEnvironmentBuilder; import org.apache.ignite.ml.environment.logging.ConsoleLogger; import org.apache.ignite.ml.environment.parallelism.ParallelismStrategy; import org.apache.ignite.ml.math.primitives.vector.Vector; import org.apache.ignite.ml.preprocessing.Preprocessor; import org.apache.ignite.ml.preprocessing.encoding.EncoderTrainer; import org.apache.ignite.ml.preprocessing.encoding.EncoderType; import org.apache.ignite.ml.preprocessing.imputing.ImputerTrainer; import org.apache.ignite.ml.preprocessing.minmaxscaling.MinMaxScalerTrainer; import org.apache.ignite.ml.preprocessing.normalization.NormalizationTrainer; import org.apache.ignite.ml.selection.cv.CrossValidation; import org.apache.ignite.ml.selection.cv.CrossValidationResult; import org.apache.ignite.ml.selection.paramgrid.BruteForceStrategy; import org.apache.ignite.ml.selection.paramgrid.ParamGrid; import org.apache.ignite.ml.selection.scoring.evaluator.Evaluator; import org.apache.ignite.ml.selection.scoring.metric.MetricName; import org.apache.ignite.ml.selection.scoring.metric.classification.Accuracy; import org.apache.ignite.ml.selection.split.TrainTestDatasetSplitter; import org.apache.ignite.ml.selection.split.TrainTestSplit; import org.apache.ignite.ml.tree.DecisionTreeClassificationTrainer; import org.apache.ignite.ml.tree.DecisionTreeModel; /** * To choose the best hyper-parameters the cross-validation with {@link ParamGrid} will be used in this example. * <p> * Code in this example launches Ignite grid and fills the cache with test data (based on Titanic passengers data).</p> * <p> * After that it defines how to split the data to train and test sets and configures preprocessors that extract features * from an upstream data and perform other desired changes over the extracted data.</p> * <p> * Then, it tunes hyper-parameters with K-fold Cross-Validation on the split training set and trains the model based on the * processed data using decision tree classification and the obtained hyper-parameters.</p> * <p> * Finally, this example uses {@link Evaluator} functionality to compute metrics from predictions.</p> * <p> * The purpose of cross-validation is model checking, not model building.</p> * <p> * We train {@code k} different models.</p> * <p> * They differ in that {@code 1/(k-1)}th of the training data is exchanged against other cases.</p> * <p> * These models are sometimes called surrogate models because the (average) performance measured for these models is * taken as a surrogate of the performance of the model trained on all cases.</p> * <p> * All scenarios are described there: https://sebastianraschka.com/faq/docs/evaluate-a-model.html</p> */ public class Step_14_Parallel_Brute_Force_Search { /** * Run example. */ public static void main(String[] args) { System.out.println(); System.out.println(">>> Tutorial step 14 (Brute Force) example started."); try (Ignite ignite = Ignition.start("examples/config/example-ignite.xml")) { try { IgniteCache<Integer, Vector> dataCache = TitanicUtils.readPassengers(ignite); // Extracts "pclass", "sibsp", "parch", "sex", "embarked", "age", "fare". final Vectorizer<Integer, Vector, Integer, Double> vectorizer = new DummyVectorizer<Integer>(0, 3, 4, 5, 6, 8, 10).labeled(1); TrainTestSplit<Integer, Vector> split = new TrainTestDatasetSplitter<Integer, Vector>() .split(0.75); Preprocessor<Integer, Vector> strEncoderPreprocessor = new EncoderTrainer<Integer, Vector>() .withEncoderType(EncoderType.STRING_ENCODER) .withEncodedFeature(1) .withEncodedFeature(6) .fit(ignite, dataCache, vectorizer ); Preprocessor<Integer, Vector> imputingPreprocessor = new ImputerTrainer<Integer, Vector>() .fit(ignite, dataCache, strEncoderPreprocessor ); Preprocessor<Integer, Vector> minMaxScalerPreprocessor = new MinMaxScalerTrainer<Integer, Vector>() .fit( ignite, dataCache, imputingPreprocessor ); NormalizationTrainer<Integer, Vector> normalizationTrainer = new NormalizationTrainer<Integer, Vector>() .withP(1); Preprocessor<Integer, Vector> normalizationPreprocessor = normalizationTrainer .fit( ignite, dataCache, minMaxScalerPreprocessor ); // Tune hyper-parameters with K-fold Cross-Validation on the split training set. DecisionTreeClassificationTrainer trainerCV = new DecisionTreeClassificationTrainer(); CrossValidation<DecisionTreeModel, Integer, Vector> scoreCalculator = new CrossValidation<>(); ParamGrid paramGrid = new ParamGrid() .withParameterSearchStrategy(new BruteForceStrategy()) .addHyperParam("p", normalizationTrainer::withP, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0}) .addHyperParam("maxDeep", trainerCV::withMaxDeep, new Double[] {1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0, 10.0}) .addHyperParam( "minImpurityDecrease", trainerCV::withMinImpurityDecrease, new Double[] {0.0, 0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9, 1.0} ); scoreCalculator .withIgnite(ignite) .withUpstreamCache(dataCache) .withEnvironmentBuilder(LearningEnvironmentBuilder.defaultBuilder() .withParallelismStrategyTypeDependency(ParallelismStrategy.ON_DEFAULT_POOL) .withLoggingFactoryDependency(ConsoleLogger.Factory.LOW)) .withTrainer(trainerCV) .isRunningOnPipeline(false) .withMetric(MetricName.ACCURACY) .withFilter(split.getTrainFilter()) .withPreprocessor(normalizationPreprocessor) .withAmountOfFolds(3) .withParamGrid(paramGrid); CrossValidationResult crossValidationRes = scoreCalculator.tuneHyperParameters(); System.out.println("Train with maxDeep: " + crossValidationRes.getBest("maxDeep") + " and minImpurityDecrease: " + crossValidationRes.getBest("minImpurityDecrease")); DecisionTreeClassificationTrainer trainer = new DecisionTreeClassificationTrainer() .withMaxDeep(crossValidationRes.getBest("maxDeep")) .withMinImpurityDecrease(crossValidationRes.getBest("minImpurityDecrease")); System.out.println(crossValidationRes); System.out.println("Best score: " + Arrays.toString(crossValidationRes.getBestScore())); System.out.println("Best hyper params: " + crossValidationRes.getBestHyperParams()); System.out.println("Best average score: " + crossValidationRes.getBestAvgScore()); crossValidationRes.getScoringBoard().forEach((hyperParams, score) -> System.out.println("Score " + Arrays.toString(score) + " for hyper params " + hyperParams)); // Train decision tree model. DecisionTreeModel bestMdl = trainer.fit( ignite, dataCache, split.getTrainFilter(), normalizationPreprocessor ); System.out.println("\n>>> Trained model: " + bestMdl); double accuracy = Evaluator.evaluate( dataCache, split.getTestFilter(), bestMdl, normalizationPreprocessor, new Accuracy<>() ); System.out.println("\n>>> Accuracy " + accuracy); System.out.println("\n>>> Test Error " + (1 - accuracy)); System.out.println(">>> Tutorial step 14 (Brute Force) example completed."); } catch (FileNotFoundException e) { e.printStackTrace(); } } finally { System.out.flush(); } } }
/* * Copyright 2017 StreamSets Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.streamsets.pipeline.stage.destination.mapreduce; import com.google.common.annotations.VisibleForTesting; import com.streamsets.pipeline.api.Batch; import com.streamsets.pipeline.api.Record; import com.streamsets.pipeline.api.Stage; import com.streamsets.pipeline.api.StageException; import com.streamsets.pipeline.api.base.BaseExecutor; import com.streamsets.pipeline.api.base.OnRecordErrorException; import com.streamsets.pipeline.api.el.ELEval; import com.streamsets.pipeline.api.el.ELEvalException; import com.streamsets.pipeline.api.el.ELVars; import com.streamsets.pipeline.lib.el.RecordEL; import com.streamsets.pipeline.stage.common.DefaultErrorRecordHandler; import com.streamsets.pipeline.stage.common.ErrorRecordHandler; import com.streamsets.pipeline.stage.destination.mapreduce.config.JobConfig; import com.streamsets.pipeline.stage.destination.mapreduce.config.MapReduceConfig; import com.streamsets.pipeline.stage.destination.mapreduce.jobtype.avroconvert.AvroConversionCommonConstants; import com.streamsets.pipeline.stage.destination.mapreduce.jobtype.avroorc.AvroOrcConstants; import com.streamsets.pipeline.lib.converter.AvroParquetConstants; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.util.ReflectionUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.IOException; import java.security.PrivilegedExceptionAction; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.concurrent.Callable; public class MapReduceExecutor extends BaseExecutor { private static final Logger LOG = LoggerFactory.getLogger(MapReduceExecutor.class); private final MapReduceConfig mapReduceConfig; private final JobConfig jobConfig; private ErrorRecordHandler errorRecordHandler; @VisibleForTesting public boolean waitForCompletition; public MapReduceExecutor(MapReduceConfig mapReduceConfig, JobConfig jobConfig) { this.mapReduceConfig = mapReduceConfig; this.jobConfig = jobConfig; this.waitForCompletition = false; } @Override public List<ConfigIssue> init() { List<ConfigIssue> issues = super.init(); issues.addAll(mapReduceConfig.init(getContext(), "mapReduceConfig")); issues.addAll(jobConfig.init(getContext(), "jobConfig")); errorRecordHandler = new DefaultErrorRecordHandler(getContext()); return issues; } /** * Handy class to keep track of various ELs with the shared variables object. */ private static class EvalContext { private ELVars variables; private Map<String, ELEval> evals; private Stage.Context context; public EvalContext(Stage.Context context) { this.context = context; this.variables = context.createELVars(); this.evals = new HashMap<>(); } public void setRecord(Record record) { RecordEL.setRecordInContext(variables, record); } public String evaluateToString(String name, String expr, boolean failOnEmptyString) throws ELEvalException { String evaluated = evaluate(name, expr, String.class); if(failOnEmptyString && StringUtils.isEmpty(evaluated)) { throw new ELEvalException(MapReduceErrors.MAPREDUCE_0007, expr, name); } return evaluated; } public <T> T evaluate(String name, String expr, Class<T> klass) throws ELEvalException { return getEval(name).eval(variables, expr, klass); } public ELEval getEval(String name) { if(evals.containsKey(name)) { return evals.get(name); } ELEval eval = context.createELEval(name); evals.put(name, eval); return eval; } } @Override public void write(Batch batch) throws StageException { EvalContext eval = new EvalContext(getContext()); Iterator<Record> it = batch.getRecords(); while(it.hasNext()) { final Record record = it.next(); eval.setRecord(record); Job job = null; try { // Job configuration object is a clone of the original one that we're keeping in mapReduceConfig class final Configuration jobConfiguration = new Configuration(mapReduceConfig.getConfiguration()); // Evaluate all dynamic properties and store them in the configuration job for (Map.Entry<String, String> entry : jobConfig.jobConfigs.entrySet()) { String key = eval.evaluateToString("jobConfigs", entry.getKey(), true); String value = eval.evaluateToString("jobConfigs", entry.getValue(), false); jobConfiguration.set(key, value); } // For build-in job creators, evaluate their properties and persist them in the MR config switch (jobConfig.jobType) { case AVRO_PARQUET: jobConfiguration.set(AvroConversionCommonConstants.INPUT_FILE, eval.evaluateToString("inputFile", jobConfig.avroConversionCommonConfig.inputFile, true)); jobConfiguration.set(AvroConversionCommonConstants.OUTPUT_DIR, eval.evaluateToString("outputDirectory", jobConfig.avroConversionCommonConfig.outputDirectory, true)); jobConfiguration.setBoolean(AvroConversionCommonConstants.KEEP_INPUT_FILE, jobConfig.avroConversionCommonConfig.keepInputFile); jobConfiguration.set(AvroParquetConstants.COMPRESSION_CODEC_NAME, eval.evaluateToString("compressionCodec", jobConfig.avroParquetConfig.compressionCodec, false)); jobConfiguration.setInt(AvroParquetConstants.ROW_GROUP_SIZE, jobConfig.avroParquetConfig.rowGroupSize); jobConfiguration.setInt(AvroParquetConstants.PAGE_SIZE, jobConfig.avroParquetConfig.pageSize); jobConfiguration.setInt(AvroParquetConstants.DICTIONARY_PAGE_SIZE, jobConfig.avroParquetConfig.dictionaryPageSize); jobConfiguration.setInt(AvroParquetConstants.MAX_PADDING_SIZE, jobConfig.avroParquetConfig.maxPaddingSize); jobConfiguration.setBoolean(AvroConversionCommonConstants.OVERWRITE_TMP_FILE, jobConfig.avroConversionCommonConfig.overwriteTmpFile); jobConfiguration.set(AvroParquetConstants.TIMEZONE, jobConfig.avroParquetConfig.timeZoneID); break; case AVRO_ORC: jobConfiguration.set(AvroConversionCommonConstants.INPUT_FILE, eval.evaluateToString("inputFile", jobConfig.avroConversionCommonConfig.inputFile, true)); jobConfiguration.set(AvroConversionCommonConstants.OUTPUT_DIR, eval.evaluateToString("outputDirectory", jobConfig.avroConversionCommonConfig.outputDirectory, true)); jobConfiguration.setBoolean(AvroConversionCommonConstants.KEEP_INPUT_FILE, jobConfig.avroConversionCommonConfig.keepInputFile); jobConfiguration.setBoolean(AvroConversionCommonConstants.OVERWRITE_TMP_FILE, jobConfig.avroConversionCommonConfig.overwriteTmpFile); jobConfiguration.setInt(AvroOrcConstants.ORC_BATCH_SIZE, jobConfig.avroOrcConfig.orcBatchSize); break; case CUSTOM: // Nothing because custom is generic one that have no special config properties break; default: throw new UnsupportedOperationException("Unsupported JobType: " + jobConfig.jobType); } job = createAndSubmitJob(jobConfiguration); } catch (IOException|InterruptedException|ELEvalException e) { LOG.error("Can't submit mapreduce job", e); errorRecordHandler.onError(new OnRecordErrorException(record, MapReduceErrors.MAPREDUCE_0005, e.getMessage(), e)); } if(job != null) { MapReduceExecutorEvents.JOB_CREATED.create(getContext()) .with("tracking-url", job.getTrackingURL()) .with("job-id", job.getJobID().toString()) .createAndSend(); } } } private Job createAndSubmitJob(final Configuration configuration) throws IOException, InterruptedException { return mapReduceConfig.getUGI().doAs((PrivilegedExceptionAction<Job>) () -> { // Create new mapreduce job object Callable<Job> jobCreator = ReflectionUtils.newInstance(jobConfig.getJobCreator(), configuration); Job job = jobCreator.call(); job.setJobName(jobConfig.jobName); // In trace mode, dump all the configuration that we're using for the job if(LOG.isTraceEnabled()) { LOG.trace("Using the following configuration object for mapreduce job."); for(Map.Entry<String, String> entry : configuration) { LOG.trace(" Config: {}={}", entry.getKey(), entry.getValue()); } } // Submit it for processing. Blocking mode is only for testing. job.submit(); if(waitForCompletition) { job.waitForCompletion(true); } return job; }); } }
package org.mockserver.server; import com.google.common.net.MediaType; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.mockserver.client.ClientException; import org.mockserver.echo.http.EchoServer; import org.mockserver.integration.server.AbstractExtendedSameJVMMockingIntegrationTest; import org.mockserver.mock.action.ExpectationForwardCallback; import org.mockserver.mock.action.ExpectationResponseCallback; import org.mockserver.model.HttpRequest; import org.mockserver.model.HttpResponse; import org.mockserver.model.HttpStatusCode; import java.io.UnsupportedEncodingException; import java.net.URLEncoder; import java.nio.charset.StandardCharsets; import static io.netty.handler.codec.http.HttpHeaderNames.CONTENT_TYPE; import static org.hamcrest.Matchers.containsString; import static org.junit.Assert.assertEquals; import static org.mockserver.character.Character.NEW_LINE; import static org.mockserver.model.ConnectionOptions.connectionOptions; import static org.mockserver.model.Cookie.cookie; import static org.mockserver.model.Header.header; import static org.mockserver.model.HttpClassCallback.callback; import static org.mockserver.model.HttpError.error; import static org.mockserver.model.HttpRequest.request; import static org.mockserver.model.HttpResponse.response; import static org.mockserver.model.HttpStatusCode.ACCEPTED_202; import static org.mockserver.model.HttpStatusCode.OK_200; import static org.mockserver.model.Parameter.param; /** * @author jamesdbloom */ public abstract class AbstractExtendedDeployableWARMockingIntegrationTest extends AbstractExtendedSameJVMMockingIntegrationTest { @Rule public ExpectedException exception = ExpectedException.none(); @Test public void shouldReturnResponseByMatchingUrlEncodedPath() throws UnsupportedEncodingException { // when mockServerClient .when( request() .withPath(calculatePath("ab@c.de")) ) .respond( response() .withStatusCode(HttpStatusCode.ACCEPTED_202.code()) .withReasonPhrase(HttpStatusCode.ACCEPTED_202.reasonPhrase()) .withBody("some_body_response") ); // then // - in http assertEquals( response() .withStatusCode(HttpStatusCode.ACCEPTED_202.code()) .withReasonPhrase(HttpStatusCode.ACCEPTED_202.reasonPhrase()) .withBody("some_body_response"), makeRequest( request() .withMethod("GET") .withPath(calculatePath("ab%40c.de")) .withQueryStringParameters( param("queryStringParameterOneName", "queryStringParameterOneValue"), param("queryStringParameterTwoName", "queryStringParameterTwoValue") ) .withHeaders(header("headerNameRequest", "headerValueRequest")) .withCookies(cookie("cookieNameRequest", "cookieValueRequest")), headersToIgnore) ); // - in https assertEquals( response() .withStatusCode(HttpStatusCode.ACCEPTED_202.code()) .withReasonPhrase(HttpStatusCode.ACCEPTED_202.reasonPhrase()) .withBody("some_body_response"), makeRequest( request() .withMethod("GET") .withSecure(true) .withPath(calculatePath("ab%40c.de")) .withQueryStringParameters( param("queryStringParameterOneName", "queryStringParameterOneValue"), param("queryStringParameterTwoName", "queryStringParameterTwoValue") ) .withHeaders(header("headerNameRequest", "headerValueRequest")) .withCookies(cookie("cookieNameRequest", "cookieValueRequest")), headersToIgnore) ); } @Test public void shouldReturnErrorResponseForExpectationWithConnectionOptions() { // given exception.expect(ClientException.class); exception.expectMessage(containsString("ConnectionOptions is not supported by MockServer deployed as a WAR")); // when mockServerClient .when( request() ) .respond( response() .withBody("some_long_body") .withConnectionOptions( connectionOptions() .withKeepAliveOverride(true) .withContentLengthHeaderOverride(10) ) ); } @Test public void shouldReturnErrorResponseForExpectationWithHttpError() { // given exception.expect(ClientException.class); exception.expectMessage(containsString("HttpError is not supported by MockServer deployed as a WAR")); // when mockServerClient .when( request() ) .error( error() .withDropConnection(true) ); } @Test public void shouldReturnErrorResponseForRespondByObjectCallback() { // given exception.expect(ClientException.class); exception.expectMessage(containsString("ExpectationResponseCallback and ExpectationForwardCallback is not supported by MockServer deployed as a WAR")); // when mockServerClient .when( request() .withPath(calculatePath("object_callback")) ) .respond( new ExpectationResponseCallback() { @Override public HttpResponse handle(HttpRequest httpRequest) { return response() .withStatusCode(ACCEPTED_202.code()) .withReasonPhrase(ACCEPTED_202.reasonPhrase()) .withHeaders( header("x-object-callback", "test_object_callback_header") ) .withBody("an_object_callback_response"); } } ); } @Test public void shouldReturnErrorResponseForForwardByObjectCallback() { // given exception.expect(ClientException.class); exception.expectMessage(containsString("ExpectationResponseCallback and ExpectationForwardCallback is not supported by MockServer deployed as a WAR")); // when mockServerClient .when( request() .withPath(calculatePath("echo")) ) .forward( new ExpectationForwardCallback() { @Override public HttpRequest handle(HttpRequest httpRequest) { return request() .withBody("some_overridden_body") .withSecure(httpRequest.isSecure()); } } ); } @Test public void shouldCallbackForResponseToSpecifiedClassInTestClasspath() { // given TestClasspathTestExpectationResponseCallback.httpRequests.clear(); TestClasspathTestExpectationResponseCallback.httpResponse = response() .withStatusCode(ACCEPTED_202.code()) .withReasonPhrase(ACCEPTED_202.reasonPhrase()) .withHeaders( header("x-callback", "test_callback_header") ) .withBody("a_callback_response"); // when mockServerClient .when( request() .withPath(calculatePath("callback")) ) .respond( callback() .withCallbackClass("org.mockserver.server.TestClasspathTestExpectationResponseCallback") ); // then // - in http assertEquals( response() .withStatusCode(ACCEPTED_202.code()) .withReasonPhrase(ACCEPTED_202.reasonPhrase()) .withHeaders( header("x-callback", "test_callback_header") ) .withBody("a_callback_response"), makeRequest( request() .withPath(calculatePath("callback")) .withMethod("POST") .withHeaders( header("X-Test", "test_headers_and_body") ) .withBody("an_example_body_http"), headersToIgnore) ); assertEquals(TestClasspathTestExpectationResponseCallback.httpRequests.get(0).getBody().getValue(), "an_example_body_http"); assertEquals(TestClasspathTestExpectationResponseCallback.httpRequests.get(0).getPath().getValue(), calculatePath("callback")); // - in https assertEquals( response() .withStatusCode(ACCEPTED_202.code()) .withReasonPhrase(ACCEPTED_202.reasonPhrase()) .withHeaders( header("x-callback", "test_callback_header") ) .withBody("a_callback_response"), makeRequest( request() .withSecure(true) .withPath(calculatePath("callback")) .withMethod("POST") .withHeaders( header("X-Test", "test_headers_and_body") ) .withBody("an_example_body_https"), headersToIgnore) ); assertEquals(TestClasspathTestExpectationResponseCallback.httpRequests.get(1).getBody().getValue(), "an_example_body_https"); assertEquals(TestClasspathTestExpectationResponseCallback.httpRequests.get(1).getPath().getValue(), calculatePath("callback")); } @Test public void shouldCallbackForForwardCallbackToSpecifiedClassInTestClasspath() { // given TestClasspathTestExpectationForwardCallback.httpRequests.clear(); TestClasspathTestExpectationForwardCallback.httpRequestToReturn = request() .withHeaders( header("x-callback", "test_callback_header"), header("Host", "localhost:" + insecureEchoServer.getPort()) ) .withBody("a_callback_forward"); // when mockServerClient .when( request() .withPath(calculatePath("callback")) ) .forward( callback() .withCallbackClass("org.mockserver.server.TestClasspathTestExpectationForwardCallback") ); // then // - in http assertEquals( response() .withStatusCode(OK_200.code()) .withReasonPhrase(OK_200.reasonPhrase()) .withHeaders( header("x-callback", "test_callback_header") ) .withBody("a_callback_forward"), makeRequest( request() .withPath(calculatePath("callback")) .withMethod("POST") .withHeaders( header("X-Test", "test_headers_and_body") ) .withBody("an_example_body_http"), headersToIgnore) ); assertEquals(TestClasspathTestExpectationForwardCallback.httpRequests.get(0).getBody().getValue(), "an_example_body_http"); assertEquals(TestClasspathTestExpectationForwardCallback.httpRequests.get(0).getPath().getValue(), calculatePath("callback")); // - in https assertEquals( response() .withStatusCode(OK_200.code()) .withReasonPhrase(OK_200.reasonPhrase()) .withHeaders( header("x-callback", "test_callback_header") ) .withBody("a_callback_forward"), makeRequest( request() .withSecure(true) .withPath(calculatePath("callback")) .withMethod("POST") .withHeaders( header("X-Test", "test_headers_and_body") ) .withBody("an_example_body_https"), headersToIgnore) ); assertEquals(TestClasspathTestExpectationForwardCallback.httpRequests.get(1).getBody().getValue(), "an_example_body_https"); assertEquals(TestClasspathTestExpectationForwardCallback.httpRequests.get(1).getPath().getValue(), calculatePath("callback")); } @Test public void shouldReturnStatus() { // then // - in http assertEquals( response() .withStatusCode(OK_200.code()) .withReasonPhrase(OK_200.reasonPhrase()) .withHeader(CONTENT_TYPE.toString(), "application/json; charset=utf-8") .withBody("{" + NEW_LINE + " \"ports\" : [ " + getServerPort() + " ]" + NEW_LINE + "}", MediaType.JSON_UTF_8), makeRequest( request() .withPath(calculatePath("mockserver/status")) .withMethod("PUT"), headersToIgnore) ); // - in https assertEquals( response() .withStatusCode(OK_200.code()) .withReasonPhrase(OK_200.reasonPhrase()) .withHeader(CONTENT_TYPE.toString(), "application/json; charset=utf-8") .withBody("{" + NEW_LINE + " \"ports\" : [ " + getServerSecurePort() + " ]" + NEW_LINE + "}", MediaType.JSON_UTF_8), makeRequest( request() .withSecure(true) .withPath(calculatePath("mockserver/status")) .withMethod("PUT"), headersToIgnore) ); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.storm.kafka; import java.io.Serializable; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import java.util.SortedMap; import java.util.TreeMap; import com.google.common.base.Strings; import com.google.common.collect.ImmutableMap; import org.apache.storm.Config; import org.apache.storm.kafka.KafkaSpout.EmitState; import org.apache.storm.kafka.trident.MaxMetric; import org.apache.storm.metric.api.CombinedMetric; import org.apache.storm.metric.api.CountMetric; import org.apache.storm.metric.api.MeanReducer; import org.apache.storm.metric.api.ReducedMetric; import org.apache.storm.spout.SpoutOutputCollector; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import kafka.javaapi.consumer.SimpleConsumer; import kafka.javaapi.message.ByteBufferMessageSet; import kafka.message.MessageAndOffset; public class PartitionManager { private static final Logger LOG = LoggerFactory.getLogger(PartitionManager.class); private final CombinedMetric fetchAPILatencyMax; private final ReducedMetric fetchAPILatencyMean; private final CountMetric fetchAPICallCount; private final CountMetric fetchAPIMessageCount; // Count of messages which could not be emitted or retried because they were deleted from kafka private final CountMetric lostMessageCount; // Count of messages which were not retried because failedMsgRetryManager didn't consider offset // eligible for retry private final CountMetric messageIneligibleForRetryCount; private Long emittedToOffset; // pending key = Kafka offset, value = time at which the message was first submitted to the // topology private SortedMap<Long, Long> pending = new TreeMap<Long, Long>(); private final FailedMsgRetryManager failedMsgRetryManager; // retryRecords key = Kafka offset, value = retry info for the given message private Long committedTo; private LinkedList<MessageAndOffset> waitingToEmit = new LinkedList<MessageAndOffset>(); private Partition partition; private SpoutConfig spoutConfig; private String topologyInstanceId; private SimpleConsumer consumer; private DynamicPartitionConnections connections; private ZkState state; private Map<String, Object> stormConf; private long numberFailed; private long numberAcked; /*** * Constructor * @param connections * @param topologyInstanceId * @param state * @param stormConf * @param spoutConfig * @param id */ @SuppressWarnings("unchecked") public PartitionManager(DynamicPartitionConnections connections, String topologyInstanceId, ZkState state, Map<String, Object> stormConf, SpoutConfig spoutConfig, Partition id) { this.partition = id; this.connections = connections; this.spoutConfig = spoutConfig; this.topologyInstanceId = topologyInstanceId; this.consumer = connections.register(id.host, id.topic, id.partition); this.state = state; this.stormConf = stormConf; this.numberAcked = this.numberFailed = 0; try { failedMsgRetryManager = (FailedMsgRetryManager) Class.forName(spoutConfig.failedMsgRetryManagerClass) .newInstance(); failedMsgRetryManager.prepare(spoutConfig, stormConf); } catch (ClassNotFoundException | InstantiationException | IllegalAccessException e) { throw new IllegalArgumentException( String.format("Failed to create an instance of <%s> from: <%s>", FailedMsgRetryManager.class, spoutConfig.failedMsgRetryManagerClass), e); } String jsonTopologyId = null; Long jsonOffset = null; String path = committedPath(); try { Map<Object, Object> json = state.readJSON(path); LOG.info("Read partition information from: " + path + " --> " + json); if (json != null) { jsonTopologyId = (String) ((Map<Object, Object>) json.get("topology")).get("id"); jsonOffset = (Long) json.get("offset"); } // SUPPRESS CHECKSTYLE IllegalCatch } catch (Throwable e) { LOG.warn("Error reading and/or parsing at ZkNode: " + path, e); } String topic = partition.topic; Long currentOffset = KafkaUtils.getOffset(consumer, topic, id.partition, spoutConfig); if (jsonTopologyId == null || jsonOffset == null) { // failed to parse JSON? committedTo = currentOffset; LOG.info("No partition information found, using configuration to determine offset"); } else if (!topologyInstanceId.equals(jsonTopologyId) && spoutConfig.ignoreZkOffsets) { committedTo = KafkaUtils.getOffset(consumer, topic, id.partition, spoutConfig.startOffsetTime); LOG.info("Topology change detected and ignore zookeeper offsets set to true, " + "using configuration to determine offset"); } else { committedTo = jsonOffset; LOG.info("Read last commit offset from zookeeper: " + committedTo + "; old topology_id: " + jsonTopologyId + " - new topology_id: " + topologyInstanceId); } if (currentOffset - committedTo > spoutConfig.maxOffsetBehind || committedTo <= 0) { LOG.info("Last commit offset from zookeeper: " + committedTo); Long lastCommittedOffset = committedTo; committedTo = currentOffset; LOG.info("Commit offset " + lastCommittedOffset + " is more than " + spoutConfig.maxOffsetBehind + " behind latest offset " + currentOffset + ", resetting to startOffsetTime=" + spoutConfig.startOffsetTime); } LOG.info("Starting Kafka " + consumer.host() + " " + id + " from offset " + committedTo); emittedToOffset = committedTo; fetchAPILatencyMax = new CombinedMetric(new MaxMetric()); fetchAPILatencyMean = new ReducedMetric(new MeanReducer()); fetchAPICallCount = new CountMetric(); fetchAPIMessageCount = new CountMetric(); lostMessageCount = new CountMetric(); messageIneligibleForRetryCount = new CountMetric(); } public Map<String, Object> getMetricsDataMap() { Map<String, Object> ret = new HashMap<>(); ret.put(partition + "/fetchAPILatencyMax", fetchAPILatencyMax.getValueAndReset()); ret.put(partition + "/fetchAPILatencyMean", fetchAPILatencyMean.getValueAndReset()); ret.put(partition + "/fetchAPICallCount", fetchAPICallCount.getValueAndReset()); ret.put(partition + "/fetchAPIMessageCount", fetchAPIMessageCount.getValueAndReset()); ret.put(partition + "/lostMessageCount", lostMessageCount.getValueAndReset()); ret.put(partition + "/messageIneligibleForRetryCount", messageIneligibleForRetryCount.getValueAndReset()); return ret; } //returns false if it's reached the end of current batch public EmitState next(SpoutOutputCollector collector) { if (waitingToEmit.isEmpty()) { fill(); } while (true) { MessageAndOffset toEmit = waitingToEmit.pollFirst(); if (toEmit == null) { return EmitState.NO_EMITTED; } Iterable<List<Object>> tups; if (spoutConfig.scheme instanceof MessageMetadataSchemeAsMultiScheme) { tups = KafkaUtils.generateTuples((MessageMetadataSchemeAsMultiScheme) spoutConfig.scheme, toEmit.message(), partition, toEmit.offset()); } else { tups = KafkaUtils.generateTuples(spoutConfig, toEmit.message(), partition.topic); } if ((tups != null) && tups.iterator().hasNext()) { if (!Strings.isNullOrEmpty(spoutConfig.outputStreamId)) { for (List<Object> tup : tups) { collector.emit(spoutConfig.topic, tup, new KafkaMessageId(partition, toEmit.offset())); } } else { for (List<Object> tup : tups) { collector.emit(tup, new KafkaMessageId(partition, toEmit.offset())); } } break; } else { ack(toEmit.offset()); } } if (!waitingToEmit.isEmpty()) { return EmitState.EMITTED_MORE_LEFT; } else { return EmitState.EMITTED_END; } } private void fill() { long start = System.currentTimeMillis(); Long offset; // Are there failed tuples? If so, fetch those first. offset = this.failedMsgRetryManager.nextFailedMessageToRetry(); final boolean processingNewTuples = offset == null; if (processingNewTuples) { offset = emittedToOffset; } ByteBufferMessageSet msgs = null; try { msgs = KafkaUtils.fetchMessages(spoutConfig, consumer, partition, offset); } catch (TopicOffsetOutOfRangeException e) { offset = KafkaUtils.getOffset(consumer, partition.topic, partition.partition, kafka.api.OffsetRequest.EarliestTime()); // fetch failed, so don't update the fetch metrics //fix bug [STORM-643] : remove outdated failed offsets if (!processingNewTuples) { // For the case of EarliestTime it would be better to discard // all the failed offsets, that are earlier than actual EarliestTime // offset, since they are anyway not there. // These calls to broker API will be then saved. Set<Long> omitted = this.failedMsgRetryManager.clearOffsetsBefore(offset); // Omitted messages have not been acked and may be lost if (null != omitted) { lostMessageCount.incrBy(omitted.size()); } LOG.warn("Removing the failed offsets for {} that are out of range: {}", partition, omitted); } if (offset > emittedToOffset) { lostMessageCount.incrBy(offset - emittedToOffset); emittedToOffset = offset; LOG.warn("{} Using new offset: {}", partition, emittedToOffset); } return; } long millis = System.currentTimeMillis() - start; fetchAPILatencyMax.update(millis); fetchAPILatencyMean.update(millis); fetchAPICallCount.incr(); if (msgs != null) { int numMessages = 0; for (MessageAndOffset msg : msgs) { final Long curOffset = msg.offset(); if (curOffset < offset) { // Skip any old offsets. continue; } if (processingNewTuples || this.failedMsgRetryManager.shouldReEmitMsg(curOffset)) { numMessages += 1; if (!pending.containsKey(curOffset)) { pending.put(curOffset, System.currentTimeMillis()); } waitingToEmit.add(msg); emittedToOffset = Math.max(msg.nextOffset(), emittedToOffset); if (failedMsgRetryManager.shouldReEmitMsg(curOffset)) { this.failedMsgRetryManager.retryStarted(curOffset); } } } fetchAPIMessageCount.incrBy(numMessages); } } public void ack(Long offset) { if (!pending.isEmpty() && pending.firstKey() < offset - spoutConfig.maxOffsetBehind) { // Too many things pending! pending.headMap(offset - spoutConfig.maxOffsetBehind).clear(); } pending.remove(offset); this.failedMsgRetryManager.acked(offset); numberAcked++; } public void fail(Long offset) { if (offset < emittedToOffset - spoutConfig.maxOffsetBehind) { LOG.info( "Skipping failed tuple at offset={}" + " because it's more than maxOffsetBehind={}" + " behind emittedToOffset={} for {}", offset, spoutConfig.maxOffsetBehind, emittedToOffset, partition ); } else { LOG.debug("Failing at offset={} with pending.size()={} and emittedToOffset={} for {}", offset, pending.size(), emittedToOffset, partition); numberFailed++; if (numberAcked == 0 && numberFailed > spoutConfig.maxOffsetBehind) { throw new RuntimeException("Too many tuple failures"); } // Offset may not be considered for retry by failedMsgRetryManager if (this.failedMsgRetryManager.retryFurther(offset)) { this.failedMsgRetryManager.failed(offset); } else { // state for the offset should be cleaned up LOG.warn("Will not retry failed kafka offset {} further", offset); messageIneligibleForRetryCount.incr(); pending.remove(offset); this.failedMsgRetryManager.acked(offset); } } } public void commit() { long lastCompletedOffset = lastCompletedOffset(); if (committedTo != lastCompletedOffset) { LOG.debug("Writing last completed offset ({}) to ZK for {} for topology: {}", lastCompletedOffset, partition, topologyInstanceId); Map<Object, Object> data = (Map<Object, Object>) ImmutableMap.builder() .put("topology", ImmutableMap.of("id", topologyInstanceId, "name", stormConf.get(Config.TOPOLOGY_NAME))) .put("offset", lastCompletedOffset) .put("partition", partition.partition) .put("broker", ImmutableMap.of("host", partition.host.host, "port", partition.host.port)) .put("topic", partition.topic).build(); state.writeJSON(committedPath(), data); committedTo = lastCompletedOffset; LOG.debug("Wrote last completed offset ({}) to ZK for {} for topology: {}", lastCompletedOffset, partition, topologyInstanceId); } else { LOG.debug("No new offset for {} for topology: {}", partition, topologyInstanceId); } } private String committedPath() { return spoutConfig.zkRoot + "/" + spoutConfig.id + "/" + partition.getId(); } public long lastCompletedOffset() { if (pending.isEmpty()) { return emittedToOffset; } else { return pending.firstKey(); } } public OffsetData getOffsetData() { return new OffsetData(emittedToOffset, lastCompletedOffset()); } public Partition getPartition() { return partition; } public void close() { commit(); connections.unregister(partition.host, partition.topic, partition.partition); } static class KafkaMessageId implements Serializable { private static final long serialVersionUID = 4962830658778031020L; public Partition partition; public long offset; KafkaMessageId(Partition partition, long offset) { this.partition = partition; this.offset = offset; } } public static class OffsetData { public long latestEmittedOffset; public long latestCompletedOffset; public OffsetData(long latestEmittedOffset, long latestCompletedOffset) { this.latestEmittedOffset = latestEmittedOffset; this.latestCompletedOffset = latestCompletedOffset; } } }
package edu.washington.nsre.util; import java.io.File; import java.io.PrintStream; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Set; import com.google.gson.Gson; //import edu.stanford.nlp.kbp.slotfilling.MultiR; import edu.stanford.nlp.stats.ClassicCounter; import edu.stanford.nlp.stats.Counter; import edu.stanford.nlp.stats.Counters; import edu.stanford.nlp.stats.IntCounter; //import edu.uw.multir.DW; import edu.stanford.nlp.util.Triple; public class RelationExtractionPRCurve { public List<double[]> prall; public HashMap<String, List<double[]>> prcurveByrel = new HashMap<String, List<double[]>>(); public Set<String> relations; public double[] bestF1All; public HashMap<String, double[]> bestF1ByRel = new HashMap<String, double[]>(); public Counter<String> truePositiveByRel = new IntCounter<String>(); static class Pred { String gold; String pred; double score; } static Gson gson = new Gson(); public void prcurve_old(String input_gold, String input_pred) { DR drg = new DR(input_gold); DR drp = new DR(input_pred); String[] g; String[] p; List<Pred> predictions = new ArrayList<Pred>(); Set<String> relations = new HashSet<String>(); while ((g = drg.read()) != null) { p = drp.read(); HashMap<String, Double> pred_scorer = gson.fromJson(p[0], HashMap.class); HashMap<String, Double> gold_scorer = gson.fromJson(g[0], HashMap.class); if (gold_scorer.size() == 0) { gold_scorer.put("NA", 1.0); } if (pred_scorer.size() == 0) { pred_scorer.put("NA", 0.0); } for (String grel : gold_scorer.keySet()) { for (String prel : pred_scorer.keySet()) { double score = pred_scorer.get(prel); Pred onep = new Pred(); onep.gold = grel; onep.pred = prel; onep.score = score; if (!onep.gold.equals("NA") || !onep.pred.equals("NA")) { predictions.add(onep); } if (!grel.equals("NA")) { relations.add(grel); } if (!prel.equals("NA")) { relations.add(prel); } } } } Collections.sort(predictions, new Comparator<Pred>() { public int compare(Pred o1, Pred o2) { return Double.compare(o2.score, o1.score); } }); prall = prTable(predictions, "ALL"); bestF1All = getBestF1(prall); for (String r : relations) { List<double[]> pr = prTable(predictions, r); prcurveByrel.put(r, pr); double[] bestf1 = getBestF1(pr); bestF1ByRel.put(r, bestf1); } drg.close(); drp.close(); } private static List<Triple<Integer, String, Double>> convertToSorted(List<Counter<String>> predictedLabels) { List<Triple<Integer, String, Double>> sorted = new ArrayList<Triple<Integer, String, Double>>(); for (int i = 0; i < predictedLabels.size(); i++) { for (String l : predictedLabels.get(i).keySet()) { double s = predictedLabels.get(i).getCount(l); sorted.add(new Triple<Integer, String, Double>(i, l, s)); } } Collections.sort(sorted, new Comparator<Triple<Integer, String, Double>>() { public int compare(Triple<Integer, String, Double> t1, Triple<Integer, String, Double> t2) { if (t1.third() > t2.third()) return -1; else if (t1.third() < t2.third()) return 1; return 0; } }); return sorted; } private static Triple<Double, Double, Double> score(List<Triple<Integer, String, Double>> preds, List<Set<String>> golds) { int total = 0, predicted = 0, correct = 0; for (int i = 0; i < golds.size(); i++) { Set<String> gold = golds.get(i); total += gold.size(); } for (Triple<Integer, String, Double> pred : preds) { predicted++; if (golds.get(pred.first()).contains(pred.second())) correct++; } double p = (double) correct / (double) predicted; double r = (double) correct / (double) total; double f1 = (p != 0 && r != 0 ? 2 * p * r / (p + r) : 0); return new Triple<Double, Double, Double>(p, r, f1); } private static void generatePRCurveNonProbScores(PrintStream os, List<Set<String>> goldLabels, List<Counter<String>> predictedLabels) { // each triple stores: position of tuple in gold, one label for this // tuple, its score List<Triple<Integer, String, Double>> preds = convertToSorted(predictedLabels); double prevP = -1, prevR = -1; int START_OFFSET = 10; // score at least this many predictions (makes no // sense to score 1...) for (int i = START_OFFSET; i < preds.size(); i++) { List<Triple<Integer, String, Double>> filteredLabels = preds.subList(0, i); Triple<Double, Double, Double> score = score(filteredLabels, goldLabels); if (score.first() != prevP || score.second() != prevR) { double ratio = (double) i / (double) preds.size(); os.println(ratio + " P " + score.first() + " R " + score.second() + " F1 " + score.third()); prevP = score.first(); prevR = score.second(); } } } public void prcurve(String input_gold, String input_pred) { DR drg = new DR(input_gold); DR drp = new DR(input_pred); String[] g; String[] p; List<Pred> predictions = new ArrayList<Pred>(); relations = new HashSet<String>(); List<Set<String>> goldLabels = new ArrayList<Set<String>>(); List<Counter<String>> predictedLabels = new ArrayList<Counter<String>>(); while ((g = drg.read()) != null) { p = drp.read(); HashMap<String, Double> pred_scorer = gson.fromJson(p[0], HashMap.class); HashMap<String, Double> gold_scorer = gson.fromJson(g[0], HashMap.class); Set<String> gls = new HashSet<String>(); for (String gl : gold_scorer.keySet()) { gls.add(gl); relations.add(gl); } goldLabels.add(gls); Counter<String> preds = new ClassicCounter<String>(); for (String rel : pred_scorer.keySet()) { preds.incrementCount(rel, pred_scorer.get(rel)); } predictedLabels.add(preds); } this.prall = generatePRCurveNonProbScores(goldLabels, predictedLabels); // D.p("all"); this.bestF1All = getBestF1(prall); for (String r : relations) { List<Set<String>> goldLabels_r = new ArrayList<Set<String>>(); List<Counter<String>> predictedLabels_r = new ArrayList<Counter<String>>(); for (int i = 0; i < goldLabels.size(); i++) { Set<String> goldLabel = goldLabels.get(i); Counter<String> c = predictedLabels.get(i); if (goldLabels.contains(r) || c.getCount(r) > 0) { goldLabels_r.add(goldLabel); predictedLabels_r.add(c); } } List<double[]> pr = generatePRCurveNonProbScores(goldLabels_r, predictedLabels_r); // D.p(r); prcurveByrel.put(r, pr); double[] bestf1 = getBestF1(pr); bestF1ByRel.put(r, bestf1); } drg.close(); drp.close(); } public int numTruePositive(List<Set<String>> goldLabels) { int ret = 0; for (Set<String> label : goldLabels) { if (label.size() > 0) ret++; for (String r : label) { this.truePositiveByRel.incrementCount(r); } } return ret; } public void prcurve(List<Set<String>> goldLabels, List<Counter<String>> predictedLabels) { relations = new HashSet<String>(); for (Set<String> gls : goldLabels) { for (String r : gls) { relations.add(r); } } this.prall = generatePRCurveNonProbScores(goldLabels, predictedLabels); this.bestF1All = getBestF1(prall); for (String r : relations) { List<Set<String>> goldLabels_r = new ArrayList<Set<String>>(); List<Counter<String>> predictedLabels_r = new ArrayList<Counter<String>>(); for (int i = 0; i < goldLabels.size(); i++) { Set<String> goldLabel = goldLabels.get(i); Counter<String> c = predictedLabels.get(i); Set<String> goldLabel_onlyr = new HashSet<String>(); Counter<String> predLabel_onlyr = new ClassicCounter<String>(); if (goldLabel.contains(r)) { goldLabel_onlyr.add(r); } if (c.getCount(r) > 0) { predLabel_onlyr.incrementCount(r, c.getCount(r)); } if (goldLabel.contains(r) || c.getCount(r) > 0) { goldLabels_r.add(goldLabel_onlyr); predictedLabels_r.add(predLabel_onlyr); } } List<double[]> pr = generatePRCurveNonProbScores(goldLabels_r, predictedLabels_r); prcurveByrel.put(r, pr); double[] bestf1 = getBestF1(pr); bestF1ByRel.put(r, bestf1); } } public static List<double[]> generatePRCurveNonProbScores( List<Set<String>> goldLabels, List<Counter<String>> predictedLabels) { List<double[]> ret = new ArrayList<double[]>(); HashSet<String> golds = new HashSet<String>(); for (int i = 0; i < goldLabels.size(); i++) { Set<String> one = goldLabels.get(i); for (String r : one) { golds.add(i + "\t" + r); } } Counter<String> preds = new ClassicCounter<String>(); for (int i = 0; i < predictedLabels.size(); i++) { Counter<String> one = predictedLabels.get(i); for (String r : one.keySet()) { preds.setCount(i + "\t" + r, one.getCount(r)); } } List<String> predSorted = Counters.toSortedList(preds); double truepos = 0, falsepos = 0, falseneg = 0; for (int i = 0; i < predSorted.size(); i++) { String a = predSorted.get(i); if (golds.contains(a)) { truepos++; } else { falsepos++; } double precision = truepos * 1.0 / (truepos + falsepos); double recall = truepos * 1.0 / golds.size(); ret.add(new double[] { precision, recall }); // D.p( precision, recall, a); } return ret; } public static List<double[]> prTable(List<Pred> sorted_preds, String targetRel) { int totalGold = 0; for (Pred pred : sorted_preds) { if (pred.gold.equals(targetRel) || targetRel.equals("ALL")) { totalGold++; } } List<double[]> prtable = new ArrayList<double[]>(); int truepos = 0, falsepos = 0; for (Pred pred : sorted_preds) { if (pred.pred.equals(targetRel) || pred.gold.equals(targetRel) || targetRel.equals("ALL")) { if (pred.pred.equals(pred.gold) && !pred.pred.equals("NA")) { truepos++; prtable.add(new double[] { truepos * 1.0 / (truepos + falsepos), truepos * 1.0 / totalGold }); } else if (!pred.pred.equals(pred.gold) && !pred.pred.equals("NA")) { falsepos++; prtable.add(new double[] { truepos * 1.0 / (truepos + falsepos), truepos * 1.0 / totalGold }); } } else { // do not consider this point } } return prtable; } public static double[] getFinalF1(List<double[]> prtable) { double[] ret = new double[3]; if (prtable.size() > 0) { double[] pr = prtable.get(prtable.size() - 1); double f1 = 2 * pr[0] * pr[1] / (pr[0] + pr[1]); ret[0] = pr[0]; ret[1] = pr[1]; ret[2] = f1; } return ret; } public static double[] getBestF1(List<double[]> prtable) { double[] best = new double[3]; for (double[] pr : prtable) { double f1 = 2 * pr[0] * pr[1] / (pr[0] + pr[1]); if (f1 > best[2]) { best[0] = pr[0]; best[1] = pr[1]; best[2] = f1; } } return best; } public static double getAveragePrecision(List<double[]> prtable) { double sum = 0; for (int i = 0; i < prtable.size(); i++) { double[] pr = prtable.get(i); if (i >= 1) { double[] pr2 = prtable.get(i - 1); sum += ((pr[0] + pr2[0]) / 2) * (pr[1] - pr2[1]); } else { sum += pr[0] * pr[1]; } } return sum; } public static void main(String[] args) { RelationExtractionPRCurve repr = new RelationExtractionPRCurve(); repr.prcurve(args[0], args[1]); for (String r : repr.bestF1ByRel.keySet()) { // D.p(r, gson.toJson(repr.bestF1ByRel.get(r))); } String outputdir = args[2]; if (!new File(outputdir).exists()) { (new File(outputdir)).mkdir(); } DW dwreport = new DW(outputdir + "/summary"); { D.p("ALL", gson.toJson(repr.bestF1All)); DW dw = new DW(outputdir + "/all.pr"); for (double[] pr : repr.prall) { double f1 = 2 * pr[0] * pr[1] / (pr[0] + pr[1]); dw.write(pr[0], pr[1], f1); } dw.close(); double[] finalf1 = repr.getFinalF1(repr.prall); double[] bestf1 = repr.getBestF1(repr.prall); double averageprec = repr.getAveragePrecision(repr.prall); dwreport.write("ALL", finalf1[2], bestf1[2], averageprec); } { for (String rel : repr.prcurveByrel.keySet()) { List<double[]> prs = repr.prcurveByrel.get(rel); String filename = rel.replaceAll("\\W+", "_"); DW dw = new DW(outputdir + "/" + filename + ".pr"); for (double[] pr : prs) { double f1 = 2 * pr[0] * pr[1] / (pr[0] + pr[1]); dw.write(pr[0], pr[1], f1); } dw.close(); double[] finalf1 = repr.getFinalF1(prs); double[] bestf1 = repr.getBestF1(prs); double averageprec = repr.getAveragePrecision(prs); dwreport.write(rel, finalf1[2], bestf1[2], averageprec); } } dwreport.close(); } }
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.cordova.engine; import java.util.Arrays; import android.annotation.TargetApi; import android.app.Activity; import android.content.Context; import android.content.ActivityNotFoundException; import android.content.Intent; import android.net.Uri; import android.os.Build; import android.view.Gravity; import android.view.View; import android.view.ViewGroup.LayoutParams; import android.webkit.ConsoleMessage; import android.webkit.GeolocationPermissions.Callback; import android.webkit.JsPromptResult; import android.webkit.JsResult; import android.webkit.ValueCallback; import android.webkit.WebChromeClient; import android.webkit.WebStorage; import android.webkit.WebView; import android.webkit.PermissionRequest; import android.widget.LinearLayout; import android.widget.ProgressBar; import android.widget.RelativeLayout; import org.apache.cordova.CordovaDialogsHelper; import org.apache.cordova.CordovaPlugin; import org.apache.cordova.LOG; /** * This class is the WebChromeClient that implements callbacks for our web view. * The kind of callbacks that happen here are on the chrome outside the document, * such as onCreateWindow(), onConsoleMessage(), onProgressChanged(), etc. Related * to but different than CordovaWebViewClient. */ public class SystemWebChromeClient extends WebChromeClient { private static final int FILECHOOSER_RESULTCODE = 5173; private static final String LOG_TAG = "SystemWebChromeClient"; private long MAX_QUOTA = 100 * 1024 * 1024; protected final SystemWebViewEngine parentEngine; // the video progress view private View mVideoProgressView; private CordovaDialogsHelper dialogsHelper; private Context appContext; private CustomViewCallback mCustomViewCallback; private View mCustomView; public SystemWebChromeClient(SystemWebViewEngine parentEngine) { this.parentEngine = parentEngine; appContext = parentEngine.webView.getContext(); dialogsHelper = new CordovaDialogsHelper(appContext); } /** * Tell the client to display a javascript alert dialog. */ @Override public boolean onJsAlert(WebView view, String url, String message, final JsResult result) { dialogsHelper.showAlert(message, new CordovaDialogsHelper.Result() { @Override public void gotResult(boolean success, String value) { if (success) { result.confirm(); } else { result.cancel(); } } }); return true; } /** * Tell the client to display a confirm dialog to the user. */ @Override public boolean onJsConfirm(WebView view, String url, String message, final JsResult result) { dialogsHelper.showConfirm(message, new CordovaDialogsHelper.Result() { @Override public void gotResult(boolean success, String value) { if (success) { result.confirm(); } else { result.cancel(); } } }); return true; } /** * Tell the client to display a prompt dialog to the user. * If the client returns true, WebView will assume that the client will * handle the prompt dialog and call the appropriate JsPromptResult method. * * Since we are hacking prompts for our own purposes, we should not be using them for * this purpose, perhaps we should hack console.log to do this instead! */ @Override public boolean onJsPrompt(WebView view, String origin, String message, String defaultValue, final JsPromptResult result) { // Unlike the @JavascriptInterface bridge, this method is always called on the UI thread. String handledRet = parentEngine.bridge.promptOnJsPrompt(origin, message, defaultValue); if (handledRet != null) { result.confirm(handledRet); } else { dialogsHelper.showPrompt(message, defaultValue, new CordovaDialogsHelper.Result() { @Override public void gotResult(boolean success, String value) { if (success) { result.confirm(value); } else { result.cancel(); } } }); } return true; } /** * Handle database quota exceeded notification. */ @Override public void onExceededDatabaseQuota(String url, String databaseIdentifier, long currentQuota, long estimatedSize, long totalUsedQuota, WebStorage.QuotaUpdater quotaUpdater) { LOG.d(LOG_TAG, "onExceededDatabaseQuota estimatedSize: %d currentQuota: %d totalUsedQuota: %d", estimatedSize, currentQuota, totalUsedQuota); quotaUpdater.updateQuota(MAX_QUOTA); } // console.log in api level 7: http://developer.android.com/guide/developing/debug-tasks.html // Expect this to not compile in a future Android release! @SuppressWarnings("deprecation") @Override public void onConsoleMessage(String message, int lineNumber, String sourceID) { //This is only for Android 2.1 if(Build.VERSION.SDK_INT == Build.VERSION_CODES.ECLAIR_MR1) { LOG.d(LOG_TAG, "%s: Line %d : %s", sourceID, lineNumber, message); super.onConsoleMessage(message, lineNumber, sourceID); } } @TargetApi(8) @Override public boolean onConsoleMessage(ConsoleMessage consoleMessage) { if (consoleMessage.message() != null) LOG.d(LOG_TAG, "%s: Line %d : %s" , consoleMessage.sourceId() , consoleMessage.lineNumber(), consoleMessage.message()); return super.onConsoleMessage(consoleMessage); } @Override /** * Instructs the client to show a prompt to ask the user to set the Geolocation permission state for the specified origin. * * This also checks for the Geolocation Plugin and requests permission from the application to use Geolocation. * * @param origin * @param callback */ public void onGeolocationPermissionsShowPrompt(String origin, Callback callback) { super.onGeolocationPermissionsShowPrompt(origin, callback); callback.invoke(origin, true, false); //Get the plugin, it should be loaded CordovaPlugin geolocation = parentEngine.pluginManager.getPlugin("Geolocation"); if(geolocation != null && !geolocation.hasPermisssion()) { geolocation.requestPermissions(0); } } // API level 7 is required for this, see if we could lower this using something else @Override public void onShowCustomView(View view, CustomViewCallback callback) { parentEngine.getCordovaWebView().showCustomView(view, callback); } @Override public void onHideCustomView() { parentEngine.getCordovaWebView().hideCustomView(); } @Override /** * Ask the host application for a custom progress view to show while * a <video> is loading. * @return View The progress view. */ public View getVideoLoadingProgressView() { if (mVideoProgressView == null) { // Create a new Loading view programmatically. // create the linear layout LinearLayout layout = new LinearLayout(parentEngine.getView().getContext()); layout.setOrientation(LinearLayout.VERTICAL); RelativeLayout.LayoutParams layoutParams = new RelativeLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); layoutParams.addRule(RelativeLayout.CENTER_IN_PARENT); layout.setLayoutParams(layoutParams); // the proress bar ProgressBar bar = new ProgressBar(parentEngine.getView().getContext()); LinearLayout.LayoutParams barLayoutParams = new LinearLayout.LayoutParams(LayoutParams.WRAP_CONTENT, LayoutParams.WRAP_CONTENT); barLayoutParams.gravity = Gravity.CENTER; bar.setLayoutParams(barLayoutParams); layout.addView(bar); mVideoProgressView = layout; } return mVideoProgressView; } // <input type=file> support: // openFileChooser() is for pre KitKat and in KitKat mr1 (it's known broken in KitKat). // For Lollipop, we use onShowFileChooser(). public void openFileChooser(ValueCallback<Uri> uploadMsg) { this.openFileChooser(uploadMsg, "*/*"); } public void openFileChooser( ValueCallback<Uri> uploadMsg, String acceptType ) { this.openFileChooser(uploadMsg, acceptType, null); } public void openFileChooser(final ValueCallback<Uri> uploadMsg, String acceptType, String capture) { Intent intent = new Intent(Intent.ACTION_GET_CONTENT); intent.addCategory(Intent.CATEGORY_OPENABLE); intent.setType("*/*"); parentEngine.cordova.startActivityForResult(new CordovaPlugin() { @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { Uri result = intent == null || resultCode != Activity.RESULT_OK ? null : intent.getData(); LOG.d(LOG_TAG, "Receive file chooser URL: " + result); uploadMsg.onReceiveValue(result); } }, intent, FILECHOOSER_RESULTCODE); } @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public boolean onShowFileChooser(WebView webView, final ValueCallback<Uri[]> filePathsCallback, final FileChooserParams fileChooserParams) { Intent intent = fileChooserParams.createIntent(); try { parentEngine.cordova.startActivityForResult(new CordovaPlugin() { @Override public void onActivityResult(int requestCode, int resultCode, Intent intent) { Uri[] result = FileChooserParams.parseResult(resultCode, intent); LOG.d(LOG_TAG, "Receive file chooser URL: " + result); filePathsCallback.onReceiveValue(result); } }, intent, FILECHOOSER_RESULTCODE); } catch (ActivityNotFoundException e) { LOG.w("No activity found to handle file chooser intent.", e); filePathsCallback.onReceiveValue(null); } return true; } @TargetApi(Build.VERSION_CODES.LOLLIPOP) @Override public void onPermissionRequest(final PermissionRequest request) { LOG.d(LOG_TAG, "onPermissionRequest: " + Arrays.toString(request.getResources())); request.grant(request.getResources()); } public void destroyLastDialog(){ dialogsHelper.destroyLastDialog(); } }
// // This file was generated by the JavaTM Architecture for XML Binding(JAXB) Reference Implementation, v2.2.11 // See <a href="http://java.sun.com/xml/jaxb">http://java.sun.com/xml/jaxb</a> // Any modifications to this file will be lost upon recompilation of the source schema. // Generated on: 2015.09.10 at 11:19:43 PM PDT // package net.distributary.tahseen.awis.generated; import javax.xml.bind.JAXBElement; import javax.xml.bind.annotation.XmlElementDecl; import javax.xml.bind.annotation.XmlRegistry; import javax.xml.namespace.QName; /** * This object contains factory methods for each * Java content interface and Java element interface * generated in the net.distributary.tahseen.awis.generated package. * <p>An ObjectFactory allows you to programatically * construct new instances of the Java representation * for XML content. The Java representation of XML * content can consist of schema derived interfaces * and classes representing the binding of schema * type definitions, element declarations and model * groups. Factory methods for each of these are * provided in this class. * */ @XmlRegistry public class ObjectFactory { private final static QName _Result_QNAME = new QName("http://alexa.amazonaws.com/doc/2005-10-05/", "Result"); /** * Create a new ObjectFactory that can be used to create new instances of schema derived classes for package: net.distributary.tahseen.awis.generated * */ public ObjectFactory() { } /** * Create an instance of {@link HTTPHeaders } * */ public HTTPHeaders createHTTPHeaders() { return new HTTPHeaders(); } /** * Create an instance of {@link net.distributary.tahseen.awis.generated.Arguments } * */ public net.distributary.tahseen.awis.generated.Arguments createArguments() { return new net.distributary.tahseen.awis.generated.Arguments(); } /** * Create an instance of {@link net.distributary.tahseen.awis.generated.Errors } * */ public net.distributary.tahseen.awis.generated.Errors createErrors() { return new net.distributary.tahseen.awis.generated.Errors(); } /** * Create an instance of {@link CategoryBrowseResponse } * */ public CategoryBrowseResponse createCategoryBrowseResponse() { return new CategoryBrowseResponse(); } /** * Create an instance of {@link CategoryListingsResponse } * */ public CategoryListingsResponse createCategoryListingsResponse() { return new CategoryListingsResponse(); } /** * Create an instance of {@link CrawlResponse } * */ public CrawlResponse createCrawlResponse() { return new CrawlResponse(); } /** * Create an instance of {@link SitesLinkingInResponse } * */ public SitesLinkingInResponse createSitesLinkingInResponse() { return new SitesLinkingInResponse(); } /** * Create an instance of {@link TrafficHistoryResponse } * */ public TrafficHistoryResponse createTrafficHistoryResponse() { return new TrafficHistoryResponse(); } /** * Create an instance of {@link UrlInfoResponse } * */ public UrlInfoResponse createUrlInfoResponse() { return new UrlInfoResponse(); } /** * Create an instance of {@link WebMapResponse } * */ public WebMapResponse createWebMapResponse() { return new WebMapResponse(); } /** * Create an instance of {@link OperationInformation } * */ public OperationInformation createOperationInformation() { return new OperationInformation(); } /** * Create an instance of {@link ResponseGroupInformation } * */ public ResponseGroupInformation createResponseGroupInformation() { return new ResponseGroupInformation(); } /** * Create an instance of {@link Alexa } * */ public Alexa createAlexa() { return new Alexa(); } /** * Create an instance of {@link ContributingSiteType } * */ public ContributingSiteType createContributingSiteType() { return new ContributingSiteType(); } /** * Create an instance of {@link WebMapSubType } * */ public WebMapSubType createWebMapSubType() { return new WebMapSubType(); } /** * Create an instance of {@link WebMapSubType.Results } * */ public WebMapSubType.Results createWebMapSubTypeResults() { return new WebMapSubType.Results(); } /** * Create an instance of {@link UrlServiceType } * */ public UrlServiceType createUrlServiceType() { return new UrlServiceType(); } /** * Create an instance of {@link UsageStatisticType } * */ public UsageStatisticType createUsageStatisticType() { return new UsageStatisticType(); } /** * Create an instance of {@link PhysicalAddressType } * */ public PhysicalAddressType createPhysicalAddressType() { return new PhysicalAddressType(); } /** * Create an instance of {@link TrafficHistoryType } * */ public TrafficHistoryType createTrafficHistoryType() { return new TrafficHistoryType(); } /** * Create an instance of {@link TrafficHistoryType.HistoricalData } * */ public TrafficHistoryType.HistoricalData createTrafficHistoryTypeHistoricalData() { return new TrafficHistoryType.HistoricalData(); } /** * Create an instance of {@link TrafficHistoryType.HistoricalData.Data } * */ public TrafficHistoryType.HistoricalData.Data createTrafficHistoryTypeHistoricalDataData() { return new TrafficHistoryType.HistoricalData.Data(); } /** * Create an instance of {@link TrafficDataType } * */ public TrafficDataType createTrafficDataType() { return new TrafficDataType(); } /** * Create an instance of {@link TrafficDataType.RankByCity } * */ public TrafficDataType.RankByCity createTrafficDataTypeRankByCity() { return new TrafficDataType.RankByCity(); } /** * Create an instance of {@link TrafficDataType.RankByCity.City } * */ public TrafficDataType.RankByCity.City createTrafficDataTypeRankByCityCity() { return new TrafficDataType.RankByCity.City(); } /** * Create an instance of {@link TrafficDataType.RankByCity.City.Contribution } * */ public TrafficDataType.RankByCity.City.Contribution createTrafficDataTypeRankByCityCityContribution() { return new TrafficDataType.RankByCity.City.Contribution(); } /** * Create an instance of {@link TrafficDataType.RankByCountry } * */ public TrafficDataType.RankByCountry createTrafficDataTypeRankByCountry() { return new TrafficDataType.RankByCountry(); } /** * Create an instance of {@link TrafficDataType.RankByCountry.Country } * */ public TrafficDataType.RankByCountry.Country createTrafficDataTypeRankByCountryCountry() { return new TrafficDataType.RankByCountry.Country(); } /** * Create an instance of {@link SitesLinkingInType } * */ public SitesLinkingInType createSitesLinkingInType() { return new SitesLinkingInType(); } /** * Create an instance of {@link RelatedType } * */ public RelatedType createRelatedType() { return new RelatedType(); } /** * Create an instance of {@link RelatedType.Categories } * */ public RelatedType.Categories createRelatedTypeCategories() { return new RelatedType.Categories(); } /** * Create an instance of {@link CrawlType } * */ public CrawlType createCrawlType() { return new CrawlType(); } /** * Create an instance of {@link CrawlType.Index } * */ public CrawlType.Index createCrawlTypeIndex() { return new CrawlType.Index(); } /** * Create an instance of {@link CrawlType.MetaData } * */ public CrawlType.MetaData createCrawlTypeMetaData() { return new CrawlType.MetaData(); } /** * Create an instance of {@link CrawlType.MetaData.Links } * */ public CrawlType.MetaData.Links createCrawlTypeMetaDataLinks() { return new CrawlType.MetaData.Links(); } /** * Create an instance of {@link CrawlType.MetaData.OtherUrls } * */ public CrawlType.MetaData.OtherUrls createCrawlTypeMetaDataOtherUrls() { return new CrawlType.MetaData.OtherUrls(); } /** * Create an instance of {@link ContactInfoType } * */ public ContactInfoType createContactInfoType() { return new ContactInfoType(); } /** * Create an instance of {@link RequestType } * */ public RequestType createRequestType() { return new RequestType(); } /** * Create an instance of {@link RequestType.Arguments } * */ public RequestType.Arguments createRequestTypeArguments() { return new RequestType.Arguments(); } /** * Create an instance of {@link WebMapRequest } * */ public WebMapRequest createWebMapRequest() { return new WebMapRequest(); } /** * Create an instance of {@link UrlInfoRequest } * */ public UrlInfoRequest createUrlInfoRequest() { return new UrlInfoRequest(); } /** * Create an instance of {@link TrafficHistoryRequest } * */ public TrafficHistoryRequest createTrafficHistoryRequest() { return new TrafficHistoryRequest(); } /** * Create an instance of {@link SitesLinkingInRequest } * */ public SitesLinkingInRequest createSitesLinkingInRequest() { return new SitesLinkingInRequest(); } /** * Create an instance of {@link CrawlRequest } * */ public CrawlRequest createCrawlRequest() { return new CrawlRequest(); } /** * Create an instance of {@link CategoryListingsRequest } * */ public CategoryListingsRequest createCategoryListingsRequest() { return new CategoryListingsRequest(); } /** * Create an instance of {@link CategoryBrowseRequest } * */ public CategoryBrowseRequest createCategoryBrowseRequest() { return new CategoryBrowseRequest(); } /** * Create an instance of {@link MultiOperation } * */ public MultiOperation createMultiOperation() { return new MultiOperation(); } /** * Create an instance of {@link CategoryBrowse } * */ public CategoryBrowse createCategoryBrowse() { return new CategoryBrowse(); } /** * Create an instance of {@link CategoryListings } * */ public CategoryListings createCategoryListings() { return new CategoryListings(); } /** * Create an instance of {@link Crawl } * */ public Crawl createCrawl() { return new Crawl(); } /** * Create an instance of {@link SitesLinkingIn } * */ public SitesLinkingIn createSitesLinkingIn() { return new SitesLinkingIn(); } /** * Create an instance of {@link TrafficHistory } * */ public TrafficHistory createTrafficHistory() { return new TrafficHistory(); } /** * Create an instance of {@link UrlInfo } * */ public UrlInfo createUrlInfo() { return new UrlInfo(); } /** * Create an instance of {@link WebMap } * */ public WebMap createWebMap() { return new WebMap(); } /** * Create an instance of {@link MultiOperationResponse } * */ public MultiOperationResponse createMultiOperationResponse() { return new MultiOperationResponse(); } /** * Create an instance of {@link OperationRequest } * */ public OperationRequest createOperationRequest() { return new OperationRequest(); } /** * Create an instance of {@link HTTPHeaders.Header } * */ public HTTPHeaders.Header createHTTPHeadersHeader() { return new HTTPHeaders.Header(); } /** * Create an instance of {@link net.distributary.tahseen.awis.generated.Arguments.Argument } * */ public net.distributary.tahseen.awis.generated.Arguments.Argument createArgumentsArgument() { return new net.distributary.tahseen.awis.generated.Arguments.Argument(); } /** * Create an instance of {@link net.distributary.tahseen.awis.generated.Errors.Error } * */ public net.distributary.tahseen.awis.generated.Errors.Error createErrorsError() { return new net.distributary.tahseen.awis.generated.Errors.Error(); } /** * Create an instance of {@link CategoryBrowseResponse.Response } * */ public CategoryBrowseResponse.Response createCategoryBrowseResponseResponse() { return new CategoryBrowseResponse.Response(); } /** * Create an instance of {@link CategoryListingsResponse.Response } * */ public CategoryListingsResponse.Response createCategoryListingsResponseResponse() { return new CategoryListingsResponse.Response(); } /** * Create an instance of {@link CrawlResponse.Response } * */ public CrawlResponse.Response createCrawlResponseResponse() { return new CrawlResponse.Response(); } /** * Create an instance of {@link SitesLinkingInResponse.Response } * */ public SitesLinkingInResponse.Response createSitesLinkingInResponseResponse() { return new SitesLinkingInResponse.Response(); } /** * Create an instance of {@link TrafficHistoryResponse.Response } * */ public TrafficHistoryResponse.Response createTrafficHistoryResponseResponse() { return new TrafficHistoryResponse.Response(); } /** * Create an instance of {@link UrlInfoResponse.Response } * */ public UrlInfoResponse.Response createUrlInfoResponseResponse() { return new UrlInfoResponse.Response(); } /** * Create an instance of {@link WebMapResponse.Response } * */ public WebMapResponse.Response createWebMapResponseResponse() { return new WebMapResponse.Response(); } /** * Create an instance of {@link Information } * */ public Information createInformation() { return new Information(); } /** * Create an instance of {@link Request } * */ public Request createRequest() { return new Request(); } /** * Create an instance of {@link OperationInformation.RequiredParameters } * */ public OperationInformation.RequiredParameters createOperationInformationRequiredParameters() { return new OperationInformation.RequiredParameters(); } /** * Create an instance of {@link OperationInformation.AvailableParameters } * */ public OperationInformation.AvailableParameters createOperationInformationAvailableParameters() { return new OperationInformation.AvailableParameters(); } /** * Create an instance of {@link OperationInformation.DefaultResponseGroups } * */ public OperationInformation.DefaultResponseGroups createOperationInformationDefaultResponseGroups() { return new OperationInformation.DefaultResponseGroups(); } /** * Create an instance of {@link OperationInformation.AvailableResponseGroups } * */ public OperationInformation.AvailableResponseGroups createOperationInformationAvailableResponseGroups() { return new OperationInformation.AvailableResponseGroups(); } /** * Create an instance of {@link ResponseGroupInformation.ValidOperations } * */ public ResponseGroupInformation.ValidOperations createResponseGroupInformationValidOperations() { return new ResponseGroupInformation.ValidOperations(); } /** * Create an instance of {@link ResponseGroupInformation.Elements } * */ public ResponseGroupInformation.Elements createResponseGroupInformationElements() { return new ResponseGroupInformation.Elements(); } /** * Create an instance of {@link CategoryBrowseResult } * */ public CategoryBrowseResult createCategoryBrowseResult() { return new CategoryBrowseResult(); } /** * Create an instance of {@link CategoryBrowseType } * */ public CategoryBrowseType createCategoryBrowseType() { return new CategoryBrowseType(); } /** * Create an instance of {@link CategoryListingsType } * */ public CategoryListingsType createCategoryListingsType() { return new CategoryListingsType(); } /** * Create an instance of {@link ContentDataType } * */ public ContentDataType createContentDataType() { return new ContentDataType(); } /** * Create an instance of {@link Alexa.WebMapData } * */ public Alexa.WebMapData createAlexaWebMapData() { return new Alexa.WebMapData(); } /** * Create an instance of {@link CategoryListingsResult } * */ public CategoryListingsResult createCategoryListingsResult() { return new CategoryListingsResult(); } /** * Create an instance of {@link CrawlResult } * */ public CrawlResult createCrawlResult() { return new CrawlResult(); } /** * Create an instance of {@link SitesLinkingInResult } * */ public SitesLinkingInResult createSitesLinkingInResult() { return new SitesLinkingInResult(); } /** * Create an instance of {@link TrafficHistoryResult } * */ public TrafficHistoryResult createTrafficHistoryResult() { return new TrafficHistoryResult(); } /** * Create an instance of {@link UrlInfoResult } * */ public UrlInfoResult createUrlInfoResult() { return new UrlInfoResult(); } /** * Create an instance of {@link WebMapResult } * */ public WebMapResult createWebMapResult() { return new WebMapResult(); } /** * Create an instance of {@link ResponseStatus } * */ public ResponseStatus createResponseStatus() { return new ResponseStatus(); } /** * Create an instance of {@link AlexaBatch } * */ public AlexaBatch createAlexaBatch() { return new AlexaBatch(); } /** * Create an instance of {@link AttributeType } * */ public AttributeType createAttributeType() { return new AttributeType(); } /** * Create an instance of {@link ErrorType } * */ public ErrorType createErrorType() { return new ErrorType(); } /** * Create an instance of {@link GenericDataType } * */ public GenericDataType createGenericDataType() { return new GenericDataType(); } /** * Create an instance of {@link LanguageType } * */ public LanguageType createLanguageType() { return new LanguageType(); } /** * Create an instance of {@link PhoneNumberType } * */ public PhoneNumberType createPhoneNumberType() { return new PhoneNumberType(); } /** * Create an instance of {@link RelatedLinkType } * */ public RelatedLinkType createRelatedLinkType() { return new RelatedLinkType(); } /** * Create an instance of {@link SpeedType } * */ public SpeedType createSpeedType() { return new SpeedType(); } /** * Create an instance of {@link TrafficStatType } * */ public TrafficStatType createTrafficStatType() { return new TrafficStatType(); } /** * Create an instance of {@link CategoriesType } * */ public CategoriesType createCategoriesType() { return new CategoriesType(); } /** * Create an instance of {@link CategoryType } * */ public CategoryType createCategoryType() { return new CategoryType(); } /** * Create an instance of {@link ListingsType } * */ public ListingsType createListingsType() { return new ListingsType(); } /** * Create an instance of {@link ListingType } * */ public ListingType createListingType() { return new ListingType(); } /** * Create an instance of {@link TimeRangeType } * */ public TimeRangeType createTimeRangeType() { return new TimeRangeType(); } /** * Create an instance of {@link AccountType } * */ public AccountType createAccountType() { return new AccountType(); } /** * Create an instance of {@link ResponseType } * */ public ResponseType createResponseType() { return new ResponseType(); } /** * Create an instance of {@link DataUrlType } * */ public DataUrlType createDataUrlType() { return new DataUrlType(); } /** * Create an instance of {@link SiteDataType } * */ public SiteDataType createSiteDataType() { return new SiteDataType(); } /** * Create an instance of {@link KeywordsType } * */ public KeywordsType createKeywordsType() { return new KeywordsType(); } /** * Create an instance of {@link OwnedDomainsType } * */ public OwnedDomainsType createOwnedDomainsType() { return new OwnedDomainsType(); } /** * Create an instance of {@link OwnedDomainType } * */ public OwnedDomainType createOwnedDomainType() { return new OwnedDomainType(); } /** * Create an instance of {@link LetterBarsType } * */ public LetterBarsType createLetterBarsType() { return new LetterBarsType(); } /** * Create an instance of {@link ContributingSiteType.TimeRange } * */ public ContributingSiteType.TimeRange createContributingSiteTypeTimeRange() { return new ContributingSiteType.TimeRange(); } /** * Create an instance of {@link ContributingSiteType.Reach } * */ public ContributingSiteType.Reach createContributingSiteTypeReach() { return new ContributingSiteType.Reach(); } /** * Create an instance of {@link ContributingSiteType.PageViews } * */ public ContributingSiteType.PageViews createContributingSiteTypePageViews() { return new ContributingSiteType.PageViews(); } /** * Create an instance of {@link WebMapSubType.Results.Result } * */ public WebMapSubType.Results.Result createWebMapSubTypeResultsResult() { return new WebMapSubType.Results.Result(); } /** * Create an instance of {@link UrlServiceType.DataUrl } * */ public UrlServiceType.DataUrl createUrlServiceTypeDataUrl() { return new UrlServiceType.DataUrl(); } /** * Create an instance of {@link UsageStatisticType.Reach } * */ public UsageStatisticType.Reach createUsageStatisticTypeReach() { return new UsageStatisticType.Reach(); } /** * Create an instance of {@link UsageStatisticType.PageViews } * */ public UsageStatisticType.PageViews createUsageStatisticTypePageViews() { return new UsageStatisticType.PageViews(); } /** * Create an instance of {@link PhysicalAddressType.Streets } * */ public PhysicalAddressType.Streets createPhysicalAddressTypeStreets() { return new PhysicalAddressType.Streets(); } /** * Create an instance of {@link TrafficHistoryType.HistoricalData.Data.PageViews } * */ public TrafficHistoryType.HistoricalData.Data.PageViews createTrafficHistoryTypeHistoricalDataDataPageViews() { return new TrafficHistoryType.HistoricalData.Data.PageViews(); } /** * Create an instance of {@link TrafficHistoryType.HistoricalData.Data.Reach } * */ public TrafficHistoryType.HistoricalData.Data.Reach createTrafficHistoryTypeHistoricalDataDataReach() { return new TrafficHistoryType.HistoricalData.Data.Reach(); } /** * Create an instance of {@link TrafficDataType.ContributingSubdomains } * */ public TrafficDataType.ContributingSubdomains createTrafficDataTypeContributingSubdomains() { return new TrafficDataType.ContributingSubdomains(); } /** * Create an instance of {@link TrafficDataType.UsageStatistics } * */ public TrafficDataType.UsageStatistics createTrafficDataTypeUsageStatistics() { return new TrafficDataType.UsageStatistics(); } /** * Create an instance of {@link TrafficDataType.RankByCity.City.Contribution.PerUser } * */ public TrafficDataType.RankByCity.City.Contribution.PerUser createTrafficDataTypeRankByCityCityContributionPerUser() { return new TrafficDataType.RankByCity.City.Contribution.PerUser(); } /** * Create an instance of {@link TrafficDataType.RankByCountry.Country.Contribution } * */ public TrafficDataType.RankByCountry.Country.Contribution createTrafficDataTypeRankByCountryCountryContribution() { return new TrafficDataType.RankByCountry.Country.Contribution(); } /** * Create an instance of {@link SitesLinkingInType.Site } * */ public SitesLinkingInType.Site createSitesLinkingInTypeSite() { return new SitesLinkingInType.Site(); } /** * Create an instance of {@link RelatedType.RelatedLinks } * */ public RelatedType.RelatedLinks createRelatedTypeRelatedLinks() { return new RelatedType.RelatedLinks(); } /** * Create an instance of {@link RelatedType.Categories.CategoryData } * */ public RelatedType.Categories.CategoryData createRelatedTypeCategoriesCategoryData() { return new RelatedType.Categories.CategoryData(); } /** * Create an instance of {@link CrawlType.Index.Offsets } * */ public CrawlType.Index.Offsets createCrawlTypeIndexOffsets() { return new CrawlType.Index.Offsets(); } /** * Create an instance of {@link CrawlType.MetaData.RequestInfo } * */ public CrawlType.MetaData.RequestInfo createCrawlTypeMetaDataRequestInfo() { return new CrawlType.MetaData.RequestInfo(); } /** * Create an instance of {@link CrawlType.MetaData.Checksums } * */ public CrawlType.MetaData.Checksums createCrawlTypeMetaDataChecksums() { return new CrawlType.MetaData.Checksums(); } /** * Create an instance of {@link CrawlType.MetaData.Images } * */ public CrawlType.MetaData.Images createCrawlTypeMetaDataImages() { return new CrawlType.MetaData.Images(); } /** * Create an instance of {@link CrawlType.MetaData.Links.Link } * */ public CrawlType.MetaData.Links.Link createCrawlTypeMetaDataLinksLink() { return new CrawlType.MetaData.Links.Link(); } /** * Create an instance of {@link CrawlType.MetaData.OtherUrls.OtherUrl } * */ public CrawlType.MetaData.OtherUrls.OtherUrl createCrawlTypeMetaDataOtherUrlsOtherUrl() { return new CrawlType.MetaData.OtherUrls.OtherUrl(); } /** * Create an instance of {@link ContactInfoType.PhoneNumbers } * */ public ContactInfoType.PhoneNumbers createContactInfoTypePhoneNumbers() { return new ContactInfoType.PhoneNumbers(); } /** * Create an instance of {@link ContactInfoType.PhysicalAddress } * */ public ContactInfoType.PhysicalAddress createContactInfoTypePhysicalAddress() { return new ContactInfoType.PhysicalAddress(); } /** * Create an instance of {@link ContactInfoType.CompanyStockTicker } * */ public ContactInfoType.CompanyStockTicker createContactInfoTypeCompanyStockTicker() { return new ContactInfoType.CompanyStockTicker(); } /** * Create an instance of {@link RequestType.Errors } * */ public RequestType.Errors createRequestTypeErrors() { return new RequestType.Errors(); } /** * Create an instance of {@link RequestType.Arguments.Argument } * */ public RequestType.Arguments.Argument createRequestTypeArgumentsArgument() { return new RequestType.Arguments.Argument(); } /** * Create an instance of {@link WebMapRequest.Security } * */ public WebMapRequest.Security createWebMapRequestSecurity() { return new WebMapRequest.Security(); } /** * Create an instance of {@link UrlInfoRequest.Security } * */ public UrlInfoRequest.Security createUrlInfoRequestSecurity() { return new UrlInfoRequest.Security(); } /** * Create an instance of {@link TrafficHistoryRequest.Security } * */ public TrafficHistoryRequest.Security createTrafficHistoryRequestSecurity() { return new TrafficHistoryRequest.Security(); } /** * Create an instance of {@link SitesLinkingInRequest.Security } * */ public SitesLinkingInRequest.Security createSitesLinkingInRequestSecurity() { return new SitesLinkingInRequest.Security(); } /** * Create an instance of {@link CrawlRequest.Security } * */ public CrawlRequest.Security createCrawlRequestSecurity() { return new CrawlRequest.Security(); } /** * Create an instance of {@link CategoryListingsRequest.Security } * */ public CategoryListingsRequest.Security createCategoryListingsRequestSecurity() { return new CategoryListingsRequest.Security(); } /** * Create an instance of {@link CategoryBrowseRequest.Security } * */ public CategoryBrowseRequest.Security createCategoryBrowseRequestSecurity() { return new CategoryBrowseRequest.Security(); } /** * Create an instance of {@link JAXBElement }{@code <}{@link Object }{@code >}} * */ @XmlElementDecl(namespace = "http://alexa.amazonaws.com/doc/2005-10-05/", name = "Result") public JAXBElement<Object> createResult(Object value) { return new JAXBElement<Object>(_Result_QNAME, Object.class, null, value); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyBoolean; import static org.mockito.Matchers.anyInt; import static org.mockito.Matchers.argThat; import static org.mockito.Matchers.eq; import static org.mockito.Matchers.isA; import static org.mockito.Matchers.same; import static org.mockito.Mockito.doNothing; import static org.mockito.Mockito.doReturn; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; import java.io.IOException; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.CompletionService; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.AbstractFileSystem; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FileContext; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.Text; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.api.records.URL; import org.apache.hadoop.yarn.exceptions.YarnRuntimeException; import org.apache.hadoop.yarn.factories.RecordFactory; import org.apache.hadoop.yarn.server.nodemanager.api.LocalizationProtocol; import org.apache.hadoop.yarn.server.nodemanager.api.ResourceLocalizationSpec; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalResourceStatus; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerAction; import org.apache.hadoop.yarn.server.nodemanager.api.protocolrecords.LocalizerStatus; import org.apache.hadoop.yarn.util.ConverterUtils; import org.junit.Test; import org.mockito.ArgumentMatcher; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; public class TestContainerLocalizer { static final Log LOG = LogFactory.getLog(TestContainerLocalizer.class); static final Path basedir = new Path("target", TestContainerLocalizer.class.getName()); static final String appUser = "yak"; static final String appId = "app_RM_0"; static final String containerId = "container_0"; static final InetSocketAddress nmAddr = new InetSocketAddress("foobar", 8040); private AbstractFileSystem spylfs; private Random random; private List<Path> localDirs; private Path tokenPath; private LocalizationProtocol nmProxy; @Test public void testContainerLocalizerMain() throws Exception { FileContext fs = FileContext.getLocalFSFileContext(); spylfs = spy(fs.getDefaultFileSystem()); ContainerLocalizer localizer = setupContainerLocalizerForTest(); // verify created cache List<Path> privCacheList = new ArrayList<Path>(); List<Path> appCacheList = new ArrayList<Path>(); for (Path p : localDirs) { Path base = new Path(new Path(p, ContainerLocalizer.USERCACHE), appUser); Path privcache = new Path(base, ContainerLocalizer.FILECACHE); privCacheList.add(privcache); Path appDir = new Path(base, new Path(ContainerLocalizer.APPCACHE, appId)); Path appcache = new Path(appDir, ContainerLocalizer.FILECACHE); appCacheList.add(appcache); } // mock heartbeat responses from NM ResourceLocalizationSpec rsrcA = getMockRsrc(random, LocalResourceVisibility.PRIVATE, privCacheList.get(0)); ResourceLocalizationSpec rsrcB = getMockRsrc(random, LocalResourceVisibility.PRIVATE, privCacheList.get(0)); ResourceLocalizationSpec rsrcC = getMockRsrc(random, LocalResourceVisibility.APPLICATION, appCacheList.get(0)); ResourceLocalizationSpec rsrcD = getMockRsrc(random, LocalResourceVisibility.PRIVATE, privCacheList.get(0)); when(nmProxy.heartbeat(isA(LocalizerStatus.class))) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.LIVE, Collections.singletonList(rsrcA))) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.LIVE, Collections.singletonList(rsrcB))) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.LIVE, Collections.singletonList(rsrcC))) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.LIVE, Collections.singletonList(rsrcD))) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.LIVE, Collections.<ResourceLocalizationSpec>emptyList())) .thenReturn(new MockLocalizerHeartbeatResponse(LocalizerAction.DIE, null)); LocalResource tRsrcA = rsrcA.getResource(); LocalResource tRsrcB = rsrcB.getResource(); LocalResource tRsrcC = rsrcC.getResource(); LocalResource tRsrcD = rsrcD.getResource(); doReturn( new FakeDownload(rsrcA.getResource().getResource().getFile(), true)) .when(localizer).download(isA(Path.class), eq(tRsrcA), isA(UserGroupInformation.class)); doReturn( new FakeDownload(rsrcB.getResource().getResource().getFile(), true)) .when(localizer).download(isA(Path.class), eq(tRsrcB), isA(UserGroupInformation.class)); doReturn( new FakeDownload(rsrcC.getResource().getResource().getFile(), true)) .when(localizer).download(isA(Path.class), eq(tRsrcC), isA(UserGroupInformation.class)); doReturn( new FakeDownload(rsrcD.getResource().getResource().getFile(), true)) .when(localizer).download(isA(Path.class), eq(tRsrcD), isA(UserGroupInformation.class)); // run localization assertEquals(0, localizer.runLocalization(nmAddr)); for (Path p : localDirs) { Path base = new Path(new Path(p, ContainerLocalizer.USERCACHE), appUser); Path privcache = new Path(base, ContainerLocalizer.FILECACHE); // $x/usercache/$user/filecache verify(spylfs).mkdir(eq(privcache), isA(FsPermission.class), eq(false)); Path appDir = new Path(base, new Path(ContainerLocalizer.APPCACHE, appId)); // $x/usercache/$user/appcache/$appId/filecache Path appcache = new Path(appDir, ContainerLocalizer.FILECACHE); verify(spylfs).mkdir(eq(appcache), isA(FsPermission.class), eq(false)); } // verify tokens read at expected location verify(spylfs).open(tokenPath); // verify downloaded resources reported to NM verify(nmProxy).heartbeat(argThat(new HBMatches(rsrcA.getResource()))); verify(nmProxy).heartbeat(argThat(new HBMatches(rsrcB.getResource()))); verify(nmProxy).heartbeat(argThat(new HBMatches(rsrcC.getResource()))); verify(nmProxy).heartbeat(argThat(new HBMatches(rsrcD.getResource()))); // verify all HB use localizerID provided verify(nmProxy, never()).heartbeat(argThat( new ArgumentMatcher<LocalizerStatus>() { @Override public boolean matches(Object o) { LocalizerStatus status = (LocalizerStatus) o; return !containerId.equals(status.getLocalizerId()); } })); } @Test @SuppressWarnings("unchecked") public void testLocalizerTokenIsGettingRemoved() throws Exception { FileContext fs = FileContext.getLocalFSFileContext(); spylfs = spy(fs.getDefaultFileSystem()); ContainerLocalizer localizer = setupContainerLocalizerForTest(); doNothing().when(localizer).localizeFiles(any(LocalizationProtocol.class), any(CompletionService.class), any(UserGroupInformation.class)); localizer.runLocalization(nmAddr); verify(spylfs, times(1)).delete(tokenPath, false); } @Test @SuppressWarnings("unchecked") // mocked generics public void testContainerLocalizerClosesFilesystems() throws Exception { // verify filesystems are closed when localizer doesn't fail FileContext fs = FileContext.getLocalFSFileContext(); spylfs = spy(fs.getDefaultFileSystem()); ContainerLocalizer localizer = setupContainerLocalizerForTest(); doNothing().when(localizer).localizeFiles(any(LocalizationProtocol.class), any(CompletionService.class), any(UserGroupInformation.class)); verify(localizer, never()).closeFileSystems( any(UserGroupInformation.class)); localizer.runLocalization(nmAddr); verify(localizer).closeFileSystems(any(UserGroupInformation.class)); spylfs = spy(fs.getDefaultFileSystem()); // verify filesystems are closed when localizer fails localizer = setupContainerLocalizerForTest(); doThrow(new YarnRuntimeException("Forced Failure")).when(localizer).localizeFiles( any(LocalizationProtocol.class), any(CompletionService.class), any(UserGroupInformation.class)); verify(localizer, never()).closeFileSystems( any(UserGroupInformation.class)); localizer.runLocalization(nmAddr); verify(localizer).closeFileSystems(any(UserGroupInformation.class)); } @SuppressWarnings("unchecked") // mocked generics private ContainerLocalizer setupContainerLocalizerForTest() throws Exception { // don't actually create dirs doNothing().when(spylfs).mkdir( isA(Path.class), isA(FsPermission.class), anyBoolean()); Configuration conf = new Configuration(); FileContext lfs = FileContext.getFileContext(spylfs, conf); localDirs = new ArrayList<Path>(); for (int i = 0; i < 4; ++i) { localDirs.add(lfs.makeQualified(new Path(basedir, i + ""))); } RecordFactory mockRF = getMockLocalizerRecordFactory(); ContainerLocalizer concreteLoc = new ContainerLocalizer(lfs, appUser, appId, containerId, localDirs, mockRF); ContainerLocalizer localizer = spy(concreteLoc); // return credential stream instead of opening local file random = new Random(); long seed = random.nextLong(); System.out.println("SEED: " + seed); random.setSeed(seed); DataInputBuffer appTokens = createFakeCredentials(random, 10); tokenPath = lfs.makeQualified(new Path( String.format(ContainerLocalizer.TOKEN_FILE_NAME_FMT, containerId))); doReturn(new FSDataInputStream(new FakeFSDataInputStream(appTokens)) ).when(spylfs).open(tokenPath); nmProxy = mock(LocalizationProtocol.class); doReturn(nmProxy).when(localizer).getProxy(nmAddr); doNothing().when(localizer).sleep(anyInt()); // return result instantly for deterministic test ExecutorService syncExec = mock(ExecutorService.class); CompletionService<Path> cs = mock(CompletionService.class); when(cs.submit(isA(Callable.class))) .thenAnswer(new Answer<Future<Path>>() { @Override public Future<Path> answer(InvocationOnMock invoc) throws Throwable { Future<Path> done = mock(Future.class); when(done.isDone()).thenReturn(true); FakeDownload d = (FakeDownload) invoc.getArguments()[0]; when(done.get()).thenReturn(d.call()); return done; } }); doReturn(syncExec).when(localizer).createDownloadThreadPool(); doReturn(cs).when(localizer).createCompletionService(syncExec); return localizer; } static class HBMatches extends ArgumentMatcher<LocalizerStatus> { final LocalResource rsrc; HBMatches(LocalResource rsrc) { this.rsrc = rsrc; } @Override public boolean matches(Object o) { LocalizerStatus status = (LocalizerStatus) o; for (LocalResourceStatus localized : status.getResources()) { switch (localized.getStatus()) { case FETCH_SUCCESS: if (localized.getLocalPath().getFile().contains( rsrc.getResource().getFile())) { return true; } break; default: fail("Unexpected: " + localized.getStatus()); break; } } return false; } } static class FakeDownload implements Callable<Path> { private final Path localPath; private final boolean succeed; FakeDownload(String absPath, boolean succeed) { this.localPath = new Path("file:///localcache" + absPath); this.succeed = succeed; } @Override public Path call() throws IOException { if (!succeed) { throw new IOException("FAIL " + localPath); } return localPath; } } static RecordFactory getMockLocalizerRecordFactory() { RecordFactory mockRF = mock(RecordFactory.class); when(mockRF.newRecordInstance(same(LocalResourceStatus.class))) .thenAnswer(new Answer<LocalResourceStatus>() { @Override public LocalResourceStatus answer(InvocationOnMock invoc) throws Throwable { return new MockLocalResourceStatus(); } }); when(mockRF.newRecordInstance(same(LocalizerStatus.class))) .thenAnswer(new Answer<LocalizerStatus>() { @Override public LocalizerStatus answer(InvocationOnMock invoc) throws Throwable { return new MockLocalizerStatus(); } }); return mockRF; } static ResourceLocalizationSpec getMockRsrc(Random r, LocalResourceVisibility vis, Path p) { ResourceLocalizationSpec resourceLocalizationSpec = mock(ResourceLocalizationSpec.class); LocalResource rsrc = mock(LocalResource.class); String name = Long.toHexString(r.nextLong()); URL uri = mock(org.apache.hadoop.yarn.api.records.URL.class); when(uri.getScheme()).thenReturn("file"); when(uri.getHost()).thenReturn(null); when(uri.getFile()).thenReturn("/local/" + vis + "/" + name); when(rsrc.getResource()).thenReturn(uri); when(rsrc.getSize()).thenReturn(r.nextInt(1024) + 1024L); when(rsrc.getTimestamp()).thenReturn(r.nextInt(1024) + 2048L); when(rsrc.getType()).thenReturn(LocalResourceType.FILE); when(rsrc.getVisibility()).thenReturn(vis); when(resourceLocalizationSpec.getResource()).thenReturn(rsrc); when(resourceLocalizationSpec.getDestinationDirectory()). thenReturn(ConverterUtils.getYarnUrlFromPath(p)); return resourceLocalizationSpec; } @SuppressWarnings({ "rawtypes", "unchecked" }) static DataInputBuffer createFakeCredentials(Random r, int nTok) throws IOException { Credentials creds = new Credentials(); byte[] password = new byte[20]; Text kind = new Text(); Text service = new Text(); Text alias = new Text(); for (int i = 0; i < nTok; ++i) { byte[] identifier = ("idef" + i).getBytes(); r.nextBytes(password); kind.set("kind" + i); service.set("service" + i); alias.set("token" + i); Token token = new Token(identifier, password, kind, service); creds.addToken(alias, token); } DataOutputBuffer buf = new DataOutputBuffer(); creds.writeTokenStorageToStream(buf); DataInputBuffer ret = new DataInputBuffer(); ret.reset(buf.getData(), 0, buf.getLength()); return ret; } }
/** * BaseMatrix.java * * Author : Christopher K. Allen * Since : Oct 11, 2013 */ package xal.tools.math; import java.io.PrintWriter; import java.io.StringWriter; import java.text.DecimalFormat; import java.text.NumberFormat; import java.util.Comparator; import java.util.LinkedList; import java.util.StringTokenizer; import Jama.Matrix; import Jama.SingularValueDecomposition; import xal.tools.beam.PhaseMatrix; import xal.tools.data.DataAdaptor; import xal.tools.data.DataFormatException; import xal.tools.data.IArchive; /** * <p> * Class <code>BaseMatrix</code>. This is a base class for objects representing * real-number matrix objects. Thus it contains basic matrix operations where the interacting * objects are all of type <code>M</code>, or vectors of the singular type <code>V</code>. * (If matrix and vectors are not of compatible dimensions the operations fail.) * The template parameter <code>M</code> is the type of the child class. This * mechanism allows <code>BaseMatrix&lt;M extends BaseMatrix&lt;M&gt;&gt;</code> * to recognize the type of it derived classes in order to create and process * new objects as necessary. * </p> * <p> * Currently the internal matrix operations are supported by the <tt>Jama</tt> * matrix package. However, the <tt>Jama</tt> matrix package has been deemed a * "proof of principle" for the Java language and scientific computing and * is, thus, no longer supported. The objective of this base class is to hide * the internal implementation of matrix operations from the child classes and * all developers using the matrix packages. If it is determined that the <tt>Jama</tt> * matrix package is to be removed from XAL, the modification will be substantially * simplified in the current architecture. * </p> * * @author Christopher K. Allen * @since Oct 11, 2013 */ public abstract class BaseMatrix<M extends BaseMatrix<M>> implements IArchive { /* * Global Constants */ /** The default character width of matrices when displayed using {@link #toStringMatrix()} */ private static final int INT_COL_WD_DFLT = 15; /** Attribute marker for data managed by IArchive interface */ public static final String ATTR_DATA = "values"; /** A small number used in comparing matrix elements (e.g., #isEqual() ) */ protected static final double DBL_EPS = 1.0e-12; /** number of Units in the Last Place (ULPs) used for bracketing approximately equal values */ protected static final int ULPS_BRACKET = 100; /* * Global Attributes */ /** Text format for outputting debug info */ final static private DecimalFormat SCI_FORMAT = new DecimalFormat("0.000000E00"); // /* // * Internal Classes // */ // // /** // * Interface <code>BaseMatrix.Ind</code> is exposed by objects // * representing matrix indices. In particular, the <code>enum</code> // * types that are matrix indices expose this interface. // * // * @author Christopher K. Allen // * @since Sep 25, 2013 // */ // public interface IIndex extends IIndex { // } // /* * Local Attributes */ /** number of matrix rows */ private int cntRows; /** number of matrix columns */ private int cntCols; /** internal matrix implementation */ protected Jama.Matrix matImpl; /* * Object Overrides */ /** * Base classes must override the clone operation in order to * make deep copies of the current object. This operation cannot * be done without the exact type. * * @see java.lang.Object#clone() * * @author Christopher K. Allen * @since Jul 3, 2014 */ @Override public abstract M clone(); /* * Assignment */ /** * Element assignment - assigns matrix element to the specified value * * @param i row index * @param j column index * @parm s new matrix element value * * @exception ArrayIndexOutOfBoundsException an index was equal to or larger than the matrix size */ public void setElem(int i, int j, double s) throws ArrayIndexOutOfBoundsException { this.getMatrix().set(i,j, s); } /** * Set a block sub-matrix within the current matrix. If the given two-dimensional * array is larger than block described by the indices it is truncated. If the * given indices describe a matrix larger than the given two-dimensional array * then an exception is thrown. * * @param i0 row index of upper left block * @param i1 row index of lower right block * @param j0 column index of upper left block * @param j1 column index of lower right block * @param arrSub two-dimensional sub element array * * @exception ArrayIndexOutOfBoundsException sub-matrix does not fit into base matrix */ public void setSubMatrix(int i0, int i1, int j0, int j1, double[][] arrSub) throws ArrayIndexOutOfBoundsException { Jama.Matrix matSub = new Matrix(arrSub); this.getMatrix().setMatrix(i0,i1,j0,j1, matSub); } /** * Sets the entire matrix to the values given in the Java primitive type * double array. * * @param arrMatrix Java primitive array containing new matrix values * * @exception ArrayIndexOutOfBoundsException the argument must have the same dimensions as this matrix * * @author Christopher K. Allen * @since Oct 4, 2013 */ public void setMatrix(double[][] arrMatrix) throws ArrayIndexOutOfBoundsException { // Check the dimensions of the argument double array if (this.getRowCnt() != arrMatrix.length || arrMatrix[0].length != this.getColCnt() ) throw new ArrayIndexOutOfBoundsException( "Dimensions of argument do not correspond to size of this matrix = " + this.getRowCnt() + "x" + this.getColCnt() ); // Set the elements of this array to that given by the corresponding // argument entries for (int i=0; i<this.getRowCnt(); i++) for (int j=0; j<this.getColCnt(); j++) { double dblVal = arrMatrix[i][j]; this.setElem(i, j, dblVal); } } /** * Parsing assignment - set the <code>PhaseMatrix</code> value * according to a token string of element values. * * The token string argument is assumed to be one-dimensional and packed by * column (aka FORTRAN). * * @param strValues token vector of SIZE<sup>2</sup> numeric values * * @exception IllegalArgumentException wrong number of token strings * @exception NumberFormatException bad number format, unparseable */ public void setMatrix(String strValues) throws NumberFormatException, IllegalArgumentException { // Error check the number of token strings StringTokenizer tokArgs = new StringTokenizer(strValues, " ,()[]{}"); //$NON-NLS-1$ if (tokArgs.countTokens() != this.getRowCnt()*this.getColCnt()) throw new IllegalArgumentException("PhaseMatrix#setMatrix - wrong number of token strings: " + strValues); //$NON-NLS-1$ // Extract initial phase coordinate values for (int i=0; i<this.getRowCnt(); i++) for (int j=0; j<this.getColCnt(); j++) { String strVal = tokArgs.nextToken(); double dblVal = Double.valueOf(strVal).doubleValue(); this.setElem(i,j, dblVal); } } /* * Matrix Attributes */ /** * Returns the number of rows in this matrix. Specifically, if * this matrix, denoted <b>M</b>, is in <b>R</b><sup><i>m</i>&times;<i>n</i></sup>, * then the returned value is <i>m</i>. * * @return the first dimension in the shape of this matrix. * * @author Christopher K. Allen * @since Oct 14, 2013 */ public int getRowCnt() { return this.cntRows; } /** * Returns the number of columns in this matrix. Specifically, if * this matrix, denoted <b>M</b>, is in <b>R</b><sup><i>m</i>&times;<i>n</i></sup>, * then the returned value is <i>n</i>. * * @return the second dimension in the shape of this matrix. * * @author Christopher K. Allen * @since Oct 14, 2013 */ public int getColCnt() { return this.cntCols; } /** * Return matrix element value. Get matrix element value at specified * <code>Diagonal</code> position. * * @param i row index * @param j column index * * @exception ArrayIndexOutOfBoundsException an index was equal to or larger than the matrix size */ public double getElem(int i, int j) throws ArrayIndexOutOfBoundsException { return this.getMatrix().get(i,j); } /** * <p> * Returns the matrix element at the position indicated by the * given row and column index sources. * </p> * <h3>NOTES</h3> * <p> * &middot; It is expected that the * object exposing the <code>IIndex</code> interface is an enumeration * class restricting the number of possible index values. * <br> * &middot; Consequently we do not declare a thrown exception assuming * that that enumeration class eliminates the possibility of an out of * bounds error. * </p> * * @param indRow source of the row index * @param indCol source of the column index * * @return value of the matrix element at the given row and column * * @author Christopher K. Allen * @since Sep 30, 2013 */ public double getElem(IIndex indRow, IIndex indCol) { double dblVal = this.matImpl.get(indRow.val(), indCol.val()); return dblVal; } /** * Returns a copy of the internal Java array containing * the matrix elements. The array dimensions are given by * the size of this matrix, available from * <code>{@link #getSize()}</code>. * * @return copied array of matrix values * * @author Christopher K. Allen * @since Sep 25, 2013 */ public double[][] getArrayCopy() { return this.matImpl.getArrayCopy(); } /* * Matrix Operations */ /** * <p> * Tests whether the given matrix is approximately equal to this matrix. * The idea is that we ignore any numerical noise when comparing if the two * matrices are equal. * </p> * <p> * This is a convenience class for the method * <code>{@link #isApproxEqual(BaseMatrix,int)}</code> where the number of ULPs * is set to <code>ULPS_BRACKET</code>. * </p> * <p> * The matrices are compared element by element using * <code>{@link ElementaryFunction#approxEq(double, double)}</code>. * </p> * * @return <code>true</code> if the given matrix is equal to this one with the * given number of significant digits, <code>false</code> otherwise. * * @since Jul 22, 2015 by Christopher K. Allen */ public boolean isApproxEqual(M matTest) { return this.isApproxEqual(matTest, ULPS_BRACKET); } /** * <p> * Tests whether the given matrix is approximately equal to this matrix. * The idea is that we ignore any numerical noise when comparing if the two * matrices are equal. This is done by ignoring the number of Units in the * Last Place in the machine representation. The larger this number the * more least significant digits we ignore. * </p> * <p> * The matrices are compared element by element using * <code>{@link ElementaryFunction#approxEq(double, double, int)}</code>. * </p> * * @param matTest the matrix being compared to this one. * @param the number of Units in the Last Place to ignore * * @return <code>true</code> if the given matrix is equal to this one with the * given number of significant digits, <code>false</code> otherwise. * * @since Jul 22, 2015 by Christopher K. Allen */ public boolean isApproxEqual(M matTest, int cntUlp) { for (int i=0; i<this.cntRows; i++) for (int j=0; j<this.cntCols; j++) { double dblVal = this.getElem(i, j); double dblCmp = matTest.getElem(i, j); if ( !ElementaryFunction.approxEq(dblVal, dblCmp, cntUlp) ) return false; } return true; } /** * Create a deep copy of the this matrix object. The returned * object is completely decoupled from the original. * * @return a deep copy object of this matrix */ public M copy() { M matClone = this.newInstance(); ((BaseMatrix<M>)matClone).assignMatrix( this.getMatrix() ); return matClone; } /** * Assign this matrix to be the zero matrix, specifically * the matrix containing all 0's. * * @author Christopher K. Allen * @since Oct 3, 2013 */ public void assignZero() { for (int i=0; i<this.getRowCnt(); i++) for (int j=0; j<this.getColCnt(); j++) this.setElem(i, j, 0.0); } /** * Ratio of the largest singular value over the smallest singular value. * Note that this method does a singular value decomposition just to * get the number (done in the (wasteful) <code>Jama.Matrix</code> * internal implementation). Thus, this computation is not cheap * if the matrix is large. * * @return the ratio of extreme singular values * * @author Christopher K. Allen * @since Oct 16, 2013 */ public double conditionNumber() { // double dblCondNum = this.matImpl.cond(); // Do a singular value decomposition SingularValueDecomposition svd = this.matImpl.svd(); double[] arrDblSv = svd.getSingularValues(); // Make a list of singular values LinkedList<Double> lstDblSv = new LinkedList<Double>(); for (double dblSv : arrDblSv) lstDblSv.add(dblSv); // Create a comparator to sort the singular values from // smallest to largest Comparator<Double> ifcSorter = new Comparator<Double>() { @Override public int compare(Double d1, Double d2) { if (d1 < d2) return -1; if (d1 == d2) return 0; //else (d1 > d2) return +1; } }; // Sort the singular values the get the largest and smallest lstDblSv.sort(ifcSorter); double dblMaxSv = lstDblSv.getLast(); double dblMinSv = lstDblSv.getFirst(); // Compute the condition number, ratio of largest to smallest singular values double dblCndNum = dblMaxSv/dblMinSv; return dblCndNum; } /** * Returns the transpose of this matrix. * * @return matrix <b>A</b><sup><i>T</i></sup> where <b>A</b> is this matrix * * @since Jul 22, 2015 by Christopher K. Allen */ public M transpose() { Jama.Matrix implTrans = this.getMatrix().transpose(); M matTrans = this.newInstance(implTrans); return matTrans; } /** * Computes the inverse of this matrix assuming that it is square. A invocation on * a non-square matrix will result in a runtime exception. * * @return the matrix <b>A</b><sup>-1</sup> where <b>A</b> is this matrix * * @throws UnsupportedOperationException * * @since Jul 22, 2015 by Christopher K. Allen */ public M inverse() throws UnsupportedOperationException { if (this.cntRows != this.cntCols) throw new UnsupportedOperationException("Cannot compute the inverse of a non-square matrix."); Jama.Matrix implInv = this.getMatrix().inverse(); M matInv = this.newInstance(implInv); return matInv; } /* * Algebraic Operations */ /** * Non-destructive matrix addition. This matrix is unaffected. * * @param matAddend matrix to be added to this * * @return the result of this matrix plus the given matrix (element-wise), * or <code>null</code> if error */ public M plus(M matAddend) { Jama.Matrix impAdd = ((BaseMatrix<M>)matAddend).getMatrix(); Jama.Matrix impSum = this.getMatrix().plus( impAdd ); M matAns = this.newInstance(impSum); return matAns; } /** * In-place matrix addition. The given matrix is added to this matrix * algebraically (element by element). * * @param mat matrix to be added to this (no new objects are created) */ public void plusEquals(M mat) { BaseMatrix<M> matBase = (BaseMatrix<M>)mat; this.getMatrix().plusEquals( matBase.getMatrix() ); } /** * Non-destructive matrix subtraction. This matrix is unaffected. * * @param matSub the subtrahend * * @return the value of this matrix minus the value of the given matrix, * or <code>null</code> if an error occurred */ public M minus(M matSub) { Jama.Matrix impSub = ((BaseMatrix<M>)matSub).getMatrix(); Jama.Matrix impDif = this.getMatrix().minus( impSub ); M matAns = this.newInstance(impDif); return matAns; } /** * In-place matrix subtraction. The given matrix is subtracted from the * value of this matrix. No additional objects are created. * * @param mat subtrahend */ public void minusEquals(M mat) { BaseMatrix<M> matBase = (BaseMatrix<M>)mat; this.getMatrix().minusEquals( matBase.getMatrix() ); } /** * Non-destructive scalar multiplication. This matrix is unaffected. * * @param s multiplier * * @return new matrix equal to the element-wise product of <i>s</i> and this matrix, * or <code>null</code> if an error occurred */ public M times(double s) { Jama.Matrix impPrd = this.getMatrix().times(s); M matAns = this.newInstance(impPrd); return matAns; } /* * Topological Operations */ /** * <p> * Return the maximum absolute value of all matrix elements. This can * be considered a norm on matrices, but it is not sub-multiplicative. * That is, * <br> * <br> * ||<b>AB</b>||<sub>max</sub> is not necessarily bound by ||<b>A</b>||<sub>max</sub> ||<b>B</b>||<sub>max</sub> . * <br> * <br> * </p> * * @return max<sub><i>i,j</i></sub> | <b>A</b><sub><i>i,j</i></sub> | */ public double max() { double val = 0.0; double max = Math.abs(getElem(0,0)); for (int i=0; i<this.getRowCnt(); i++) for (int j=0; j<this.getColCnt(); j++) { val = Math.abs( getElem(i,j) ); if (val > max) max = val; } return max; } /** * <p> * The matrix norm || &middot; ||<sub>1</sub> <b>induced</b> from * the <i>l</i><sub>1</sub> vector norm on <b>R</b><sup><i>n</i></sup>. That is, * <br/> * <br/> * &nbsp; &nbsp; ||<b>A</b>||<sub>1</sub> &equiv; max<sub><b>x</b>&in;<b>R</b><sup><i>n</i></sup></sub> ||<b>Ax</b>||<sub>1</sub> * <br/> * <br/> * where, by context, the second occurrence of ||&middot;||<sub>1</sub> is the * Lesbeque 1-norm on <b>R</b><sup><i>n</i><sup>. * </p> * <h3>NOTES:</h3> * <p> * &middot; For square matrices induced norms are sub-multiplicative, that is * ||<b>AB</b>|| &le; ||<b>A</b>|| ||<b>B</b>||. * <br/> * <br/> * &middot; The ||&middot;||<sub>1</sub> induced norm equates to the * the maximum absolute column sum. * </p> * * @return ||<b>M</b>||<sub>1</sub> = max<sub><i>i</i></sub> &Sigma;<sub><i>j</i></sub> |<i>M<sub>i,j</i></sub>| */ public double norm1() { return this.getMatrix().norm1(); } /** * <p> * Returns the <i>l</i><sub>2</sub> induced norm of this matrix, * which is the maximum, which turns out to be the spectral radius * of the matrix. Specifically, * <br/> * <br/> * &nbsp; &nbsp; ||<b>A</b>||<sub>2</sub> &equiv; [ max &lambda;(<b>A</b><sup><i>T</i></sup><b>A</b>) ]<sup>1/2</sup> , * <br/> * &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; &nbsp; = max &rho;(<b>A</b>) , * <br/> * <br/> * where &lambda;(&middot;) is the eigenvalue operator and &rho;(&middot;) is the * singular value operator. * </p> * * @return the maximum singular value of this matrix */ public double norm2() { return this.getMatrix().norm2(); } /** * <p> * The matrix norm || &middot; ||<sub>&infin;</sub> <b>induced</b> from * the <i>l</i><sub>&infin;</sub> vector norm on <b>R</b><sup><i>n</i></sup>. That is, * <br/> * <br/> * &nbsp; &nbsp; ||<b>A</b>||<sub>&infin;</sub> &equiv; max<sub><b>x</b>&in;<b>R</b><sup><i>n</i></sup></sub> * ||<b>Ax</b>||<sub>&infin;</sub> * <br/> * <br/> * where, by context, the second occurrence of ||&middot;||<sub>&infin;</sub> is the * Lesbeque &infin;-norm on <b>R</b><sup><i>n</i><sup>. * </p> * <h3>NOTES:</h3> * <p> * &middot; For square matrices induced norms are sub-multiplicative, that is * ||<b>AB</b>|| &le; ||<b>A</b>|| ||<b>B</b>||. * <br/> * <br/> * &middot; The ||&middot;||<sub>&infin;</sub> induced norm equates to the * the maximum absolute column sum. * </p> * * @return ||<b>M</b>||<sub>1</sub> = max<sub><i>i</i></sub> &Sigma;<sub><i>j</i></sub> |<i>M<sub>i,j</i></sub>| */ public double normInf() { return this.getMatrix().normInf(); } /** * <p> * Return the Frobenius norm ||<b>A</b>||<sub><i>F</i></sub> . * The Frobenius norm has the property that it is * both the element-wise Lebesgue 2-norm the Schatten 2-norm. Thus we have * <br/> * <br/> * &nbsp; &nbsp; ||<b>A</b>||<sub><i>F</i></sub> = [ &Sigma;<sub><i>i</i></sub> &Sigma;<sub><i>j</i></sub> <i>A</i><sub><i>i,j</i></sub><sup>2</sup> ]<sup>1/2</sup> * = [ Tr(<b>A</b><sup><i>T</i></sup><b>A</b>) ]<sup>1/2</sup> * = [ &Sigma;<sub><i>i</i></sub> &sigma;<sub><i>i</i></sub><sup>2</sup> ]<sup>1/2</sup> * <br/> * <br/> * where Tr is the trace operator and &sigma;<sub><i>i</i></sub> are the singular values of * matrix <b>A</b>. * </p> * <h3>NOTES</h3> * <p> * &middot; Since the Schatten norms are sub-multiplicative, the Frobenius norm * is sub-multiplicative. * <br/> * <br/> * &middot; The Frobenius norm is invariant under rotations by elements of * <i>O</i>(2) &sub; <b>R</b><sup><i>n</i>&times;<i>n</i></sup> . * </p> * * * @return ||<b>A</b>||<sub><i>F</i></sub> = [ &Sigma;<sub><i>i,j</i></sub> <i>A<sub>ij</sub></i><sup>2</sup> ]<sup>1/2</sup> */ public double normF() { return this.getMatrix().normF(); } /* * Testing and Debugging */ /** * Print out the contents of the R2x2 in text format. * * @param os output stream to receive text dump */ public void print(PrintWriter os) { this.matImpl.print(os, new DecimalFormat("0.#####E0"), this.getColCnt()); } /* * IArchive Interface */ /** * Save the value of this <code>PhaseMatrix</code> to a data sink * represented by the <code>DataAdaptor</code> interface. * * @param daptArchive interface to data sink * * @see xal.tools.data.IArchive#save(xal.tools.data.DataAdaptor) */ @Override public void save(DataAdaptor daptArchive) { daptArchive.setValue(ATTR_DATA, this.toString()); } /** * Restore the value of the this <code>PhaseMatrix</code> from the * contents of a data archive. * * @param daptArchive interface to data source * * @throws DataFormatException malformed data * @throws IllegalArgumentException wrong number of string tokens * * @see xal.tools.data.IArchive#load(xal.tools.data.DataAdaptor) */ @Override public void load(DataAdaptor daptArchive) throws DataFormatException { if ( daptArchive.hasAttribute(PhaseMatrix.ATTR_DATA) ) { String strValues = daptArchive.stringValue(PhaseMatrix.ATTR_DATA); this.setMatrix(strValues); } } /* * Object Overrides */ /** * Checks absolute equivalency. That is, checks whether or not the * argument is this object. * * @param objTest object under equivalency test * * @return <code>true</code> if the argument is this object, * <code>false</code> otherwise */ @Override public boolean equals(Object objTest) { //boolean bResult = this.equals(objTest); // this code causes an infinite recursion final boolean bResult = super.equals( objTest ); return bResult; } /** * Convert the contents of the matrix to a string representation. * The format is similar to that of Mathematica. Specifically, * <br/> * <br/> * { {a b }{c d } } * <br/> * * @return string representation of the matrix */ @Override public String toString() { // double is 15 significant digits plus the spaces and brackets final int size = (this.getRowCnt()*this.getColCnt() * 16) + (this.getRowCnt()*2) + 4; StringBuffer strBuf = new StringBuffer(size); synchronized(strBuf) { // get lock once instead of once per append strBuf.append("{ "); for (int i=0; i<this.getRowCnt(); i++) { strBuf.append("{ "); for (int j=0; j<this.getColCnt(); j++) { strBuf.append(this.getElem(i,j)); strBuf.append(" "); } strBuf.append("}"); } strBuf.append(" }"); } return strBuf.toString(); } /** * Returns a string representation of this matrix. The string contains * multiple lines, one for each row of the matrix. Within each line the * matrix entries are formatted. Thus, the string should resemble the * usual matrix format when printed out. * * @return multiple line formatted string containing matrix elements in matrix format * * @author Christopher K. Allen * @since Feb 8, 2013 */ public String toStringMatrix() { return this.toStringMatrix(SCI_FORMAT); } /** * Returns a string representation of this matrix. The string contains * multiple lines, one for each row of the matrix. Within each line the * matrix entries are formatted according to the given number format. * The default column width is used. * The string should resemble the usual matrix format when printed out. * * @param fmt <code>NumberFormat</code> object containing output format for matrix entries * * @return multiple line formatted string containing matrix elements in matrix format * * @author Christopher K. Allen * @since Feb 8, 2013 */ public String toStringMatrix(NumberFormat fmt) { return this.toStringMatrix(fmt, INT_COL_WD_DFLT); } /** * Returns a string representation of this matrix. The string contains * multiple lines, one for each row of the matrix. Within each line the * matrix entries are formatted according to the given number format. * The string should resemble the usual matrix format when printed out. * * @param fmt <code>NumberFormat</code> object containing output format for matrix entries * @param intColWd number of characters used for each column (padding is with spaces) * * @return multiple line formatted string containing matrix elements in matrix format * * @author Christopher K. Allen * @since Feb 8, 2013 */ public String toStringMatrix(NumberFormat fmt, int intColWd) { StringWriter sw = new StringWriter(); PrintWriter pw = new PrintWriter(sw); matImpl.print(pw, fmt, intColWd); return sw.toString(); } /** * "Borrowed" implementation from AffineTransform, since it is based on * double attribute values. Must implement hashCode to be consistent with * equals as specified by contract of hashCode in <code>Object</code>. * * @return a hashCode for this object */ @Override public int hashCode() { long bits = 0; for (int i=0; i<this.getRowCnt(); i++) { for (int j= 0; j<this.getColCnt(); j++) { bits = bits * 31 + Double.doubleToLongBits(getElem(i,j));; } } return (((int) bits) ^ ((int) (bits >> 32))); } /* * Internal Support */ /** * Return the internal matrix representation. * * @return the Jama matrix object */ protected Jama.Matrix getMatrix() { return matImpl; } /** * Sets the entire matrix to the values given in the Java primitive type * double array. The given array is packed by rows, for example, * <code>arrMatrix[0]</code> refers to the first row of the matrix. * Note that a new Jama matrix is instantiated to encapsulate the given array. * * @param arrMatrix Java primitive array containing new matrix internal representation * * @exception IllegalArgumentException the argument is degenerate and cannot represent a matrix * * @author Christopher K. Allen * @since Oct 4, 2013 */ protected void assignMatrix(double[][] arrMatrix) { // // Check the dimensions of the argument double array // if (this.getRowCnt() != arrMatrix.length || arrMatrix[0].length != this.getColCnt() ) // throw new ArrayIndexOutOfBoundsException( // "Dimensions of argument do not correspond to size of this matrix = " // + this.getRowCnt() + "x" + this.getColCnt() // ); // // Set the elements of this array to that given by the corresponding // // argument entries // for (int i=0; i<this.getRowCnt(); i++) // for (int j=0; j<this.getColCnt(); j++) { // double dblVal = arrMatrix[i][j]; // // this.setElem(i, j, dblVal); // } // // Check the dimensions of the argument double array // We need to have a valid allocated double array if (arrMatrix.length < 1 || arrMatrix[0].length < 1) throw new ArrayIndexOutOfBoundsException( "The argument array is not of full rank, it is not fully allocated." ); this.cntRows = arrMatrix.length; this.cntCols = arrMatrix[0].length; this.matImpl = new Jama.Matrix(arrMatrix); } /** * Sets the internal matrix value to that given in the argument. This * is a deep copy operation. Note that the complete matrix is copy, * thus the dimensions and other parameters are assigned as well. * * @param matValue internal implementation of matrix values * * @author Christopher K. Allen * @since Oct 1, 2013 */ protected void assignMatrix(Jama.Matrix matValue) { // for (int i=0; i<this.getRowCnt(); i++) // for (int j=0; j<this.getColCnt(); j++) { // double dblVal = matValue.get(i, j); // // this.matImpl.set(i, j, dblVal); // } double[][] arrCopy = matValue.getArrayCopy(); this.matImpl = new Jama.Matrix(arrCopy); this.cntCols = this.matImpl.getColumnDimension(); this.cntRows = this.matImpl.getRowDimension(); } /** * <p> * Creates a new, uninitialized instance of this matrix type. * </p> * <p> * NOTE: * &middot; This method was made abstract by Ivo List. Rather than use * reflection to instantiate new objects, this function is now delegated * to the concrete classes. This architecture is more robust and allows * the compiler to do more error checking. * </p> * * @return uninitialized matrix object of type <code>M</code> * * @author Ivo List * @author Christopher K. Allen * @since Oct 1, 2013 */ protected abstract M newInstance(); // protected M newInstance() throws InstantiationException { // try { // M matNewInst = this.ctrType.newInstance(); // // return matNewInst; // // } catch (InstantiationException | // IllegalAccessException | // IllegalArgumentException | // InvocationTargetException e) { // // throw new InstantiationException("Unable to copy matrix " + this.getClass().getName()); // } // } /** * Creates a new instance of this matrix type initialized to the given * implementation matrix. * * @param impInit implementation matrix containing initialization values * * @return initialized matrix object of type <code>M</code> * * @author Christopher K. Allen * @since Oct 1, 2013 */ protected M newInstance(Jama.Matrix impInit) { M matNewInst = this.newInstance(); ((BaseMatrix<M>)matNewInst).assignMatrix(impInit); return matNewInst; } // public <U extends Vector<U>, V extends Vector<V>> U times (V vec) { // return null; // } /* * Child Class Support */ /** * Creates a new, uninitialized instance of a square matrix with the given * matrix dimensions. The matrix contains all zeros. * * @param cntRows the matrix row count of this object * @param cntCols the matrix column count * * @throws UnsupportedOperationException child class has not defined a public, zero-argument constructor */ protected BaseMatrix(int cntRows, int cntCols) /*throws UnsupportedOperationException*/ { this.cntRows = cntRows; this.cntCols = cntCols; this.matImpl = new Jama.Matrix(cntRows, cntCols, 0.0); } /** * Copy constructor for <code>BaseMatrix</code>. Creates a deep * copy of the given object. The dimensions are set and the * internal array is cloned. * * @param matTemplate the matrix to be cloned * * @throws UnsupportedOperationException base class has not defined a public, zero-argument constructor * * @author Christopher K. Allen * @since Sep 25, 2013 */ protected BaseMatrix(M matTemplate) { // this(matParent.getRowCnt(), matParent.getColCnt()); BaseMatrix<M> matBase = (BaseMatrix<M>)matTemplate; this.assignMatrix(matBase.getMatrix()); } /** * <p> * Parsing Constructor - creates an instance of the child class and initialize it * according to a token string of element values. * </p> * <p> * The token string argument is assumed to be one-dimensional and packed by * column (ala FORTRAN). * </p> * * @param cntRows the matrix row size of this object * @param cntCols the matrix column size of this object * @param strTokens token vector of getSize()^2 numeric values * * @exception IllegalArgumentException wrong number of token strings * @exception NumberFormatException bad number format, unparseable */ protected BaseMatrix(int cntRows, int cntCols, String strTokens) throws IllegalArgumentException, NumberFormatException { this(cntRows, cntCols); // Error check the number of token strings StringTokenizer tokArgs = new StringTokenizer(strTokens, " ,()[]{}"); if (tokArgs.countTokens() != this.getRowCnt()*this.getColCnt()) throw new IllegalArgumentException("SquareMatrix, wrong number of token in string initializer: " + strTokens); // Extract initial phase coordinate values for (int i=0; i<this.getRowCnt(); i++) for (int j=0; j<this.getColCnt(); j++) { String strVal = tokArgs.nextToken(); double dblVal = Double.valueOf(strVal).doubleValue(); this.setElem(i,j, dblVal); } } /** * <p> * Initializing constructor for base class <code>BaseMatrix</code>. * Initializes the matrix to the values given in the Java primitive type * double array by setting the internal matrix representation to the given * Java array. The matrix is shaped according to the (row-packed) arguement. * </p> * <p> * The dimensions of the given Java double array determine the size of the matrix. * An <i>m</i>x<i>n</i> Java double array creates an <i>m</i>x<i>n</i> * <code>BaseMatrix</code> array. If the argument is not fully allocated or * inconsistent, an exception is thrown. * </p> * <p> * As an example consider the following Java array * <pre> * <code> * double[][] arrInternal = new double[][] { * {1.1, 1.2, 1.3, 1.4, 1.5}, * {2.1, 2.2, 2.3, 2.0, 2.5}, * {3.1, 3.2, 3.3, 3.4, 3.0} * }; * </code> * </pre> * This array would produce a 3&times;5 matrix. Note that the given argument becomes * the internal representation of the matrix object. Thus, the Java array * <code>arrInternal</code> will be changed by the the encapsulating matrix object * so should no longer be referenced after presenting it to this constructor. * </p> * * @param arrMatrix Java primitive array to be new internal matrix value representation * * @exception IllegalArgumentException the argument is degenerate and cannot represent a matrix * * @since Oct 4, 2013 by Christopher K. Allen */ protected BaseMatrix(double[][] arrVals) { // this(matParent.getRowCnt(), matParent.getColCnt()); this.assignMatrix(arrVals); } }
/* * Copyright (C) 2007 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import static com.google.common.collect.Iterables.getOnlyElement; import static com.google.common.collect.Iterables.unmodifiableIterable; import static com.google.common.collect.Sets.newHashSet; import static java.lang.reflect.Proxy.newProxyInstance; import static java.util.Arrays.asList; import com.google.common.annotations.GwtCompatible; import com.google.common.annotations.GwtIncompatible; import com.google.common.collect.testing.Helpers; import com.google.common.collect.testing.ListTestSuiteBuilder; import com.google.common.collect.testing.MinimalCollection; import com.google.common.collect.testing.MinimalIterable; import com.google.common.collect.testing.features.CollectionFeature; import com.google.common.collect.testing.features.CollectionSize; import com.google.common.collect.testing.google.ListGenerators.BuilderAddAllListGenerator; import com.google.common.collect.testing.google.ListGenerators.BuilderReversedListGenerator; import com.google.common.collect.testing.google.ListGenerators.ImmutableListHeadSubListGenerator; import com.google.common.collect.testing.google.ListGenerators.ImmutableListMiddleSubListGenerator; import com.google.common.collect.testing.google.ListGenerators.ImmutableListOfGenerator; import com.google.common.collect.testing.google.ListGenerators.ImmutableListTailSubListGenerator; import com.google.common.collect.testing.google.ListGenerators.UnhashableElementsImmutableListGenerator; import com.google.common.collect.testing.testers.ListHashCodeTester; import com.google.common.testing.NullPointerTester; import com.google.common.testing.SerializableTester; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Set; import java.util.concurrent.CopyOnWriteArrayList; /** * Unit test for {@link ImmutableList}. * * @author Kevin Bourrillion * @author George van den Driessche * @author Jared Levy */ @GwtCompatible(emulated = true) public class ImmutableListTest extends TestCase { @GwtIncompatible("suite") public static Test suite() { TestSuite suite = new TestSuite(); suite.addTest(ListTestSuiteBuilder.using(new ImmutableListOfGenerator()) .named("ImmutableList") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using(new BuilderAddAllListGenerator()) .named("ImmutableList, built with Builder.add") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using(new BuilderAddAllListGenerator()) .named("ImmutableList, built with Builder.addAll") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using(new BuilderReversedListGenerator()) .named("ImmutableList, reversed") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using( new ImmutableListHeadSubListGenerator()) .named("ImmutableList, head subList") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using( new ImmutableListTailSubListGenerator()) .named("ImmutableList, tail subList") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using( new ImmutableListMiddleSubListGenerator()) .named("ImmutableList, middle subList") .withFeatures(CollectionSize.ANY, CollectionFeature.SERIALIZABLE, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); suite.addTest(ListTestSuiteBuilder.using( new UnhashableElementsImmutableListGenerator()) .suppressing(ListHashCodeTester.getHashCodeMethod()) .named("ImmutableList, unhashable values") .withFeatures(CollectionSize.ANY, CollectionFeature.ALLOWS_NULL_QUERIES) .createTestSuite()); return suite; } public static class CreationTests extends TestCase { public void testCreation_noArgs() { List<String> list = ImmutableList.of(); assertEquals(Collections.emptyList(), list); } public void testCreation_oneElement() { List<String> list = ImmutableList.of("a"); assertEquals(Collections.singletonList("a"), list); } public void testCreation_twoElements() { List<String> list = ImmutableList.of("a", "b"); assertEquals(Lists.newArrayList("a", "b"), list); } public void testCreation_threeElements() { List<String> list = ImmutableList.of("a", "b", "c"); assertEquals(Lists.newArrayList("a", "b", "c"), list); } public void testCreation_fourElements() { List<String> list = ImmutableList.of("a", "b", "c", "d"); assertEquals(Lists.newArrayList("a", "b", "c", "d"), list); } public void testCreation_fiveElements() { List<String> list = ImmutableList.of("a", "b", "c", "d", "e"); assertEquals(Lists.newArrayList("a", "b", "c", "d", "e"), list); } public void testCreation_sixElements() { List<String> list = ImmutableList.of("a", "b", "c", "d", "e", "f"); assertEquals(Lists.newArrayList("a", "b", "c", "d", "e", "f"), list); } public void testCreation_sevenElements() { List<String> list = ImmutableList.of("a", "b", "c", "d", "e", "f", "g"); assertEquals(Lists.newArrayList("a", "b", "c", "d", "e", "f", "g"), list); } public void testCreation_eightElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h"), list); } public void testCreation_nineElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i"), list); } public void testCreation_tenElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j"), list); } public void testCreation_elevenElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k"), list); } // Varargs versions public void testCreation_twelveElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l"), list); } public void testCreation_thirteenElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m"), list); } public void testCreation_fourteenElements() { List<String> list = ImmutableList.of( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n"); assertEquals(Lists.newArrayList( "a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n"), list); } public void testCreation_singletonNull() { try { ImmutableList.of((String) null); fail(); } catch (NullPointerException expected) { } } public void testCreation_withNull() { try { ImmutableList.of("a", null, "b"); fail(); } catch (NullPointerException expected) { } } public void testCreation_generic() { List<String> a = ImmutableList.of("a"); // only verify that there is no compile warning ImmutableList.of(a, a); } public void testCreation_arrayOfArray() { String[] array = new String[] { "a" }; List<String[]> list = ImmutableList.<String[]>of(array); assertEquals(Collections.singletonList(array), list); } public void testCopyOf_emptyArray() { String[] array = new String[0]; List<String> list = ImmutableList.copyOf(array); assertEquals(Collections.emptyList(), list); } public void testCopyOf_arrayOfOneElement() { String[] array = new String[] { "a" }; List<String> list = ImmutableList.copyOf(array); assertEquals(Collections.singletonList("a"), list); } public void testCopyOf_nullArray() { try { ImmutableList.copyOf((String[]) null); fail(); } catch(NullPointerException expected) { } } public void testCopyOf_arrayContainingOnlyNull() { String[] array = new String[] { null }; try { ImmutableList.copyOf(array); fail(); } catch (NullPointerException expected) { } } public void testCopyOf_collection_empty() { // "<String>" is required to work around a javac 1.5 bug. Collection<String> c = MinimalCollection.<String>of(); List<String> list = ImmutableList.copyOf(c); assertEquals(Collections.emptyList(), list); } public void testCopyOf_collection_oneElement() { Collection<String> c = MinimalCollection.of("a"); List<String> list = ImmutableList.copyOf(c); assertEquals(Collections.singletonList("a"), list); } public void testCopyOf_collection_general() { Collection<String> c = MinimalCollection.of("a", "b", "a"); List<String> list = ImmutableList.copyOf(c); assertEquals(asList("a", "b", "a"), list); List<String> mutableList = asList("a", "b"); list = ImmutableList.copyOf(mutableList); mutableList.set(0, "c"); assertEquals(asList("a", "b"), list); } public void testCopyOf_collectionContainingNull() { Collection<String> c = MinimalCollection.of("a", null, "b"); try { ImmutableList.copyOf(c); fail(); } catch (NullPointerException expected) { } } public void testCopyOf_iterator_empty() { Iterator<String> iterator = Iterators.emptyIterator(); List<String> list = ImmutableList.copyOf(iterator); assertEquals(Collections.emptyList(), list); } public void testCopyOf_iterator_oneElement() { Iterator<String> iterator = Iterators.singletonIterator("a"); List<String> list = ImmutableList.copyOf(iterator); assertEquals(Collections.singletonList("a"), list); } public void testCopyOf_iterator_general() { Iterator<String> iterator = asList("a", "b", "a").iterator(); List<String> list = ImmutableList.copyOf(iterator); assertEquals(asList("a", "b", "a"), list); } public void testCopyOf_iteratorContainingNull() { Iterator<String> iterator = asList("a", null, "b").iterator(); try { ImmutableList.copyOf(iterator); fail(); } catch (NullPointerException expected) { } } public void testCopyOf_iteratorNull() { try { ImmutableList.copyOf((Iterator<String>) null); fail(); } catch(NullPointerException expected) { } } public void testCopyOf_concurrentlyMutating() { List<String> sample = Lists.newArrayList("a", "b", "c"); for (int delta : new int[] {-1, 0, 1}) { for (int i = 0; i < sample.size(); i++) { Collection<String> misleading = Helpers.misleadingSizeCollection(delta); List<String> expected = sample.subList(0, i); misleading.addAll(expected); assertEquals(expected, ImmutableList.copyOf(misleading)); assertEquals(expected, ImmutableList.copyOf((Iterable<String>) misleading)); } } } private static class CountingIterable implements Iterable<String> { int count = 0; @Override public Iterator<String> iterator() { count++; return asList("a", "b", "a").iterator(); } } public void testCopyOf_plainIterable() { CountingIterable iterable = new CountingIterable(); List<String> list = ImmutableList.copyOf(iterable); assertEquals(asList("a", "b", "a"), list); } public void testCopyOf_plainIterable_iteratesOnce() { CountingIterable iterable = new CountingIterable(); ImmutableList.copyOf(iterable); assertEquals(1, iterable.count); } public void testCopyOf_shortcut_empty() { Collection<String> c = ImmutableList.of(); assertSame(c, ImmutableList.copyOf(c)); } public void testCopyOf_shortcut_singleton() { Collection<String> c = ImmutableList.of("a"); assertSame(c, ImmutableList.copyOf(c)); } public void testCopyOf_shortcut_immutableList() { Collection<String> c = ImmutableList.of("a", "b", "c"); assertSame(c, ImmutableList.copyOf(c)); } } @GwtIncompatible("reflection") public static class ConcurrentTests extends TestCase { enum WrapWithIterable { WRAP, NO_WRAP } private static void runConcurrentlyMutatedTest( Collection<Integer> initialContents, Iterable<ListFrobber> actionsToPerformConcurrently, WrapWithIterable wrap) { ConcurrentlyMutatedList<Integer> concurrentlyMutatedList = newConcurrentlyMutatedList( initialContents, actionsToPerformConcurrently); Iterable<Integer> iterableToCopy = wrap == WrapWithIterable.WRAP ? unmodifiableIterable(concurrentlyMutatedList) : concurrentlyMutatedList; ImmutableList<Integer> copyOfIterable = ImmutableList.copyOf(iterableToCopy); assertTrue(concurrentlyMutatedList.getAllStates() .contains(copyOfIterable)); // Check that it's a RegularImmutableList iff it is nonempty: assertEquals(copyOfIterable.size() == 0, copyOfIterable.isEmpty()); } private static void runConcurrentlyMutatedTest(WrapWithIterable wrap) { /* * TODO: Iterate over many array sizes and all possible operation lists, * performing adds and removes in different ways. */ runConcurrentlyMutatedTest( elements(), ops(add(1), add(2)), wrap); runConcurrentlyMutatedTest( elements(), ops(add(1), nop()), wrap); runConcurrentlyMutatedTest( elements(), ops(add(1), remove()), wrap); runConcurrentlyMutatedTest( elements(), ops(nop(), add(1)), wrap); runConcurrentlyMutatedTest( elements(1), ops(remove(), nop()), wrap); runConcurrentlyMutatedTest( elements(1), ops(remove(), add(2)), wrap); runConcurrentlyMutatedTest( elements(1, 2), ops(remove(), remove()), wrap); runConcurrentlyMutatedTest( elements(1, 2), ops(remove(), nop()), wrap); runConcurrentlyMutatedTest( elements(1, 2), ops(remove(), add(3)), wrap); runConcurrentlyMutatedTest( elements(1, 2), ops(nop(), remove()), wrap); runConcurrentlyMutatedTest( elements(1, 2, 3), ops(remove(), remove()), wrap); } private static ImmutableList<Integer> elements(Integer... elements) { return ImmutableList.copyOf(elements); } private static ImmutableList<ListFrobber> ops(ListFrobber... elements) { return ImmutableList.copyOf(elements); } public void testCopyOf_concurrentlyMutatedList() { runConcurrentlyMutatedTest(WrapWithIterable.NO_WRAP); } public void testCopyOf_concurrentlyMutatedIterable() { runConcurrentlyMutatedTest(WrapWithIterable.WRAP); } /** An operation to perform on a list. */ interface ListFrobber { void perform(List<Integer> list); } static final ListFrobber add(final int element) { return new ListFrobber() { @Override public void perform(List<Integer> list) { list.add(0, element); } }; } static final ListFrobber remove() { return new ListFrobber() { @Override public void perform(List<Integer> list) { list.remove(0); } }; } static final ListFrobber nop() { return new ListFrobber() { @Override public void perform(List<Integer> list) { } }; } /** * A list that mutates itself after every call to each of its {@link List} * methods. */ interface ConcurrentlyMutatedList<E> extends List<E> { /** * The elements of a {@link ConcurrentlyMutatedList} are added and removed * over time. This method returns every state that the list has passed * through at some point. */ Set<List<E>> getAllStates(); } /** * Returns a {@link ConcurrentlyMutatedList} that performs the given * operations as its concurrent modifications. The mutations occur in the * same thread as the triggering method call. */ private static ConcurrentlyMutatedList<Integer> newConcurrentlyMutatedList( final Collection<Integer> initialContents, final Iterable<ListFrobber> actionsToPerformConcurrently) { InvocationHandler invocationHandler = new InvocationHandler() { final CopyOnWriteArrayList<Integer> delegate = new CopyOnWriteArrayList<Integer>(initialContents); final Method getAllStatesMethod = getOnlyElement(asList( ConcurrentlyMutatedList.class.getDeclaredMethods())); final Iterator<ListFrobber> remainingActions = actionsToPerformConcurrently.iterator(); final Set<List<Integer>> allStates = newHashSet(); @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return method.equals(getAllStatesMethod) ? getAllStates() : invokeListMethod(method, args); } private Set<List<Integer>> getAllStates() { return allStates; } private Object invokeListMethod(Method method, Object[] args) throws Throwable { try { Object returnValue = method.invoke(delegate, args); mutateDelegate(); return returnValue; } catch (InvocationTargetException e) { throw e.getCause(); } catch (IllegalAccessException e) { throw new AssertionError(e); } } private void mutateDelegate() { allStates.add(ImmutableList.copyOf(delegate)); remainingActions.next().perform(delegate); allStates.add(ImmutableList.copyOf(delegate)); } }; @SuppressWarnings("unchecked") ConcurrentlyMutatedList<Integer> list = (ConcurrentlyMutatedList<Integer>) newProxyInstance( ImmutableListTest.CreationTests.class.getClassLoader(), new Class[] {ConcurrentlyMutatedList.class}, invocationHandler); return list; } } public static class BasicTests extends TestCase { @GwtIncompatible("NullPointerTester") public void testNullPointers() throws Exception { NullPointerTester tester = new NullPointerTester(); tester.testAllPublicStaticMethods(ImmutableList.class); tester.testAllPublicInstanceMethods(ImmutableList.of(1, 2, 3)); } @GwtIncompatible("SerializableTester") public void testSerialization_empty() { Collection<String> c = ImmutableList.of(); assertSame(c, SerializableTester.reserialize(c)); } @GwtIncompatible("SerializableTester") public void testSerialization_singleton() { Collection<String> c = ImmutableList.of("a"); SerializableTester.reserializeAndAssert(c); } @GwtIncompatible("SerializableTester") public void testSerialization_multiple() { Collection<String> c = ImmutableList.of("a", "b", "c"); SerializableTester.reserializeAndAssert(c); } public void testEquals_immutableList() { Collection<String> c = ImmutableList.of("a", "b", "c"); assertTrue(c.equals(ImmutableList.of("a", "b", "c"))); assertFalse(c.equals(ImmutableList.of("a", "c", "b"))); assertFalse(c.equals(ImmutableList.of("a", "b"))); assertFalse(c.equals(ImmutableList.of("a", "b", "c", "d"))); } public void testBuilderAdd() { ImmutableList<String> list = new ImmutableList.Builder<String>() .add("a") .add("b") .add("a") .add("c") .build(); assertEquals(asList("a", "b", "a", "c"), list); } public void testBuilderAdd_varargs() { ImmutableList<String> list = new ImmutableList.Builder<String>() .add("a", "b", "a", "c") .build(); assertEquals(asList("a", "b", "a", "c"), list); } public void testBuilderAddAll_iterable() { List<String> a = asList("a", "b"); List<String> b = asList("c", "d"); ImmutableList<String> list = new ImmutableList.Builder<String>() .addAll(a) .addAll(b) .build(); assertEquals(asList( "a", "b", "c", "d"), list); b.set(0, "f"); assertEquals(asList( "a", "b", "c", "d"), list); } public void testBuilderAddAll_iterator() { List<String> a = asList("a", "b"); List<String> b = asList("c", "d"); ImmutableList<String> list = new ImmutableList.Builder<String>() .addAll(a.iterator()) .addAll(b.iterator()) .build(); assertEquals(asList( "a", "b", "c", "d"), list); b.set(0, "f"); assertEquals(asList( "a", "b", "c", "d"), list); } public void testComplexBuilder() { List<Integer> colorElem = asList(0x00, 0x33, 0x66, 0x99, 0xCC, 0xFF); ImmutableList.Builder<Integer> webSafeColorsBuilder = ImmutableList.builder(); for (Integer red : colorElem) { for (Integer green : colorElem) { for (Integer blue : colorElem) { webSafeColorsBuilder.add((red << 16) + (green << 8) + blue); } } } ImmutableList<Integer> webSafeColors = webSafeColorsBuilder.build(); assertEquals(216, webSafeColors.size()); Integer[] webSafeColorArray = webSafeColors.toArray(new Integer[webSafeColors.size()]); assertEquals(0x000000, (int) webSafeColorArray[0]); assertEquals(0x000033, (int) webSafeColorArray[1]); assertEquals(0x000066, (int) webSafeColorArray[2]); assertEquals(0x003300, (int) webSafeColorArray[6]); assertEquals(0x330000, (int) webSafeColorArray[36]); assertEquals(0x000066, (int) webSafeColors.get(2)); assertEquals(0x003300, (int) webSafeColors.get(6)); ImmutableList<Integer> addedColor = webSafeColorsBuilder.add(0x00BFFF).build(); assertEquals("Modifying the builder should not have changed any already" + " built sets", 216, webSafeColors.size()); assertEquals("the new array should be one bigger than webSafeColors", 217, addedColor.size()); Integer[] appendColorArray = addedColor.toArray(new Integer[addedColor.size()]); assertEquals(0x00BFFF, (int) appendColorArray[216]); } public void testBuilderAddHandlesNullsCorrectly() { ImmutableList.Builder<String> builder = ImmutableList.builder(); try { builder.add((String) null); fail("expected NullPointerException"); } catch (NullPointerException expected) { } try { builder.add((String[]) null); fail("expected NullPointerException"); } catch (NullPointerException expected) { } try { builder.add("a", null, "b"); fail("expected NullPointerException"); } catch (NullPointerException expected) { } } public void testBuilderAddAllHandlesNullsCorrectly() { ImmutableList.Builder<String> builder = ImmutableList.builder(); try { builder.addAll((Iterable<String>) null); fail("expected NullPointerException"); } catch (NullPointerException expected) { } try { builder.addAll((Iterator<String>) null); fail("expected NullPointerException"); } catch (NullPointerException expected) { } builder = ImmutableList.builder(); List<String> listWithNulls = asList("a", null, "b"); try { builder.addAll(listWithNulls); fail("expected NullPointerException"); } catch (NullPointerException expected) { } builder = ImmutableList.builder(); Iterator<String> iteratorWithNulls = asList("a", null, "b").iterator(); try { builder.addAll(iteratorWithNulls); fail("expected NullPointerException"); } catch (NullPointerException expected) { } Iterable<String> iterableWithNulls = MinimalIterable.of("a", null, "b"); try { builder.addAll(iterableWithNulls); fail("expected NullPointerException"); } catch (NullPointerException expected) { } } public void testAsList() { ImmutableList<String> list = ImmutableList.of("a", "b"); assertSame(list, list.asList()); } } }
package org.wikipedia.interlanguage; import android.content.Intent; import android.os.Bundle; import android.text.Editable; import android.text.TextWatcher; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.BaseAdapter; import android.widget.EditText; import android.widget.ListView; import android.widget.TextView; import org.wikipedia.R; import org.wikipedia.Site; import org.wikipedia.ViewAnimations; import org.wikipedia.WikipediaApp; import org.wikipedia.activity.ActivityUtil; import org.wikipedia.activity.ThemedActionBarActivity; import org.wikipedia.history.HistoryEntry; import org.wikipedia.MainActivity; import org.wikipedia.page.PageTitle; import org.wikipedia.views.WikiErrorView; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.ListIterator; import java.util.Locale; import static org.wikipedia.util.StringUtil.emptyIfNull; import static org.wikipedia.util.DeviceUtil.hideSoftKeyboard; public class LangLinksActivity extends ThemedActionBarActivity { public static final int ACTIVITY_RESULT_LANGLINK_SELECT = 1; public static final String ACTION_LANGLINKS_FOR_TITLE = "org.wikipedia.langlinks_for_title"; public static final String EXTRA_PAGETITLE = "org.wikipedia.pagetitle"; private static final String LANGUAGE_ENTRIES_BUNDLE_KEY = "languageEntries"; private static final String GOTHIC_LANGUAGE_CODE = "got"; private ArrayList<PageTitle> languageEntries; private PageTitle title; private WikipediaApp app; private ListView langLinksList; private View langLinksProgress; private View langLinksContainer; private View langLinksEmpty; private View langLinksNoMatch; private WikiErrorView langLinksError; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); app = WikipediaApp.getInstance(); setContentView(R.layout.activity_langlinks); if (!getIntent().getAction().equals(ACTION_LANGLINKS_FOR_TITLE)) { throw new RuntimeException("Only ACTION_LANGLINKS_FOR_TITLE is supported"); } langLinksList = (ListView) findViewById(R.id.langlinks_list); langLinksProgress = findViewById(R.id.langlinks_load_progress); langLinksContainer = findViewById(R.id.langlinks_list_container); langLinksEmpty = findViewById(R.id.langlinks_empty); langLinksNoMatch = findViewById(R.id.langlinks_no_match); langLinksError = (WikiErrorView) findViewById(R.id.langlinks_error); EditText langLinksFilter = (EditText) findViewById(R.id.langlinks_filter); title = getIntent().getParcelableExtra(EXTRA_PAGETITLE); if (savedInstanceState != null && savedInstanceState.containsKey(LANGUAGE_ENTRIES_BUNDLE_KEY)) { languageEntries = savedInstanceState.getParcelableArrayList(LANGUAGE_ENTRIES_BUNDLE_KEY); } fetchLangLinks(); langLinksError.setRetryClickListener(new View.OnClickListener() { @Override public void onClick(View v) { ViewAnimations.crossFade(langLinksError, langLinksProgress); fetchLangLinks(); } }); langLinksList.setOnItemClickListener(new AdapterView.OnItemClickListener() { @Override public void onItemClick(AdapterView<?> parent, View view, int position, long id) { PageTitle langLink = (PageTitle) parent.getAdapter().getItem(position); app.setMruLanguageCode(langLink.getSite().languageCode()); HistoryEntry historyEntry = new HistoryEntry(langLink, HistoryEntry.SOURCE_LANGUAGE_LINK); Intent intent = new Intent() .setClass(LangLinksActivity.this, MainActivity.class) .setAction(MainActivity.ACTION_PAGE_FOR_TITLE) .putExtra(MainActivity.EXTRA_PAGETITLE, langLink) .putExtra(MainActivity.EXTRA_HISTORYENTRY, historyEntry); setResult(ACTIVITY_RESULT_LANGLINK_SELECT, intent); hideSoftKeyboard(LangLinksActivity.this); finish(); } }); langLinksFilter.addTextChangedListener(new TextWatcher() { @Override public void beforeTextChanged(CharSequence s, int start, int count, int after) { } @Override public void onTextChanged(CharSequence s, int start, int before, int count) { } @Override public void afterTextChanged(Editable s) { // the languages might not be loaded yet... if (langLinksList.getAdapter() == null) { return; } ((LangLinksAdapter) langLinksList.getAdapter()).setFilterText(s.toString()); //Check if there are no languages that match the filter if (langLinksList.getAdapter().getCount() == 0) { langLinksNoMatch.setVisibility(View.VISIBLE); } else { langLinksNoMatch.setVisibility(View.GONE); } } }); } @Override public boolean onOptionsItemSelected(MenuItem item) { return ActivityUtil.defaultOnOptionsItemSelected(this, item) || super.onOptionsItemSelected(item); } @Override public void onBackPressed() { hideSoftKeyboard(this); super.onBackPressed(); } @Override protected void setTheme() { setActionBarTheme(); } @Override protected void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); if (languageEntries != null) { outState.putParcelableArrayList(LANGUAGE_ENTRIES_BUNDLE_KEY, languageEntries); } } private void displayLangLinks() { if (languageEntries.size() == 0) { ViewAnimations.crossFade(langLinksProgress, langLinksEmpty); } else { langLinksList.setAdapter(new LangLinksAdapter(languageEntries, app)); ViewAnimations.crossFade(langLinksProgress, langLinksContainer); } } private void fetchLangLinks() { if (languageEntries == null) { new LangLinksFetchTask(this, title) { @Override public void onFinish(ArrayList<PageTitle> result) { languageEntries = result; updateLanguageEntriesSupported(languageEntries); sortLanguageEntriesByMru(languageEntries); displayLangLinks(); } @Override public void onCatch(Throwable caught) { ViewAnimations.crossFade(langLinksProgress, langLinksError); langLinksError.setError(caught); } private void updateLanguageEntriesSupported(List<PageTitle> languageEntries) { for (ListIterator<PageTitle> it = languageEntries.listIterator(); it.hasNext();) { PageTitle link = it.next(); String languageCode = link.getSite().languageCode(); if (GOTHIC_LANGUAGE_CODE.equals(languageCode)) { // Remove Gothic since it causes Android to segfault. it.remove(); } else if (Locale.CHINESE.getLanguage().equals(languageCode)) { // Replace Chinese with Simplified and Traditional dialects. it.remove(); for (String dialect : Arrays.asList(AppLanguageLookUpTable.SIMPLIFIED_CHINESE_LANGUAGE_CODE, AppLanguageLookUpTable.TRADITIONAL_CHINESE_LANGUAGE_CODE)) { it.add(new PageTitle(link.getText(), Site.forLanguageCode(dialect))); } } } } private void sortLanguageEntriesByMru(List<PageTitle> entries) { int addIndex = 0; for (String language : app.getMruLanguageCodes()) { for (int i = 0; i < entries.size(); i++) { if (entries.get(i).getSite().languageCode().equals(language)) { PageTitle entry = entries.remove(i); entries.add(addIndex++, entry); break; } } } } }.execute(); } else { displayLangLinks(); } } private static final class LangLinksAdapter extends BaseAdapter { private final List<PageTitle> originalLanguageEntries; private final List<PageTitle> languageEntries; private final WikipediaApp app; private LangLinksAdapter(List<PageTitle> languageEntries, WikipediaApp app) { this.originalLanguageEntries = languageEntries; this.languageEntries = new ArrayList<>(originalLanguageEntries); this.app = app; } public void setFilterText(String filter) { languageEntries.clear(); filter = filter.toLowerCase(); for (PageTitle entry : originalLanguageEntries) { String languageCode = entry.getSite().languageCode(); String canonicalName = emptyIfNull(app.getAppLanguageCanonicalName(languageCode)); String localizedName = emptyIfNull(app.getAppLanguageLocalizedName(languageCode)); if (canonicalName.toLowerCase().contains(filter) || localizedName.toLowerCase().contains(filter)) { languageEntries.add(entry); } } notifyDataSetInvalidated(); } @Override public int getCount() { return languageEntries.size(); } @Override public PageTitle getItem(int position) { return languageEntries.get(position); } @Override public long getItemId(int position) { return position; } @Override public View getView(int position, View convertView, ViewGroup parent) { if (convertView == null) { convertView = LayoutInflater.from(parent.getContext()).inflate(R.layout.item_language_list_entry, parent, false); } PageTitle item = getItem(position); String languageCode = item.getSite().languageCode(); String localizedLanguageName = app.getAppLanguageLocalizedName(languageCode); if (localizedLanguageName == null && languageCode.equals(Locale.CHINA.getLanguage())) { localizedLanguageName = Locale.CHINA.getDisplayName(Locale.CHINA); } TextView localizedLanguageNameTextView = (TextView) convertView.findViewById(R.id.localized_language_name); TextView articleTitleTextView = (TextView) convertView.findViewById(R.id.language_subtitle); localizedLanguageNameTextView.setText(localizedLanguageName); articleTitleTextView.setText(item.getDisplayText()); return convertView; } } }
/* * Copyright 2002-2014 the original author or authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.springframework.web.servlet.config.annotation; import java.io.IOException; import javax.servlet.ServletException; import org.junit.Rule; import org.junit.Test; import org.junit.rules.ExpectedException; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.mock.web.test.MockHttpServletRequest; import org.springframework.mock.web.test.MockHttpServletResponse; import org.springframework.mock.web.test.MockServletConfig; import org.springframework.mock.web.test.MockServletContext; import org.springframework.stereotype.Controller; import org.springframework.ui.ModelMap; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.context.support.AnnotationConfigWebApplicationContext; import org.springframework.web.servlet.DispatcherServlet; import org.springframework.web.servlet.view.freemarker.FreeMarkerConfigurer; import org.springframework.web.servlet.view.freemarker.FreeMarkerViewResolver; import org.springframework.web.servlet.view.groovy.GroovyMarkupConfigurer; import org.springframework.web.servlet.view.tiles3.TilesConfigurer; import org.springframework.web.servlet.view.velocity.VelocityConfigurer; import static org.junit.Assert.*; /** * Integration tests for view resolution with {@code @EnableWebMvc}. * * @author Rossen Stoyanchev * @since 4.1 */ public class ViewResolutionIntegrationTests { @Rule public final ExpectedException thrown = ExpectedException.none(); @Test public void freemarker() throws Exception { MockHttpServletResponse response = runTest(FreeMarkerWebConfig.class); assertEquals("<html><body>Hello World!</body></html>", response.getContentAsString()); } @Test public void velocity() throws Exception { MockHttpServletResponse response = runTest(VelocityWebConfig.class); assertEquals("<html><body>Hello World!</body></html>", response.getContentAsString()); } @Test public void tiles() throws Exception { MockHttpServletResponse response = runTest(TilesWebConfig.class); assertEquals("/WEB-INF/index.jsp", response.getForwardedUrl()); } @Test public void groovyMarkup() throws Exception { MockHttpServletResponse response = runTest(GroovyMarkupWebConfig.class); assertEquals("<html><body>Hello World!</body></html>", response.getContentAsString()); } @Test public void freemarkerInvalidConfig() throws Exception { this.thrown.expectMessage("In addition to a FreeMarker view resolver "); runTest(InvalidFreeMarkerWebConfig.class); } @Test public void velocityInvalidConfig() throws Exception { this.thrown.expectMessage("In addition to a Velocity view resolver "); runTest(InvalidVelocityWebConfig.class); } @Test public void tilesInvalidConfig() throws Exception { this.thrown.expectMessage("In addition to a Tiles view resolver "); runTest(InvalidTilesWebConfig.class); } @Test public void groovyMarkupInvalidConfig() throws Exception { this.thrown.expectMessage("In addition to a Groovy markup view resolver "); runTest(InvalidGroovyMarkupWebConfig.class); } // SPR-12013 @Test public void existingViewResolver() throws Exception { MockHttpServletResponse response = runTest(ExistingViewResolverConfig.class); assertEquals("<html><body>Hello World!</body></html>", response.getContentAsString()); } private MockHttpServletResponse runTest(Class<?> configClass) throws ServletException, IOException { String basePath = "org/springframework/web/servlet/config/annotation"; MockServletContext servletContext = new MockServletContext(basePath); MockServletConfig servletConfig = new MockServletConfig(servletContext); MockHttpServletRequest request = new MockHttpServletRequest("GET", "/"); MockHttpServletResponse response = new MockHttpServletResponse(); AnnotationConfigWebApplicationContext context = new AnnotationConfigWebApplicationContext(); context.register(configClass); context.setServletContext(servletContext); context.refresh(); DispatcherServlet servlet = new DispatcherServlet(context); servlet.init(servletConfig); servlet.service(request, response); return response; } @Controller static class SampleController { @RequestMapping(value = "/", method = RequestMethod.GET) public String sample(ModelMap model) { model.addAttribute("hello", "Hello World!"); return "index"; } } @EnableWebMvc static abstract class AbstractWebConfig extends WebMvcConfigurerAdapter { @Bean public SampleController sampleController() { return new SampleController(); } } @Configuration static class FreeMarkerWebConfig extends AbstractWebConfig { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.freeMarker(); } @Bean public FreeMarkerConfigurer freeMarkerConfigurer() { FreeMarkerConfigurer configurer = new FreeMarkerConfigurer(); configurer.setTemplateLoaderPath("/WEB-INF/"); return configurer; } } @Configuration static class VelocityWebConfig extends AbstractWebConfig { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.velocity(); } @Bean public VelocityConfigurer velocityConfigurer() { VelocityConfigurer configurer = new VelocityConfigurer(); configurer.setResourceLoaderPath("/WEB-INF/"); return configurer; } } @Configuration static class TilesWebConfig extends AbstractWebConfig { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.tiles(); } @Bean public TilesConfigurer tilesConfigurer() { TilesConfigurer configurer = new TilesConfigurer(); configurer.setDefinitions("/WEB-INF/tiles.xml"); return configurer; } } @Configuration static class GroovyMarkupWebConfig extends AbstractWebConfig { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.groovy(); } @Bean public GroovyMarkupConfigurer groovyMarkupConfigurer() { GroovyMarkupConfigurer configurer = new GroovyMarkupConfigurer(); configurer.setResourceLoaderPath("/WEB-INF/"); return configurer; } } @Configuration static class InvalidFreeMarkerWebConfig extends WebMvcConfigurationSupport { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.freeMarker(); } } @Configuration static class InvalidVelocityWebConfig extends WebMvcConfigurationSupport { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.velocity(); } } @Configuration static class InvalidTilesWebConfig extends WebMvcConfigurationSupport { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.tiles(); } } @Configuration static class InvalidGroovyMarkupWebConfig extends WebMvcConfigurationSupport { @Override public void configureViewResolvers(ViewResolverRegistry registry) { registry.groovy(); } } /** * Test @EnableWebMvc in the presence of pre-existing ViewResolver. */ @Configuration static class ExistingViewResolverConfig extends AbstractWebConfig { @Bean public FreeMarkerViewResolver freeMarkerViewResolver() { FreeMarkerViewResolver viewResolver = new FreeMarkerViewResolver(); viewResolver.setSuffix(".ftl"); return viewResolver; } @Bean public FreeMarkerConfigurer freeMarkerConfigurer() { FreeMarkerConfigurer configurer = new FreeMarkerConfigurer(); configurer.setTemplateLoaderPath("/WEB-INF/"); return configurer; } } }
/* * Copyright 2012 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.config.heuristic.selector.move; import java.util.ArrayList; import java.util.Comparator; import java.util.List; import com.thoughtworks.xstream.annotations.XStreamImplicit; import com.thoughtworks.xstream.annotations.XStreamInclude; import org.optaplanner.core.config.heuristic.policy.HeuristicConfigPolicy; import org.optaplanner.core.config.heuristic.selector.SelectorConfig; import org.optaplanner.core.config.heuristic.selector.common.SelectionCacheType; import org.optaplanner.core.config.heuristic.selector.common.SelectionOrder; import org.optaplanner.core.config.heuristic.selector.common.decorator.SelectionSorterOrder; import org.optaplanner.core.config.heuristic.selector.move.composite.CartesianProductMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.composite.UnionMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.factory.MoveIteratorFactoryConfig; import org.optaplanner.core.config.heuristic.selector.move.factory.MoveListFactoryConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.ChangeMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.PillarChangeMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.PillarSwapMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.SwapMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.chained.KOptMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.chained.SubChainChangeMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.chained.SubChainSwapMoveSelectorConfig; import org.optaplanner.core.config.heuristic.selector.move.generic.chained.TailChainSwapMoveSelectorConfig; import org.optaplanner.core.config.util.ConfigUtils; import org.optaplanner.core.impl.heuristic.selector.common.decorator.ComparatorSelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionFilter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionProbabilityWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorter; import org.optaplanner.core.impl.heuristic.selector.common.decorator.SelectionSorterWeightFactory; import org.optaplanner.core.impl.heuristic.selector.common.decorator.WeightFactorySelectionSorter; import org.optaplanner.core.impl.heuristic.selector.move.MoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.CachingMoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.FilteringMoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.ProbabilityMoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.SelectedCountLimitMoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.ShufflingMoveSelector; import org.optaplanner.core.impl.heuristic.selector.move.decorator.SortingMoveSelector; /** * General superclass for {@link ChangeMoveSelectorConfig}, etc. */ @XStreamInclude({ UnionMoveSelectorConfig.class, CartesianProductMoveSelectorConfig.class, ChangeMoveSelectorConfig.class, SwapMoveSelectorConfig.class, PillarChangeMoveSelectorConfig.class, PillarSwapMoveSelectorConfig.class, TailChainSwapMoveSelectorConfig.class, KOptMoveSelectorConfig.class, SubChainChangeMoveSelectorConfig.class, SubChainSwapMoveSelectorConfig.class, MoveListFactoryConfig.class, MoveIteratorFactoryConfig.class }) public abstract class MoveSelectorConfig<C extends MoveSelectorConfig> extends SelectorConfig<C> { protected SelectionCacheType cacheType = null; protected SelectionOrder selectionOrder = null; @XStreamImplicit(itemFieldName = "filterClass") protected List<Class<? extends SelectionFilter>> filterClassList = null; protected Class<? extends Comparator> sorterComparatorClass = null; protected Class<? extends SelectionSorterWeightFactory> sorterWeightFactoryClass = null; protected SelectionSorterOrder sorterOrder = null; protected Class<? extends SelectionSorter> sorterClass = null; protected Class<? extends SelectionProbabilityWeightFactory> probabilityWeightFactoryClass = null; protected Long selectedCountLimit = null; private Double fixedProbabilityWeight = null; public SelectionCacheType getCacheType() { return cacheType; } public void setCacheType(SelectionCacheType cacheType) { this.cacheType = cacheType; } public SelectionOrder getSelectionOrder() { return selectionOrder; } public void setSelectionOrder(SelectionOrder selectionOrder) { this.selectionOrder = selectionOrder; } public List<Class<? extends SelectionFilter>> getFilterClassList() { return filterClassList; } public void setFilterClassList(List<Class<? extends SelectionFilter>> filterClassList) { this.filterClassList = filterClassList; } public Class<? extends Comparator> getSorterComparatorClass() { return sorterComparatorClass; } public void setSorterComparatorClass(Class<? extends Comparator> sorterComparatorClass) { this.sorterComparatorClass = sorterComparatorClass; } public Class<? extends SelectionSorterWeightFactory> getSorterWeightFactoryClass() { return sorterWeightFactoryClass; } public void setSorterWeightFactoryClass(Class<? extends SelectionSorterWeightFactory> sorterWeightFactoryClass) { this.sorterWeightFactoryClass = sorterWeightFactoryClass; } public SelectionSorterOrder getSorterOrder() { return sorterOrder; } public void setSorterOrder(SelectionSorterOrder sorterOrder) { this.sorterOrder = sorterOrder; } public Class<? extends SelectionSorter> getSorterClass() { return sorterClass; } public void setSorterClass(Class<? extends SelectionSorter> sorterClass) { this.sorterClass = sorterClass; } public Class<? extends SelectionProbabilityWeightFactory> getProbabilityWeightFactoryClass() { return probabilityWeightFactoryClass; } public void setProbabilityWeightFactoryClass(Class<? extends SelectionProbabilityWeightFactory> probabilityWeightFactoryClass) { this.probabilityWeightFactoryClass = probabilityWeightFactoryClass; } public Long getSelectedCountLimit() { return selectedCountLimit; } public void setSelectedCountLimit(Long selectedCountLimit) { this.selectedCountLimit = selectedCountLimit; } public Double getFixedProbabilityWeight() { return fixedProbabilityWeight; } public void setFixedProbabilityWeight(Double fixedProbabilityWeight) { this.fixedProbabilityWeight = fixedProbabilityWeight; } // ************************************************************************ // Builder methods // ************************************************************************ /** * @param configPolicy never null * @param minimumCacheType never null, If caching is used (different from {@link SelectionCacheType#JUST_IN_TIME}), * then it should be at least this {@link SelectionCacheType} because an ancestor already uses such caching * and less would be pointless. * @param inheritedSelectionOrder never null * @return never null */ public MoveSelector buildMoveSelector(HeuristicConfigPolicy configPolicy, SelectionCacheType minimumCacheType, SelectionOrder inheritedSelectionOrder) { MoveSelectorConfig unfoldedMoveSelectorConfig = buildUnfoldedMoveSelectorConfig(configPolicy); if (unfoldedMoveSelectorConfig != null) { return unfoldedMoveSelectorConfig.buildMoveSelector(configPolicy, minimumCacheType, inheritedSelectionOrder); } SelectionCacheType resolvedCacheType = SelectionCacheType.resolve(cacheType, minimumCacheType); SelectionOrder resolvedSelectionOrder = SelectionOrder.resolve(selectionOrder, inheritedSelectionOrder); validateCacheTypeVersusSelectionOrder(resolvedCacheType, resolvedSelectionOrder); validateSorting(resolvedSelectionOrder); validateProbability(resolvedSelectionOrder); validateSelectedLimit(minimumCacheType); MoveSelector moveSelector = buildBaseMoveSelector(configPolicy, SelectionCacheType.max(minimumCacheType, resolvedCacheType), determineBaseRandomSelection(resolvedCacheType, resolvedSelectionOrder)); moveSelector = applyFiltering(resolvedCacheType, resolvedSelectionOrder, moveSelector); moveSelector = applySorting(resolvedCacheType, resolvedSelectionOrder, moveSelector); moveSelector = applyProbability(resolvedCacheType, resolvedSelectionOrder, moveSelector); moveSelector = applyShuffling(resolvedCacheType, resolvedSelectionOrder, moveSelector); moveSelector = applyCaching(resolvedCacheType, resolvedSelectionOrder, moveSelector); moveSelector = applySelectedLimit(resolvedCacheType, resolvedSelectionOrder, moveSelector); return moveSelector; } /** * @param configPolicy never null * @return null if no unfolding is needed */ protected MoveSelectorConfig buildUnfoldedMoveSelectorConfig(HeuristicConfigPolicy configPolicy) { return null; } protected boolean determineBaseRandomSelection( SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder) { switch (resolvedSelectionOrder) { case ORIGINAL: return false; case SORTED: case SHUFFLED: case PROBABILISTIC: // baseValueSelector and lower should be ORIGINAL if they are going to get cached completely return false; case RANDOM: // Predict if caching will occur return resolvedCacheType.isNotCached() || (isBaseInherentlyCached() && !hasFiltering()); default: throw new IllegalStateException("The selectionOrder (" + resolvedSelectionOrder + ") is not implemented."); } } protected boolean isBaseInherentlyCached() { return false; } /** * * @param configPolicy never null * @param minimumCacheType never null, If caching is used (different from {@link SelectionCacheType#JUST_IN_TIME}), * then it should be at least this {@link SelectionCacheType} because an ancestor already uses such caching * and less would be pointless. * @param randomSelection true is equivalent to {@link SelectionOrder#RANDOM}, * false is equivalent to {@link SelectionOrder#ORIGINAL} * @return never null */ protected abstract MoveSelector buildBaseMoveSelector( HeuristicConfigPolicy configPolicy, SelectionCacheType minimumCacheType, boolean randomSelection); private boolean hasFiltering() { return !ConfigUtils.isEmptyCollection(filterClassList); } private MoveSelector applyFiltering(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (hasFiltering()) { List<SelectionFilter> filterList = new ArrayList<>(filterClassList.size()); for (Class<? extends SelectionFilter> filterClass : filterClassList) { filterList.add(ConfigUtils.newInstance(this, "filterClass", filterClass)); } moveSelector = new FilteringMoveSelector(moveSelector, filterList); } return moveSelector; } private void validateSorting(SelectionOrder resolvedSelectionOrder) { if ((sorterComparatorClass != null || sorterWeightFactoryClass != null || sorterOrder != null || sorterClass != null) && resolvedSelectionOrder != SelectionOrder.SORTED) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with sorterComparatorClass (" + sorterComparatorClass + ") and sorterWeightFactoryClass (" + sorterWeightFactoryClass + ") and sorterOrder (" + sorterOrder + ") and sorterClass (" + sorterClass + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.SORTED + "."); } if (sorterComparatorClass != null && sorterWeightFactoryClass != null) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") has both a sorterComparatorClass (" + sorterComparatorClass + ") and a sorterWeightFactoryClass (" + sorterWeightFactoryClass + ")."); } if (sorterComparatorClass != null && sorterClass != null) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") has both a sorterComparatorClass (" + sorterComparatorClass + ") and a sorterClass (" + sorterClass + ")."); } if (sorterWeightFactoryClass != null && sorterClass != null) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") has both a sorterWeightFactoryClass (" + sorterWeightFactoryClass + ") and a sorterClass (" + sorterClass + ")."); } if (sorterClass != null && sorterOrder != null) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with sorterClass (" + sorterClass + ") has a non-null sorterOrder (" + sorterOrder + ")."); } } private MoveSelector applySorting(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (resolvedSelectionOrder == SelectionOrder.SORTED) { SelectionSorter sorter; if (sorterComparatorClass != null) { Comparator<Object> sorterComparator = ConfigUtils.newInstance(this, "sorterComparatorClass", sorterComparatorClass); sorter = new ComparatorSelectionSorter(sorterComparator, SelectionSorterOrder.resolve(sorterOrder)); } else if (sorterWeightFactoryClass != null) { SelectionSorterWeightFactory sorterWeightFactory = ConfigUtils.newInstance(this, "sorterWeightFactoryClass", sorterWeightFactoryClass); sorter = new WeightFactorySelectionSorter(sorterWeightFactory, SelectionSorterOrder.resolve(sorterOrder)); } else if (sorterClass != null) { sorter = ConfigUtils.newInstance(this, "sorterClass", sorterClass); } else { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a sorterComparatorClass (" + sorterComparatorClass + ") or a sorterWeightFactoryClass (" + sorterWeightFactoryClass + ") or a sorterClass (" + sorterClass + ")."); } moveSelector = new SortingMoveSelector(moveSelector, resolvedCacheType, sorter); } return moveSelector; } private void validateProbability(SelectionOrder resolvedSelectionOrder) { if (probabilityWeightFactoryClass != null && resolvedSelectionOrder != SelectionOrder.PROBABILISTIC) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with probabilityWeightFactoryClass (" + probabilityWeightFactoryClass + ") has a resolvedSelectionOrder (" + resolvedSelectionOrder + ") that is not " + SelectionOrder.PROBABILISTIC + "."); } } private MoveSelector applyProbability(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (resolvedSelectionOrder == SelectionOrder.PROBABILISTIC) { if (probabilityWeightFactoryClass == null) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with resolvedSelectionOrder (" + resolvedSelectionOrder + ") needs a probabilityWeightFactoryClass (" + probabilityWeightFactoryClass + ")."); } SelectionProbabilityWeightFactory probabilityWeightFactory = ConfigUtils.newInstance(this, "probabilityWeightFactoryClass", probabilityWeightFactoryClass); moveSelector = new ProbabilityMoveSelector(moveSelector, resolvedCacheType, probabilityWeightFactory); } return moveSelector; } private MoveSelector applyShuffling(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (resolvedSelectionOrder == SelectionOrder.SHUFFLED) { moveSelector = new ShufflingMoveSelector(moveSelector, resolvedCacheType); } return moveSelector; } private MoveSelector applyCaching(SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (resolvedCacheType.isCached() && resolvedCacheType.compareTo(moveSelector.getCacheType()) > 0) { moveSelector = new CachingMoveSelector(moveSelector, resolvedCacheType, resolvedSelectionOrder.toRandomSelectionBoolean()); } return moveSelector; } private void validateSelectedLimit(SelectionCacheType minimumCacheType) { if (selectedCountLimit != null && minimumCacheType.compareTo(SelectionCacheType.JUST_IN_TIME) > 0) { throw new IllegalArgumentException("The moveSelectorConfig (" + this + ") with selectedCountLimit (" + selectedCountLimit + ") has a minimumCacheType (" + minimumCacheType + ") that is higher than " + SelectionCacheType.JUST_IN_TIME + "."); } } private MoveSelector applySelectedLimit( SelectionCacheType resolvedCacheType, SelectionOrder resolvedSelectionOrder, MoveSelector moveSelector) { if (selectedCountLimit != null) { moveSelector = new SelectedCountLimitMoveSelector(moveSelector, selectedCountLimit); } return moveSelector; } /** * Gather a list of all descendant {@link MoveSelectorConfig}s * except for {@link UnionMoveSelectorConfig} and {@link CartesianProductMoveSelectorConfig}. * @param leafMoveSelectorConfigList not null */ public void extractLeafMoveSelectorConfigsIntoList(List<MoveSelectorConfig> leafMoveSelectorConfigList) { leafMoveSelectorConfigList.add(this); } @Override public void inherit(C inheritedConfig) { super.inherit(inheritedConfig); inheritCommon(inheritedConfig); } /** * Does not inherit subclass properties because this class and {@code foldedConfig} can be of a different type. * @param foldedConfig never null */ public void inheritFolded(MoveSelectorConfig foldedConfig) { inheritCommon(foldedConfig); } private void inheritCommon(MoveSelectorConfig inheritedConfig) { cacheType = ConfigUtils.inheritOverwritableProperty(cacheType, inheritedConfig.getCacheType()); selectionOrder = ConfigUtils.inheritOverwritableProperty(selectionOrder, inheritedConfig.getSelectionOrder()); filterClassList = ConfigUtils.inheritOverwritableProperty( filterClassList, inheritedConfig.getFilterClassList()); sorterComparatorClass = ConfigUtils.inheritOverwritableProperty( sorterComparatorClass, inheritedConfig.getSorterComparatorClass()); sorterWeightFactoryClass = ConfigUtils.inheritOverwritableProperty( sorterWeightFactoryClass, inheritedConfig.getSorterWeightFactoryClass()); sorterOrder = ConfigUtils.inheritOverwritableProperty( sorterOrder, inheritedConfig.getSorterOrder()); sorterClass = ConfigUtils.inheritOverwritableProperty( sorterClass, inheritedConfig.getSorterClass()); probabilityWeightFactoryClass = ConfigUtils.inheritOverwritableProperty( probabilityWeightFactoryClass, inheritedConfig.getProbabilityWeightFactoryClass()); selectedCountLimit = ConfigUtils.inheritOverwritableProperty( selectedCountLimit, inheritedConfig.getSelectedCountLimit()); fixedProbabilityWeight = ConfigUtils.inheritOverwritableProperty( fixedProbabilityWeight, inheritedConfig.getFixedProbabilityWeight()); } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.as2.api.entity; import java.io.ByteArrayInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.charset.CharsetDecoder; import java.security.PrivateKey; import java.util.ArrayList; import java.util.Collection; import java.util.Iterator; import java.util.List; import org.apache.camel.component.as2.api.AS2Charset; import org.apache.camel.component.as2.api.AS2Header; import org.apache.camel.component.as2.api.AS2MimeType; import org.apache.camel.component.as2.api.io.AS2SessionInputBuffer; import org.apache.camel.component.as2.api.util.AS2HeaderUtils; import org.apache.camel.component.as2.api.util.ContentTypeUtils; import org.apache.camel.component.as2.api.util.DispositionNotificationContentUtils; import org.apache.camel.component.as2.api.util.EntityUtils; import org.apache.camel.component.as2.api.util.HttpMessageUtils; import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpException; import org.apache.http.HttpMessage; import org.apache.http.NameValuePair; import org.apache.http.ParseException; import org.apache.http.entity.ContentType; import org.apache.http.impl.io.AbstractMessageParser; import org.apache.http.impl.io.HttpTransportMetricsImpl; import org.apache.http.message.BasicLineParser; import org.apache.http.message.BasicNameValuePair; import org.apache.http.message.LineParser; import org.apache.http.message.ParserCursor; import org.apache.http.util.Args; import org.apache.http.util.CharArrayBuffer; import org.bouncycastle.cms.CMSEnvelopedData; import org.bouncycastle.cms.Recipient; import org.bouncycastle.cms.RecipientInformation; import org.bouncycastle.cms.RecipientInformationStore; import org.bouncycastle.cms.jcajce.JceKeyTransEnvelopedRecipient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public final class EntityParser { private static final Logger LOG = LoggerFactory.getLogger(EntityParser.class); private static final int DEFAULT_BUFFER_SIZE = 8 * 1024; private static final String APPLICATION_EDI_CONTENT_TYPE_PREFIX = "application/edi"; private EntityParser() { } public static boolean isBoundaryCloseDelimiter(final CharArrayBuffer buffer, ParserCursor cursor, String boundary) { Args.notNull(buffer, "Buffer"); Args.notNull(boundary, "Boundary"); String boundaryCloseDelimiter = "--" + boundary + "--"; // boundary // close-delimiter // - RFC2046 // 5.1.1 if (cursor == null) { cursor = new ParserCursor(0, boundaryCloseDelimiter.length()); } int indexFrom = cursor.getPos(); int indexTo = cursor.getUpperBound(); if ((indexFrom + boundaryCloseDelimiter.length()) > indexTo) { return false; } for (int i = indexFrom; i < indexTo; ++i) { if (buffer.charAt(i) != boundaryCloseDelimiter.charAt(i)) { return false; } } return true; } public static boolean isBoundaryDelimiter(final CharArrayBuffer buffer, ParserCursor cursor, String boundary) { Args.notNull(buffer, "Buffer"); Args.notNull(boundary, "Boundary"); String boundaryDelimiter = "--" + boundary; // boundary delimiter - // RFC2046 5.1.1 if (cursor == null) { cursor = new ParserCursor(0, boundaryDelimiter.length()); } int indexFrom = cursor.getPos(); int indexTo = cursor.getUpperBound(); if ((indexFrom + boundaryDelimiter.length()) > indexTo) { return false; } for (int i = indexFrom; i < indexTo; ++i) { if (buffer.charAt(i) != boundaryDelimiter.charAt(i)) { return false; } } return true; } public static void skipPreambleAndStartBoundary(AS2SessionInputBuffer inbuffer, String boundary) throws HttpException { boolean foundStartBoundary; try { foundStartBoundary = false; CharArrayBuffer lineBuffer = new CharArrayBuffer(1024); while (inbuffer.readLine(lineBuffer) != -1) { final ParserCursor cursor = new ParserCursor(0, lineBuffer.length()); if (isBoundaryDelimiter(lineBuffer, cursor, boundary)) { foundStartBoundary = true; break; } lineBuffer.clear(); } } catch (Exception e) { throw new HttpException("Failed to read start boundary for body part", e); } if (!foundStartBoundary) { throw new HttpException("Failed to find start boundary for body part"); } } public static void skipToBoundary(AS2SessionInputBuffer inbuffer, String boundary) throws HttpException { boolean foundEndBoundary; try { foundEndBoundary = false; CharArrayBuffer lineBuffer = new CharArrayBuffer(1024); while (inbuffer.readLine(lineBuffer) != -1) { final ParserCursor cursor = new ParserCursor(0, lineBuffer.length()); if (isBoundaryDelimiter(lineBuffer, cursor, boundary)) { foundEndBoundary = true; break; } lineBuffer.clear(); } } catch (Exception e) { throw new HttpException("Failed to read start boundary for body part", e); } if (!foundEndBoundary && boundary != null) { throw new HttpException("Failed to find start boundary for body part"); } } public static MimeEntity parseEnvelopedEntity(byte[] envelopedContent, PrivateKey privateKey) { try { byte[] decryptedContent = decryptData(envelopedContent, privateKey); InputStream is = new ByteArrayInputStream(decryptedContent); AS2SessionInputBuffer inbuffer = new AS2SessionInputBuffer(new HttpTransportMetricsImpl(), DEFAULT_BUFFER_SIZE); inbuffer.bind(is); // Read Text Report Body Part Headers Header[] headers = AbstractMessageParser.parseHeaders(inbuffer, -1, -1, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); // Get Content-Type and Content-Transfer-Encoding ContentType envelopedEntityContentType = null; String envelopedEntityContentTransferEncoding = null; for (Header header : headers) { switch (header.getName()) { case AS2Header.CONTENT_TYPE: envelopedEntityContentType = ContentType.parse(header.getValue()); break; case AS2Header.CONTENT_TRANSFER_ENCODING: envelopedEntityContentTransferEncoding = header.getValue(); break; default: continue; } } if (envelopedEntityContentType == null) { throw new HttpException("Failed to find Content-Type header in enveloped entity"); } MimeEntity entity = parseEntityBody(inbuffer, null, envelopedEntityContentType, envelopedEntityContentTransferEncoding, headers); entity.removeAllHeaders(); entity.setHeaders(headers); return entity; } catch (Exception e) { return null; } } public static byte[] decryptData(byte[] encryptedData, PrivateKey privateKey) throws Exception { // Create enveloped data from encrypted data CMSEnvelopedData cmsEnvelopedData = new CMSEnvelopedData(encryptedData); // Extract recipient information form enveloped data. RecipientInformationStore recipientsInformationStore = cmsEnvelopedData.getRecipientInfos(); Collection<RecipientInformation> recipients = recipientsInformationStore.getRecipients(); Iterator<RecipientInformation> it = recipients.iterator(); // Decrypt if enveloped data contains recipient information if (it.hasNext()) { // Create recipient from private key. Recipient recipient = new JceKeyTransEnvelopedRecipient(privateKey); // Extract decrypted data from recipient information RecipientInformation recipientInfo = it.next(); return recipientInfo.getContent(recipient); } return null; } public static void parseMultipartSignedEntity(HttpMessage message) throws HttpException { MultipartSignedEntity multipartSignedEntity = null; HttpEntity entity = Args.notNull(EntityUtils.getMessageEntity(message), "message entity"); if (entity instanceof MultipartSignedEntity) { return; } Args.check(entity.isStreaming(), "Entity is not streaming"); try { // Determine and validate the Content Type Header contentTypeHeader = entity.getContentType(); if (contentTypeHeader == null) { throw new HttpException("Content-Type header is missing"); } ContentType contentType = ContentType.parse(entity.getContentType().getValue()); if (!contentType.getMimeType().equals(AS2MimeType.MULTIPART_SIGNED)) { throw new HttpException("Entity has invalid MIME type '" + contentType.getMimeType() + "'"); } // Determine Charset String charsetName = AS2Charset.US_ASCII; Charset charset = contentType.getCharset(); if (charset != null) { charsetName = charset.name(); } // Determine content transfer encoding String contentTransferEncoding = HttpMessageUtils.getHeaderValue(message, AS2Header.CONTENT_TRANSFER_ENCODING); AS2SessionInputBuffer inbuffer = new AS2SessionInputBuffer(new HttpTransportMetricsImpl(), DEFAULT_BUFFER_SIZE); inbuffer.bind(entity.getContent()); // Get Boundary Value String boundary = HttpMessageUtils.getParameterValue(message, AS2Header.CONTENT_TYPE, "boundary"); if (boundary == null) { throw new HttpException("Failed to retrieve 'boundary' parameter from content type header"); } // Get Micalg Value String micalg = HttpMessageUtils.getParameterValue(message, AS2Header.CONTENT_TYPE, "micalg"); if (micalg == null) { throw new HttpException("Failed to retrieve 'micalg' parameter from content type header"); } multipartSignedEntity = parseMultipartSignedEntityBody(inbuffer, boundary, micalg, charsetName, contentTransferEncoding); multipartSignedEntity.setMainBody(true); EntityUtils.setMessageEntity(message, multipartSignedEntity); } catch (HttpException e) { throw e; } catch (Exception e) { throw new HttpException("Failed to parse entity content", e); } } public static void parseApplicationEDIEntity(HttpMessage message) throws HttpException { ApplicationEDIEntity applicationEDIEntity = null; HttpEntity entity = Args.notNull(EntityUtils.getMessageEntity(message), "message entity"); if (entity instanceof ApplicationEDIEntity) { return; } Args.check(entity.isStreaming(), "Entity is not streaming"); try { // Determine and validate the Content Type Header contentTypeHeader = entity.getContentType(); if (contentTypeHeader == null) { throw new HttpException("Content-Type header is missing"); } ContentType contentType = ContentType.parse(entity.getContentType().getValue()); if (!contentType.getMimeType().startsWith(EntityParser.APPLICATION_EDI_CONTENT_TYPE_PREFIX)) { throw new HttpException("Entity has invalid MIME type '" + contentType.getMimeType() + "'"); } // Determine Transfer Encoding Header transferEncoding = entity.getContentEncoding(); String contentTransferEncoding = transferEncoding == null ? null : transferEncoding.getValue(); AS2SessionInputBuffer inBuffer = new AS2SessionInputBuffer(new HttpTransportMetricsImpl(), 8 * 1024); inBuffer.bind(entity.getContent()); applicationEDIEntity = parseEDIEntityBody(inBuffer, null, contentType, contentTransferEncoding); applicationEDIEntity.setMainBody(true); EntityUtils.setMessageEntity(message, applicationEDIEntity); } catch (HttpException e) { throw e; } catch (Exception e) { throw new HttpException("Failed to parse entity content", e); } } public static void parseMessageDispositionNotificationReportEntity(HttpMessage message) throws HttpException { DispositionNotificationMultipartReportEntity dispositionNotificationMultipartReportEntity = null; HttpEntity entity = Args.notNull(EntityUtils.getMessageEntity(message), "message entity"); if (entity instanceof DispositionNotificationMultipartReportEntity) { return; } Args.check(entity.isStreaming(), "Entity is not streaming"); try { // Determine and validate the Content Type Header contentTypeHeader = entity.getContentType(); if (contentTypeHeader == null) { throw new HttpException("Content-Type header is missing"); } ContentType contentType = ContentType.parse(entity.getContentType().getValue()); if (!contentType.getMimeType().equals(AS2MimeType.MULTIPART_REPORT)) { throw new HttpException("Entity has invalid MIME type '" + contentType.getMimeType() + "'"); } // Determine Charset String charsetName = AS2Charset.US_ASCII; Charset charset = contentType.getCharset(); if (charset != null) { charsetName = charset.name(); } // Determine content transfer encoding String contentTransferEncoding = HttpMessageUtils.getHeaderValue(message, AS2Header.CONTENT_TRANSFER_ENCODING); AS2SessionInputBuffer inbuffer = new AS2SessionInputBuffer(new HttpTransportMetricsImpl(), 8 * 1024); inbuffer.bind(entity.getContent()); // Get Boundary Value String boundary = HttpMessageUtils.getParameterValue(message, AS2Header.CONTENT_TYPE, "boundary"); if (boundary == null) { throw new HttpException("Failed to retrive boundary value"); } dispositionNotificationMultipartReportEntity = parseMultipartReportEntityBody(inbuffer, boundary, charsetName, contentTransferEncoding); EntityUtils.setMessageEntity(message, dispositionNotificationMultipartReportEntity); } catch (HttpException e) { throw e; } catch (Exception e) { throw new HttpException("Failed to parse entity content", e); } } public static void parseAS2MessageEntity(HttpMessage message) throws HttpException { if (EntityUtils.hasEntity(message)) { String contentTypeStr = HttpMessageUtils.getHeaderValue(message, AS2Header.CONTENT_TYPE); if (contentTypeStr != null) { ContentType contentType; try { contentType = ContentType.parse(contentTypeStr); } catch (Exception e) { LOG.debug("Failed to get content type of message", e); return; } switch (contentType.getMimeType().toLowerCase()) { case AS2MimeType.APPLICATION_EDIFACT: case AS2MimeType.APPLICATION_EDI_X12: case AS2MimeType.APPLICATION_EDI_CONSENT: parseApplicationEDIEntity(message); break; case AS2MimeType.MULTIPART_SIGNED: parseMultipartSignedEntity(message); break; case AS2MimeType.APPLICATION_PKCS7_MIME: break; case AS2MimeType.MULTIPART_REPORT: parseMessageDispositionNotificationReportEntity(message); break; default: break; } } } } public static MultipartSignedEntity parseMultipartSignedEntityBody(AS2SessionInputBuffer inbuffer, String boundary, String micalg, String charsetName, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { if (charsetName == null) { charsetName = AS2Charset.US_ASCII; } Charset charset = Charset.forName(charsetName); CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); MultipartSignedEntity multipartSignedEntity = new MultipartSignedEntity(boundary, false); // Skip Preamble and Start Boundary line skipPreambleAndStartBoundary(inbuffer, boundary); // // Parse Signed Entity Part // // Read Text Report Body Part Headers Header[] headers = AbstractMessageParser.parseHeaders(inbuffer, -1, -1, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); // Get Content-Type and Content-Transfer-Encoding ContentType signedEntityContentType = null; String signedEntityContentTransferEncoding = null; for (Header header : headers) { switch (header.getName()) { case AS2Header.CONTENT_TYPE: signedEntityContentType = ContentType.parse(header.getValue()); break; case AS2Header.CONTENT_TRANSFER_ENCODING: signedEntityContentTransferEncoding = header.getValue(); break; default: continue; } } if (signedEntityContentType == null) { throw new HttpException("Failed to find Content-Type header in signed entity body part"); } MimeEntity signedEntity = parseEntityBody(inbuffer, boundary, signedEntityContentType, signedEntityContentTransferEncoding, headers); signedEntity.removeAllHeaders(); signedEntity.setHeaders(headers); multipartSignedEntity.addPart(signedEntity); // // End Signed Entity Part // // Parse Signature Body Part // // Read Signature Body Part Headers headers = AbstractMessageParser.parseHeaders(inbuffer, -1, -1, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); // Get Content-Type and Content-Transfer-Encoding ContentType signatureContentType = null; String signatureContentTransferEncoding = null; for (Header header : headers) { switch (header.getName()) { case AS2Header.CONTENT_TYPE: signatureContentType = ContentType.parse(header.getValue()); break; case AS2Header.CONTENT_TRANSFER_ENCODING: signatureContentTransferEncoding = header.getValue(); break; default: continue; } } if (signatureContentType == null) { throw new HttpException("Failed to find Content-Type header in signature body part"); } if (!ContentTypeUtils.isPkcs7SignatureType(signatureContentType)) { throw new HttpException( "Invalid content type '" + signatureContentType.getMimeType() + "' for signature body part"); } ApplicationPkcs7SignatureEntity applicationPkcs7SignatureEntity = parseApplicationPkcs7SignatureEntityBody(inbuffer, boundary, signatureContentType, signatureContentTransferEncoding); applicationPkcs7SignatureEntity.removeAllHeaders(); applicationPkcs7SignatureEntity.setHeaders(headers); multipartSignedEntity.addPart(applicationPkcs7SignatureEntity); // // End Signature Body Part NameValuePair[] parameters = new NameValuePair[] { new BasicNameValuePair("protocol", AS2MimeType.APPLICATION_PKCS7_SIGNATURE), new BasicNameValuePair("boundary", boundary), new BasicNameValuePair("micalg", micalg), new BasicNameValuePair("charset", charsetName)}; ContentType contentType = ContentType.create(AS2MimeType.MULTIPART_SIGNED, parameters); multipartSignedEntity.setContentType(contentType); multipartSignedEntity.setContentTransferEncoding(contentTransferEncoding); return multipartSignedEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse text entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static DispositionNotificationMultipartReportEntity parseMultipartReportEntityBody(AS2SessionInputBuffer inbuffer, String boundary, String charsetName, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { if (charsetName == null) { charsetName = AS2Charset.US_ASCII; } Charset charset = Charset.forName(charsetName); CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); DispositionNotificationMultipartReportEntity dispositionNotificationMultipartReportEntity = new DispositionNotificationMultipartReportEntity(boundary, false); // Skip Preamble and Start Boundary line skipPreambleAndStartBoundary(inbuffer, boundary); // // Parse Text Report Body Part // // Read Text Report Body Part Headers Header[] headers = AbstractMessageParser.parseHeaders(inbuffer, -1, -1, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); // Get Content-Type and Content-Transfer-Encoding ContentType textReportContentType = null; String textReportContentTransferEncoding = null; for (Header header : headers) { switch (header.getName()) { case AS2Header.CONTENT_TYPE: textReportContentType = ContentType.parse(header.getValue()); break; case AS2Header.CONTENT_TRANSFER_ENCODING: textReportContentTransferEncoding = header.getValue(); break; default: continue; } } if (textReportContentType == null) { throw new HttpException("Failed to find Content-Type header in EDI message body part"); } if (!textReportContentType.getMimeType().equalsIgnoreCase(AS2MimeType.TEXT_PLAIN)) { throw new HttpException("Invalid content type '" + textReportContentType.getMimeType() + "' for first body part of disposition notification"); } String textReportCharsetName = textReportContentType.getCharset() == null ? AS2Charset.US_ASCII : textReportContentType.getCharset().name(); TextPlainEntity textReportEntity = parseTextPlainEntityBody(inbuffer, boundary, textReportCharsetName, textReportContentTransferEncoding); textReportEntity.setHeaders(headers); dispositionNotificationMultipartReportEntity.addPart(textReportEntity); // // End Text Report Body Part // // Parse Disposition Notification Body Part // // Read Disposition Notification Body Part Headers headers = AbstractMessageParser.parseHeaders(inbuffer, -1, -1, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); // Get Content-Type and Content-Transfer-Encoding ContentType dispositionNotificationContentType = null; String dispositionNotificationContentTransferEncoding = null; for (Header header : headers) { switch (header.getName()) { case AS2Header.CONTENT_TYPE: dispositionNotificationContentType = ContentType.parse(header.getValue()); break; case AS2Header.CONTENT_TRANSFER_ENCODING: dispositionNotificationContentTransferEncoding = header.getValue(); break; default: continue; } } if (dispositionNotificationContentType == null) { throw new HttpException("Failed to find Content-Type header in body part"); } if (!dispositionNotificationContentType.getMimeType() .equalsIgnoreCase(AS2MimeType.MESSAGE_DISPOSITION_NOTIFICATION)) { throw new HttpException("Invalid content type '" + dispositionNotificationContentType.getMimeType() + "' for second body part of disposition notification"); } String dispositionNotificationCharsetName = dispositionNotificationContentType.getCharset() == null ? AS2Charset.US_ASCII : dispositionNotificationContentType.getCharset().name(); AS2MessageDispositionNotificationEntity messageDispositionNotificationEntity = parseMessageDispositionNotificationEntityBody( inbuffer, boundary, dispositionNotificationCharsetName, dispositionNotificationContentTransferEncoding); messageDispositionNotificationEntity.setHeaders(headers); dispositionNotificationMultipartReportEntity.addPart(messageDispositionNotificationEntity); // // End Disposition Notification Body Part dispositionNotificationMultipartReportEntity.setContentTransferEncoding(contentTransferEncoding); return dispositionNotificationMultipartReportEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse text entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static TextPlainEntity parseTextPlainEntityBody(AS2SessionInputBuffer inbuffer, String boundary, String charsetName, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { if (charsetName == null) { charsetName = AS2Charset.US_ASCII; } Charset charset = Charset.forName(charsetName); CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); String text = parseBodyPartText(inbuffer, boundary); if (contentTransferEncoding != null) { text = EntityUtils.decode(text, charset, contentTransferEncoding); } return new TextPlainEntity(text, charsetName, contentTransferEncoding, false); } catch (Exception e) { ParseException parseException = new ParseException("failed to parse text entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static AS2MessageDispositionNotificationEntity parseMessageDispositionNotificationEntityBody(AS2SessionInputBuffer inbuffer, String boundary, String charsetName, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { if (charsetName == null) { charsetName = AS2Charset.US_ASCII; } Charset charset = Charset.forName(charsetName); CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); List<CharArrayBuffer> dispositionNotificationFields = parseBodyPartFields(inbuffer, boundary, BasicLineParser.INSTANCE, new ArrayList<CharArrayBuffer>()); AS2MessageDispositionNotificationEntity as2MessageDispositionNotificationEntity = DispositionNotificationContentUtils.parseDispositionNotification(dispositionNotificationFields); ContentType contentType = ContentType.create(AS2MimeType.MESSAGE_DISPOSITION_NOTIFICATION, charset); as2MessageDispositionNotificationEntity.setContentType(contentType); return as2MessageDispositionNotificationEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse MDN entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static MimeEntity parseEntityBody(AS2SessionInputBuffer inbuffer, String boundary, ContentType entityContentType, String contentTransferEncoding, Header[] headers) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { Charset charset = entityContentType.getCharset(); if (charset == null) { charset = Charset.forName(AS2Charset.US_ASCII); } CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); MimeEntity entity = null; switch (entityContentType.getMimeType().toLowerCase()) { case AS2MimeType.APPLICATION_EDIFACT: case AS2MimeType.APPLICATION_EDI_X12: case AS2MimeType.APPLICATION_EDI_CONSENT: entity = parseEDIEntityBody(inbuffer, boundary, entityContentType, contentTransferEncoding); break; case AS2MimeType.MULTIPART_SIGNED: String multipartSignedBoundary = AS2HeaderUtils.getParameterValue(headers, AS2Header.CONTENT_TYPE, "boundary"); String micalg = AS2HeaderUtils.getParameterValue(headers, AS2Header.CONTENT_TYPE, "micalg"); entity = parseMultipartSignedEntityBody(inbuffer, multipartSignedBoundary, micalg, charset.name(), contentTransferEncoding); skipToBoundary(inbuffer, boundary); break; case AS2MimeType.MESSAGE_DISPOSITION_NOTIFICATION: entity = parseMessageDispositionNotificationEntityBody(inbuffer, boundary, charset.name(), contentTransferEncoding); break; case AS2MimeType.MULTIPART_REPORT: String multipartReportBoundary = AS2HeaderUtils.getParameterValue(headers, AS2Header.CONTENT_TYPE, "boundary"); entity = parseMultipartReportEntityBody(inbuffer, multipartReportBoundary, charset.name(), contentTransferEncoding); skipToBoundary(inbuffer, boundary); break; case AS2MimeType.TEXT_PLAIN: entity = parseTextPlainEntityBody(inbuffer, boundary, charset.name(), contentTransferEncoding); break; case AS2MimeType.APPLICATION_PKCS7_SIGNATURE: entity = parseApplicationPkcs7SignatureEntityBody(inbuffer, boundary, entityContentType, contentTransferEncoding); break; default: break; } return entity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse EDI entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static ApplicationEDIEntity parseEDIEntityBody(AS2SessionInputBuffer inbuffer, String boundary, ContentType ediMessageContentType, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { Charset charset = ediMessageContentType.getCharset(); if (charset == null) { charset = Charset.forName(AS2Charset.US_ASCII); } CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); String ediMessageBodyPartContent = parseBodyPartText(inbuffer, boundary); if (contentTransferEncoding != null) { ediMessageBodyPartContent = EntityUtils.decode(ediMessageBodyPartContent, charset, contentTransferEncoding); } ApplicationEDIEntity applicationEDIEntity = EntityUtils.createEDIEntity(ediMessageBodyPartContent, ediMessageContentType, contentTransferEncoding, false); return applicationEDIEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse EDI entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static ApplicationPkcs7SignatureEntity parseApplicationPkcs7SignatureEntityBody(AS2SessionInputBuffer inbuffer, String boundary, ContentType contentType, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { Charset charset = contentType.getCharset(); if (charset == null) { charset = Charset.forName(AS2Charset.US_ASCII); } CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); String pkcs7SignatureBodyContent = parseBodyPartText(inbuffer, boundary); byte[] signature = EntityUtils.decode(pkcs7SignatureBodyContent.getBytes(charset), contentTransferEncoding); String charsetName = charset.toString(); ApplicationPkcs7SignatureEntity applicationPkcs7SignatureEntity = new ApplicationPkcs7SignatureEntity( signature, charsetName, contentTransferEncoding, false); return applicationPkcs7SignatureEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse PKCS7 Signature entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static ApplicationPkcs7MimeEntity parseApplicationPkcs7MimeEntityBody(AS2SessionInputBuffer inbuffer, String boundary, ContentType contentType, String contentTransferEncoding) throws ParseException { CharsetDecoder previousDecoder = inbuffer.getCharsetDecoder(); try { Charset charset = contentType.getCharset(); if (charset == null) { charset = Charset.forName(AS2Charset.US_ASCII); } CharsetDecoder charsetDecoder = charset.newDecoder(); inbuffer.setCharsetDecoder(charsetDecoder); String pkcs7EncryptedBodyContent = parseBodyPartText(inbuffer, boundary); byte[] encryptedContent = EntityUtils.decode(pkcs7EncryptedBodyContent.getBytes(charset), contentTransferEncoding); ApplicationPkcs7MimeEntity applicationPkcs7MimeEntity = new ApplicationPkcs7MimeEntity( encryptedContent, contentTransferEncoding, false); return applicationPkcs7MimeEntity; } catch (Exception e) { ParseException parseException = new ParseException("failed to parse PKCS7 Mime entity"); parseException.initCause(e); throw parseException; } finally { inbuffer.setCharsetDecoder(previousDecoder); } } public static String parseBodyPartText(final AS2SessionInputBuffer inbuffer, final String boundary) throws IOException { CharArrayBuffer buffer = new CharArrayBuffer(DEFAULT_BUFFER_SIZE); CharArrayBuffer line = new CharArrayBuffer(DEFAULT_BUFFER_SIZE); while (true) { final int l = inbuffer.readLine(line); if (l == -1) { break; } if (boundary != null && isBoundaryDelimiter(line, null, boundary)) { // remove last CRLF from buffer which belongs to boundary int length = buffer.length(); buffer.setLength(length - 2); break; } buffer.append(line); if (inbuffer.isLastLineReadTerminatedByLineFeed()) { buffer.append("\r\n"); } line.clear(); } return buffer.toString(); } public static List<CharArrayBuffer> parseBodyPartFields(final AS2SessionInputBuffer inbuffer, final String boundary, final LineParser parser, final List<CharArrayBuffer> fields) throws IOException { Args.notNull(parser, "parser"); Args.notNull(fields, "fields"); CharArrayBuffer current = null; CharArrayBuffer previous = null; while (true) { if (current == null) { current = new CharArrayBuffer(64); } final int l = inbuffer.readLine(current); if (l == -1 || current.length() < 1) { break; } if (boundary != null && isBoundaryDelimiter(current, null, boundary)) { break; } // check if current line part of folded headers if ((current.charAt(0) == ' ' || current.charAt(0) == '\t') && previous != null) { // we have continuation of folded header : append value int i = 0; while (i < current.length()) { final char ch = current.charAt(i); if (ch != ' ' && ch != '\t') { break; } i++; } // Just append current line to previous line previous.append(' '); previous.append(current, i, current.length() - i); // leave current line buffer for reuse for next header current.clear(); } else { fields.add(current); previous = current; current = null; } } return fields; } }
package com.james.status.data; import android.content.Context; import android.content.SharedPreferences; import android.graphics.Color; import android.graphics.Typeface; import android.os.Build; import android.os.Environment; import android.preference.PreferenceManager; import android.support.annotation.Nullable; import android.support.v4.app.NotificationCompat; import android.widget.Toast; import com.google.gson.Gson; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.util.Map; import java.util.Set; public enum PreferenceData { PREF_VERSION(0), STATUS_ENABLED(false), STATUS_NOTIFICATIONS_COMPAT(false), //STATUS_NOTIFICATIONS_HEADS_UP(false), TODO: #137 STATUS_COLOR_AUTO(true), STATUS_COLOR(Color.BLACK), STATUS_HOME_TRANSPARENT(true), STATUS_ICON_COLOR(Color.WHITE), STATUS_ICON_TEXT_COLOR(Color.WHITE), STATUS_DARK_ICON_COLOR(Color.argb(150, 0, 0, 0)), STATUS_DARK_ICON_TEXT_COLOR(Color.argb(150, 0, 0, 0)), STATUS_DARK_ICONS(true), //STATUS_TINTED_ICONS(false), TODO: #137 //STATUS_BUMP_MODE(false), TODO: #137 STATUS_BACKGROUND_ANIMATIONS(true), STATUS_ICON_ANIMATIONS(true), STATUS_HIDE_ON_VOLUME(Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP), STATUS_PERSISTENT_NOTIFICATION(true), STATUS_IGNORE_PERMISSION_CHECKING(false), STATUS_TRANSPARENT_MODE(false), STATUS_BURNIN_PROTECTION(false), STATUS_SIDE_PADDING(6), STATUS_HEIGHT(0), ICON_VISIBILITY("%1$s/VISIBILITY", true), ICON_POSITION("%1$s/POSITION", 0), ICON_GRAVITY("%1$s/GRAVITY", 0), ICON_TEXT_VISIBILITY("%1$s/TEXT_VISIBILITY", false), ICON_TEXT_FORMAT("%1$s/TEXT_FORMAT", "h:mm a"), ICON_TEXT_SIZE("%1$s/TEXT_SIZE", 14), ICON_TEXT_COLOR_LIGHT("%1$s/TEXT_COLOR_LIGHT", 0), ICON_TEXT_COLOR_DARK("%1$s/TEXT_COLOR_DARK", 0), ICON_TEXT_TYPEFACE("%1$s/TEXT_TYPEFACE", ""), ICON_TEXT_EFFECT("%1$s/TEXT_EFFECT", Typeface.BOLD), ICON_ICON_VISIBILITY("%1$s/ICON_VISIBILITY", true), ICON_ICON_COLOR_LIGHT("%1$s/ICON_COLOR_LIGHT", 0), ICON_ICON_COLOR_DARK("%1$s/ICON_COLOR_DARK", 0), ICON_ICON_STYLE("%1$s/ICON_STYLE", ""), ICON_ICON_STYLE_NAMES("%1$s/ICON_STYLE_NAMES", new String[]{}), ICON_ICON_PADDING("%1$s/ICON_PADDING", 2), ICON_ICON_SCALE("%1$s/ICON_SCALE", 18), ICON_ICON_OFFSET_X("%1$s/ICON_ICON_OFFSET_X", 0), ICON_ICON_OFFSET_Y("%1$s/ICON_ICON_OFFSET_Y", 0), ICON_TEXT_OFFSET_X("%1$s/ICON_TEXT_OFFSET_X", 0), ICON_TEXT_OFFSET_Y("%1$s/ICON_TEXT_OFFSET_Y", 0), APP_COLOR("%1$s/APP_COLOR", 0), APP_COLOR_CACHE("%1$s/APP_COLOR_CACHE", 0), APP_COLOR_CACHE_VERSION("%1$s/APP_COLOR_CACHE_VERSION", 0), APP_FULLSCREEN("%1$s/APP_FULLSCREEN", false), APP_FULLSCREEN_IGNORE("%1$s/APP_FULLSCREEN_IGNORE", false), APP_NOTIFICATIONS("%1$s/APP_NOTIFICATIONS", true), APP_NOTIFICATIONS_MIN_PRIORITY(NotificationCompat.PRIORITY_LOW), APP_NOTIFICATIONS_IGNORE_ONGOING(false); public static final int VERSION = 1; private String name; private Object defaultValue; PreferenceData(Object value) { name = name(); defaultValue = value; } PreferenceData(String name, Object value) { this.name = name; defaultValue = value; } public String getName(@Nullable String... args) { if (args != null && args.length > 0) return String.format(name, (Object[]) args); else return name; } public <T> T getDefaultValue() { try { return (T) defaultValue; } catch (ClassCastException e) { throw new TypeMismatchException(this); } } public <T> T getValue(Context context) { return getSpecificOverriddenValue(context, (T) getDefaultValue(), (String[]) null); } public <T> T getValue(Context context, @Nullable T defaultValue) { return getSpecificOverriddenValue(context, defaultValue, (String[]) null); } public <T> T getSpecificValue(Context context, @Nullable String... args) { return getSpecificOverriddenValue(context, (T) getDefaultValue(), args); } public <T> T getSpecificOverriddenValue(Context context, @Nullable T defaultValue, @Nullable String... args) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); String name = getName(args); T type = defaultValue != null ? defaultValue : (T) getDefaultValue(); if (type instanceof Object[] && prefs.contains(name + "-length")) { try { int length = prefs.getInt(name + "-length", 0); Object[] array; if (type instanceof Boolean[]) array = new Boolean[length]; else if (type instanceof Integer[]) array = new Integer[length]; else if (type instanceof String[]) array = new String[length]; else throw new TypeMismatchException(this); for (int i = 0; i < array.length; i++) { if (array instanceof Boolean[]) array[i] = prefs.contains(name + "-" + i) ? prefs.getBoolean(name + "-" + i, false) : null; else if (array instanceof Integer[]) array[i] = prefs.contains(name + "-" + i) ? prefs.getInt(name + "-" + i, 0) : null; else if (array instanceof String[]) array[i] = prefs.getString(name + "-" + i, ""); else throw new TypeMismatchException(this); } return (T) array; } catch (ClassCastException e) { throw new TypeMismatchException(this, type.getClass()); } } else if (prefs.contains(name)) { try { if (type instanceof Boolean) return (T) new Boolean(prefs.getBoolean(name, defaultValue != null ? (Boolean) defaultValue : false)); else if (type instanceof Integer) return (T) new Integer(prefs.getInt(name, defaultValue != null ? (Integer) defaultValue : -1)); else if (type instanceof String) return (T) prefs.getString(name, defaultValue != null ? (String) defaultValue : ""); } catch (ClassCastException e) { throw new TypeMismatchException(this, type.getClass()); } } return defaultValue; } public <T> void setValue(Context context, @Nullable T value) { setValue(context, value, (String[]) null); } public <T> void setValue(Context context, @Nullable T value, @Nullable String... args) { SharedPreferences.Editor editor = PreferenceManager.getDefaultSharedPreferences(context).edit(); String name = getName(args); if (value == null) editor.remove(name + (defaultValue instanceof Object[] ? "-length" : "")); else if (value instanceof Object[]) { Object[] array = (Object[]) value; for (int i = 0; i < array.length; i++) { Object item = array[i]; if (item instanceof Boolean) editor.putBoolean(name + "-" + i, (boolean) item); else if (item instanceof Integer) editor.putInt(name + "-" + i, (int) item); else if (item instanceof String) editor.putString(name + "-" + i, (String) item); else throw new TypeMismatchException(this); } editor.putInt(name + "-length", array.length); } else { if (value instanceof Boolean) editor.putBoolean(name, (Boolean) value); else if (value instanceof Integer) editor.putInt(name, (Integer) value); else if (value instanceof String) editor.putString(name, (String) value); else throw new TypeMismatchException(this); } editor.apply(); } public static boolean toFile(Context context, File file) { Map<String, ?> prefs = PreferenceManager.getDefaultSharedPreferences(context).getAll(); FileOutputStream stream = null; try { stream = new FileOutputStream(file); stream.write(new Gson().toJson(prefs).getBytes()); } catch (IOException e) { e.printStackTrace(); Toast.makeText(context, e.getMessage(), Toast.LENGTH_SHORT).show(); } if (stream != null) { try { stream.close(); } catch (IOException ignored) { } } else return false; return file.exists(); } public static boolean fromFile(Context context, File file) { SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context); byte[] bytes = new byte[(int) file.length()]; FileInputStream stream = null; try { stream = new FileInputStream(file); stream.read(bytes); } catch (IOException e) { e.printStackTrace(); } if (stream != null) { try { stream.close(); } catch (IOException ignored) { } } String contents = new String(bytes); SharedPreferences.Editor editor = prefs.edit(); try { Map<String, ?> map = new Gson().fromJson(contents, Map.class); for (String key : map.keySet()) { Object value = map.get(key); if (value instanceof Boolean) editor.putBoolean(key, (Boolean) value); else if (value instanceof Float) editor.putFloat(key, (Float) value); else if (value instanceof Integer) editor.putInt(key, (Integer) value); else if (value instanceof Long) editor.putLong(key, (Long) value); else if (value instanceof String) editor.putString(key, (String) value); else if (value instanceof Set) editor.putStringSet(key, (Set) value); } } catch (Exception e) { e.printStackTrace(); Toast.makeText(context, e.getMessage(), Toast.LENGTH_SHORT).show(); return false; } return editor.commit(); } public static String getBackupsDir() { return Environment.getExternalStorageDirectory() + "/status/backups"; } public static class TypeMismatchException extends RuntimeException { public TypeMismatchException(PreferenceData data) { this(data, null); } public TypeMismatchException(PreferenceData data, Class expectedType) { super("Wrong type used for \"" + data.name() + "\"" + (data.defaultValue != null ? ": expected " + data.defaultValue.getClass().getName() + (expectedType != null ? ", got " + expectedType.getName() : "") : "")); } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.syncope.core.provisioning.java.notification; import java.io.StringWriter; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.apache.commons.jexl3.MapContext; import org.apache.commons.lang3.StringUtils; import org.apache.syncope.common.lib.to.GroupTO; import org.apache.syncope.common.lib.to.UserTO; import org.apache.syncope.common.lib.types.AuditElements; import org.apache.syncope.common.lib.types.AuditElements.Result; import org.apache.syncope.common.lib.types.AuditLoggerName; import org.apache.syncope.common.lib.to.AnyObjectTO; import org.apache.syncope.common.lib.to.ProvisioningResult; import org.apache.syncope.common.lib.types.AnyTypeKind; import org.apache.syncope.core.provisioning.java.jexl.JexlUtils; import org.apache.syncope.core.persistence.api.dao.ConfDAO; import org.apache.syncope.core.persistence.api.dao.NotificationDAO; import org.apache.syncope.core.persistence.api.dao.GroupDAO; import org.apache.syncope.core.persistence.api.dao.TaskDAO; import org.apache.syncope.core.persistence.api.dao.UserDAO; import org.apache.syncope.core.persistence.api.dao.search.OrderByClause; import org.apache.syncope.core.persistence.api.entity.EntityFactory; import org.apache.syncope.core.persistence.api.entity.Notification; import org.apache.syncope.core.persistence.api.entity.PlainAttr; import org.apache.syncope.core.persistence.api.entity.group.Group; import org.apache.syncope.core.persistence.api.entity.task.NotificationTask; import org.apache.syncope.core.persistence.api.entity.task.TaskExec; import org.apache.syncope.core.persistence.api.entity.user.UPlainAttr; import org.apache.syncope.core.persistence.api.entity.user.User; import org.apache.syncope.core.provisioning.api.data.GroupDataBinder; import org.apache.syncope.core.provisioning.api.data.UserDataBinder; import org.apache.syncope.core.persistence.api.search.SearchCondConverter; import org.apache.syncope.core.spring.ApplicationContextProvider; import org.apache.syncope.core.persistence.api.dao.AnyObjectDAO; import org.apache.syncope.core.persistence.api.dao.AnySearchDAO; import org.apache.syncope.core.persistence.api.dao.DerSchemaDAO; import org.apache.syncope.core.persistence.api.dao.VirSchemaDAO; import org.apache.syncope.core.persistence.api.entity.Any; import org.apache.syncope.core.persistence.api.entity.AnyAbout; import org.apache.syncope.core.persistence.api.entity.AnyType; import org.apache.syncope.core.persistence.api.entity.DerSchema; import org.apache.syncope.core.persistence.api.entity.VirSchema; import org.apache.syncope.core.persistence.api.entity.anyobject.AnyObject; import org.apache.syncope.core.persistence.api.entity.user.UMembership; import org.apache.syncope.core.provisioning.api.DerAttrHandler; import org.apache.syncope.core.provisioning.java.IntAttrNameParser; import org.apache.syncope.core.provisioning.api.IntAttrName; import org.apache.syncope.core.provisioning.api.VirAttrHandler; import org.apache.syncope.core.provisioning.api.data.AnyObjectDataBinder; import org.apache.syncope.core.provisioning.api.notification.NotificationManager; import org.apache.syncope.core.provisioning.api.notification.NotificationRecipientsProvider; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.support.AbstractBeanDefinition; import org.springframework.stereotype.Component; import org.springframework.transaction.annotation.Transactional; @Component @Transactional(rollbackFor = { Throwable.class }) public class NotificationManagerImpl implements NotificationManager { private static final Logger LOG = LoggerFactory.getLogger(NotificationManager.class); @Autowired private DerSchemaDAO derSchemaDAO; @Autowired private VirSchemaDAO virSchemaDAO; /** * Notification DAO. */ @Autowired private NotificationDAO notificationDAO; /** * Configuration DAO. */ @Autowired private ConfDAO confDAO; /** * AnyObject DAO. */ @Autowired private AnyObjectDAO anyObjectDAO; /** * User DAO. */ @Autowired private UserDAO userDAO; /** * Group DAO. */ @Autowired private GroupDAO groupDAO; /** * Search DAO. */ @Autowired private AnySearchDAO searchDAO; /** * Task DAO. */ @Autowired private TaskDAO taskDAO; @Autowired private DerAttrHandler derAttrHander; @Autowired private VirAttrHandler virAttrHander; @Autowired private UserDataBinder userDataBinder; @Autowired private GroupDataBinder groupDataBinder; @Autowired private AnyObjectDataBinder anyObjectDataBinder; @Autowired private EntityFactory entityFactory; @Autowired private IntAttrNameParser intAttrNameParser; @Transactional(readOnly = true) @Override public long getMaxRetries() { return confDAO.find("notification.maxRetries", "0").getValues().get(0).getLongValue(); } /** * Create a notification task. * * @param notification notification to take as model * @param any the any object this task is about * @param jexlVars JEXL variables * @return notification task, fully populated */ private NotificationTask getNotificationTask( final Notification notification, final Any<?> any, final Map<String, Object> jexlVars) { if (any != null) { virAttrHander.getValues(any); } List<User> recipients = new ArrayList<>(); if (notification.getRecipientsFIQL() != null) { recipients.addAll(searchDAO.<User>search( SearchCondConverter.convert(notification.getRecipientsFIQL()), Collections.<OrderByClause>emptyList(), AnyTypeKind.USER)); } if (notification.isSelfAsRecipient() && any instanceof User) { recipients.add((User) any); } Set<String> recipientEmails = new HashSet<>(); List<UserTO> recipientTOs = new ArrayList<>(recipients.size()); for (User recipient : recipients) { virAttrHander.getValues(recipient); String email = getRecipientEmail(notification.getRecipientAttrName(), recipient); if (email == null) { LOG.warn("{} cannot be notified: {} not found", recipient, notification.getRecipientAttrName()); } else { recipientEmails.add(email); recipientTOs.add(userDataBinder.getUserTO(recipient, true)); } } if (notification.getStaticRecipients() != null) { recipientEmails.addAll(notification.getStaticRecipients()); } if (notification.getRecipientsProviderClassName() != null) { try { NotificationRecipientsProvider recipientsProvider = (NotificationRecipientsProvider) ApplicationContextProvider.getBeanFactory(). createBean(Class.forName(notification.getRecipientsProviderClassName()), AbstractBeanDefinition.AUTOWIRE_BY_NAME, false); recipientEmails.addAll(recipientsProvider.provideRecipients(notification)); } catch (Exception e) { LOG.error("Could not fetch recipients from {}", notification.getRecipientsProviderClassName(), e); } } jexlVars.put("recipients", recipientTOs); jexlVars.put("syncopeConf", this.findAllSyncopeConfs()); jexlVars.put("events", notification.getEvents()); NotificationTask task = entityFactory.newEntity(NotificationTask.class); task.setNotification(notification); if (any != null) { task.setEntityKey(any.getKey()); task.setAnyTypeKind(any.getType().getKind()); } task.setTraceLevel(notification.getTraceLevel()); task.getRecipients().addAll(recipientEmails); task.setSender(notification.getSender()); task.setSubject(notification.getSubject()); if (StringUtils.isNotBlank(notification.getTemplate().getTextTemplate())) { task.setTextBody(evaluate(notification.getTemplate().getTextTemplate(), jexlVars)); } if (StringUtils.isNotBlank(notification.getTemplate().getHTMLTemplate())) { task.setHtmlBody(evaluate(notification.getTemplate().getHTMLTemplate(), jexlVars)); } return task; } private String evaluate(final String template, final Map<String, Object> jexlVars) { StringWriter writer = new StringWriter(); JexlUtils.newJxltEngine(). createTemplate(template). evaluate(new MapContext(jexlVars), writer); return writer.toString(); } @Override public List<NotificationTask> createTasks( final AuditElements.EventCategoryType type, final String category, final String subcategory, final String event, final Result condition, final Object before, final Object output, final Object... input) { Any<?> any = null; if (before instanceof UserTO) { any = userDAO.find(((UserTO) before).getKey()); } else if (output instanceof UserTO) { any = userDAO.find(((UserTO) output).getKey()); } else if (output instanceof ProvisioningResult && ((ProvisioningResult) output).getEntity() instanceof UserTO) { any = userDAO.find(((ProvisioningResult) output).getEntity().getKey()); } else if (before instanceof AnyObjectTO) { any = anyObjectDAO.find(((AnyObjectTO) before).getKey()); } else if (output instanceof AnyObjectTO) { any = anyObjectDAO.find(((AnyObjectTO) output).getKey()); } else if (output instanceof ProvisioningResult && ((ProvisioningResult) output).getEntity() instanceof AnyObjectTO) { any = anyObjectDAO.find(((ProvisioningResult) output).getEntity().getKey()); } else if (before instanceof GroupTO) { any = groupDAO.find(((GroupTO) before).getKey()); } else if (output instanceof GroupTO) { any = groupDAO.find(((GroupTO) output).getKey()); } else if (output instanceof ProvisioningResult && ((ProvisioningResult) output).getEntity() instanceof GroupTO) { any = groupDAO.find(((ProvisioningResult) output).getEntity().getKey()); } AnyType anyType = any == null ? null : any.getType(); LOG.debug("Search notification for [{}]{}", anyType, any); List<NotificationTask> notifications = new ArrayList<>(); for (Notification notification : notificationDAO.findAll()) { if (LOG.isDebugEnabled()) { for (AnyAbout about : notification.getAbouts()) { LOG.debug("Notification about {} defined: {}", about.getAnyType(), about.get()); } } if (notification.isActive()) { String currentEvent = AuditLoggerName.buildEvent(type, category, subcategory, event, condition); if (!notification.getEvents().contains(currentEvent)) { LOG.debug("No events found about {}", any); } else if (anyType == null || any == null || notification.getAbout(anyType) == null || searchDAO.matches(any, SearchCondConverter.convert(notification.getAbout(anyType).get()), anyType.getKind())) { LOG.debug("Creating notification task for event {} about {}", currentEvent, any); final Map<String, Object> model = new HashMap<>(); model.put("type", type); model.put("category", category); model.put("subcategory", subcategory); model.put("event", event); model.put("condition", condition); model.put("before", before); model.put("output", output); model.put("input", input); if (any instanceof User) { model.put("user", userDataBinder.getUserTO((User) any, true)); } else if (any instanceof Group) { model.put("group", groupDataBinder.getGroupTO((Group) any, true)); } else if (any instanceof AnyObject) { model.put("group", anyObjectDataBinder.getAnyObjectTO((AnyObject) any, true)); } NotificationTask notificationTask = getNotificationTask(notification, any, model); notificationTask = taskDAO.save(notificationTask); notifications.add(notificationTask); } } else { LOG.debug("Notification {} is not active, task will not be created", notification.getKey()); } } return notifications; } private String getRecipientEmail(final String recipientAttrName, final User user) { String email = null; IntAttrName intAttrName = intAttrNameParser.parse(recipientAttrName, AnyTypeKind.USER); if ("username".equals(intAttrName.getField())) { email = user.getUsername(); } else if (intAttrName.getSchemaType() != null) { UMembership membership = null; if (intAttrName.getMembershipOfGroup() != null) { Group group = groupDAO.findByName(intAttrName.getMembershipOfGroup()); if (group != null) { membership = user.getMembership(group.getKey()); } } switch (intAttrName.getSchemaType()) { case PLAIN: UPlainAttr attr = membership == null ? user.getPlainAttr(recipientAttrName) : user.getPlainAttr(recipientAttrName, membership); if (attr != null) { email = attr.getValuesAsStrings().isEmpty() ? null : attr.getValuesAsStrings().get(0); } break; case DERIVED: DerSchema schema = derSchemaDAO.find(recipientAttrName); if (schema == null) { LOG.warn("Ignoring non existing {} {}", DerSchema.class.getSimpleName(), recipientAttrName); } else { email = membership == null ? derAttrHander.getValue(user, schema) : derAttrHander.getValue(user, membership, schema); } break; case VIRTUAL: VirSchema virSchema = virSchemaDAO.find(recipientAttrName); if (virSchema == null) { LOG.warn("Ignoring non existing {} {}", VirSchema.class.getSimpleName(), recipientAttrName); } else { List<String> virAttrValues = membership == null ? virAttrHander.getValues(user, virSchema) : virAttrHander.getValues(user, membership, virSchema); email = virAttrValues.isEmpty() ? null : virAttrValues.get(0); } break; default: } } return email; } @Override public TaskExec storeExec(final TaskExec execution) { NotificationTask task = taskDAO.find(execution.getTask().getKey()); task.add(execution); task.setExecuted(true); taskDAO.save(task); // this flush call is needed to generate a value for the execution key taskDAO.flush(); return execution; } @Override public void setTaskExecuted(final String taskKey, final boolean executed) { NotificationTask task = taskDAO.find(taskKey); task.setExecuted(executed); taskDAO.save(task); } @Override public long countExecutionsWithStatus(final String taskKey, final String status) { NotificationTask task = taskDAO.find(taskKey); long count = 0; for (TaskExec taskExec : task.getExecs()) { if (status == null) { if (taskExec.getStatus() == null) { count++; } } else if (status.equals(taskExec.getStatus())) { count++; } } return count; } protected Map<String, String> findAllSyncopeConfs() { Map<String, String> syncopeConfMap = new HashMap<>(); for (PlainAttr<?> attr : confDAO.get().getPlainAttrs()) { syncopeConfMap.put(attr.getSchema().getKey(), attr.getValuesAsStrings().get(0)); } return syncopeConfMap; } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.honeycode.model; import java.io.Serializable; import javax.annotation.Generated; import com.amazonaws.protocol.StructuredPojo; import com.amazonaws.protocol.ProtocolMarshaller; /** * <p> * An object that represents a filter formula along with the id of the context row under which the filter function needs * to evaluate. * </p> * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/honeycode-2020-03-01/Filter" target="_top">AWS API * Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class Filter implements Serializable, Cloneable, StructuredPojo { /** * <p> * A formula representing a filter function that returns zero or more matching rows from a table. Valid formulas in * this field return a list of rows from a table. The most common ways of writing a formula to return a list of rows * are to use the FindRow() or Filter() functions. Any other formula that returns zero or more rows is also * acceptable. For example, you can use a formula that points to a cell that contains a filter function. * </p> */ private String formula; /** * <p> * The optional contextRowId attribute can be used to specify the row id of the context row if the filter formula * contains unqualified references to table columns and needs a context row to evaluate them successfully. * </p> */ private String contextRowId; /** * <p> * A formula representing a filter function that returns zero or more matching rows from a table. Valid formulas in * this field return a list of rows from a table. The most common ways of writing a formula to return a list of rows * are to use the FindRow() or Filter() functions. Any other formula that returns zero or more rows is also * acceptable. For example, you can use a formula that points to a cell that contains a filter function. * </p> * * @param formula * A formula representing a filter function that returns zero or more matching rows from a table. Valid * formulas in this field return a list of rows from a table. The most common ways of writing a formula to * return a list of rows are to use the FindRow() or Filter() functions. Any other formula that returns zero * or more rows is also acceptable. For example, you can use a formula that points to a cell that contains a * filter function. */ public void setFormula(String formula) { this.formula = formula; } /** * <p> * A formula representing a filter function that returns zero or more matching rows from a table. Valid formulas in * this field return a list of rows from a table. The most common ways of writing a formula to return a list of rows * are to use the FindRow() or Filter() functions. Any other formula that returns zero or more rows is also * acceptable. For example, you can use a formula that points to a cell that contains a filter function. * </p> * * @return A formula representing a filter function that returns zero or more matching rows from a table. Valid * formulas in this field return a list of rows from a table. The most common ways of writing a formula to * return a list of rows are to use the FindRow() or Filter() functions. Any other formula that returns zero * or more rows is also acceptable. For example, you can use a formula that points to a cell that contains a * filter function. */ public String getFormula() { return this.formula; } /** * <p> * A formula representing a filter function that returns zero or more matching rows from a table. Valid formulas in * this field return a list of rows from a table. The most common ways of writing a formula to return a list of rows * are to use the FindRow() or Filter() functions. Any other formula that returns zero or more rows is also * acceptable. For example, you can use a formula that points to a cell that contains a filter function. * </p> * * @param formula * A formula representing a filter function that returns zero or more matching rows from a table. Valid * formulas in this field return a list of rows from a table. The most common ways of writing a formula to * return a list of rows are to use the FindRow() or Filter() functions. Any other formula that returns zero * or more rows is also acceptable. For example, you can use a formula that points to a cell that contains a * filter function. * @return Returns a reference to this object so that method calls can be chained together. */ public Filter withFormula(String formula) { setFormula(formula); return this; } /** * <p> * The optional contextRowId attribute can be used to specify the row id of the context row if the filter formula * contains unqualified references to table columns and needs a context row to evaluate them successfully. * </p> * * @param contextRowId * The optional contextRowId attribute can be used to specify the row id of the context row if the filter * formula contains unqualified references to table columns and needs a context row to evaluate them * successfully. */ public void setContextRowId(String contextRowId) { this.contextRowId = contextRowId; } /** * <p> * The optional contextRowId attribute can be used to specify the row id of the context row if the filter formula * contains unqualified references to table columns and needs a context row to evaluate them successfully. * </p> * * @return The optional contextRowId attribute can be used to specify the row id of the context row if the filter * formula contains unqualified references to table columns and needs a context row to evaluate them * successfully. */ public String getContextRowId() { return this.contextRowId; } /** * <p> * The optional contextRowId attribute can be used to specify the row id of the context row if the filter formula * contains unqualified references to table columns and needs a context row to evaluate them successfully. * </p> * * @param contextRowId * The optional contextRowId attribute can be used to specify the row id of the context row if the filter * formula contains unqualified references to table columns and needs a context row to evaluate them * successfully. * @return Returns a reference to this object so that method calls can be chained together. */ public Filter withContextRowId(String contextRowId) { setContextRowId(contextRowId); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getFormula() != null) sb.append("Formula: ").append("***Sensitive Data Redacted***").append(","); if (getContextRowId() != null) sb.append("ContextRowId: ").append(getContextRowId()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof Filter == false) return false; Filter other = (Filter) obj; if (other.getFormula() == null ^ this.getFormula() == null) return false; if (other.getFormula() != null && other.getFormula().equals(this.getFormula()) == false) return false; if (other.getContextRowId() == null ^ this.getContextRowId() == null) return false; if (other.getContextRowId() != null && other.getContextRowId().equals(this.getContextRowId()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getFormula() == null) ? 0 : getFormula().hashCode()); hashCode = prime * hashCode + ((getContextRowId() == null) ? 0 : getContextRowId().hashCode()); return hashCode; } @Override public Filter clone() { try { return (Filter) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } @com.amazonaws.annotation.SdkInternalApi @Override public void marshall(ProtocolMarshaller protocolMarshaller) { com.amazonaws.services.honeycode.model.transform.FilterMarshaller.getInstance().marshall(this, protocolMarshaller); } }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.ingest; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.xcontent.ToXContentFragment; import org.elasticsearch.xcontent.XContentBuilder; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.concurrent.TimeUnit; public class IngestStats implements Writeable, ToXContentFragment { private final Stats totalStats; private final List<PipelineStat> pipelineStats; private final Map<String, List<ProcessorStat>> processorStats; /** * @param totalStats - The total stats for Ingest. This is the logically the sum of all pipeline stats, * and pipeline stats are logically the sum of the processor stats. * @param pipelineStats - The stats for a given ingest pipeline. * @param processorStats - The per-processor stats for a given pipeline. A map keyed by the pipeline identifier. */ public IngestStats(Stats totalStats, List<PipelineStat> pipelineStats, Map<String, List<ProcessorStat>> processorStats) { this.totalStats = totalStats; this.pipelineStats = pipelineStats; this.processorStats = processorStats; } /** * Read from a stream. */ public IngestStats(StreamInput in) throws IOException { this.totalStats = new Stats(in); int size = in.readVInt(); this.pipelineStats = new ArrayList<>(size); this.processorStats = new HashMap<>(size); for (int i = 0; i < size; i++) { String pipelineId = in.readString(); Stats pipelineStat = new Stats(in); this.pipelineStats.add(new PipelineStat(pipelineId, pipelineStat)); int processorsSize = in.readVInt(); List<ProcessorStat> processorStatsPerPipeline = new ArrayList<>(processorsSize); for (int j = 0; j < processorsSize; j++) { String processorName = in.readString(); String processorType = in.readString(); Stats processorStat = new Stats(in); processorStatsPerPipeline.add(new ProcessorStat(processorName, processorType, processorStat)); } this.processorStats.put(pipelineId, processorStatsPerPipeline); } } @Override public void writeTo(StreamOutput out) throws IOException { totalStats.writeTo(out); out.writeVInt(pipelineStats.size()); for (PipelineStat pipelineStat : pipelineStats) { out.writeString(pipelineStat.getPipelineId()); pipelineStat.getStats().writeTo(out); List<ProcessorStat> processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId()); if (processorStatsForPipeline == null) { out.writeVInt(0); } else { out.writeVInt(processorStatsForPipeline.size()); for (ProcessorStat processorStat : processorStatsForPipeline) { out.writeString(processorStat.getName()); out.writeString(processorStat.getType()); processorStat.getStats().writeTo(out); } } } } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject("ingest"); builder.startObject("total"); totalStats.toXContent(builder, params); builder.endObject(); builder.startObject("pipelines"); for (PipelineStat pipelineStat : pipelineStats) { builder.startObject(pipelineStat.getPipelineId()); pipelineStat.getStats().toXContent(builder, params); List<ProcessorStat> processorStatsForPipeline = processorStats.get(pipelineStat.getPipelineId()); builder.startArray("processors"); if (processorStatsForPipeline != null) { for (ProcessorStat processorStat : processorStatsForPipeline) { builder.startObject(); builder.startObject(processorStat.getName()); builder.field("type", processorStat.getType()); builder.startObject("stats"); processorStat.getStats().toXContent(builder, params); builder.endObject(); builder.endObject(); builder.endObject(); } } builder.endArray(); builder.endObject(); } builder.endObject(); builder.endObject(); return builder; } public Stats getTotalStats() { return totalStats; } public List<PipelineStat> getPipelineStats() { return pipelineStats; } public Map<String, List<ProcessorStat>> getProcessorStats() { return processorStats; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IngestStats that = (IngestStats) o; return Objects.equals(totalStats, that.totalStats) && Objects.equals(pipelineStats, that.pipelineStats) && Objects.equals(processorStats, that.processorStats); } @Override public int hashCode() { return Objects.hash(totalStats, pipelineStats, processorStats); } public static class Stats implements Writeable, ToXContentFragment { private final long ingestCount; private final long ingestTimeInMillis; private final long ingestCurrent; private final long ingestFailedCount; public Stats(long ingestCount, long ingestTimeInMillis, long ingestCurrent, long ingestFailedCount) { this.ingestCount = ingestCount; this.ingestTimeInMillis = ingestTimeInMillis; this.ingestCurrent = ingestCurrent; this.ingestFailedCount = ingestFailedCount; } /** * Read from a stream. */ public Stats(StreamInput in) throws IOException { ingestCount = in.readVLong(); ingestTimeInMillis = in.readVLong(); ingestCurrent = in.readVLong(); ingestFailedCount = in.readVLong(); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeVLong(ingestCount); out.writeVLong(ingestTimeInMillis); out.writeVLong(ingestCurrent); out.writeVLong(ingestFailedCount); } /** * @return The total number of executed ingest preprocessing operations. */ public long getIngestCount() { return ingestCount; } /** * @return The total time spent of ingest preprocessing in millis. */ public long getIngestTimeInMillis() { return ingestTimeInMillis; } /** * @return The total number of ingest preprocessing operations currently executing. */ public long getIngestCurrent() { return ingestCurrent; } /** * @return The total number of ingest preprocessing operations that have failed. */ public long getIngestFailedCount() { return ingestFailedCount; } @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field("count", ingestCount); builder.humanReadableField("time_in_millis", "time", new TimeValue(ingestTimeInMillis, TimeUnit.MILLISECONDS)); builder.field("current", ingestCurrent); builder.field("failed", ingestFailedCount); return builder; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IngestStats.Stats that = (IngestStats.Stats) o; return Objects.equals(ingestCount, that.ingestCount) && Objects.equals(ingestTimeInMillis, that.ingestTimeInMillis) && Objects.equals(ingestFailedCount, that.ingestFailedCount) && Objects.equals(ingestCurrent, that.ingestCurrent); } @Override public int hashCode() { return Objects.hash(ingestCount, ingestTimeInMillis, ingestFailedCount, ingestCurrent); } } /** * Easy conversion from scoped {@link IngestMetric} objects to a serializable Stats objects */ static class Builder { private Stats totalStats; private List<PipelineStat> pipelineStats = new ArrayList<>(); private Map<String, List<ProcessorStat>> processorStats = new HashMap<>(); Builder addTotalMetrics(IngestMetric totalMetric) { this.totalStats = totalMetric.createStats(); return this; } Builder addPipelineMetrics(String pipelineId, IngestMetric pipelineMetric) { this.pipelineStats.add(new PipelineStat(pipelineId, pipelineMetric.createStats())); return this; } Builder addProcessorMetrics(String pipelineId, String processorName, String processorType, IngestMetric metric) { this.processorStats.computeIfAbsent(pipelineId, k -> new ArrayList<>()) .add(new ProcessorStat(processorName, processorType, metric.createStats())); return this; } IngestStats build() { return new IngestStats(totalStats, Collections.unmodifiableList(pipelineStats), Collections.unmodifiableMap(processorStats)); } } /** * Container for pipeline stats. */ public static class PipelineStat { private final String pipelineId; private final Stats stats; public PipelineStat(String pipelineId, Stats stats) { this.pipelineId = pipelineId; this.stats = stats; } public String getPipelineId() { return pipelineId; } public Stats getStats() { return stats; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IngestStats.PipelineStat that = (IngestStats.PipelineStat) o; return Objects.equals(pipelineId, that.pipelineId) && Objects.equals(stats, that.stats); } @Override public int hashCode() { return Objects.hash(pipelineId, stats); } } /** * Container for processor stats. */ public static class ProcessorStat { private final String name; private final String type; private final Stats stats; public ProcessorStat(String name, String type, Stats stats) { this.name = name; this.type = type; this.stats = stats; } public String getName() { return name; } public String getType() { return type; } public Stats getStats() { return stats; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; IngestStats.ProcessorStat that = (IngestStats.ProcessorStat) o; return Objects.equals(name, that.name) && Objects.equals(type, that.type) && Objects.equals(stats, that.stats); } @Override public int hashCode() { return Objects.hash(name, type, stats); } } }
/* * Copyright 1997-2004 The Apache Software Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.avalon.framework.parameters; import java.io.Serializable; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Properties; import org.apache.avalon.framework.configuration.Configuration; import org.apache.avalon.framework.configuration.ConfigurationException; /** * The <code>Parameters</code> class represents a set of key-value * pairs. * <p> * The <code>Parameters</code> object provides a mechanism to obtain * values based on a <code>String</code> name. There are convenience * methods that allow you to use defaults if the value does not exist, * as well as obtain the value in any of the same formats that are in * the {@link Configuration} interface. * </p><p> * While there are similarities between the <code>Parameters</code> * object and the java.util.Properties object, there are some * important semantic differences. First, <code>Parameters</code> are * <i>read-only</i>. Second, <code>Parameters</code> are easily * derived from {@link Configuration} objects. Lastly, the * <code>Parameters</code> object is derived from XML fragments that * look like this: * <pre><code> * &lt;parameter name="param-name" value="param-value" /&gt; * </code></pre> * </p><p> * <strong>Note: this class is not thread safe by default.</strong> If you * require thread safety please synchronize write access to this class to * prevent potential data corruption. * </p> * * @author <a href="mailto:dev@avalon.apache.org">Avalon Development Team</a> * @version CVS $Revision: 1.41 $ $Date: 2004/02/11 14:34:25 $ */ public class Parameters implements Serializable { /** * Empty Parameters object * * @since 4.1.2 */ public static final Parameters EMPTY_PARAMETERS; /** Static initializer to initialize the empty Parameters object */ static { EMPTY_PARAMETERS = new Parameters(); EMPTY_PARAMETERS.makeReadOnly(); } ///Underlying store of parameters private Map m_parameters = new HashMap(); private boolean m_readOnly; /** * Set the <code>String</code> value of a specified parameter. * <p /> * If the specified value is <b>null</b> the parameter is removed. * * @param name a <code>String</code> value * @param value a <code>String</code> value * @return The previous value of the parameter or <b>null</b>. * @throws IllegalStateException if the Parameters object is read-only */ public String setParameter( final String name, final String value ) throws IllegalStateException { checkWriteable(); if( null == name ) { return null; } if( null == value ) { return (String)m_parameters.remove( name ); } return (String)m_parameters.put( name, value ); } /** * Remove a parameter from the parameters object * @param name a <code>String</code> value * @since 4.1 */ public void removeParameter( final String name ) { setParameter( name, null ); } /** * Return an <code>Iterator</code> view of all parameter names. * * @return a iterator of parameter names * @deprecated Use getNames() instead */ public Iterator getParameterNames() { return m_parameters.keySet().iterator(); } /** * Retrieve an array of all parameter names. * * @return the parameters names */ public String[] getNames() { return (String[])m_parameters.keySet().toArray( new String[ 0 ] ); } /** * Test if the specified parameter can be retrieved. * * @param name the parameter name * @return true if parameter is a name */ public boolean isParameter( final String name ) { return m_parameters.containsKey( name ); } /** * Retrieve the <code>String</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, an exception is thrown. * * @param name the name of parameter * @return the value of parameter * @throws ParameterException if the specified parameter cannot be found */ public String getParameter( final String name ) throws ParameterException { if( null == name ) { throw new ParameterException( "You cannot lookup a null parameter" ); } final String test = (String)m_parameters.get( name ); if( null == test ) { throw new ParameterException( "The parameter '" + name + "' does not contain a value" ); } else { return test; } } /** * Retrieve the <code>String</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, <code>defaultValue</code> * is returned. * * @param name the name of parameter * @param defaultValue the default value, returned if parameter does not exist * or parameter's name is null * @return the value of parameter */ public String getParameter( final String name, final String defaultValue ) { if( name == null ) { return defaultValue; } final String test = (String)m_parameters.get( name ); if( test == null ) { return defaultValue; } else { return test; } } /** * Parses string represenation of the <code>int</code> value. * <p /> * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param value the value to parse * @return the integer value * @throws NumberFormatException if the specified value can not be parsed */ private int parseInt( final String value ) throws NumberFormatException { if( value.startsWith( "0x" ) ) { return Integer.parseInt( value.substring( 2 ), 16 ); } else if( value.startsWith( "0o" ) ) { return Integer.parseInt( value.substring( 2 ), 8 ); } else if( value.startsWith( "0b" ) ) { return Integer.parseInt( value.substring( 2 ), 2 ); } else { return Integer.parseInt( value ); } } /** * Retrieve the <code>int</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, an exception is thrown. * * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param name the name of parameter * @return the integer parameter type * @throws ParameterException if the specified parameter cannot be found * or is not an Integer value */ public int getParameterAsInteger( final String name ) throws ParameterException { try { return parseInt( getParameter( name ) ); } catch( final NumberFormatException e ) { throw new ParameterException( "Could not return an integer value", e ); } } /** * Retrieve the <code>int</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, <code>defaultValue</code> * is returned. * * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param name the name of parameter * @param defaultValue value returned if parameter does not exist or is of wrong type * @return the integer parameter type */ public int getParameterAsInteger( final String name, final int defaultValue ) { try { final String value = getParameter( name, null ); if( value == null ) { return defaultValue; } return parseInt( value ); } catch( final NumberFormatException e ) { return defaultValue; } } /** * Parses string represenation of the <code>long</code> value. * <p /> * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param value the value to parse * @return the long value * @throws NumberFormatException if the specified value can not be parsed */ private long parseLong( final String value ) throws NumberFormatException { if( value.startsWith( "0x" ) ) { return Long.parseLong( value.substring( 2 ), 16 ); } else if( value.startsWith( "0o" ) ) { return Long.parseLong( value.substring( 2 ), 8 ); } else if( value.startsWith( "0b" ) ) { return Long.parseLong( value.substring( 2 ), 2 ); } else { return Long.parseLong( value ); } } /** * Retrieve the <code>long</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, an exception is thrown. * * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param name the name of parameter * @return the long parameter type * @throws ParameterException if the specified parameter cannot be found * or is not a Long value. */ public long getParameterAsLong( final String name ) throws ParameterException { try { return parseLong( getParameter( name ) ); } catch( final NumberFormatException e ) { throw new ParameterException( "Could not return a long value", e ); } } /** * Retrieve the <code>long</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, <code>defaultValue</code> * is returned. * * Hexadecimal numbers begin with 0x, Octal numbers begin with 0o and binary * numbers begin with 0b, all other values are assumed to be decimal. * * @param name the name of parameter * @param defaultValue value returned if parameter does not exist or is of wrong type * @return the long parameter type */ public long getParameterAsLong( final String name, final long defaultValue ) { try { final String value = getParameter( name, null ); if( value == null ) { return defaultValue; } return parseLong( value ); } catch( final NumberFormatException e ) { return defaultValue; } } /** * Retrieve the <code>float</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, an exception is thrown. * * @param name the parameter name * @return the value * @throws ParameterException if the specified parameter cannot be found * or is not a Float value */ public float getParameterAsFloat( final String name ) throws ParameterException { try { return Float.parseFloat( getParameter( name ) ); } catch( final NumberFormatException e ) { throw new ParameterException( "Could not return a float value", e ); } } /** * Retrieve the <code>float</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, <code>defaultValue</code> * is returned. * * @param name the parameter name * @param defaultValue the default value if parameter does not exist or is of wrong type * @return the value */ public float getParameterAsFloat( final String name, final float defaultValue ) { try { final String value = getParameter( name, null ); if( value == null ) { return defaultValue; } return Float.parseFloat( value ); } catch( final NumberFormatException pe ) { return defaultValue; } } /** * Retrieve the <code>boolean</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, an exception is thrown. * * @param name the parameter name * @return the value * @throws ParameterException if an error occurs * @throws ParameterException */ public boolean getParameterAsBoolean( final String name ) throws ParameterException { final String value = getParameter( name ); if( value.equalsIgnoreCase( "true" ) ) { return true; } else if( value.equalsIgnoreCase( "false" ) ) { return false; } else { throw new ParameterException( "Could not return a boolean value" ); } } /** * Retrieve the <code>boolean</code> value of the specified parameter. * <p /> * If the specified parameter cannot be found, <code>defaultValue</code> * is returned. * * @param name the parameter name * @param defaultValue the default value if parameter does not exist or is of wrong type * @return the value */ public boolean getParameterAsBoolean( final String name, final boolean defaultValue ) { final String value = getParameter( name, null ); if( value == null ) { return defaultValue; } if( value.equalsIgnoreCase( "true" ) ) { return true; } else if( value.equalsIgnoreCase( "false" ) ) { return false; } else { return defaultValue; } } /** * Merge parameters from another <code>Parameters</code> instance * into this. * * @param other the other Parameters * @return This <code>Parameters</code> instance. */ public Parameters merge( final Parameters other ) { checkWriteable(); final String[] names = other.getNames(); for( int i = 0; i < names.length; i++ ) { final String name = names[ i ]; String value = null; try { value = other.getParameter( name ); } catch( final ParameterException pe ) { value = null; } setParameter( name, value ); } return this; } /** * Make this Parameters read-only so that it will throw a * <code>IllegalStateException</code> if someone tries to * modify it. */ public void makeReadOnly() { m_readOnly = true; } /** * Checks is this <code>Parameters</code> object is writeable. * * @throws IllegalStateException if this <code>Parameters</code> object is read-only */ protected final void checkWriteable() throws IllegalStateException { if( m_readOnly ) { throw new IllegalStateException( "Context is read only and can not be modified" ); } } /** * Create a <code>Parameters</code> object from a <code>Configuration</code> * object. This acts exactly like the following method call: * <pre> * Parameters.fromConfiguration(configuration, "parameter"); * </pre> * * @param configuration the Configuration * @return This <code>Parameters</code> instance. * @throws ConfigurationException if an error occurs */ public static Parameters fromConfiguration( final Configuration configuration ) throws ConfigurationException { return fromConfiguration( configuration, "parameter" ); } /** * Create a <code>Parameters</code> object from a <code>Configuration</code> * object using the supplied element name. * * @param configuration the Configuration * @param elementName the element name for the parameters * @return This <code>Parameters</code> instance. * @throws ConfigurationException if an error occurs * @since 4.1 */ public static Parameters fromConfiguration( final Configuration configuration, final String elementName ) throws ConfigurationException { if( null == configuration ) { throw new ConfigurationException( "You cannot convert to parameters with a null Configuration" ); } final Configuration[] parameters = configuration.getChildren( elementName ); final Parameters params = new Parameters(); for( int i = 0; i < parameters.length; i++ ) { try { final String name = parameters[ i ].getAttribute( "name" ); final String value = parameters[ i ].getAttribute( "value" ); params.setParameter( name, value ); } catch( final Exception e ) { throw new ConfigurationException( "Cannot process Configurable", e ); } } return params; } /** * Create a <code>Parameters</code> object from a <code>Properties</code> * object. * * @param properties the Properties * @return This <code>Parameters</code> instance. */ public static Parameters fromProperties( final Properties properties ) { final Parameters parameters = new Parameters(); final Enumeration names = properties.propertyNames(); while( names.hasMoreElements() ) { final String key = names.nextElement().toString(); final String value = properties.getProperty( key ); parameters.setParameter( key, value ); } return parameters; } /** * Creates a <code>java.util.Properties</code> object from an Avalon * Parameters object. * * @param params a <code>Parameters</code> instance * @return a <code>Properties</code> instance */ public static Properties toProperties( final Parameters params ) { final Properties properties = new Properties(); final String[] names = params.getNames(); for( int i = 0; i < names.length; ++i ) { // "" is the default value, since getNames() proves it will exist properties.setProperty( names[ i ], params.getParameter( names[ i ], "" ) ); } return properties; } }
package net.jselby.escapists.editor.elements; import javafx.application.Platform; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.concurrent.Worker; import javafx.embed.swing.JFXPanel; import javafx.scene.Scene; import javafx.scene.web.WebEngine; import javafx.scene.web.WebView; import net.jselby.escapists.editor.mapping.Map; import net.jselby.escapists.editor.mapping.MapRenderer; import org.w3c.dom.Document; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import org.w3c.dom.events.Event; import org.w3c.dom.events.EventTarget; import org.w3c.dom.html.HTMLAnchorElement; import javax.swing.*; import java.awt.*; import java.awt.event.*; import java.awt.image.BufferedImage; import java.net.URL; /** * A custom component which renders maps in the map editor. * * @author j_selby */ public class MapRendererComponent extends JPanel { private final JFXPanel panel; private float origWidth; private float origHeight; private Map mapToEdit; private BufferedImage render; private float zoomFactor = 1.0f; private String view = "World"; private MapRenderer renderer; public MapRendererComponent(Map map, MouseListener clickListener, MouseMotionListener motionListener) { this.mapToEdit = map; addMouseListener(clickListener); addMouseMotionListener(motionListener); // Build the renderer renderer = new MapRenderer(); addMouseListener(new MouseAdapter() { @Override public void mousePressed(MouseEvent e) { if (renderer.showZones && renderer.zoneEditing) { renderer.getZoning().mouseDown(mapToEdit, (int) ((float) e.getX() / (float) zoomFactor), (int) ((float) e.getY() / (float) zoomFactor)); refresh(); } } }); addMouseMotionListener(new MouseAdapter() { @Override public void mouseDragged(MouseEvent e) { if (renderer.showZones && renderer.zoneEditing) { renderer.getZoning().mouseDragged(mapToEdit, (int) ((float) e.getX() / (float) zoomFactor), (int) ((float) e.getY() / (float) zoomFactor)); refresh(); } } }); panel = new JFXPanel(); panel.setBorder(BorderFactory.createEmptyBorder(0, 0, 0, 0)); setLayout(new BorderLayout()); setMap(map); addComponentListener(new ComponentAdapter() { @Override public void componentResized(ComponentEvent e) { double oldWidth = panel.getSize().getWidth(); double oldHeight = panel.getSize().getWidth(); double newWidth = getSize().getWidth(); double newHeight = getSize().getWidth(); if (oldWidth != newWidth || newHeight != oldHeight) { panel.setSize(getSize()); } } }); Platform.runLater(new Runnable() { @Override public void run() { initFX(panel); } }); } @Override protected void paintComponent(Graphics g) { Graphics2D graphics2D = (Graphics2D) g; if (render != null) { graphics2D.scale(zoomFactor, zoomFactor); g.drawImage(render, 0, 0, null); } Dimension size = new Dimension((int) (origWidth * zoomFactor),(int) (origHeight * zoomFactor)); setSize(size); setPreferredSize(size); setMaximumSize(size); setMinimumSize(size); // Re-Layout the panel validate(); } public void refresh() { // Render a snapshot if (mapToEdit != null) { render = renderer.render(mapToEdit, view); } setIgnoreRepaint(false); repaint(); } public void setZoomFactor(float newZoom) { this.zoomFactor = newZoom; refresh(); } public float getZoomFactor() { return zoomFactor; } public void setShowZones(boolean showZones) { renderer.showZones = showZones; } public void setView(String view) { this.view = view; } public String getView() { return view; } public void setEditZones(boolean editZones) { renderer.zoneEditing = editZones; } public void setMap(Map map) { boolean zoneEditing = renderer.zoneEditing; boolean showZones = renderer.showZones; renderer = new MapRenderer(); renderer.zoneEditing = zoneEditing; renderer.showZones = showZones; this.mapToEdit = map; if (mapToEdit != null) { removeAll(); Dimension size = new Dimension((map.getHeight() - 1) * 16, (map.getWidth() - 3) * 16); setSize(size); setPreferredSize(size); setMaximumSize(size); setMinimumSize(size); // These are inverted, don't worry. origWidth = (map.getHeight() - 1) * 16; origHeight = (map.getWidth() - 3) * 16; } else { origHeight = 500; origWidth = 734; add(panel, BorderLayout.NORTH); } refresh(); } private static void initFX(final JFXPanel fxPanel) { final WebView webView = new WebView(); fxPanel.setScene(new Scene(webView)); // Obtain the webEngine to navigate final WebEngine webEngine = webView.getEngine(); webEngine.loadContent("Escapists Map Editor\n" + "Written by jselby\nhttp://redd.it/2wacp2\n\n" + "You don't have a map loaded currently - \nGo to File in the top left, and press a button there!\n" + "Loading..."); webEngine.load("http://escapists.jselby.net/welcome/"); webView.getEngine().getLoadWorker().stateProperty().addListener(new ChangeListener<Worker.State>() { @Override public void changed(ObservableValue<? extends Worker.State> observable, Worker.State oldValue, Worker.State newValue) { if (newValue == Worker.State.SUCCEEDED) { final Document document = webEngine.getDocument(); NodeList nodeList = document.getElementsByTagName("a"); for (int i = 0; i < nodeList.getLength(); i++) { Node node = nodeList.item(i); EventTarget eventTarget = (EventTarget) node; eventTarget.addEventListener("click", new org.w3c.dom.events.EventListener() { @Override public void handleEvent(Event evt) { EventTarget target = evt.getCurrentTarget(); HTMLAnchorElement anchorElement = (HTMLAnchorElement) target; String href = anchorElement.getHref(); Desktop desktop = Desktop.isDesktopSupported() ? Desktop.getDesktop() : null; if (desktop != null && desktop.isSupported(Desktop.Action.BROWSE)) { try { desktop.browse(new URL(href).toURI()); } catch (Exception e) { e.printStackTrace(); } } evt.preventDefault(); } }, false); } } } }); } }
/* * Copyright 2012 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.android.apps.iosched.ui.tablet; import com.google.android.apps.iosched.R; import com.google.android.apps.iosched.provider.ScheduleContract; import com.google.android.apps.iosched.ui.TracksAdapter; import com.google.android.apps.iosched.util.ParserUtils; import android.annotation.TargetApi; import android.app.Activity; import android.content.res.Resources; import android.database.Cursor; import android.graphics.Bitmap; import android.graphics.Canvas; import android.graphics.drawable.BitmapDrawable; import android.os.Build; import android.os.Bundle; import android.os.Handler; import android.support.v4.app.Fragment; import android.support.v4.app.LoaderManager; import android.support.v4.content.CursorLoader; import android.support.v4.content.Loader; import android.view.LayoutInflater; import android.view.View; import android.view.ViewGroup; import android.widget.AdapterView; import android.widget.ImageView; import android.widget.ListPopupWindow; import android.widget.PopupWindow; import android.widget.TextView; /** * A tablet-specific fragment that is a giant {@link android.widget.Spinner} * -like widget. It shows a {@link ListPopupWindow} containing a list of tracks, * using {@link TracksAdapter}. Requires API level 11 or later since * {@link ListPopupWindow} is API level 11+. */ @TargetApi(Build.VERSION_CODES.HONEYCOMB) public class TracksDropdownFragment extends Fragment implements LoaderManager.LoaderCallbacks<Cursor>, AdapterView.OnItemClickListener, PopupWindow.OnDismissListener { public static final int VIEW_TYPE_SESSIONS = 0; public static final int VIEW_TYPE_OFFICE_HOURS = 1; public static final int VIEW_TYPE_SANDBOX = 2; private static final String STATE_VIEW_TYPE = "viewType"; private static final String STATE_SELECTED_TRACK_ID = "selectedTrackId"; private TracksAdapter mAdapter; private int mViewType; private Handler mHandler = new Handler(); private ListPopupWindow mListPopupWindow; private ViewGroup mRootView; private ImageView mIcon; private TextView mTitle; private TextView mAbstract; private String mTrackId; public interface Callbacks { public void onTrackSelected(String trackId); public void onTrackNameAvailable(String trackId, String trackName); } private static Callbacks sDummyCallbacks = new Callbacks() { @Override public void onTrackSelected(String trackId) { } @Override public void onTrackNameAvailable(String trackId, String trackName) {} }; private Callbacks mCallbacks = sDummyCallbacks; @Override public void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); mAdapter = new TracksAdapter(getActivity(), true); if (savedInstanceState != null) { // Since this fragment doesn't rely on fragment arguments, we must // handle state restores and saves ourselves. mViewType = savedInstanceState.getInt(STATE_VIEW_TYPE); mTrackId = savedInstanceState.getString(STATE_SELECTED_TRACK_ID); } } @Override public void onSaveInstanceState(Bundle outState) { super.onSaveInstanceState(outState); outState.putInt(STATE_VIEW_TYPE, mViewType); outState.putString(STATE_SELECTED_TRACK_ID, mTrackId); } public String getSelectedTrackId() { return mTrackId; } public void selectTrack(String trackId) { loadTrackList(mViewType, trackId); } public void loadTrackList(int viewType) { loadTrackList(viewType, mTrackId); } public void loadTrackList(int viewType, String selectTrackId) { // Teardown from previous arguments if (mListPopupWindow != null) { mListPopupWindow.setAdapter(null); } mViewType = viewType; mTrackId = selectTrackId; // Start background query to load tracks getLoaderManager().restartLoader(TracksAdapter.TracksQuery._TOKEN, null, this); } @Override public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) { mRootView = (ViewGroup) inflater.inflate(R.layout.fragment_tracks_dropdown, null); mIcon = (ImageView) mRootView.findViewById(R.id.track_icon); mTitle = (TextView) mRootView.findViewById(R.id.track_title); mAbstract = (TextView) mRootView.findViewById(R.id.track_abstract); mRootView.setOnClickListener(new View.OnClickListener() { public void onClick(View view) { mListPopupWindow = new ListPopupWindow(getActivity()); mListPopupWindow.setAdapter(mAdapter); mListPopupWindow.setModal(true); mListPopupWindow.setContentWidth( getResources().getDimensionPixelSize(R.dimen.track_dropdown_width)); mListPopupWindow.setAnchorView(mRootView); mListPopupWindow.setOnItemClickListener(TracksDropdownFragment.this); mListPopupWindow.show(); mListPopupWindow.setOnDismissListener(TracksDropdownFragment.this); } }); return mRootView; } @Override public void onAttach(Activity activity) { super.onAttach(activity); if (!(activity instanceof Callbacks)) { throw new ClassCastException("Activity must implement fragment's callbacks."); } mCallbacks = (Callbacks) activity; } @Override public void onDetach() { super.onDetach(); mCallbacks = sDummyCallbacks; if (mListPopupWindow != null) { mListPopupWindow.dismiss(); } } /** {@inheritDoc} */ public void onItemClick(AdapterView<?> parent, View view, int position, long id) { final Cursor cursor = (Cursor) mAdapter.getItem(position); loadTrack(cursor, true); if (mListPopupWindow != null) { mListPopupWindow.dismiss(); } } public String getTrackName() { return (String) mTitle.getText(); } private void loadTrack(Cursor cursor, boolean triggerCallback) { final int trackColor; final Resources res = getResources(); if (cursor != null) { trackColor = cursor.getInt(TracksAdapter.TracksQuery.TRACK_COLOR); mTrackId = cursor.getString(TracksAdapter.TracksQuery.TRACK_ID); String trackName = cursor.getString(TracksAdapter.TracksQuery.TRACK_NAME); mTitle.setText(trackName); mAbstract.setText(cursor.getString(TracksAdapter.TracksQuery.TRACK_ABSTRACT)); int iconResId = res.getIdentifier( "track_" + ParserUtils.sanitizeId(trackName), "drawable", getActivity().getPackageName()); if (iconResId != 0) { BitmapDrawable sourceIconDrawable = (BitmapDrawable) res.getDrawable(iconResId); Bitmap icon = Bitmap.createBitmap(sourceIconDrawable.getIntrinsicWidth(), sourceIconDrawable.getIntrinsicHeight(), Bitmap.Config.ARGB_8888); Canvas canvas = new Canvas(icon); sourceIconDrawable.setBounds(0, 0, icon.getWidth(), icon.getHeight()); sourceIconDrawable.draw(canvas); BitmapDrawable iconDrawable = new BitmapDrawable(res, icon); mIcon.setImageDrawable(iconDrawable); } else { mIcon.setImageDrawable(null); } } else { trackColor = res.getColor(R.color.all_track_color); mTrackId = ScheduleContract.Tracks.ALL_TRACK_ID; mIcon.setImageDrawable(null); switch (mViewType) { case VIEW_TYPE_SESSIONS: mTitle.setText(R.string.all_tracks_sessions); mAbstract.setText(R.string.all_tracks_subtitle_sessions); break; case VIEW_TYPE_OFFICE_HOURS: mTitle.setText(R.string.all_tracks_office_hours); mAbstract.setText(R.string.all_tracks_subtitle_office_hours); break; case VIEW_TYPE_SANDBOX: mTitle.setText(R.string.all_tracks_sandbox); mAbstract.setText(R.string.all_tracks_subtitle_sandbox); break; } } mRootView.setBackgroundColor(trackColor); mCallbacks.onTrackNameAvailable(mTrackId, mTitle.getText().toString()); if (triggerCallback) { mHandler.post(new Runnable() { @Override public void run() { mCallbacks.onTrackSelected(mTrackId); } }); } } public void onDismiss() { mListPopupWindow = null; } @Override public Loader<Cursor> onCreateLoader(int id, Bundle data) { // Filter our tracks query to only include those with valid results String[] projection = TracksAdapter.TracksQuery.PROJECTION; String selection = null; switch (mViewType) { case VIEW_TYPE_SESSIONS: // Only show tracks with at least one session projection = TracksAdapter.TracksQuery.PROJECTION_WITH_SESSIONS_COUNT; selection = ScheduleContract.Tracks.SESSIONS_COUNT + ">0"; break; case VIEW_TYPE_OFFICE_HOURS: // Only show tracks with at least one office hours projection = TracksAdapter.TracksQuery.PROJECTION_WITH_OFFICE_HOURS_COUNT; selection = ScheduleContract.Tracks.OFFICE_HOURS_COUNT + ">0"; break; case VIEW_TYPE_SANDBOX: // Only show tracks with at least one company projection = TracksAdapter.TracksQuery.PROJECTION_WITH_SANDBOX_COUNT; selection = ScheduleContract.Tracks.SANDBOX_COUNT + ">0"; break; } return new CursorLoader(getActivity(), ScheduleContract.Tracks.CONTENT_URI, projection, selection, null, ScheduleContract.Tracks.DEFAULT_SORT); } @Override public void onLoadFinished(Loader<Cursor> loader, Cursor cursor) { if (getActivity() == null || cursor == null) { return; } boolean trackLoaded = false; if (mTrackId != null) { cursor.moveToFirst(); while (!cursor.isAfterLast()) { if (mTrackId.equals(cursor.getString(TracksAdapter.TracksQuery.TRACK_ID))) { loadTrack(cursor, false); trackLoaded = true; break; } cursor.moveToNext(); } } if (!trackLoaded) { loadTrack(null, false); } mAdapter.setHasAllItem(true); mAdapter.changeCursor(cursor); } @Override public void onLoaderReset(Loader<Cursor> cusor) { } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.query.aggregation.histogram; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.druid.collections.CloseableStupidPool; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.io.Closer; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.Result; import org.apache.druid.query.TestQueryRunners; import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; import org.apache.druid.query.topn.TopNQuery; import org.apache.druid.query.topn.TopNQueryBuilder; import org.apache.druid.query.topn.TopNQueryConfig; import org.apache.druid.query.topn.TopNQueryQueryToolChest; import org.apache.druid.query.topn.TopNQueryRunnerFactory; import org.apache.druid.query.topn.TopNResultValue; import org.apache.druid.segment.TestHelper; import org.junit.AfterClass; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.IOException; import java.nio.ByteBuffer; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Map; @RunWith(Parameterized.class) public class ApproximateHistogramTopNQueryTest { private static final Closer RESOURCE_CLOSER = Closer.create(); @AfterClass public static void teardown() throws IOException { RESOURCE_CLOSER.close(); } @Parameterized.Parameters(name = "{0}") public static Iterable<Object[]> constructorFeeder() { final CloseableStupidPool<ByteBuffer> defaultPool = TestQueryRunners.createDefaultNonBlockingPool(); final CloseableStupidPool<ByteBuffer> customPool = new CloseableStupidPool<>( "TopNQueryRunnerFactory-bufferPool", () -> ByteBuffer.allocate(2000) ); RESOURCE_CLOSER.register(defaultPool); RESOURCE_CLOSER.register(customPool); return QueryRunnerTestHelper.transformToConstructionFeeder( Iterables.concat( QueryRunnerTestHelper.makeQueryRunners( new TopNQueryRunnerFactory( defaultPool, new TopNQueryQueryToolChest( new TopNQueryConfig(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ), QueryRunnerTestHelper.NOOP_QUERYWATCHER ) ), QueryRunnerTestHelper.makeQueryRunners( new TopNQueryRunnerFactory( customPool, new TopNQueryQueryToolChest( new TopNQueryConfig(), QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ), QueryRunnerTestHelper.NOOP_QUERYWATCHER ) ) ) ); } private final QueryRunner runner; public ApproximateHistogramTopNQueryTest( QueryRunner runner ) { this.runner = runner; } @Test public void testTopNWithApproximateHistogramAgg() { ApproximateHistogramAggregatorFactory factory = new ApproximateHistogramAggregatorFactory( "apphisto", "index", 10, 5, Float.NEGATIVE_INFINITY, Float.POSITIVE_INFINITY, false ); TopNQuery query = new TopNQueryBuilder() .dataSource(QueryRunnerTestHelper.DATA_SOURCE) .granularity(QueryRunnerTestHelper.ALL_GRAN) .dimension(QueryRunnerTestHelper.MARKET_DIMENSION) .metric(QueryRunnerTestHelper.dependentPostAggMetric) .threshold(4) .intervals(QueryRunnerTestHelper.FULL_ON_INTERVAL_SPEC) .aggregators( Lists.newArrayList( Iterables.concat( QueryRunnerTestHelper.COMMON_DOUBLE_AGGREGATORS, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index"), factory ) ) ) ) .postAggregators( QueryRunnerTestHelper.ADD_ROWS_INDEX_CONSTANT, QueryRunnerTestHelper.DEPENDENT_POST_AGG, new QuantilePostAggregator("quantile", "apphisto", 0.5f) ) .build(); List<Result<TopNResultValue>> expectedResults = Collections.singletonList( new Result<TopNResultValue>( DateTimes.of("2011-01-12T00:00:00.000Z"), new TopNResultValue( Arrays.<Map<String, Object>>asList( ImmutableMap.<String, Object>builder() .put(QueryRunnerTestHelper.MARKET_DIMENSION, "total_market") .put("rows", 186L) .put("index", 215679.82879638672D) .put("addRowsIndexConstant", 215866.82879638672D) .put(QueryRunnerTestHelper.dependentPostAggMetric, 216053.82879638672D) .put("uniques", QueryRunnerTestHelper.UNIQUES_2) .put("maxIndex", 1743.9217529296875D) .put("minIndex", 792.3260498046875D) .put("quantile", 1085.6775f) .put( "apphisto", new Histogram( new float[]{ 554.4271240234375f, 792.3260498046875f, 1030.2249755859375f, 1268.1239013671875f, 1506.0228271484375f, 1743.9217529296875f }, new double[]{ 0.0D, 39.42073059082031D, 103.29110717773438D, 34.93659591674805D, 8.351564407348633D } ) ) .build(), ImmutableMap.<String, Object>builder() .put(QueryRunnerTestHelper.MARKET_DIMENSION, "upfront") .put("rows", 186L) .put("index", 192046.1060180664D) .put("addRowsIndexConstant", 192233.1060180664D) .put(QueryRunnerTestHelper.dependentPostAggMetric, 192420.1060180664D) .put("uniques", QueryRunnerTestHelper.UNIQUES_2) .put("maxIndex", 1870.06103515625D) .put("minIndex", 545.9906005859375D) .put("quantile", 880.9881f) .put( "apphisto", new Histogram( new float[]{ 214.97299194335938f, 545.9906005859375f, 877.0081787109375f, 1208.0257568359375f, 1539.0433349609375f, 1870.06103515625f }, new double[]{ 0.0D, 67.53287506103516D, 72.22068786621094D, 31.984678268432617D, 14.261756896972656D } ) ) .build(), ImmutableMap.<String, Object>builder() .put(QueryRunnerTestHelper.MARKET_DIMENSION, "spot") .put("rows", 837L) .put("index", 95606.57232284546D) .put("addRowsIndexConstant", 96444.57232284546D) .put(QueryRunnerTestHelper.dependentPostAggMetric, 97282.57232284546D) .put("uniques", QueryRunnerTestHelper.UNIQUES_9) .put("maxIndex", 277.2735290527344D) .put("minIndex", 59.02102279663086D) .put("quantile", 101.78856f) .put( "apphisto", new Histogram( new float[]{ 4.457897186279297f, 59.02102279663086f, 113.58415222167969f, 168.14727783203125f, 222.7104034423828f, 277.2735290527344f }, new double[]{ 0.0D, 462.4309997558594D, 357.5404968261719D, 15.022850036621094D, 2.0056631565093994D } ) ) .build() ) ) ) ); TestHelper.assertExpectedResults(expectedResults, runner.run(QueryPlus.wrap(query))); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.rya.api.client.accumulo; import static com.google.common.base.Preconditions.checkNotNull; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.accumulo.core.client.AccumuloException; import org.apache.accumulo.core.client.AccumuloSecurityException; import org.apache.accumulo.core.client.Connector; import org.apache.accumulo.core.client.Instance; import org.apache.accumulo.core.client.ZooKeeperInstance; import org.apache.accumulo.core.client.security.tokens.PasswordToken; import org.apache.accumulo.minicluster.MiniAccumuloCluster; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.apache.rya.accumulo.MiniAccumuloClusterInstance; import org.apache.rya.accumulo.MiniAccumuloSingleton; import org.apache.rya.accumulo.RyaTestInstanceRule; import org.apache.rya.indexing.pcj.fluo.app.export.rya.RyaExportParameters; import org.apache.rya.indexing.pcj.fluo.app.observers.FilterObserver; import org.apache.rya.indexing.pcj.fluo.app.observers.JoinObserver; import org.apache.rya.indexing.pcj.fluo.app.observers.QueryResultObserver; import org.apache.rya.indexing.pcj.fluo.app.observers.StatementPatternObserver; import org.apache.rya.indexing.pcj.fluo.app.observers.TripleObserver; import org.apache.zookeeper.ClientCnxn; import org.junit.After; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Rule; import org.openrdf.repository.RepositoryConnection; import org.openrdf.repository.RepositoryException; import org.openrdf.sail.Sail; import org.openrdf.sail.SailException; import org.apache.fluo.api.client.FluoAdmin; import org.apache.fluo.api.client.FluoAdmin.AlreadyInitializedException; import org.apache.fluo.api.client.FluoAdmin.TableExistsException; import org.apache.fluo.api.client.FluoClient; import org.apache.fluo.api.client.FluoFactory; import org.apache.fluo.api.config.FluoConfiguration; import org.apache.fluo.api.config.ObserverSpecification; import org.apache.fluo.api.mini.MiniFluo; import org.apache.rya.accumulo.AccumuloRdfConfiguration; import org.apache.rya.api.client.RyaClientException; import org.apache.rya.api.client.Install; import org.apache.rya.api.client.Install.DuplicateInstanceNameException; import org.apache.rya.api.client.Install.InstallConfiguration; import org.apache.rya.api.instance.RyaDetailsRepository.RyaDetailsRepositoryException; import org.apache.rya.api.persist.RyaDAOException; import org.apache.rya.indexing.accumulo.ConfigUtils; import org.apache.rya.indexing.external.PrecomputedJoinIndexerConfig; import org.apache.rya.rdftriplestore.RyaSailRepository; import org.apache.rya.rdftriplestore.inference.InferenceEngineException; import org.apache.rya.sail.config.RyaSailFactory; /** * Integration tests that ensure the Fluo application processes PCJs results * correctly. * <p> * This class is being ignored because it doesn't contain any unit tests. */ public abstract class FluoITBase { private static final Logger log = Logger.getLogger(FluoITBase.class); // Mini Accumulo Cluster private static MiniAccumuloClusterInstance clusterInstance = MiniAccumuloSingleton.getInstance(); private static MiniAccumuloCluster cluster; private static String instanceName = null; private static String zookeepers = null; protected static Connector accumuloConn = null; // Fluo data store and connections. protected MiniFluo fluo = null; protected FluoClient fluoClient = null; // Rya data store and connections. protected RyaSailRepository ryaRepo = null; protected RepositoryConnection ryaConn = null; @Rule public RyaTestInstanceRule testInstance = new RyaTestInstanceRule(false); @BeforeClass public static void beforeClass() throws Exception { Logger.getLogger(ClientCnxn.class).setLevel(Level.ERROR); // Setup and start the Mini Accumulo. cluster = clusterInstance.getCluster(); // Store a connector to the Mini Accumulo. instanceName = cluster.getInstanceName(); zookeepers = cluster.getZooKeepers(); final Instance instance = new ZooKeeperInstance(instanceName, zookeepers); accumuloConn = instance.getConnector(clusterInstance.getUsername(), new PasswordToken(clusterInstance.getPassword())); } @Before public void setupMiniResources() throws Exception { // Initialize the Mini Fluo that will be used to store created queries. fluo = startMiniFluo(); fluoClient = FluoFactory.newClient(fluo.getClientConfiguration()); // Initialize the Rya that will be used by the tests. ryaRepo = setupRya(); ryaConn = ryaRepo.getConnection(); } @After public void shutdownMiniResources() { if (ryaConn != null) { try { log.info("Shutting down Rya Connection."); ryaConn.close(); log.info("Rya Connection shut down."); } catch (final Exception e) { log.error("Could not shut down the Rya Connection.", e); } } if (ryaRepo != null) { try { log.info("Shutting down Rya Repo."); ryaRepo.shutDown(); log.info("Rya Repo shut down."); } catch (final Exception e) { log.error("Could not shut down the Rya Repo.", e); } } if (fluoClient != null) { try { log.info("Shutting down Fluo Client."); fluoClient.close(); log.info("Fluo Client shut down."); } catch (final Exception e) { log.error("Could not shut down the Fluo Client.", e); } } if (fluo != null) { try { log.info("Shutting down Mini Fluo."); fluo.close(); log.info("Mini Fluo shut down."); } catch (final Exception e) { log.error("Could not shut down the Mini Fluo.", e); } } } public String getRyaInstanceName() { return testInstance.getRyaInstanceName(); } public AccumuloConnectionDetails createConnectionDetails() { return new AccumuloConnectionDetails( clusterInstance.getUsername(), clusterInstance.getPassword().toCharArray(), clusterInstance.getInstanceName(), clusterInstance.getZookeepers()); } /** * Override this method to provide an output configuration to the Fluo application. * <p> * Exports to the Rya instance by default. * * @return The parameters that will be passed to {@link QueryResultObserver} at startup. */ protected Map<String, String> makeExportParams() { final HashMap<String, String> params = new HashMap<>(); final RyaExportParameters ryaParams = new RyaExportParameters(params); ryaParams.setExportToRya(true); ryaParams.setAccumuloInstanceName(instanceName); ryaParams.setZookeeperServers(zookeepers); ryaParams.setExporterUsername(clusterInstance.getUsername()); ryaParams.setExporterPassword(clusterInstance.getPassword()); ryaParams.setRyaInstanceName(getRyaInstanceName()); return params; } /** * Setup a Mini Fluo cluster that uses a temporary directory to store its * data. * * @return A Mini Fluo cluster. */ protected MiniFluo startMiniFluo() throws AlreadyInitializedException, TableExistsException { // Setup the observers that will be used by the Fluo PCJ Application. final List<ObserverSpecification> observers = new ArrayList<>(); observers.add(new ObserverSpecification(TripleObserver.class.getName())); observers.add(new ObserverSpecification(StatementPatternObserver.class.getName())); observers.add(new ObserverSpecification(JoinObserver.class.getName())); observers.add(new ObserverSpecification(FilterObserver.class.getName())); // Provide export parameters child test classes may provide to the // export observer. final ObserverSpecification exportObserverConfig = new ObserverSpecification( QueryResultObserver.class.getName(), makeExportParams()); observers.add(exportObserverConfig); // Configure how the mini fluo will run. final FluoConfiguration config = new FluoConfiguration(); config.setMiniStartAccumulo(false); config.setAccumuloInstance(instanceName); config.setAccumuloUser(clusterInstance.getUsername()); config.setAccumuloPassword(clusterInstance.getPassword()); config.setInstanceZookeepers(zookeepers + "/fluo"); config.setAccumuloZookeepers(zookeepers); config.setApplicationName(getRyaInstanceName()); config.setAccumuloTable("fluo" + getRyaInstanceName()); config.addObservers(observers); FluoFactory.newAdmin(config).initialize( new FluoAdmin.InitializationOptions().setClearTable(true).setClearZookeeper(true) ); return FluoFactory.newMiniFluo(config); } /** * Sets up a Rya instance. */ protected RyaSailRepository setupRya() throws AccumuloException, AccumuloSecurityException, RepositoryException, RyaDAOException, NumberFormatException, UnknownHostException, InferenceEngineException, AlreadyInitializedException, RyaDetailsRepositoryException, DuplicateInstanceNameException, RyaClientException, SailException { checkNotNull(instanceName); checkNotNull(zookeepers); // Setup Rya configuration values. final AccumuloRdfConfiguration conf = new AccumuloRdfConfiguration(); conf.setTablePrefix(getRyaInstanceName()); conf.setDisplayQueryPlan(true); conf.setBoolean(ConfigUtils.USE_MOCK_INSTANCE, false); conf.set(ConfigUtils.CLOUDBASE_USER, clusterInstance.getUsername()); conf.set(ConfigUtils.CLOUDBASE_PASSWORD, clusterInstance.getPassword()); conf.set(ConfigUtils.CLOUDBASE_INSTANCE, clusterInstance.getInstanceName()); conf.set(ConfigUtils.CLOUDBASE_ZOOKEEPERS, clusterInstance.getZookeepers()); conf.set(ConfigUtils.USE_PCJ, "true"); conf.set(ConfigUtils.FLUO_APP_NAME, getRyaInstanceName()); conf.set(ConfigUtils.PCJ_STORAGE_TYPE, PrecomputedJoinIndexerConfig.PrecomputedJoinStorageType.ACCUMULO.toString()); conf.set(ConfigUtils.PCJ_UPDATER_TYPE, PrecomputedJoinIndexerConfig.PrecomputedJoinUpdaterType.FLUO.toString()); conf.set(ConfigUtils.CLOUDBASE_AUTHS, ""); // Install the test instance of Rya. final Install install = new AccumuloInstall(createConnectionDetails(), accumuloConn); final InstallConfiguration installConfig = InstallConfiguration.builder() .setEnableTableHashPrefix(true) .setEnableEntityCentricIndex(true) .setEnableFreeTextIndex(true) .setEnableTemporalIndex(true) .setEnablePcjIndex(true) .setEnableGeoIndex(true) .setFluoPcjAppName(getRyaInstanceName()) .build(); install.install(getRyaInstanceName(), installConfig); // Connect to the instance of Rya that was just installed. final Sail sail = RyaSailFactory.getInstance(conf); final RyaSailRepository ryaRepo = new RyaSailRepository(sail); return ryaRepo; } }
package com.jmed.condominapp.pojos; import android.os.Parcel; import android.os.Parcelable; import java.util.Comparator; public class Pojo_User implements Parcelable { public static final int ADMINISTRATOR = 0; public static final int PRESIDENT = 1; public static final int NEIGHBOUR = 2; private String us_id; private int us_community; private String us_floor; private String us_door; private String us_phone; private String us_mail; private String us_name; private int us_category; private String us_photo; private boolean us_deleted; public Pojo_User(String us_id, int us_community, String us_floor, String us_door, String us_phone, String us_mail, String us_name, int us_category, String us_photo, boolean us_deleted) { this.us_id = us_id; this.us_community = us_community; this.us_floor = us_floor; this.us_door = us_door; this.us_phone = us_phone; this.us_mail = us_mail; this.us_name = us_name; this.us_category = us_category; this.us_photo = us_photo; this.us_deleted = us_deleted; } //region Getter //--------------------------------------------------------------------------------------------- public String getUs_id() { return us_id; } public int getUs_community() { return us_community; } public String getUs_floor() { return us_floor; } public String getUs_door() { return us_door; } public String getUs_phone() { return us_phone; } public String getUs_mail() { return us_mail; } public String getUs_name() { return us_name; } public int getUs_category() { return us_category; } public String getUs_photo() { return us_photo; } public boolean isUs_deleted() { return us_deleted; } //--------------------------------------------------------------------------------------------- //endregion //region Setter //--------------------------------------------------------------------------------------------- public void setUs_id(String us_id) { this.us_id = us_id; } public void setUs_community(int us_community) { this.us_community = us_community; } public void setUs_floor(String us_floor) { this.us_floor = us_floor; } public void setUs_door(String us_door) { this.us_door = us_door; } public void setUs_phone(String us_phone) { this.us_phone = us_phone; } public void setUs_mail(String us_mail) { this.us_mail = us_mail; } public void setUs_name(String us_name) { this.us_name = us_name; } public void setUs_category(int us_category) { this.us_category = us_category; } public void setUs_photo(String us_photo) { this.us_photo = us_photo; } public void setUs_deleted(boolean us_deleted) { this.us_deleted = us_deleted; } //--------------------------------------------------------------------------------------------- //endregion //region Override methods //--------------------------------------------------------------------------------------------- @Override public String toString() { return "User: " + us_name + " (" + us_phone + ") -> " + us_floor + us_door; } @Override public boolean equals(Object obj) { boolean result = false; if (obj != null) { if (obj instanceof Pojo_User) { Pojo_User another = (Pojo_User) obj; if (this.us_phone.equals(another.us_phone)) { result = true; } } } return result; } //--------------------------------------------------------------------------------------------- //endregion //region List Comparators //--------------------------------------------------------------------------------------------- public static final Comparator<Pojo_User> COMPARATOR_USER_NAME = new Comparator<Pojo_User>() { @Override public int compare(Pojo_User o1, Pojo_User o2) { return o1.getUs_name().toUpperCase().compareTo(o2.getUs_name().toUpperCase()); } }; public static final Comparator<Pojo_User> COMPARATOR_USER_PHONE = new Comparator<Pojo_User>() { @Override public int compare(Pojo_User o1, Pojo_User o2) { return o1.getUs_phone().compareTo(o2.getUs_phone()); } }; //--------------------------------------------------------------------------------------------- //endregion //region Parcerable implementation //--------------------------------------------------------------------------------------------- protected Pojo_User(Parcel in) { us_id = in.readString(); us_community = in.readInt(); us_floor = in.readString(); us_door = in.readString(); us_phone = in.readString(); us_mail = in.readString(); us_name = in.readString(); us_category = in.readInt(); us_photo = in.readString(); us_deleted = in.readByte() != 0; } public static final Creator<Pojo_User> CREATOR = new Creator<Pojo_User>() { @Override public Pojo_User createFromParcel(Parcel in) { return new Pojo_User(in); } @Override public Pojo_User[] newArray(int size) { return new Pojo_User[size]; } }; @Override public void writeToParcel(Parcel parcel, int i) { parcel.writeString(us_id); parcel.writeInt(us_community); parcel.writeString(us_floor); parcel.writeString(us_door); parcel.writeString(us_phone); parcel.writeString(us_mail); parcel.writeString(us_name); parcel.writeInt(us_category); parcel.writeString(us_photo); parcel.writeByte((byte) (us_deleted ? 1 : 0)); } @Override public int describeContents() { return 0; } //--------------------------------------------------------------------------------------------- //endregion }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.zookeeper.test; import static org.apache.zookeeper.test.ClientBase.CONNECTION_TIMEOUT; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertNotSame; import static org.junit.jupiter.api.Assertions.assertNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Map; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.TimeUnit; import javax.management.Attribute; import javax.management.AttributeNotFoundException; import javax.management.InstanceNotFoundException; import javax.management.InvalidAttributeValueException; import javax.management.MBeanException; import javax.management.MalformedObjectNameException; import javax.management.ObjectName; import javax.management.ReflectionException; import javax.management.RuntimeMBeanException; import org.apache.zookeeper.CreateMode; import org.apache.zookeeper.DummyWatcher; import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException.ConnectionLossException; import org.apache.zookeeper.PortAssignment; import org.apache.zookeeper.Watcher.Event.KeeperState; import org.apache.zookeeper.ZooDefs.Ids; import org.apache.zookeeper.ZooKeeper; import org.apache.zookeeper.ZooKeeper.States; import org.apache.zookeeper.admin.ZooKeeperAdmin; import org.apache.zookeeper.jmx.MBeanRegistry; import org.apache.zookeeper.jmx.ZKMBeanInfo; import org.apache.zookeeper.server.admin.Commands; import org.apache.zookeeper.server.quorum.QuorumPeerConfig; import org.apache.zookeeper.server.util.PortForwarder; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.ValueSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class ObserverMasterTest extends ObserverMasterTestBase { protected static final Logger LOG = LoggerFactory.getLogger(ObserverMasterTest.class); /** * This test ensures two things: * 1. That Observers can successfully proxy requests to the ensemble. * 2. That Observers don't participate in leader elections. * The second is tested by constructing an ensemble where a leader would * be elected if and only if an Observer voted. */ @ParameterizedTest @ValueSource(booleans = {true, false}) public void testObserver(boolean testObserverMaster) throws Exception { // We expect two notifications before we want to continue latch = new CountDownLatch(2); setUp(-1, testObserverMaster); q3.start(); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for server 3 being up"); if (testObserverMaster) { int masterPort = q3.getQuorumPeer().observer.getSocket().getPort(); LOG.info("port {} {}", masterPort, OM_PORT); assertEquals(masterPort, OM_PORT, "observer failed to connect to observer master"); } zk = new ZooKeeper("127.0.0.1:" + CLIENT_PORT_OBS, ClientBase.CONNECTION_TIMEOUT, this); zk.create("/obstest", "test".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); // Assert that commands are getting forwarded correctly assertEquals(new String(zk.getData("/obstest", null, null)), "test"); // Now check that other commands don't blow everything up zk.sync("/", null, null); zk.setData("/obstest", "test2".getBytes(), -1); zk.getChildren("/", false); assertEquals(zk.getState(), States.CONNECTED); LOG.info("Shutting down server 2"); // Now kill one of the other real servers q2.shutdown(); assertTrue(ClientBase.waitForServerDown("127.0.0.1:" + CLIENT_PORT_QP2, ClientBase.CONNECTION_TIMEOUT), "Waiting for server 2 to shut down"); LOG.info("Server 2 down"); // Now the resulting ensemble shouldn't be quorate latch.await(); assertNotSame(KeeperState.SyncConnected, lastEvent.getState(), "Client is still connected to non-quorate cluster"); LOG.info("Latch returned"); try { assertNotEquals("Shouldn't get a response when cluster not quorate!", "test", new String(zk.getData("/obstest", null, null))); } catch (ConnectionLossException c) { LOG.info("Connection loss exception caught - ensemble not quorate (this is expected)"); } latch = new CountDownLatch(1); LOG.info("Restarting server 2"); // Bring it back //q2 = new MainThread(2, CLIENT_PORT_QP2, quorumCfgSection, extraCfgs); q2.start(); LOG.info("Waiting for server 2 to come up"); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_QP2, CONNECTION_TIMEOUT), "waiting for server 2 being up"); LOG.info("Server 2 started, waiting for latch"); latch.await(); // It's possible our session expired - but this is ok, shows we // were able to talk to the ensemble assertTrue((KeeperState.SyncConnected == lastEvent.getState() || KeeperState.Expired == lastEvent.getState()), "Client didn't reconnect to quorate ensemble (state was" + lastEvent.getState() + ")"); LOG.info("perform a revalidation test"); int leaderProxyPort = PortAssignment.unique(); int obsProxyPort = PortAssignment.unique(); int leaderPort = q1.getQuorumPeer().leader == null ? CLIENT_PORT_QP2 : CLIENT_PORT_QP1; PortForwarder leaderPF = new PortForwarder(leaderProxyPort, leaderPort); latch = new CountDownLatch(1); ZooKeeper client = new ZooKeeper(String.format("127.0.0.1:%d,127.0.0.1:%d", leaderProxyPort, obsProxyPort), ClientBase.CONNECTION_TIMEOUT, this); latch.await(); client.create("/revalidtest", "test".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); assertNotNull(client.exists("/revalidtest", null), "Read-after write failed"); latch = new CountDownLatch(2); PortForwarder obsPF = new PortForwarder(obsProxyPort, CLIENT_PORT_OBS); try { leaderPF.shutdown(); } catch (Exception e) { // ignore? } latch.await(); assertEquals(new String(client.getData("/revalidtest", null, null)), "test"); client.close(); obsPF.shutdown(); shutdown(); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void testRevalidation(boolean testObserverMaster) throws Exception { setUp(-1, testObserverMaster); q3.start(); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for server 3 being up"); final int leaderProxyPort = PortAssignment.unique(); final int obsProxyPort = PortAssignment.unique(); int leaderPort = q1.getQuorumPeer().leader == null ? CLIENT_PORT_QP2 : CLIENT_PORT_QP1; PortForwarder leaderPF = new PortForwarder(leaderProxyPort, leaderPort); latch = new CountDownLatch(1); zk = new ZooKeeper(String.format("127.0.0.1:%d,127.0.0.1:%d", leaderProxyPort, obsProxyPort), ClientBase.CONNECTION_TIMEOUT, this); latch.await(); zk.create("/revalidtest", "test".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.EPHEMERAL); assertNotNull(zk.exists("/revalidtest", null), "Read-after write failed"); latch = new CountDownLatch(2); PortForwarder obsPF = new PortForwarder(obsProxyPort, CLIENT_PORT_OBS); try { leaderPF.shutdown(); } catch (Exception e) { // ignore? } latch.await(); assertEquals(new String(zk.getData("/revalidtest", null, null)), "test"); obsPF.shutdown(); shutdown(); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void testInOrderCommits(boolean testObserverMaster) throws Exception { setUp(-1, testObserverMaster); zk = new ZooKeeper("127.0.0.1:" + CLIENT_PORT_QP1, ClientBase.CONNECTION_TIMEOUT, null); for (int i = 0; i < 10; i++) { zk.create("/bulk" + i, ("Initial data of some size").getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); } zk.close(); q3.start(); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for observer to be up"); latch = new CountDownLatch(1); zk = new ZooKeeper("127.0.0.1:" + CLIENT_PORT_QP1, ClientBase.CONNECTION_TIMEOUT, this); latch.await(); assertEquals(zk.getState(), States.CONNECTED); zk.create("/init", "first".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); final long zxid = q1.getQuorumPeer().getLastLoggedZxid(); // wait for change to propagate waitFor("Timeout waiting for observer sync", new WaitForCondition() { public boolean evaluate() { return zxid == q3.getQuorumPeer().getLastLoggedZxid(); } }, 30); ZooKeeper obsZk = new ZooKeeper("127.0.0.1:" + CLIENT_PORT_OBS, ClientBase.CONNECTION_TIMEOUT, this); int followerPort = q1.getQuorumPeer().leader == null ? CLIENT_PORT_QP1 : CLIENT_PORT_QP2; ZooKeeper fZk = new ZooKeeper("127.0.0.1:" + followerPort, ClientBase.CONNECTION_TIMEOUT, this); final int numTransactions = 10001; CountDownLatch gate = new CountDownLatch(1); CountDownLatch oAsyncLatch = new CountDownLatch(numTransactions); Thread oAsyncWriteThread = new Thread(new AsyncWriter(obsZk, numTransactions, true, oAsyncLatch, "/obs", gate)); CountDownLatch fAsyncLatch = new CountDownLatch(numTransactions); Thread fAsyncWriteThread = new Thread(new AsyncWriter(fZk, numTransactions, true, fAsyncLatch, "/follower", gate)); LOG.info("ASYNC WRITES"); oAsyncWriteThread.start(); fAsyncWriteThread.start(); gate.countDown(); oAsyncLatch.await(); fAsyncLatch.await(); oAsyncWriteThread.join(ClientBase.CONNECTION_TIMEOUT); if (oAsyncWriteThread.isAlive()) { LOG.error("asyncWriteThread is still alive"); } fAsyncWriteThread.join(ClientBase.CONNECTION_TIMEOUT); if (fAsyncWriteThread.isAlive()) { LOG.error("asyncWriteThread is still alive"); } obsZk.close(); fZk.close(); shutdown(); } @ParameterizedTest @ValueSource(booleans = {true, false}) public void testAdminCommands(boolean testObserverMaster) throws IOException, MBeanException, InstanceNotFoundException, ReflectionException, InterruptedException, MalformedObjectNameException, AttributeNotFoundException, InvalidAttributeValueException, KeeperException { // flush all beans, then start for (ZKMBeanInfo beanInfo : MBeanRegistry.getInstance().getRegisteredBeans()) { MBeanRegistry.getInstance().unregister(beanInfo); } JMXEnv.setUp(); setUp(-1, testObserverMaster); q3.start(); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for observer to be up"); // Assert that commands are getting forwarded correctly zk = new ZooKeeper("127.0.0.1:" + CLIENT_PORT_OBS, ClientBase.CONNECTION_TIMEOUT, this); zk.create("/obstest", "test".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT); assertEquals(new String(zk.getData("/obstest", null, null)), "test"); // test stats collection final Map<String, String> emptyMap = Collections.emptyMap(); Map<String, Object> stats = Commands.runCommand("mntr", q3.getQuorumPeer().getActiveServer(), emptyMap).toMap(); assertTrue(stats.containsKey("observer_master_id"), "observer not emitting observer_master_id"); // check the stats for the first peer if (testObserverMaster) { if (q1.getQuorumPeer().leader == null) { assertEquals(Integer.valueOf(1), q1.getQuorumPeer().getSynced_observers_metric()); } else { assertEquals(Integer.valueOf(0), q1.getQuorumPeer().getSynced_observers_metric()); } } else { if (q1.getQuorumPeer().leader == null) { assertNull(q1.getQuorumPeer().getSynced_observers_metric()); } else { assertEquals(Integer.valueOf(1), q1.getQuorumPeer().getSynced_observers_metric()); } } // check the stats for the second peer if (testObserverMaster) { if (q2.getQuorumPeer().leader == null) { assertEquals(Integer.valueOf(1), q2.getQuorumPeer().getSynced_observers_metric()); } else { assertEquals(Integer.valueOf(0), q2.getQuorumPeer().getSynced_observers_metric()); } } else { if (q2.getQuorumPeer().leader == null) { assertNull(q2.getQuorumPeer().getSynced_observers_metric()); } else { assertEquals(Integer.valueOf(1), q2.getQuorumPeer().getSynced_observers_metric()); } } // test admin commands for disconnection ObjectName connBean = null; for (ObjectName bean : JMXEnv.conn().queryNames(new ObjectName(MBeanRegistry.DOMAIN + ":*"), null)) { if (bean.getCanonicalName().contains("Learner_Connections") && bean.getCanonicalName().contains("id:" + q3.getQuorumPeer().getId())) { connBean = bean; break; } } assertNotNull(connBean, "could not find connection bean"); latch = new CountDownLatch(1); JMXEnv.conn().invoke(connBean, "terminateConnection", new Object[0], null); assertTrue(latch.await(CONNECTION_TIMEOUT / 2, TimeUnit.MILLISECONDS), "server failed to disconnect on terminate"); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for server 3 being up"); final String obsBeanName = String.format("org.apache.ZooKeeperService:name0=ReplicatedServer_id%d,name1=replica.%d,name2=Observer", q3.getQuorumPeer().getId(), q3.getQuorumPeer().getId()); Set<ObjectName> names = JMXEnv.conn().queryNames(new ObjectName(obsBeanName), null); assertEquals(1, names.size(), "expecting singular observer bean"); ObjectName obsBean = names.iterator().next(); if (testObserverMaster) { // show we can move the observer using the id long observerMasterId = q3.getQuorumPeer().observer.getLearnerMasterId(); latch = new CountDownLatch(1); JMXEnv.conn().setAttribute(obsBean, new Attribute("LearnerMaster", Long.toString(3 - observerMasterId))); assertTrue(latch.await(CONNECTION_TIMEOUT, TimeUnit.MILLISECONDS), "server failed to disconnect on terminate"); assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + CLIENT_PORT_OBS, CONNECTION_TIMEOUT), "waiting for server 3 being up"); } else { // show we get an error final long leaderId = q1.getQuorumPeer().leader == null ? 2 : 1; try { JMXEnv.conn().setAttribute(obsBean, new Attribute("LearnerMaster", Long.toString(3 - leaderId))); fail("should have seen an exception on previous command"); } catch (RuntimeMBeanException e) { assertEquals(IllegalArgumentException.class, e.getCause().getClass(), "mbean failed for the wrong reason"); } } shutdown(); JMXEnv.tearDown(); } private String createServerString(String type, long serverId, int clientPort) { return "server." + serverId + "=127.0.0.1:" + PortAssignment.unique() + ":" + PortAssignment.unique() + ":" + type + ";" + clientPort; } private void waitServerUp(int clientPort) { assertTrue(ClientBase.waitForServerUp("127.0.0.1:" + clientPort, CONNECTION_TIMEOUT), "waiting for server being up"); } private ZooKeeperAdmin createAdmin(int clientPort) throws IOException { System.setProperty("zookeeper.DigestAuthenticationProvider.superDigest", "super:D/InIHSb7yEEbrWz8b9l71RjZJU="/* password is 'test'*/); QuorumPeerConfig.setReconfigEnabled(true); ZooKeeperAdmin admin = new ZooKeeperAdmin( "127.0.0.1:" + clientPort, ClientBase.CONNECTION_TIMEOUT, DummyWatcher.INSTANCE); admin.addAuthInfo("digest", "super:test".getBytes()); return admin; } // This test is known to be flaky and fail due to "reconfig already in progress". // TODO: Investigate intermittent testDynamicReconfig failures. @ParameterizedTest @ValueSource(booleans = {true, false}) @Disabled public void testDynamicReconfig(boolean testObserverMaster) throws InterruptedException, IOException, KeeperException { if (!testObserverMaster) { return; } ClientBase.setupTestEnv(); // create a quorum running with different observer master port // to make it easier to choose which server the observer is // following with // // we have setObserverMaster function but it's broken, use this // solution before we fixed that int clientPort1 = PortAssignment.unique(); int clientPort2 = PortAssignment.unique(); int omPort1 = PortAssignment.unique(); int omPort2 = PortAssignment.unique(); String quorumCfgSection = createServerString("participant", 1, clientPort1) + "\n" + createServerString("participant", 2, clientPort2); MainThread s1 = new MainThread(1, clientPort1, quorumCfgSection, String.format("observerMasterPort=%d%n", omPort1)); MainThread s2 = new MainThread(2, clientPort2, quorumCfgSection, String.format("observerMasterPort=%d%n", omPort2)); s1.start(); s2.start(); waitServerUp(clientPort1); waitServerUp(clientPort2); // create observer to follow non-leader observer master long nonLeaderOMPort = s1.getQuorumPeer().leader == null ? omPort1 : omPort2; int observerClientPort = PortAssignment.unique(); int observerId = 10; MainThread observer = new MainThread( observerId, observerClientPort, quorumCfgSection + "\n" + createServerString("observer", observerId, observerClientPort), String.format("observerMasterPort=%d%n", nonLeaderOMPort)); LOG.info("starting observer"); observer.start(); waitServerUp(observerClientPort); // create a client to the observer final LinkedBlockingQueue<KeeperState> states = new LinkedBlockingQueue<KeeperState>(); ZooKeeper observerClient = new ZooKeeper( "127.0.0.1:" + observerClientPort, ClientBase.CONNECTION_TIMEOUT, event -> { try { states.put(event.getState()); } catch (InterruptedException ignore) { } }); // wait for connected KeeperState state = states.poll(1000, TimeUnit.MILLISECONDS); assertEquals(KeeperState.SyncConnected, state); // issue reconfig command ArrayList<String> newServers = new ArrayList<String>(); String server = "server.3=127.0.0.1:" + PortAssignment.unique() + ":" + PortAssignment.unique() + ":participant;localhost:" + PortAssignment.unique(); newServers.add(server); ZooKeeperAdmin admin = createAdmin(clientPort1); ReconfigTest.reconfig(admin, newServers, null, null, -1); // make sure the observer has the new config ReconfigTest.testServerHasConfig(observerClient, newServers, null); // shouldn't be disconnected during reconfig, so expect to not // receive any new event state = states.poll(1000, TimeUnit.MILLISECONDS); assertNull(state); admin.close(); observerClient.close(); observer.shutdown(); s2.shutdown(); s1.shutdown(); } class AsyncWriter implements Runnable { private final ZooKeeper client; private final int numTransactions; private final boolean issueSync; private final CountDownLatch writerLatch; private final String root; private final CountDownLatch gate; AsyncWriter(ZooKeeper client, int numTransactions, boolean issueSync, CountDownLatch writerLatch, String root, CountDownLatch gate) { this.client = client; this.numTransactions = numTransactions; this.issueSync = issueSync; this.writerLatch = writerLatch; this.root = root; this.gate = gate; } @Override public void run() { if (gate != null) { try { gate.await(); } catch (InterruptedException e) { LOG.error("Gate interrupted"); return; } } for (int i = 0; i < numTransactions; i++) { final boolean pleaseLog = i % 100 == 0; client.create(root + i, "inner thread".getBytes(), Ids.OPEN_ACL_UNSAFE, CreateMode.PERSISTENT, (rc, path, ctx, name) -> { writerLatch.countDown(); if (pleaseLog) { LOG.info("wrote {}", path); } }, null); if (pleaseLog) { LOG.info("async wrote {}{}", root, i); if (issueSync) { client.sync(root + "0", null, null); } } } } } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * @author Oleg V. Khaschansky * @version $Revision$ */ package org.apache.harmony.awt.gl.font; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Graphics2D; import java.awt.Paint; import java.awt.Shape; import java.awt.Stroke; import java.awt.font.TextAttribute; import java.awt.geom.Area; import java.awt.geom.Line2D; import java.awt.geom.Rectangle2D; import java.text.AttributedCharacterIterator.Attribute; import java.util.Map; /** * This class is responsible for rendering text decorations like * underline, strikethrough, text with background, etc. */ public class TextDecorator { private static final TextDecorator inst = new TextDecorator(); private TextDecorator() {} static TextDecorator getInstance() { return inst; } /** * This class encapsulates a set of decoration attributes for a single text run. */ static class Decoration { private static final BasicStroke UNDERLINE_LOW_ONE_PIXEL_STROKE = new BasicStroke(1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10); private static final BasicStroke UNDERLINE_LOW_TWO_PIXEL_STROKE = new BasicStroke(2, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10); private static final BasicStroke UNDERLINE_LOW_DOTTED_STROKE = new BasicStroke( 1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10, new float[] { 1, 1 }, 0 ); private static final BasicStroke UNDERLINE_LOW_DOTTED_STROKE2 = new BasicStroke( 1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10, new float[] { 1, 1 }, 1 ); private static final BasicStroke UNDERLINE_LOW_DASHED_STROKE = new BasicStroke( 1, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10, new float[] { 4, 4 }, 0 ); boolean ulOn = false; // Have standard underline? BasicStroke ulStroke; BasicStroke imUlStroke; // Stroke for INPUT_METHOD_UNDERLINE BasicStroke imUlStroke2; // Specially for UNDERLINE_LOW_GRAY boolean strikeThrough; BasicStroke strikeThroughStroke; boolean haveStrokes = false; // Strokes already created? boolean swapBfFg; Paint bg; // background color Paint fg; // foreground color Paint graphicsPaint; // Slot for saving current paint Decoration( Integer imUl, boolean swap, boolean sth, Paint bg, Paint fg, boolean ulOn) { if (imUl != null) { // Determine which stroke to use if (imUl == TextAttribute.UNDERLINE_LOW_ONE_PIXEL) { this.imUlStroke = Decoration.UNDERLINE_LOW_ONE_PIXEL_STROKE; } else if (imUl == TextAttribute.UNDERLINE_LOW_TWO_PIXEL) { this.imUlStroke = Decoration.UNDERLINE_LOW_TWO_PIXEL_STROKE; } else if (imUl == TextAttribute.UNDERLINE_LOW_DOTTED) { this.imUlStroke = Decoration.UNDERLINE_LOW_DOTTED_STROKE; } else if (imUl == TextAttribute.UNDERLINE_LOW_GRAY) { this.imUlStroke = Decoration.UNDERLINE_LOW_DOTTED_STROKE; this.imUlStroke2 = Decoration.UNDERLINE_LOW_DOTTED_STROKE2; } else if (imUl == TextAttribute.UNDERLINE_LOW_DASHED) { this.imUlStroke = Decoration.UNDERLINE_LOW_DASHED_STROKE; } } this.ulOn = ulOn; // Has underline this.swapBfFg = swap; this.strikeThrough = sth; this.bg = bg; this.fg = fg; } /** * Creates strokes of proper width according to the info * stored in the BasicMetrics * @param metrics - basic metrics */ private void getStrokes(BasicMetrics metrics) { if (!haveStrokes) { if (strikeThrough) { strikeThroughStroke = new BasicStroke( metrics.strikethroughThickness, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10 ); } if (ulOn) { ulStroke = new BasicStroke( metrics.underlineThickness, BasicStroke.CAP_BUTT, BasicStroke.JOIN_MITER, 10 ); } haveStrokes = true; } } } /** * Creates Decoration object from the set of text attributes * @param attributes - text attributes * @return Decoration object */ static Decoration getDecoration(Map<? extends Attribute, ?> attributes) { if (attributes == null) { return null; // It is for plain text } Object underline = attributes.get(TextAttribute.UNDERLINE); boolean hasStandardUnderline = underline == TextAttribute.UNDERLINE_ON; Object imUnderline = attributes.get(TextAttribute.INPUT_METHOD_UNDERLINE); Integer imUl = (Integer) imUnderline; boolean swapBgFg = TextAttribute.SWAP_COLORS_ON.equals( attributes.get(TextAttribute.SWAP_COLORS) ); boolean strikeThrough = TextAttribute.STRIKETHROUGH_ON.equals( attributes.get(TextAttribute.STRIKETHROUGH) ); Paint fg = (Paint) attributes.get(TextAttribute.FOREGROUND); Paint bg = (Paint) attributes.get(TextAttribute.BACKGROUND); if ( !hasStandardUnderline && imUnderline == null && fg == null && bg == null && !swapBgFg && !strikeThrough ) { return null; } return new Decoration(imUl, swapBgFg, strikeThrough, bg, fg, hasStandardUnderline); } /** * Fills the background before drawing if needed. * * @param trs - text segment * @param g2d - graphics to draw to * @param xOffset - offset in X direction to the upper left corner of the * layout from the origin of the graphics * @param yOffset - offset in Y direction to the upper left corner of the * layout from the origin of the graphics */ static void prepareGraphics( TextRunSegment trs, Graphics2D g2d, float xOffset, float yOffset ) { Decoration d = trs.decoration; if (d.fg == null && d.bg == null && d.swapBfFg == false) { return; // Nothing to do } d.graphicsPaint = g2d.getPaint(); if (d.fg == null) { d.fg = d.graphicsPaint; } if (d.swapBfFg) { // Fill background area g2d.setPaint(d.fg); Rectangle2D bgArea = trs.getLogicalBounds(); Rectangle2D toFill = new Rectangle2D.Double( bgArea.getX() + xOffset, bgArea.getY() + yOffset, bgArea.getWidth(), bgArea.getHeight() ); g2d.fill(toFill); // Set foreground color g2d.setPaint(d.bg == null ? Color.WHITE : d.bg); } else { if (d.bg != null) { // Fill background area g2d.setPaint(d.bg); Rectangle2D bgArea = trs.getLogicalBounds(); Rectangle2D toFill = new Rectangle2D.Double( bgArea.getX() + xOffset, bgArea.getY() + yOffset, bgArea.getWidth(), bgArea.getHeight() ); g2d.fill(toFill); } // Set foreground color g2d.setPaint(d.fg); } } /** * Restores the original state of the graphics if needed * @param d - decoration * @param g2d - graphics */ static void restoreGraphics(Decoration d, Graphics2D g2d) { if (d.fg == null && d.bg == null && d.swapBfFg == false) { return; // Nothing to do } g2d.setPaint(d.graphicsPaint); } /** * Renders the text decorations * @param trs - text run segment * @param g2d - graphics to render to * @param xOffset - offset in X direction to the upper left corner * of the layout from the origin of the graphics * @param yOffset - offset in Y direction to the upper left corner * of the layout from the origin of the graphics */ static void drawTextDecorations( TextRunSegment trs, Graphics2D g2d, float xOffset, float yOffset ) { Decoration d = trs.decoration; if (!d.ulOn && d.imUlStroke == null && !d.strikeThrough) { return; // Nothing to do } float left = xOffset + (float) trs.getLogicalBounds().getMinX(); float right = xOffset + (float) trs.getLogicalBounds().getMaxX(); Stroke savedStroke = g2d.getStroke(); d.getStrokes(trs.metrics); if (d.strikeThrough) { float y = trs.y + yOffset + trs.metrics.strikethroughOffset; g2d.setStroke(d.strikeThroughStroke); g2d.draw(new Line2D.Float(left, y, right, y)); } if (d.ulOn) { float y = trs.y + yOffset + trs.metrics.underlineOffset; g2d.setStroke(d.ulStroke); g2d.draw(new Line2D.Float(left, y, right, y)); } if (d.imUlStroke != null) { float y = trs.y + yOffset + trs.metrics.underlineOffset; g2d.setStroke(d.imUlStroke); g2d.draw(new Line2D.Float(left, y, right, y)); if (d.imUlStroke2 != null) { y++; g2d.setStroke(d.imUlStroke2); g2d.draw(new Line2D.Float(left, y, right, y)); } } g2d.setStroke(savedStroke); } /** * Extends the visual bounds of the text run segment to * include text decorations. * @param trs - text segment * @param segmentBounds - bounds of the undecorated text * @param d - decoration * @return extended bounds */ static Rectangle2D extendVisualBounds( TextRunSegment trs, Rectangle2D segmentBounds, Decoration d ) { if (d == null) { return segmentBounds; } double minx = segmentBounds.getMinX(); double miny = segmentBounds.getMinY(); double maxx = segmentBounds.getMaxX(); double maxy = segmentBounds.getMaxY(); Rectangle2D lb = trs.getLogicalBounds(); if (d.swapBfFg || d.bg != null) { minx = Math.min(lb.getMinX() - trs.x, minx); miny = Math.min(lb.getMinY() - trs.y, miny); maxx = Math.max(lb.getMaxX() - trs.x, maxx); maxy = Math.max(lb.getMaxY() - trs.y, maxy); } if (d.ulOn || d.imUlStroke != null || d.strikeThrough) { minx = Math.min(lb.getMinX() - trs.x, minx); maxx = Math.max(lb.getMaxX() - trs.x, maxx); d.getStrokes(trs.metrics); if (d.ulStroke != null) { maxy = Math.max( maxy, trs.metrics.underlineOffset + d.ulStroke.getLineWidth() ); } if (d.imUlStroke != null) { maxy = Math.max( maxy, trs.metrics.underlineOffset + d.imUlStroke.getLineWidth() + (d.imUlStroke2 == null ? 0 : d.imUlStroke2.getLineWidth()) ); } } return new Rectangle2D.Double(minx, miny, maxx-minx, maxy-miny); } /** * Extends the outline of the text run segment to * include text decorations. * @param trs - text segment * @param segmentOutline - outline of the undecorated text * @param d - decoration * @return extended outline */ static Shape extendOutline( TextRunSegment trs, Shape segmentOutline, Decoration d ) { if (d == null || !d.ulOn && d.imUlStroke == null && !d.strikeThrough) { return segmentOutline; // Nothing to do } Area res = new Area(segmentOutline); float left = (float) trs.getLogicalBounds().getMinX() - trs.x; float right = (float) trs.getLogicalBounds().getMaxX() - trs.x; d.getStrokes(trs.metrics); if (d.strikeThrough) { float y = trs.metrics.strikethroughOffset; res.add(new Area(d.strikeThroughStroke.createStrokedShape( new Line2D.Float(left, y, right, y) ))); } if (d.ulOn) { float y = trs.metrics.underlineOffset; res.add(new Area(d.ulStroke.createStrokedShape( new Line2D.Float(left, y, right, y) ))); } if (d.imUlStroke != null) { float y = trs.metrics.underlineOffset; res.add(new Area(d.imUlStroke.createStrokedShape( new Line2D.Float(left, y, right, y) ))); if (d.imUlStroke2 != null) { y++; res.add(new Area(d.imUlStroke2.createStrokedShape( new Line2D.Float(left, y, right, y) ))); } } return res; } }
/* * Copyright 2000-2015 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.testFramework; import com.intellij.codeInsight.CodeInsightSettings; import com.intellij.diagnostic.PerformanceWatcher; import com.intellij.mock.MockApplication; import com.intellij.openapi.Disposable; import com.intellij.openapi.application.Application; import com.intellij.openapi.application.ApplicationManager; import com.intellij.openapi.application.PathManager; import com.intellij.openapi.application.impl.ApplicationInfoImpl; import com.intellij.openapi.command.impl.StartMarkAction; import com.intellij.openapi.fileTypes.StdFileTypes; import com.intellij.openapi.project.Project; import com.intellij.openapi.util.*; import com.intellij.openapi.util.io.FileSystemUtil; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.openapi.vfs.*; import com.intellij.psi.PsiDocumentManager; import com.intellij.psi.codeStyle.CodeStyleSchemes; import com.intellij.psi.codeStyle.CodeStyleSettings; import com.intellij.psi.codeStyle.CodeStyleSettingsManager; import com.intellij.psi.impl.source.PostprocessReformattingAspect; import com.intellij.refactoring.rename.inplace.InplaceRefactoring; import com.intellij.rt.execution.junit.FileComparisonFailure; import com.intellij.testFramework.exceptionCases.AbstractExceptionCase; import com.intellij.util.*; import com.intellij.util.containers.ContainerUtil; import com.intellij.util.containers.hash.HashMap; import com.intellij.util.ui.UIUtil; import gnu.trove.THashSet; import junit.framework.AssertionFailedError; import junit.framework.Test; import junit.framework.TestCase; import junit.framework.TestSuite; import org.intellij.lang.annotations.RegExp; import org.jdom.Element; import org.jetbrains.annotations.Contract; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import org.jetbrains.annotations.Nullable; import org.junit.Assert; import javax.swing.*; import javax.swing.Timer; import java.awt.*; import java.io.File; import java.io.FileNotFoundException; import java.io.IOException; import java.lang.reflect.Field; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Modifier; import java.security.SecureRandom; import java.util.*; import java.util.List; import java.util.concurrent.DelayQueue; import java.util.concurrent.Delayed; import java.util.concurrent.TimeUnit; import java.util.regex.Pattern; /** * @author peter */ @SuppressWarnings("UseOfSystemOutOrSystemErr") public abstract class UsefulTestCase extends TestCase { public static final boolean IS_UNDER_TEAMCITY = System.getenv("TEAMCITY_VERSION") != null; public static final String IDEA_MARKER_CLASS = "com.intellij.openapi.components.impl.stores.IdeaProjectStoreImpl"; public static final String TEMP_DIR_MARKER = "unitTest_"; protected static boolean OVERWRITE_TESTDATA = false; private static final String DEFAULT_SETTINGS_EXTERNALIZED; private static final Random RNG = new SecureRandom(); private static final String ORIGINAL_TEMP_DIR = FileUtil.getTempDirectory(); public static Map<String, Long> TOTAL_SETUP_COST_MILLIS = new HashMap<String, Long>(); public static Map<String, Long> TOTAL_TEARDOWN_COST_MILLIS = new HashMap<String, Long>(); protected final Disposable myTestRootDisposable = new Disposable() { @Override public void dispose() { } @Override public String toString() { String testName = getTestName(false); return UsefulTestCase.this.getClass() + (StringUtil.isEmpty(testName) ? "" : ".test" + testName); } }; protected static String ourPathToKeep = null; private CodeStyleSettings myOldCodeStyleSettings; private String myTempDir; protected static final Key<String> CREATION_PLACE = Key.create("CREATION_PLACE"); static { // Radar #5755208: Command line Java applications need a way to launch without a Dock icon. System.setProperty("apple.awt.UIElement", "true"); try { CodeInsightSettings defaultSettings = new CodeInsightSettings(); Element oldS = new Element("temp"); defaultSettings.writeExternal(oldS); DEFAULT_SETTINGS_EXTERNALIZED = JDOMUtil.writeElement(oldS, "\n"); } catch (Exception e) { throw new RuntimeException(e); } } protected boolean shouldContainTempFiles() { return true; } @Override protected void setUp() throws Exception { super.setUp(); if (shouldContainTempFiles()) { String testName = getTestName(true); if (StringUtil.isEmptyOrSpaces(testName)) testName = ""; testName = new File(testName).getName(); // in case the test name contains file separators myTempDir = FileUtil.toSystemDependentName(ORIGINAL_TEMP_DIR + "/" + TEMP_DIR_MARKER + testName + "_"+ RNG.nextInt(1000)); FileUtil.resetCanonicalTempPathCache(myTempDir); } ApplicationInfoImpl.setInPerformanceTest(isPerformanceTest()); } @Override protected void tearDown() throws Exception { try { Disposer.dispose(myTestRootDisposable); cleanupSwingDataStructures(); cleanupDeleteOnExitHookList(); } finally { if (shouldContainTempFiles()) { FileUtil.resetCanonicalTempPathCache(ORIGINAL_TEMP_DIR); if (ourPathToKeep != null && FileUtil.isAncestor(myTempDir, ourPathToKeep, false)) { File[] files = new File(myTempDir).listFiles(); if (files != null) { for (File file : files) { if (!FileUtil.pathsEqual(file.getPath(), ourPathToKeep)) { FileUtil.delete(file); } } } } else { FileUtil.delete(new File(myTempDir)); } } } UIUtil.removeLeakingAppleListeners(); super.tearDown(); } private static final Set<String> DELETE_ON_EXIT_HOOK_DOT_FILES; private static final Class DELETE_ON_EXIT_HOOK_CLASS; static { Class<?> aClass; try { aClass = Class.forName("java.io.DeleteOnExitHook"); } catch (Exception e) { throw new RuntimeException(e); } Set<String> files = ReflectionUtil.getField(aClass, null, Set.class, "files"); DELETE_ON_EXIT_HOOK_CLASS = aClass; DELETE_ON_EXIT_HOOK_DOT_FILES = files; } public static void cleanupDeleteOnExitHookList() throws ClassNotFoundException, NoSuchFieldException, IllegalAccessException { // try to reduce file set retained by java.io.DeleteOnExitHook List<String> list; synchronized (DELETE_ON_EXIT_HOOK_CLASS) { if (DELETE_ON_EXIT_HOOK_DOT_FILES.isEmpty()) return; list = new ArrayList<String>(DELETE_ON_EXIT_HOOK_DOT_FILES); } for (int i = list.size() - 1; i >= 0; i--) { String path = list.get(i); if (FileSystemUtil.getAttributes(path) == null || new File(path).delete()) { synchronized (DELETE_ON_EXIT_HOOK_CLASS) { DELETE_ON_EXIT_HOOK_DOT_FILES.remove(path); } } } } private static void cleanupSwingDataStructures() throws Exception { Object manager = ReflectionUtil.getDeclaredMethod(Class.forName("javax.swing.KeyboardManager"), "getCurrentManager").invoke(null); Map componentKeyStrokeMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "componentKeyStrokeMap"); componentKeyStrokeMap.clear(); Map containerMap = ReflectionUtil.getField(manager.getClass(), manager, Hashtable.class, "containerMap"); containerMap.clear(); } protected CompositeException checkForSettingsDamage() throws Exception { Application app = ApplicationManager.getApplication(); if (isPerformanceTest() || app == null || app instanceof MockApplication) { return new CompositeException(); } CodeStyleSettings oldCodeStyleSettings = myOldCodeStyleSettings; myOldCodeStyleSettings = null; return doCheckForSettingsDamage(oldCodeStyleSettings, getCurrentCodeStyleSettings()); } public static CompositeException doCheckForSettingsDamage(@NotNull CodeStyleSettings oldCodeStyleSettings, @NotNull CodeStyleSettings currentCodeStyleSettings) throws Exception { CompositeException result = new CompositeException(); final CodeInsightSettings settings = CodeInsightSettings.getInstance(); try { Element newS = new Element("temp"); settings.writeExternal(newS); Assert.assertEquals("Code insight settings damaged", DEFAULT_SETTINGS_EXTERNALIZED, JDOMUtil.writeElement(newS, "\n")); } catch (AssertionError error) { CodeInsightSettings clean = new CodeInsightSettings(); for (Field field : clean.getClass().getFields()) { try { ReflectionUtil.copyFieldValue(clean, settings, field); } catch (Exception ignored) { } } result.add(error); } currentCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA); try { checkSettingsEqual(oldCodeStyleSettings, currentCodeStyleSettings, "Code style settings damaged"); } catch (AssertionError e) { result.add(e); } finally { currentCodeStyleSettings.clearCodeStyleSettings(); } try { InplaceRefactoring.checkCleared(); } catch (AssertionError e) { result.add(e); } try { StartMarkAction.checkCleared(); } catch (AssertionError e) { result.add(e); } return result; } protected void storeSettings() { if (!isPerformanceTest() && ApplicationManager.getApplication() != null) { myOldCodeStyleSettings = getCurrentCodeStyleSettings().clone(); myOldCodeStyleSettings.getIndentOptions(StdFileTypes.JAVA); } } protected CodeStyleSettings getCurrentCodeStyleSettings() { if (CodeStyleSchemes.getInstance().getCurrentScheme() == null) return new CodeStyleSettings(); return CodeStyleSettingsManager.getInstance().getCurrentSettings(); } public Disposable getTestRootDisposable() { return myTestRootDisposable; } @Override protected void runTest() throws Throwable { final Throwable[] throwables = new Throwable[1]; Runnable runnable = new Runnable() { @Override public void run() { try { UsefulTestCase.super.runTest(); } catch (InvocationTargetException e) { e.fillInStackTrace(); throwables[0] = e.getTargetException(); } catch (IllegalAccessException e) { e.fillInStackTrace(); throwables[0] = e; } catch (Throwable e) { throwables[0] = e; } } }; invokeTestRunnable(runnable); if (throwables[0] != null) { throw throwables[0]; } } protected boolean shouldRunTest() { return PlatformTestUtil.canRunTest(getClass()); } public static void edt(Runnable r) { UIUtil.invokeAndWaitIfNeeded(r); } protected void invokeTestRunnable(@NotNull Runnable runnable) throws Exception { UIUtil.invokeAndWaitIfNeeded(runnable); //runnable.run(); } protected void defaultRunBare() throws Throwable { Throwable exception = null; long setupStart = System.nanoTime(); setUp(); long setupCost = (System.nanoTime() - setupStart) / 1000000; logPerClassCost(setupCost, TOTAL_SETUP_COST_MILLIS); try { runTest(); } catch (Throwable running) { exception = running; } finally { try { long teardownStart = System.nanoTime(); tearDown(); long teardownCost = (System.nanoTime() - teardownStart) / 1000000; logPerClassCost(teardownCost, TOTAL_TEARDOWN_COST_MILLIS); } catch (Throwable tearingDown) { if (exception == null) exception = tearingDown; } } if (exception != null) throw exception; } /** * Logs the setup cost grouped by test fixture class (superclass of the current test class). * * @param cost setup cost in milliseconds */ private void logPerClassCost(long cost, Map<String, Long> costMap) { Class<?> superclass = getClass().getSuperclass(); Long oldCost = costMap.get(superclass.getName()); long newCost = oldCost == null ? cost : oldCost + cost; costMap.put(superclass.getName(), newCost); } public static void logSetupTeardownCosts() { long totalSetup = 0, totalTeardown = 0; System.out.println("Setup costs"); for (Map.Entry<String, Long> entry : TOTAL_SETUP_COST_MILLIS.entrySet()) { System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue())); totalSetup += entry.getValue(); } System.out.println("Teardown costs"); for (Map.Entry<String, Long> entry : TOTAL_TEARDOWN_COST_MILLIS.entrySet()) { System.out.println(String.format(" %s: %d ms", entry.getKey(), entry.getValue())); totalTeardown += entry.getValue(); } System.out.println(String.format("Total overhead: setup %d ms, teardown %d ms", totalSetup, totalTeardown)); System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalSetupMs' value='%d']", totalSetup)); System.out.println(String.format("##teamcity[buildStatisticValue key='ideaTests.totalTeardownMs' value='%d']", totalTeardown)); } @Override public void runBare() throws Throwable { if (!shouldRunTest()) return; if (runInDispatchThread()) { final Throwable[] exception = {null}; UIUtil.invokeAndWaitIfNeeded(new Runnable() { @Override public void run() { try { defaultRunBare(); } catch (Throwable tearingDown) { if (exception[0] == null) exception[0] = tearingDown; } } }); if (exception[0] != null) throw exception[0]; } else { defaultRunBare(); } } protected boolean runInDispatchThread() { return true; } @NonNls public static String toString(Iterable<?> collection) { if (!collection.iterator().hasNext()) { return "<empty>"; } final StringBuilder builder = new StringBuilder(); for (final Object o : collection) { if (o instanceof THashSet) { builder.append(new TreeSet<Object>((THashSet)o)); } else { builder.append(o); } builder.append("\n"); } return builder.toString(); } public static <T> void assertOrderedEquals(T[] actual, T... expected) { assertOrderedEquals(Arrays.asList(actual), expected); } public static <T> void assertOrderedEquals(Iterable<T> actual, T... expected) { assertOrderedEquals(null, actual, expected); } public static void assertOrderedEquals(@NotNull byte[] actual, @NotNull byte[] expected) { assertEquals(actual.length, expected.length); for (int i = 0; i < actual.length; i++) { byte a = actual[i]; byte e = expected[i]; assertEquals("not equals at index: "+i, e, a); } } public static void assertOrderedEquals(@NotNull int[] actual, @NotNull int[] expected) { if (actual.length != expected.length) { fail("Expected size: "+expected.length+"; actual: "+actual.length+"\nexpected: "+Arrays.toString(expected)+"\nactual : "+Arrays.toString(actual)); } for (int i = 0; i < actual.length; i++) { int a = actual[i]; int e = expected[i]; assertEquals("not equals at index: "+i, e, a); } } public static <T> void assertOrderedEquals(final String errorMsg, @NotNull Iterable<T> actual, @NotNull T... expected) { Assert.assertNotNull(actual); Assert.assertNotNull(expected); assertOrderedEquals(errorMsg, actual, Arrays.asList(expected)); } public static <T> void assertOrderedEquals(final Iterable<? extends T> actual, final Collection<? extends T> expected) { assertOrderedEquals(null, actual, expected); } public static <T> void assertOrderedEquals(final String erroMsg, final Iterable<? extends T> actual, final Collection<? extends T> expected) { ArrayList<T> list = new ArrayList<T>(); for (T t : actual) { list.add(t); } if (!list.equals(new ArrayList<T>(expected))) { String expectedString = toString(expected); String actualString = toString(actual); Assert.assertEquals(erroMsg, expectedString, actualString); Assert.fail("Warning! 'toString' does not reflect the difference.\nExpected: " + expectedString + "\nActual: " + actualString); } } public static <T> void assertOrderedCollection(T[] collection, @NotNull Consumer<T>... checkers) { Assert.assertNotNull(collection); assertOrderedCollection(Arrays.asList(collection), checkers); } public static <T> void assertSameElements(T[] collection, T... expected) { assertSameElements(Arrays.asList(collection), expected); } public static <T> void assertSameElements(Collection<? extends T> collection, T... expected) { assertSameElements(collection, Arrays.asList(expected)); } public static <T> void assertSameElements(Collection<? extends T> collection, Collection<T> expected) { assertSameElements(null, collection, expected); } public static <T> void assertSameElements(String message, Collection<? extends T> collection, Collection<T> expected) { assertNotNull(collection); assertNotNull(expected); if (collection.size() != expected.size() || !new HashSet<T>(expected).equals(new HashSet<T>(collection))) { Assert.assertEquals(message, toString(expected, "\n"), toString(collection, "\n")); Assert.assertEquals(message, new HashSet<T>(expected), new HashSet<T>(collection)); } } public <T> void assertContainsOrdered(Collection<? extends T> collection, T... expected) { assertContainsOrdered(collection, Arrays.asList(expected)); } public <T> void assertContainsOrdered(Collection<? extends T> collection, Collection<T> expected) { ArrayList<T> copy = new ArrayList<T>(collection); copy.retainAll(expected); assertOrderedEquals(toString(collection), copy, expected); } public <T> void assertContainsElements(Collection<? extends T> collection, T... expected) { assertContainsElements(collection, Arrays.asList(expected)); } public <T> void assertContainsElements(Collection<? extends T> collection, Collection<T> expected) { ArrayList<T> copy = new ArrayList<T>(collection); copy.retainAll(expected); assertSameElements(toString(collection), copy, expected); } public static String toString(Object[] collection, String separator) { return toString(Arrays.asList(collection), separator); } public <T> void assertDoesntContain(Collection<? extends T> collection, T... notExpected) { assertDoesntContain(collection, Arrays.asList(notExpected)); } public <T> void assertDoesntContain(Collection<? extends T> collection, Collection<T> notExpected) { ArrayList<T> expected = new ArrayList<T>(collection); expected.removeAll(notExpected); assertSameElements(collection, expected); } public static String toString(Collection<?> collection, String separator) { List<String> list = ContainerUtil.map2List(collection, new Function<Object, String>() { @Override public String fun(final Object o) { return String.valueOf(o); } }); Collections.sort(list); StringBuilder builder = new StringBuilder(); boolean flag = false; for (final String o : list) { if (flag) { builder.append(separator); } builder.append(o); flag = true; } return builder.toString(); } public static <T> void assertOrderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) { Assert.assertNotNull(collection); if (collection.size() != checkers.length) { Assert.fail(toString(collection)); } int i = 0; for (final T actual : collection) { try { checkers[i].consume(actual); } catch (AssertionFailedError e) { System.out.println(i + ": " + actual); throw e; } i++; } } public static <T> void assertUnorderedCollection(T[] collection, Consumer<T>... checkers) { assertUnorderedCollection(Arrays.asList(collection), checkers); } public static <T> void assertUnorderedCollection(Collection<? extends T> collection, Consumer<T>... checkers) { Assert.assertNotNull(collection); if (collection.size() != checkers.length) { Assert.fail(toString(collection)); } Set<Consumer<T>> checkerSet = new HashSet<Consumer<T>>(Arrays.asList(checkers)); int i = 0; Throwable lastError = null; for (final T actual : collection) { boolean flag = true; for (final Consumer<T> condition : checkerSet) { Throwable error = accepts(condition, actual); if (error == null) { checkerSet.remove(condition); flag = false; break; } else { lastError = error; } } if (flag) { lastError.printStackTrace(); Assert.fail("Incorrect element(" + i + "): " + actual); } i++; } } private static <T> Throwable accepts(final Consumer<T> condition, final T actual) { try { condition.consume(actual); return null; } catch (Throwable e) { return e; } } @Contract("null, _ -> fail") public static <T> T assertInstanceOf(Object o, Class<T> aClass) { Assert.assertNotNull("Expected instance of: " + aClass.getName() + " actual: " + null, o); Assert.assertTrue("Expected instance of: " + aClass.getName() + " actual: " + o.getClass().getName(), aClass.isInstance(o)); @SuppressWarnings("unchecked") T t = (T)o; return t; } public static <T> T assertOneElement(Collection<T> collection) { Assert.assertNotNull(collection); Iterator<T> iterator = collection.iterator(); String toString = toString(collection); Assert.assertTrue(toString, iterator.hasNext()); T t = iterator.next(); Assert.assertFalse(toString, iterator.hasNext()); return t; } public static <T> T assertOneElement(T[] ts) { Assert.assertNotNull(ts); Assert.assertEquals(Arrays.asList(ts).toString(), 1, ts.length); return ts[0]; } public static <T> void assertOneOf(T value, T... values) { boolean found = false; for (T v : values) { if (value == v || value != null && value.equals(v)) { found = true; } } Assert.assertTrue(value + " should be equal to one of " + Arrays.toString(values), found); } public static void printThreadDump() { PerformanceWatcher.dumpThreadsToConsole("Thread dump:"); } public static void assertEmpty(final Object[] array) { assertOrderedEquals(array); } public static void assertNotEmpty(final Collection<?> collection) { if (collection == null) return; assertTrue(!collection.isEmpty()); } public static void assertEmpty(final Collection<?> collection) { assertEmpty(collection.toString(), collection); } public static void assertNullOrEmpty(final Collection<?> collection) { if (collection == null) return; assertEmpty(null, collection); } public static void assertEmpty(final String s) { assertTrue(s, StringUtil.isEmpty(s)); } public static <T> void assertEmpty(final String errorMsg, final Collection<T> collection) { assertOrderedEquals(errorMsg, collection); } public static void assertSize(int expectedSize, final Object[] array) { assertEquals(toString(Arrays.asList(array)), expectedSize, array.length); } public static void assertSize(int expectedSize, final Collection<?> c) { assertEquals(toString(c), expectedSize, c.size()); } protected <T extends Disposable> T disposeOnTearDown(final T disposable) { Disposer.register(myTestRootDisposable, disposable); return disposable; } public static void assertSameLines(String expected, String actual) { String expectedText = StringUtil.convertLineSeparators(expected.trim()); String actualText = StringUtil.convertLineSeparators(actual.trim()); Assert.assertEquals(expectedText, actualText); } public static void assertExists(File file){ assertTrue("File should exist " + file, file.exists()); } public static void assertDoesntExist(File file){ assertFalse("File should not exist " + file, file.exists()); } protected String getTestName(boolean lowercaseFirstLetter) { String name = getName(); return getTestName(name, lowercaseFirstLetter); } public static String getTestName(String name, boolean lowercaseFirstLetter) { if (name == null) { return ""; } name = StringUtil.trimStart(name, "test"); if (StringUtil.isEmpty(name)) { return ""; } return lowercaseFirstLetter(name, lowercaseFirstLetter); } public static String lowercaseFirstLetter(String name, boolean lowercaseFirstLetter) { if (lowercaseFirstLetter && !isAllUppercaseName(name)) { name = Character.toLowerCase(name.charAt(0)) + name.substring(1); } return name; } public static boolean isAllUppercaseName(String name) { int uppercaseChars = 0; for (int i = 0; i < name.length(); i++) { if (Character.isLowerCase(name.charAt(i))) { return false; } if (Character.isUpperCase(name.charAt(i))) { uppercaseChars++; } } return uppercaseChars >= 3; } protected String getTestDirectoryName() { final String testName = getTestName(true); return testName.replaceAll("_.*", ""); } public static void assertSameLinesWithFile(String filePath, String actualText) { assertSameLinesWithFile(filePath, actualText, true); } public static void assertSameLinesWithFile(String filePath, String actualText, boolean trimBeforeComparing) { String fileText; try { if (OVERWRITE_TESTDATA) { VfsTestUtil.overwriteTestData(filePath, actualText); System.out.println("File " + filePath + " created."); } fileText = FileUtil.loadFile(new File(filePath), CharsetToolkit.UTF8_CHARSET); } catch (FileNotFoundException e) { VfsTestUtil.overwriteTestData(filePath, actualText); throw new AssertionFailedError("No output text found. File " + filePath + " created."); } catch (IOException e) { throw new RuntimeException(e); } String expected = StringUtil.convertLineSeparators(trimBeforeComparing ? fileText.trim() : fileText); String actual = StringUtil.convertLineSeparators(trimBeforeComparing ? actualText.trim() : actualText); if (!Comparing.equal(expected, actual)) { throw new FileComparisonFailure(null, expected, actual, filePath); } } public static void clearFields(final Object test) throws IllegalAccessException { Class aClass = test.getClass(); while (aClass != null) { clearDeclaredFields(test, aClass); aClass = aClass.getSuperclass(); } } public static void clearDeclaredFields(Object test, Class aClass) throws IllegalAccessException { if (aClass == null) return; for (final Field field : aClass.getDeclaredFields()) { @NonNls final String name = field.getDeclaringClass().getName(); if (!name.startsWith("junit.framework.") && !name.startsWith("com.intellij.testFramework.")) { final int modifiers = field.getModifiers(); if ((modifiers & Modifier.FINAL) == 0 && (modifiers & Modifier.STATIC) == 0 && !field.getType().isPrimitive()) { field.setAccessible(true); field.set(test, null); } } } } @SuppressWarnings("deprecation") protected static void checkSettingsEqual(CodeStyleSettings expected, CodeStyleSettings settings, String message) throws Exception { if (expected == null || settings == null) return; Element oldS = new Element("temp"); expected.writeExternal(oldS); Element newS = new Element("temp"); settings.writeExternal(newS); String newString = JDOMUtil.writeElement(newS, "\n"); String oldString = JDOMUtil.writeElement(oldS, "\n"); Assert.assertEquals(message, oldString, newString); } public boolean isPerformanceTest() { String name = getName(); return name != null && name.contains("Performance") || getClass().getName().contains("Performance"); } public static void doPostponedFormatting(final Project project) { DocumentUtil.writeInRunUndoTransparentAction(new Runnable() { @Override public void run() { PsiDocumentManager.getInstance(project).commitAllDocuments(); PostprocessReformattingAspect.getInstance(project).doPostponedFormatting(); } }); } protected static void checkAllTimersAreDisposed() { Field firstTimerF; Object timerQueue; Object timer; try { Class<?> TimerQueueC = Class.forName("javax.swing.TimerQueue"); Method sharedInstance = TimerQueueC.getDeclaredMethod("sharedInstance"); sharedInstance.setAccessible(true); firstTimerF = ReflectionUtil.getDeclaredField(TimerQueueC, "firstTimer"); timerQueue = sharedInstance.invoke(null); if (firstTimerF == null) { // jdk 8 DelayQueue delayQueue = ReflectionUtil.getField(TimerQueueC, timerQueue, DelayQueue.class, "queue"); timer = delayQueue.peek(); } else { // ancient jdk firstTimerF.setAccessible(true); timer = firstTimerF.get(timerQueue); } } catch (Throwable e) { throw new RuntimeException(e); } if (timer != null) { if (firstTimerF != null) { ReflectionUtil.resetField(timerQueue, firstTimerF); } String text = ""; if (timer instanceof Delayed) { long delay = ((Delayed)timer).getDelay(TimeUnit.MILLISECONDS); text = "(delayed for "+delay+"ms)"; Method getTimer = ReflectionUtil.getDeclaredMethod(timer.getClass(), "getTimer"); getTimer.setAccessible(true); try { timer = getTimer.invoke(timer); } catch (Exception e) { throw new RuntimeException(e); } } Timer t = (Timer)timer; text = "Timer (listeners: "+Arrays.asList(t.getActionListeners()) + ") "+text; fail("Not disposed Timer: " + text + "; queue:" + timerQueue); } } /** * Checks that code block throw corresponding exception. * * @param exceptionCase Block annotated with some exception type * @throws Throwable */ protected void assertException(final AbstractExceptionCase exceptionCase) throws Throwable { assertException(exceptionCase, null); } /** * Checks that code block throw corresponding exception with expected error msg. * If expected error message is null it will not be checked. * * @param exceptionCase Block annotated with some exception type * @param expectedErrorMsg expected error messge * @throws Throwable */ protected void assertException(final AbstractExceptionCase exceptionCase, @Nullable final String expectedErrorMsg) throws Throwable { assertExceptionOccurred(true, exceptionCase, expectedErrorMsg); } /** * Checks that code block doesn't throw corresponding exception. * * @param exceptionCase Block annotated with some exception type * @throws Throwable */ protected void assertNoException(final AbstractExceptionCase exceptionCase) throws Throwable { assertExceptionOccurred(false, exceptionCase, null); } protected void assertNoThrowable(final Runnable closure) { String throwableName = null; try { closure.run(); } catch (Throwable thr) { throwableName = thr.getClass().getName(); } assertNull(throwableName); } private static void assertExceptionOccurred(boolean shouldOccur, AbstractExceptionCase exceptionCase, String expectedErrorMsg) throws Throwable { boolean wasThrown = false; try { exceptionCase.tryClosure(); } catch (Throwable e) { if (shouldOccur) { wasThrown = true; final String errorMessage = exceptionCase.getAssertionErrorMessage(); assertEquals(errorMessage, exceptionCase.getExpectedExceptionClass(), e.getClass()); if (expectedErrorMsg != null) { assertEquals("Compare error messages", expectedErrorMsg, e.getMessage()); } } else if (exceptionCase.getExpectedExceptionClass().equals(e.getClass())) { wasThrown = true; System.out.println(""); e.printStackTrace(System.out); fail("Exception isn't expected here. Exception message: " + e.getMessage()); } else { throw e; } } finally { if (shouldOccur && !wasThrown) { fail(exceptionCase.getAssertionErrorMessage()); } } } protected boolean annotatedWith(@NotNull Class annotationClass) { Class<?> aClass = getClass(); String methodName = "test" + getTestName(false); boolean methodChecked = false; while (aClass != null && aClass != Object.class) { if (aClass.getAnnotation(annotationClass) != null) return true; if (!methodChecked) { Method method = ReflectionUtil.getDeclaredMethod(aClass, methodName); if (method != null) { if (method.getAnnotation(annotationClass) != null) return true; methodChecked = true; } } aClass = aClass.getSuperclass(); } return false; } protected String getHomePath() { return PathManager.getHomePath().replace(File.separatorChar, '/'); } protected static boolean isInHeadlessEnvironment() { return GraphicsEnvironment.isHeadless(); } public static void refreshRecursively(@NotNull VirtualFile file) { VfsUtilCore.visitChildrenRecursively(file, new VirtualFileVisitor() { @Override public boolean visitFile(@NotNull VirtualFile file) { file.getChildren(); return true; } }); file.refresh(false, true); } @NotNull public static Test filteredSuite(@RegExp String regexp, @NotNull Test test) { final Pattern pattern = Pattern.compile(regexp); final TestSuite testSuite = new TestSuite(); new Processor<Test>() { @Override public boolean process(Test test) { if (test instanceof TestSuite) { for (int i = 0, len = ((TestSuite)test).testCount(); i < len; i++) { process(((TestSuite)test).testAt(i)); } } else if (pattern.matcher(test.toString()).find()) { testSuite.addTest(test); } return false; } }.process(test); return testSuite; } @Nullable public static VirtualFile refreshAndFindFile(@NotNull final File file) { return UIUtil.invokeAndWaitIfNeeded(new Computable<VirtualFile>() { @Override public VirtualFile compute() { return LocalFileSystem.getInstance().refreshAndFindFileByIoFile(file); } }); } public static <E extends Exception> void invokeAndWaitIfNeeded(@NotNull final ThrowableRunnable<E> runnable) throws Exception { if (SwingUtilities.isEventDispatchThread()) { runnable.run(); } else { final Ref<Exception> ref = Ref.create(); SwingUtilities.invokeAndWait(new Runnable() { @Override public void run() { try { runnable.run(); } catch (Exception e) { ref.set(e); } } }); if (!ref.isNull()) throw ref.get(); } } }
package com.orientechnologies.orient.graph.batch; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; import com.orientechnologies.orient.core.record.impl.ODocument; import com.orientechnologies.orient.core.sql.query.OSQLSynchQuery; import com.orientechnologies.orient.core.storage.OStorage; import com.tinkerpop.blueprints.Vertex; import com.tinkerpop.blueprints.impls.orient.OrientGraph; import org.junit.Test; import java.util.HashMap; import java.util.List; import java.util.Map; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotSame; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * @author Luigi Dell'Aquila (l.dellaquila-at-orientechnologies.com) */ public class OGraphBatchInsertTest { @Test public void test1() { String dbUrl = "memory:batchinsert_test1"; OGraphBatchInsertBasic batch = new OGraphBatchInsertBasic(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L); batch.createEdge(0L, 2L); batch.end(); ODatabaseDocumentTx db = new ODatabaseDocumentTx(dbUrl); db.open("admin", "admin"); List<?> result = db.query(new OSQLSynchQuery<Object>("select from V")); assertEquals(3, result.size()); db.close(); } @Test public void test2() { String dbUrl = "memory:batchinsert_test2"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(0L, 2L, null); batch.end(); ODatabaseDocumentTx db = new ODatabaseDocumentTx(dbUrl); db.open("admin", "admin"); List<?> result = db.query(new OSQLSynchQuery<Object>("select from V")); for (Object o : result) { System.out.println(o); } assertEquals(3, result.size()); db.close(); } @Test public void test3() { String dbUrl = "memory:batchinsert_test3"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(1L, 2L, null); batch.createEdge(2L, 0L, null); batch.createVertex(3L); batch.end(); ODatabaseDocumentTx db = new ODatabaseDocumentTx(dbUrl); db.open("admin", "admin"); List<?> result = db.query(new OSQLSynchQuery<Object>("select from V")); for (Object o : result) { System.out.println(o); } assertEquals(4, result.size()); db.close(); } @Test public void test4() { String dbUrl = "memory:batchinsert_test4"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(1L, 2L, null); batch.createEdge(2L, 0L, null); batch.createVertex(3L); Map<String, Object> vertexProps = new HashMap<String, Object>(); vertexProps.put("foo", "foo"); vertexProps.put("bar", 3); batch.setVertexProperties(0L, vertexProps); batch.end(); ODatabaseDocumentTx db = new ODatabaseDocumentTx(dbUrl); db.open("admin", "admin"); List<?> result = db.query(new OSQLSynchQuery<Object>("select from V")); boolean found0 = false; for (Object o : result) { ODocument doc = ((ODocument) o); if (new Long(0).equals(doc.field("uid"))) { found0 = true; assertEquals("foo", doc.field("foo")); assertEquals(3, doc.field("bar")); } else { assertNotSame("foo", doc.field("foo")); assertNotSame(3, doc.field("bar")); } } assertTrue(found0); assertEquals(4, result.size()); db.close(); } @Test public void test5() { String dbUrl = "memory:batchinsert_test5"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(4L, 5L, null); batch.end(); ODatabaseDocumentTx db = new ODatabaseDocumentTx(dbUrl); db.open("admin", "admin"); List<?> result = db.query(new OSQLSynchQuery<Object>("select from V")); for (Object o : result) { System.out.println(o); } assertEquals(4, result.size()); db.close(); } @Test public void testFail1() { String dbUrl = "memory:batchinsert_testFail1"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); Map<String, Object> vertexProps = new HashMap<String, Object>(); vertexProps.put("bar", 3); batch.setVertexProperties(0L, vertexProps); try { batch.createVertex(3L); fail(); } catch (IllegalStateException e) { } finally { batch.end(); } } @Test public void testFail2() { String dbUrl = "memory:batchinsert_testFail2"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); Map<String, Object> vertexProps = new HashMap<String, Object>(); vertexProps.put("bar", 3); batch.setVertexProperties(0L, vertexProps); try { batch.createVertex(3L); fail(); } catch (IllegalStateException e) { } finally { batch.end(); } } @Test public void testTraverse() { String dbUrl = "memory:batchinsert_testTraverse"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(1L, 2L, null); batch.createEdge(2L, 3L, null); Map<String, Object> vertexProps = new HashMap<String, Object>(); vertexProps.put("foo", "bar"); batch.setVertexProperties(3L, vertexProps); batch.end(); ODatabaseDocumentTx databaseDocumentTx = new ODatabaseDocumentTx(dbUrl); databaseDocumentTx.open("admin", "admin"); OStorage storage = databaseDocumentTx.getStorage(); databaseDocumentTx.close(); storage.close(true, false); OrientGraph g = new OrientGraph(dbUrl, "admin", "admin"); Iterable<Vertex> result = g.command( new OSQLSynchQuery<Vertex>("select expand(out().in().out().out().in().out()) from V where uid = ?")).execute(1L); for (Vertex v : result) { assertEquals("bar", v.getProperty("foo")); } g.shutdown(); } public void testHoles() { String dbUrl = "memory:batchinsert_testHoles"; OGraphBatchInsert batch = new OGraphBatchInsert(dbUrl, "admin", "admin"); batch.setParallel(1); batch.begin(); batch.createEdge(0L, 1L, null); batch.createEdge(1L, 3L, null); batch.createEdge(3L, 4L, null); Map<String, Object> vertexProps = new HashMap<String, Object>(); vertexProps.put("foo", "aa"); batch.setVertexProperties(3L, vertexProps); vertexProps.put("foo", "bar"); batch.setVertexProperties(4L, vertexProps); batch.end(); ODatabaseDocumentTx databaseDocumentTx = new ODatabaseDocumentTx(dbUrl); databaseDocumentTx.open("admin", "admin"); OStorage storage = databaseDocumentTx.getStorage(); databaseDocumentTx.close(); storage.close(true, false); OrientGraph g = new OrientGraph(dbUrl, "admin", "admin"); Iterable<Vertex> result = g.command( new OSQLSynchQuery<Vertex>("select expand(out().in().out().out().in().out().out().in().out()) from V where uid = ?")) .execute(0L); boolean found = false; for (Vertex v : result) { assertFalse(found); assertEquals("bar", v.getProperty("foo")); found = true; } assertTrue(found); g.shutdown(); } }
/* * Copyright 2013 MovingBlocks * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.terasology.monitoring.impl; import com.google.common.collect.Lists; import com.google.common.collect.Queues; import gnu.trove.map.TObjectDoubleMap; import gnu.trove.map.TObjectLongMap; import gnu.trove.map.hash.TObjectDoubleHashMap; import gnu.trove.map.hash.TObjectLongHashMap; import gnu.trove.procedure.TObjectDoubleProcedure; import gnu.trove.procedure.TObjectLongProcedure; import org.terasology.engine.EngineTime; import org.terasology.engine.Time; import org.terasology.monitoring.Activity; import org.terasology.registry.CoreRegistry; import java.util.Deque; import java.util.List; /** * Active implementation of Performance Monitor */ // TODO: Check to ensure activities are being started and stopped correctly // TODO: Remove activities with 0 time public class PerformanceMonitorImpl implements PerformanceMonitorInternal { private static final int RETAINED_CYCLES = 60; private static final double DECAY_RATE = 0.98; private static final Activity OFF_THREAD_ACTIVITY = new NullActivity(); // NullActivity instances are used by the NullPerformanceMonitor and for processes NOT running // on the main thread. Not strictly necessary (these processes are ignored by the PerformanceMonitor // anyway) an instance of this class offers a slight performance improvement over standard Activity // implementations as it doesn't call the PerformanceMonitor.endActivity() method. private final Activity activityInstance = new ActivityInstance(); private final Deque<ActivityInfo> activityStack; private final List<TObjectLongMap<String>> executionData; private final List<TObjectLongMap<String>> allocationData; private TObjectLongMap<String> currentExecutionData; private TObjectLongMap<String> currentAllocationData; private final TObjectLongMap<String> runningExecutionTotals; private final TObjectLongMap<String> runningAllocationTotals; private final TObjectDoubleMap<String> spikeData; private final TObjectDoubleProcedure<String> decayLargestExecutionTime; private final TObjectLongProcedure<String> updateExecutionTimeTotalAndSpikeData; private final TObjectLongProcedure<String> updateAllocatedMemoryTotal; private final TObjectLongProcedure<String> removeExpiredExecutionTimeValueFromTotal; private final TObjectLongProcedure<String> removeExpiredAllocatedMemoryValueFromTotal; private final SetterOfActivityToRunningMeanMapEntry setExecutionTimeRunningMean; private final SetterOfActivityToRunningMeanMapEntry setAllocatedMemoryRunningMean; private final Thread mainThread; private final EngineTime timer; public PerformanceMonitorImpl() { activityStack = Queues.newArrayDeque(); executionData = Lists.newLinkedList(); allocationData = Lists.newLinkedList(); currentExecutionData = new TObjectLongHashMap<>(); currentAllocationData = new TObjectLongHashMap<>(); runningExecutionTotals = new TObjectLongHashMap<>(); runningAllocationTotals = new TObjectLongHashMap<>(); spikeData = new TObjectDoubleHashMap<>(); decayLargestExecutionTime = new DecayerOfActivityLargestExecutionTime(); updateExecutionTimeTotalAndSpikeData = new UpdaterOfActivityExecutionTimeTotalAndSpikeData(); updateAllocatedMemoryTotal = new UpdaterOfActivityAllocatedMemoryTotal(); removeExpiredExecutionTimeValueFromTotal = new RemoverFromTotalOfActivityExpiredExecutionTimeValue(); removeExpiredAllocatedMemoryValueFromTotal = new RemoverFromTotalOfActivityExpiredAllocatedMemoryValue(); setExecutionTimeRunningMean = new SetterOfActivityToRunningMeanMapEntry(); setAllocatedMemoryRunningMean = new SetterOfActivityToRunningMeanMapEntry(); timer = (EngineTime) CoreRegistry.get(Time.class); mainThread = Thread.currentThread(); } @Override public void rollCycle() { executionData.add(currentExecutionData); allocationData.add(currentAllocationData); spikeData.forEachEntry(decayLargestExecutionTime); currentExecutionData.forEachEntry(updateExecutionTimeTotalAndSpikeData); currentAllocationData.forEachEntry(updateAllocatedMemoryTotal); while (executionData.size() > RETAINED_CYCLES) { executionData.get(0).forEachEntry(removeExpiredExecutionTimeValueFromTotal); executionData.remove(0); } while (allocationData.size() > RETAINED_CYCLES) { allocationData.get(0).forEachEntry(removeExpiredAllocatedMemoryValueFromTotal); allocationData.remove(0); } currentExecutionData = new TObjectLongHashMap<>(); currentAllocationData = new TObjectLongHashMap<>(); } @Override public Activity startActivity(String activityName) { if (Thread.currentThread() != mainThread) { return OFF_THREAD_ACTIVITY; } ActivityInfo newActivity = new ActivityInfo(activityName).initialize(); if (!activityStack.isEmpty()) { ActivityInfo currentActivity = activityStack.peek(); currentActivity.ownTime += newActivity.startTime - ((currentActivity.resumeTime > 0) ? currentActivity.resumeTime : currentActivity.startTime); currentActivity.ownMem += (currentActivity.startMem - newActivity.startMem > 0) ? currentActivity.startMem - newActivity.startMem : 0; } activityStack.push(newActivity); return activityInstance; } @Override public void endActivity() { if (Thread.currentThread() != mainThread || activityStack.isEmpty()) { return; } ActivityInfo oldActivity = activityStack.pop(); long endTime = timer.getRealTimeInMs(); long totalTime = (oldActivity.resumeTime > 0) ? oldActivity.ownTime + endTime - oldActivity.resumeTime : endTime - oldActivity.startTime; currentExecutionData.adjustOrPutValue(oldActivity.name, totalTime, totalTime); long endMem = Runtime.getRuntime().freeMemory(); long totalMem = (oldActivity.startMem - endMem > 0) ? oldActivity.startMem - endMem + oldActivity.ownMem : oldActivity.ownMem; currentAllocationData.adjustOrPutValue(oldActivity.name, totalMem, totalMem); if (!activityStack.isEmpty()) { ActivityInfo currentActivity = activityStack.peek(); currentActivity.resumeTime = endTime; currentActivity.startMem = endMem; } } @Override public TObjectDoubleMap<String> getRunningMean() { TObjectDoubleMap<String> activityToMeanMap = new TObjectDoubleHashMap<>(); setExecutionTimeRunningMean.setActivityToMeanMap(activityToMeanMap); setExecutionTimeRunningMean.setFactor(1.0 / executionData.size()); runningExecutionTotals.forEachEntry(setExecutionTimeRunningMean); return activityToMeanMap; } @Override public TObjectDoubleMap<String> getDecayingSpikes() { return spikeData; } @Override public TObjectDoubleMap<String> getAllocationMean() { TObjectDoubleMap<String> activityToMeanMap = new TObjectDoubleHashMap<>(); setAllocatedMemoryRunningMean.setActivityToMeanMap(activityToMeanMap); setAllocatedMemoryRunningMean.setFactor(1.0 / allocationData.size()); runningAllocationTotals.forEachEntry(setAllocatedMemoryRunningMean); return activityToMeanMap; } private class ActivityInfo { public String name; public long startTime; public long resumeTime; public long ownTime; public long startMem; public long ownMem; public ActivityInfo(String activityName) { this.name = activityName; } public ActivityInfo initialize() { this.startTime = timer.getRealTimeInMs(); this.startMem = Runtime.getRuntime().freeMemory(); return this; } } private class ActivityInstance implements Activity { @Override public void close() { endActivity(); } } private class DecayerOfActivityLargestExecutionTime implements TObjectDoubleProcedure<String> { public boolean execute(String activityName, double executionTime) { spikeData.put(activityName, executionTime * DECAY_RATE); return true; } } private class UpdaterOfActivityExecutionTimeTotalAndSpikeData implements TObjectLongProcedure<String> { double latestSpike; public boolean execute(String activityName, long latestExecutionTime) { runningExecutionTotals.adjustOrPutValue(activityName, latestExecutionTime, latestExecutionTime); latestSpike = spikeData.get(activityName); if (latestExecutionTime > latestSpike) { spikeData.put(activityName, latestExecutionTime); } return true; } } private class UpdaterOfActivityAllocatedMemoryTotal implements TObjectLongProcedure<String> { public boolean execute(String activityName, long latestAllocatedMemory) { runningAllocationTotals.adjustOrPutValue(activityName, latestAllocatedMemory, latestAllocatedMemory); return true; } } private class RemoverFromTotalOfActivityExpiredExecutionTimeValue implements TObjectLongProcedure<String> { public boolean execute(String activityName, long expiredExecutionTime) { runningExecutionTotals.adjustValue(activityName, -expiredExecutionTime); return true; } } private class RemoverFromTotalOfActivityExpiredAllocatedMemoryValue implements TObjectLongProcedure<String> { public boolean execute(String activityName, long expiredAllocatedMemory) { runningAllocationTotals.adjustValue(activityName, -expiredAllocatedMemory); return true; } } private class SetterOfActivityToRunningMeanMapEntry implements TObjectLongProcedure<String> { private TObjectDoubleMap<String> activityToMeanMap; private double factor; public SetterOfActivityToRunningMeanMapEntry setActivityToMeanMap(TObjectDoubleMap<String> newActivityToMeanMap) { this.activityToMeanMap = newActivityToMeanMap; return this; } public SetterOfActivityToRunningMeanMapEntry setFactor(double newFactor) { this.factor = newFactor; return this; } public boolean execute(String activityName, long total) { if (total > 0) { activityToMeanMap.put(activityName, total * factor); } return true; } } }
package com.wordsaretoys.rise.glwrapper; import java.nio.IntBuffer; import android.annotation.SuppressLint; import android.graphics.Bitmap; import android.opengl.GLES11Ext; import android.opengl.GLES20; /** * represents a single texture object * * @author chris * */ public class Texture { // GL id int[] id = new int[1]; // GL texture type int type; /** * ctor, creates id-only for surface texture usage */ @SuppressLint("InlinedApi") public Texture() { GLES20.glGenTextures(1, id, 0); type = GLES11Ext.GL_TEXTURE_EXTERNAL_OES; } /** * ctor, creates texture from Android bitmap object */ public Texture(Bitmap bitmap) { int width = bitmap.getWidth(); int height = bitmap.getHeight(); IntBuffer ib = IntBuffer.allocate(width * height); bitmap.copyPixelsToBuffer(ib); ib.rewind(); // allocate a GL texture GLES20.glGenTextures(1, id, 0); type = GLES20.GL_TEXTURE_2D; // copy texture data and generate mipmap GLES20.glBindTexture(type, id[0]); GLES20.glTexImage2D(type, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glGenerateMipmap(type); GLES20.glBindTexture(type, 0); } /** * ctor, creates texture from raw bitmap */ public Texture(int[] bitmap, int width, int height) { // allocate a GL texture GLES20.glGenTextures(1, id, 0); type = GLES20.GL_TEXTURE_2D; // copy texture data and generate mipmap IntBuffer ib = IntBuffer.wrap(bitmap); GLES20.glBindTexture(type, id[0]); GLES20.glTexImage2D(type, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glGenerateMipmap(type); GLES20.glBindTexture(type, 0); } /** * ctor, creates cube map texture from 6 bitmaps */ public Texture(int[] nx, int[] ny, int[] nz, int[] px, int[] py, int[] pz, int width, int height) { GLES20.glGenTextures(1, id, 0); type = GLES20.GL_TEXTURE_CUBE_MAP; GLES20.glBindTexture(type, id[0]); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(nx)); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(ny)); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(nz)); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(px)); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(py)); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, IntBuffer.wrap(pz)); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glGenerateMipmap(type); GLES20.glBindTexture(type, 0); } /** * ctor, creates cube map texture from 6 bitmaps */ public Texture(Bitmap nx, Bitmap ny, Bitmap nz, Bitmap px, Bitmap py, Bitmap pz) { int width = nx.getWidth(); int height = ny.getHeight(); IntBuffer ib = IntBuffer.allocate(width * height); GLES20.glGenTextures(1, id, 0); type = GLES20.GL_TEXTURE_CUBE_MAP; GLES20.glBindTexture(type, id[0]); nx.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_X, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); ny.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Y, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); nz.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_NEGATIVE_Z, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); px.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_X, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); py.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_Y, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); pz.copyPixelsToBuffer(ib); ib.rewind(); GLES20.glTexImage2D(GLES20.GL_TEXTURE_CUBE_MAP_POSITIVE_Z, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, ib); ib.clear(); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glTexParameteri(type, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR_MIPMAP_NEAREST); GLES20.glGenerateMipmap(type); GLES20.glBindTexture(type, 0); } /** * bind the texture to a sampler and texture unit * * used in conjunction with shader.activate() * the sampler parameter is available from the activated shader * * @param index texture unit index {0..MAX_TEXTURE_IMAGE_UNITS} * @param sampler id of sampler variable from shader */ public void bind(int index, int sampler) { GLES20.glUniform1i(sampler, index); GLES20.glActiveTexture(GLES20.GL_TEXTURE0 + index); GLES20.glBindTexture(type, id[0]); } /** * release the GL texture */ public void release() { GLES20.glDeleteTextures(1, id, 0); } /** * return GL object id */ public int getId() { return id[0]; } /** * reset a default parameter */ public void set(int param, int value) { GLES20.glBindTexture(type, id[0]); GLES20.glTexParameteri(type, param, value); GLES20.glBindTexture(type, 0); } }
/* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright * ownership. Elasticsearch licenses this file to you under * the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.elasticsearch.transport; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.lucene.store.AlreadyClosedException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ContextPreservingActionListener; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.util.concurrent.EsRejectedExecutionException; import org.elasticsearch.threadpool.ThreadPool; import java.io.Closeable; import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Set; import java.util.concurrent.ExecutorService; import java.util.concurrent.atomic.AtomicBoolean; import java.util.function.Consumer; import java.util.function.Supplier; import java.util.stream.Collectors; import java.util.stream.Stream; public abstract class RemoteConnectionStrategy implements TransportConnectionListener, Closeable { enum ConnectionStrategy { SNIFF(SniffConnectionStrategy.CHANNELS_PER_CONNECTION, SniffConnectionStrategy::enablementSettings, SniffConnectionStrategy::infoReader) { @Override public String toString() { return "sniff"; } }, PROXY(ProxyConnectionStrategy.CHANNELS_PER_CONNECTION, ProxyConnectionStrategy::enablementSettings, ProxyConnectionStrategy::infoReader) { @Override public String toString() { return "proxy"; } }; private final int numberOfChannels; private final Supplier<Stream<Setting.AffixSetting<?>>> enablementSettings; private final Supplier<Writeable.Reader<RemoteConnectionInfo.ModeInfo>> reader; ConnectionStrategy(int numberOfChannels, Supplier<Stream<Setting.AffixSetting<?>>> enablementSettings, Supplier<Writeable.Reader<RemoteConnectionInfo.ModeInfo>> reader) { this.numberOfChannels = numberOfChannels; this.enablementSettings = enablementSettings; this.reader = reader; } public int getNumberOfChannels() { return numberOfChannels; } public Supplier<Stream<Setting.AffixSetting<?>>> getEnablementSettings() { return enablementSettings; } public Writeable.Reader<RemoteConnectionInfo.ModeInfo> getReader() { return reader.get(); } } public static final Setting.AffixSetting<ConnectionStrategy> REMOTE_CONNECTION_MODE = Setting.affixKeySetting( "cluster.remote.", "mode", key -> new Setting<>( key, ConnectionStrategy.SNIFF.name(), value -> ConnectionStrategy.valueOf(value.toUpperCase(Locale.ROOT)), Setting.Property.NodeScope, Setting.Property.Dynamic)); // this setting is intentionally not registered, it is only used in tests public static final Setting<Integer> REMOTE_MAX_PENDING_CONNECTION_LISTENERS = Setting.intSetting("cluster.remote.max_pending_connection_listeners", 1000, Setting.Property.NodeScope); private final int maxPendingConnectionListeners; protected final Logger logger = LogManager.getLogger(getClass()); private final AtomicBoolean closed = new AtomicBoolean(false); private final Object mutex = new Object(); private List<ActionListener<Void>> listeners = new ArrayList<>(); protected final TransportService transportService; protected final RemoteConnectionManager connectionManager; protected final String clusterAlias; RemoteConnectionStrategy(String clusterAlias, TransportService transportService, RemoteConnectionManager connectionManager, Settings settings) { this.clusterAlias = clusterAlias; this.transportService = transportService; this.connectionManager = connectionManager; this.maxPendingConnectionListeners = REMOTE_MAX_PENDING_CONNECTION_LISTENERS.get(settings); connectionManager.addListener(this); } static ConnectionProfile buildConnectionProfile(String clusterAlias, Settings settings) { ConnectionStrategy mode = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).get(settings); ConnectionProfile.Builder builder = new ConnectionProfile.Builder() .setConnectTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)) .setHandshakeTimeout(TransportSettings.CONNECT_TIMEOUT.get(settings)) .setCompressionEnabled(RemoteClusterService.REMOTE_CLUSTER_COMPRESS.getConcreteSettingForNamespace(clusterAlias).get(settings)) .setPingInterval(RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE.getConcreteSettingForNamespace(clusterAlias).get(settings)) .addConnections(0, TransportRequestOptions.Type.BULK, TransportRequestOptions.Type.STATE, TransportRequestOptions.Type.RECOVERY) // TODO: Evaluate if we actually need PING channels? .addConnections(mode.numberOfChannels, TransportRequestOptions.Type.REG, TransportRequestOptions.Type.PING); return builder.build(); } static RemoteConnectionStrategy buildStrategy(String clusterAlias, TransportService transportService, RemoteConnectionManager connectionManager, Settings settings) { ConnectionStrategy mode = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).get(settings); switch (mode) { case SNIFF: return new SniffConnectionStrategy(clusterAlias, transportService, connectionManager, settings); case PROXY: return new ProxyConnectionStrategy(clusterAlias, transportService, connectionManager, settings); default: throw new AssertionError("Invalid connection strategy" + mode); } } static Set<String> getRemoteClusters(Settings settings) { final Stream<Setting.AffixSetting<?>> enablementSettings = Arrays.stream(ConnectionStrategy.values()) .flatMap(strategy -> strategy.getEnablementSettings().get()); return enablementSettings.flatMap(s -> getClusterAlias(settings, s)).collect(Collectors.toSet()); } public static boolean isConnectionEnabled(String clusterAlias, Settings settings) { ConnectionStrategy mode = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).get(settings); if (mode.equals(ConnectionStrategy.SNIFF)) { List<String> seeds = SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS.getConcreteSettingForNamespace(clusterAlias).get(settings); return seeds.isEmpty() == false; } else { String address = ProxyConnectionStrategy.PROXY_ADDRESS.getConcreteSettingForNamespace(clusterAlias).get(settings); return Strings.isEmpty(address) == false; } } @SuppressWarnings("unchecked") public static boolean isConnectionEnabled(String clusterAlias, Map<Setting<?>, Object> settings) { ConnectionStrategy mode = (ConnectionStrategy) settings.get(REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias)); if (mode.equals(ConnectionStrategy.SNIFF)) { List<String> seeds = (List<String>) settings.get(SniffConnectionStrategy.REMOTE_CLUSTER_SEEDS .getConcreteSettingForNamespace(clusterAlias)); return seeds.isEmpty() == false; } else { String address = (String) settings.get(ProxyConnectionStrategy.PROXY_ADDRESS .getConcreteSettingForNamespace(clusterAlias)); return Strings.isEmpty(address) == false; } } private static <T> Stream<String> getClusterAlias(Settings settings, Setting.AffixSetting<T> affixSetting) { Stream<Setting<T>> allConcreteSettings = affixSetting.getAllConcreteSettings(settings); return allConcreteSettings.map(affixSetting::getNamespace); } static InetSocketAddress parseConfiguredAddress(String configuredAddress) { final String host = parseHost(configuredAddress); final int port = parsePort(configuredAddress); InetAddress hostAddress; try { hostAddress = InetAddress.getByName(host); } catch (UnknownHostException e) { throw new IllegalArgumentException("unknown host [" + host + "]", e); } return new InetSocketAddress(hostAddress, port); } static String parseHost(final String configuredAddress) { return configuredAddress.substring(0, indexOfPortSeparator(configuredAddress)); } static int parsePort(String remoteHost) { try { int port = Integer.valueOf(remoteHost.substring(indexOfPortSeparator(remoteHost) + 1)); if (port <= 0) { throw new IllegalArgumentException("port number must be > 0 but was: [" + port + "]"); } return port; } catch (NumberFormatException e) { throw new IllegalArgumentException("failed to parse port", e); } } private static int indexOfPortSeparator(String remoteHost) { int portSeparator = remoteHost.lastIndexOf(':'); // in case we have a IPv6 address ie. [::1]:9300 if (portSeparator == -1 || portSeparator == remoteHost.length()) { throw new IllegalArgumentException("remote hosts need to be configured as [host:port], found [" + remoteHost + "] instead"); } return portSeparator; } /** * Triggers a connect round unless there is one running already. If there is a connect round running, the listener will either * be queued or rejected and failed. */ void connect(ActionListener<Void> connectListener) { boolean runConnect = false; final ActionListener<Void> listener = ContextPreservingActionListener.wrapPreservingContext(connectListener, transportService.getThreadPool().getThreadContext()); boolean closed; synchronized (mutex) { closed = this.closed.get(); if (closed) { assert listeners.isEmpty(); } else { if (listeners.size() >= maxPendingConnectionListeners) { assert listeners.size() == maxPendingConnectionListeners; listener.onFailure(new EsRejectedExecutionException("connect listener queue is full")); return; } else { listeners.add(listener); } runConnect = listeners.size() == 1; } } if (closed) { connectListener.onFailure(new AlreadyClosedException("connect handler is already closed")); return; } if (runConnect) { ExecutorService executor = transportService.getThreadPool().executor(ThreadPool.Names.MANAGEMENT); executor.submit(new AbstractRunnable() { @Override public void onFailure(Exception e) { ActionListener.onFailure(getAndClearListeners(), e); } @Override protected void doRun() { connectImpl(new ActionListener<>() { @Override public void onResponse(Void aVoid) { ActionListener.onResponse(getAndClearListeners(), aVoid); } @Override public void onFailure(Exception e) { ActionListener.onFailure(getAndClearListeners(), e); } }); } }); } } boolean shouldRebuildConnection(Settings newSettings) { ConnectionStrategy newMode = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(clusterAlias).get(newSettings); if (newMode.equals(strategyType()) == false) { return true; } else { Boolean compressionEnabled = RemoteClusterService.REMOTE_CLUSTER_COMPRESS .getConcreteSettingForNamespace(clusterAlias) .get(newSettings); TimeValue pingSchedule = RemoteClusterService.REMOTE_CLUSTER_PING_SCHEDULE .getConcreteSettingForNamespace(clusterAlias) .get(newSettings); ConnectionProfile oldProfile = connectionManager.getConnectionProfile(); ConnectionProfile.Builder builder = new ConnectionProfile.Builder(oldProfile); builder.setCompressionEnabled(compressionEnabled); builder.setPingInterval(pingSchedule); ConnectionProfile newProfile = builder.build(); return connectionProfileChanged(oldProfile, newProfile) || strategyMustBeRebuilt(newSettings); } } protected abstract boolean strategyMustBeRebuilt(Settings newSettings); protected abstract ConnectionStrategy strategyType(); @Override public void onNodeDisconnected(DiscoveryNode node, Transport.Connection connection) { if (shouldOpenMoreConnections()) { // try to reconnect and fill up the slot of the disconnected node connect(ActionListener.wrap( ignore -> logger.trace("[{}] successfully connected after disconnect of {}", clusterAlias, node), e -> logger.debug(() -> new ParameterizedMessage("[{}] failed to connect after disconnect of {}", clusterAlias, node), e))); } } @Override public void close() { final List<ActionListener<Void>> toNotify; synchronized (mutex) { if (closed.compareAndSet(false, true)) { connectionManager.removeListener(this); toNotify = listeners; listeners = Collections.emptyList(); } else { toNotify = Collections.emptyList(); } } ActionListener.onFailure(toNotify, new AlreadyClosedException("connect handler is already closed")); } public boolean isClosed() { return closed.get(); } // for testing only boolean assertNoRunningConnections() { synchronized (mutex) { assert listeners.isEmpty(); } return true; } protected abstract boolean shouldOpenMoreConnections(); protected abstract void connectImpl(ActionListener<Void> listener); protected abstract RemoteConnectionInfo.ModeInfo getModeInfo(); private List<ActionListener<Void>> getAndClearListeners() { final List<ActionListener<Void>> result; synchronized (mutex) { if (listeners.isEmpty()) { result = Collections.emptyList(); } else { result = listeners; listeners = new ArrayList<>(); } } return result; } private boolean connectionProfileChanged(ConnectionProfile oldProfile, ConnectionProfile newProfile) { return Objects.equals(oldProfile.getCompressionEnabled(), newProfile.getCompressionEnabled()) == false || Objects.equals(oldProfile.getPingInterval(), newProfile.getPingInterval()) == false; } static class StrategyValidator<T> implements Setting.Validator<T> { private final String key; private final ConnectionStrategy expectedStrategy; private final String namespace; private final Consumer<T> valueChecker; StrategyValidator(String namespace, String key, ConnectionStrategy expectedStrategy) { this(namespace, key, expectedStrategy, (v) -> {}); } StrategyValidator(String namespace, String key, ConnectionStrategy expectedStrategy, Consumer<T> valueChecker) { this.namespace = namespace; this.key = key; this.expectedStrategy = expectedStrategy; this.valueChecker = valueChecker; } @Override public void validate(T value) { valueChecker.accept(value); } @Override public void validate(T value, Map<Setting<?>, Object> settings, boolean isPresent) { Setting<ConnectionStrategy> concrete = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(namespace); ConnectionStrategy modeType = (ConnectionStrategy) settings.get(concrete); if (isPresent && modeType.equals(expectedStrategy) == false) { throw new IllegalArgumentException("Setting \"" + key + "\" cannot be used with the configured \"" + concrete.getKey() + "\" [required=" + expectedStrategy.name() + ", configured=" + modeType.name() + "]"); } } @Override public Iterator<Setting<?>> settings() { Setting<ConnectionStrategy> concrete = REMOTE_CONNECTION_MODE.getConcreteSettingForNamespace(namespace); Stream<Setting<?>> settingStream = Stream.of(concrete); return settingStream.iterator(); } } }
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.camel.component.braintree; import org.junit.After; import java.math.BigDecimal; import java.util.HashMap; import java.util.LinkedList; import java.util.List; import java.util.Map; import com.braintreegateway.BraintreeGateway; import com.braintreegateway.Result; import com.braintreegateway.Transaction; import com.braintreegateway.TransactionCloneRequest; import com.braintreegateway.TransactionRefundRequest; import com.braintreegateway.TransactionRequest; import org.apache.camel.builder.RouteBuilder; import org.apache.camel.component.braintree.internal.BraintreeApiCollection; import org.apache.camel.component.braintree.internal.TransactionGatewayApiMethod; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class TransactionGatewayIntegrationTest extends AbstractBraintreeTestSupport { private static final Logger LOG = LoggerFactory.getLogger(TransactionGatewayIntegrationTest.class); private static final String PATH_PREFIX = BraintreeApiCollection.getCollection().getApiName(TransactionGatewayApiMethod.class).getName(); private BraintreeGateway gateway; private final List<String> transactionIds; // ************************************************************************* // // ************************************************************************* public TransactionGatewayIntegrationTest() { this.gateway = null; this.transactionIds = new LinkedList<>(); } @Override protected void doPostSetup() throws Exception { this.gateway = getGateway(); } @Override @After public void tearDown() throws Exception { if (this.gateway != null) { for (String token : this.transactionIds) { // TODO: cleanup } } this.transactionIds.clear(); } // ************************************************************************* // // ************************************************************************* @Test public void testSale() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> result = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(true) .done(), Result.class); assertNotNull("sale result", result); assertTrue(result.isSuccess()); LOG.info("Transaction done - id={}", result.getTarget().getId()); this.transactionIds.add(result.getTarget().getId()); } @Test public void testCloneTransaction() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(false) .done(), Result.class); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); LOG.info("Transaction done - id={}", createResult.getTarget().getId()); this.transactionIds.add(createResult.getTarget().getId()); final Result<Transaction> cloneResult = requestBodyAndHeaders( "direct://CLONETRANSACTION", null, new BraintreeHeaderBuilder() .add("id", createResult.getTarget().getId()) .add("cloneRequest", new TransactionCloneRequest() .amount(new BigDecimal("99.00")) .options() .submitForSettlement(true) .done()) .build(), Result.class); assertNotNull("clone result", cloneResult); assertTrue(cloneResult.isSuccess()); LOG.info("Clone Transaction done - clonedId={}, id={}", createResult.getTarget().getId(), cloneResult.getTarget().getId()); this.transactionIds.add(cloneResult.getTarget().getId()); } @Test public void testFind() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(false) .done(), Result.class); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); LOG.info("Transaction done - id={}", createResult.getTarget().getId()); this.transactionIds.add(createResult.getTarget().getId()); // using String message body for single parameter "id" final Transaction result = requestBody("direct://FIND", createResult.getTarget().getId()); assertNotNull("find result", result); LOG.info("Transaction found - id={}", result.getId()); } @Test public void testSubmitForSettlementWithId() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(false) .done(), Result.class); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); LOG.info("Transaction done - id={}", createResult.getTarget().getId()); this.transactionIds.add(createResult.getTarget().getId()); final Result<Transaction> result = requestBody( "direct://SUBMITFORSETTLEMENT_WITH_ID", createResult.getTarget().getId(), Result.class); assertNotNull("Submit For Settlement result", result); LOG.debug("Transaction submitted for settlement - id={}" + result.getTarget().getId()); } @Test public void testSubmitForSettlementWithIdAndAmount() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(false) .done(), Result.class); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); LOG.info("Transaction done - id={}", createResult.getTarget().getId()); this.transactionIds.add(createResult.getTarget().getId()); final Result<Transaction> result = requestBodyAndHeaders( "direct://SUBMITFORSETTLEMENT_WITH_ID_ADN_AMOUNT", null, new BraintreeHeaderBuilder() .add("id", createResult.getTarget().getId()) .add("amount", new BigDecimal("100.00")) .build(), Result.class); assertNotNull("Submit For Settlement result", result); LOG.debug("Transaction submitted for settlement - id={}" + result.getTarget().getId()); } @Test public void testSubmitForSettlementWithRequest() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(false) .done(), Result.class); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); LOG.info("Transaction done - id={}", createResult.getTarget().getId()); this.transactionIds.add(createResult.getTarget().getId()); final Result<Transaction> result = requestBodyAndHeaders( "direct://SUBMITFORSETTLEMENT_WITH_REQUEST", null, new BraintreeHeaderBuilder() .add("id", createResult.getTarget().getId()) .add("request", new TransactionRequest() .amount(new BigDecimal("100.00"))) .build(), Result.class); assertNotNull("Submit For Settlement result", result); LOG.debug("Transaction submitted for settlement - id={}" + result.getTarget().getId()); } @Test public void testRefund() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(true) .done(), Result.class ); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); String createId = createResult.getTarget().getId(); final Result<Transaction> settleResult = this.gateway.testing().settle(createId); assertNotNull("settle result", settleResult); assertTrue(settleResult.isSuccess()); final Result<Transaction> result = requestBody( "direct://REFUND_WITH_ID", createId, Result.class ); assertNotNull("Request Refund result", result); assertTrue(result.isSuccess()); LOG.info(String.format("Refund id(%s) created for transaction id(%s)", result.getTarget().getId(), createId)); } @Test public void testRefundWithAmount() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(true) .done(), Result.class ); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); String createId = createResult.getTarget().getId(); final Result<Transaction> settleResult = this.gateway.testing().settle(createId); assertNotNull("settle result", settleResult); assertTrue(settleResult.isSuccess()); final Result<Transaction> result = requestBodyAndHeaders( "direct://REFUND", null, new BraintreeHeaderBuilder() .add("id", createId) .add("amount", new BigDecimal("99.00")) .build(), Result.class ); assertNotNull("Request Refund result", result); assertTrue(result.isSuccess()); LOG.info(String.format("Refund id(%s) created for transaction id(%s)", result.getTarget().getId(), createId)); } @Test public void testRefundWithRequest() throws Exception { assertNotNull("BraintreeGateway can't be null", this.gateway); final Result<Transaction> createResult = requestBody( "direct://SALE", new TransactionRequest() .amount(new BigDecimal("100.00")) .paymentMethodNonce("fake-valid-nonce") .options() .submitForSettlement(true) .done(), Result.class ); assertNotNull("sale result", createResult); assertTrue(createResult.isSuccess()); String createId = createResult.getTarget().getId(); final Result<Transaction> settleResult = this.gateway.testing().settle(createId); assertNotNull("settle result", settleResult); assertTrue(settleResult.isSuccess()); final Result<Transaction> result = requestBodyAndHeaders( "direct://REFUND", null, new BraintreeHeaderBuilder() .add("id", createId) .add("refundRequest", new TransactionRefundRequest() .amount(new BigDecimal("100.00"))) .build(), Result.class ); assertNotNull("Request Refund result", result); assertTrue(result.isSuccess()); LOG.info(String.format("Refund id(%s) created for transaction id(%s)", result.getTarget().getId(), createId)); } // ************************************************************************* // Auto generated tests // ************************************************************************* // TODO provide parameter values for cancelRelease @Ignore @Test public void testCancelRelease() throws Exception { // using String message body for single parameter "id" final com.braintreegateway.Result result = requestBody("direct://CANCELRELEASE", null); assertNotNull("cancelRelease result", result); LOG.debug("cancelRelease: " + result); } // TODO provide parameter values for credit @Ignore @Test public void testCredit() throws Exception { // using com.braintreegateway.TransactionRequest message body for single parameter "request" final com.braintreegateway.Result result = requestBody("direct://CREDIT", null); assertNotNull("credit result", result); LOG.debug("credit: " + result); } // TODO provide parameter values for holdInEscrow @Ignore @Test public void testHoldInEscrow() throws Exception { // using String message body for single parameter "id" final com.braintreegateway.Result result = requestBody("direct://HOLDINESCROW", null); assertNotNull("holdInEscrow result", result); LOG.debug("holdInEscrow: " + result); } // TODO provide parameter values for releaseFromEscrow @Ignore @Test public void testReleaseFromEscrow() throws Exception { // using String message body for single parameter "id" final com.braintreegateway.Result result = requestBody("direct://RELEASEFROMESCROW", null); assertNotNull("releaseFromEscrow result", result); LOG.debug("releaseFromEscrow: " + result); } // TODO provide parameter values for search @Ignore @Test public void testSearch() throws Exception { // using com.braintreegateway.TransactionSearchRequest message body for single parameter "query" final com.braintreegateway.ResourceCollection result = requestBody("direct://SEARCH", null); assertNotNull("search result", result); LOG.debug("search: " + result); } // TODO provide parameter values for submitForPartialSettlement @Ignore @Test public void testSubmitForPartialSettlement() throws Exception { final Map<String, Object> headers = new HashMap<>(); // parameter type is String headers.put("CamelBraintree.id", null); // parameter type is java.math.BigDecimal headers.put("CamelBraintree.amount", null); final com.braintreegateway.Result result = requestBodyAndHeaders("direct://SUBMITFORPARTIALSETTLEMENT", null, headers); assertNotNull("submitForPartialSettlement result", result); LOG.debug("submitForPartialSettlement: " + result); } // TODO provide parameter values for voidTransaction @Ignore @Test public void testVoidTransaction() throws Exception { // using String message body for single parameter "id" final com.braintreegateway.Result result = requestBody("direct://VOIDTRANSACTION", null); assertNotNull("voidTransaction result", result); LOG.debug("voidTransaction: " + result); } // ************************************************************************* // ROUTES // ************************************************************************* @Override protected RouteBuilder createRouteBuilder() throws Exception { return new RouteBuilder() { public void configure() { // test route for cancelRelease from("direct://CANCELRELEASE") .to("braintree://" + PATH_PREFIX + "/cancelRelease?inBody=id"); // test route for cloneTransaction from("direct://CLONETRANSACTION") .to("braintree://" + PATH_PREFIX + "/cloneTransaction"); // test route for credit from("direct://CREDIT") .to("braintree://" + PATH_PREFIX + "/credit?inBody=request"); // test route for find from("direct://FIND") .to("braintree://" + PATH_PREFIX + "/find?inBody=id"); // test route for holdInEscrow from("direct://HOLDINESCROW") .to("braintree://" + PATH_PREFIX + "/holdInEscrow?inBody=id"); // test route for refund from("direct://REFUND") .to("braintree://" + PATH_PREFIX + "/refund"); // test route for refund from("direct://REFUND_WITH_ID") .to("braintree://" + PATH_PREFIX + "/refund?inBody=id"); // test route for releaseFromEscrow from("direct://RELEASEFROMESCROW") .to("braintree://" + PATH_PREFIX + "/releaseFromEscrow?inBody=id"); // test route for sale from("direct://SALE") .to("braintree://" + PATH_PREFIX + "/sale?inBody=request"); // test route for search from("direct://SEARCH") .to("braintree://" + PATH_PREFIX + "/search?inBody=query"); // test route for submitForPartialSettlement from("direct://SUBMITFORPARTIALSETTLEMENT") .to("braintree://" + PATH_PREFIX + "/submitForPartialSettlement"); // test route for submitForSettlement from("direct://SUBMITFORSETTLEMENT_WITH_ID") .to("braintree://" + PATH_PREFIX + "/submitForSettlement?inBody=id"); // test route for submitForSettlement from("direct://SUBMITFORSETTLEMENT_WITH_ID_ADN_AMOUNT") .to("braintree://" + PATH_PREFIX + "/submitForSettlement"); // test route for submitForSettlement from("direct://SUBMITFORSETTLEMENT_WITH_REQUEST") .to("braintree://" + PATH_PREFIX + "/submitForSettlement"); // test route for voidTransaction from("direct://VOIDTRANSACTION") .to("braintree://" + PATH_PREFIX + "/voidTransaction?inBody=id"); } }; } }
// Generated by Haxe 3.3.0 package haxe.root; import haxe.root.*; @SuppressWarnings(value={"rawtypes", "unchecked"}) public class Type extends haxe.lang.HxObject { public Type(haxe.lang.EmptyObject empty) { } public Type() { //line 39 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Type.__hx_ctor__Type(this); } public static void __hx_ctor__Type(haxe.root.Type __temp_me5) { } public static <T> java.lang.Class getClass(T o) { //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp = false; //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp1 = false; //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp2 = ( ! (( o == null )) ); //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp2) { //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp1 = ( o instanceof haxe.lang.DynamicObject ); } else { //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp1 = true; } //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" if ( ! (tmp1) ) { //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp = ( o instanceof java.lang.Class ); } else { //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp = true; } //line 43 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp) { //line 44 "/usr/local/lib/haxe/std/java/_std/Type.hx" return null; } //line 46 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((java.lang.Class) (((java.lang.Class) (((java.lang.Class) (o.getClass()) )) )) ); } public static java.lang.Class getEnum(java.lang.Object o) { //line 51 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (( ( o instanceof java.lang.Enum ) || ( o instanceof haxe.lang.Enum ) )) { //line 52 "/usr/local/lib/haxe/std/java/_std/Type.hx" return o.getClass(); } //line 54 "/usr/local/lib/haxe/std/java/_std/Type.hx" return null; } public static java.lang.Class getSuperClass(java.lang.Class c) { //line 59 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.Class c1 = ((java.lang.Class) (c) ); //line 60 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.Class cl = null; //line 60 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp = ( ((java.lang.Object) (c1) ) == ((java.lang.Object) (null) ) ); //line 60 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp) { //line 60 "/usr/local/lib/haxe/std/java/_std/Type.hx" cl = null; } else { //line 60 "/usr/local/lib/haxe/std/java/_std/Type.hx" cl = c1.getSuperclass(); } //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp1 = false; //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp2 = false; //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean tmp3 = ( ((java.lang.Object) (cl) ) != ((java.lang.Object) (null) ) ); //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp3) { //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp2 = ! (haxe.lang.Runtime.valEq(cl.getName(), "haxe.lang.HxObject")) ; } else { //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp2 = false; } //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp2) { //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp1 = ! (haxe.lang.Runtime.valEq(cl.getName(), "java.lang.Object")) ; } else { //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" tmp1 = false; } //line 61 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (tmp1) { //line 62 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((java.lang.Class) (cl) ); } //line 64 "/usr/local/lib/haxe/std/java/_std/Type.hx" return null; } public static java.lang.String getClassName(java.lang.Class c) { //line 68 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.Class c1 = ((java.lang.Class) (c) ); //line 69 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.String name = c1.getName(); //line 70 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (name.startsWith("haxe.root.")) { //line 71 "/usr/local/lib/haxe/std/java/_std/Type.hx" return haxe.lang.StringExt.substr(name, 10, null); } //line 72 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (name.startsWith("java.lang")) { //line 73 "/usr/local/lib/haxe/std/java/_std/Type.hx" name = haxe.lang.StringExt.substr(name, 10, null); } //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.String __temp_svar1 = (name); //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" int __temp_hash3 = __temp_svar1.hashCode(); //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean __temp_executeDef2 = true; //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" switch (__temp_hash3) { case -1325958191: case 2052876273: { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (( (( ( __temp_hash3 == -1325958191 ) && __temp_svar1.equals("double") )) || __temp_svar1.equals("Double") )) { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" return "Float"; } //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case -1939501217: { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("Object")) { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" return "Dynamic"; } //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case 104431: case -672261858: { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (( (( ( __temp_hash3 == 104431 ) && __temp_svar1.equals("int") )) || __temp_svar1.equals("Integer") )) { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" return "Int"; } //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } } //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_executeDef2) { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" return name; } else { //line 75 "/usr/local/lib/haxe/std/java/_std/Type.hx" throw null; } } } public static java.lang.String getEnumName(java.lang.Class e) { //line 85 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.Class c = ((java.lang.Class) (e) ); //line 86 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.String ret = c.getName(); //line 87 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (ret.startsWith("haxe.root.")) { //line 88 "/usr/local/lib/haxe/std/java/_std/Type.hx" return haxe.lang.StringExt.substr(ret, 10, null); } else { //line 89 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (( haxe.lang.Runtime.valEq(ret, "boolean") || haxe.lang.Runtime.valEq(ret, "java.lang.Boolean") )) { //line 90 "/usr/local/lib/haxe/std/java/_std/Type.hx" return "Bool"; } } //line 92 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ret; } public static java.lang.Class resolveClass(java.lang.String name) { //line 97 "/usr/local/lib/haxe/std/java/_std/Type.hx" try { //line 98 "/usr/local/lib/haxe/std/java/_std/Type.hx" int tmp = haxe.lang.StringExt.indexOf(name, ".", null); //line 98 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (( tmp == -1 )) { //line 99 "/usr/local/lib/haxe/std/java/_std/Type.hx" name = ( "haxe.root." + name ); } //line 101 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((java.lang.Class) (java.lang.Class.forName(haxe.lang.Runtime.toString(name))) ); } catch (java.lang.ClassNotFoundException e) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.lang.Exceptions.setException(e); //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.String __temp_svar1 = (name); //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" boolean __temp_executeDef2 = true; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" switch (__temp_svar1.hashCode()) { case 360541844: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.Class")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return java.lang.Class.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case -1242153355: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.String")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return java.lang.String.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case -140489125: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.Dynamic")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return java.lang.Object.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case 704654956: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.Math")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return java.lang.Math.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case 363325304: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.Float")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return double.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } case -1778387957: { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_svar1.equals("haxe.root.Int")) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" __temp_executeDef2 = false; //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return int.class; } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" break; } } //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (__temp_executeDef2) { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" return null; } else { //line 103 "/usr/local/lib/haxe/std/java/_std/Type.hx" throw null; } } } } public static java.lang.Class resolveEnum(java.lang.String name) { if ("Bool".equals(name)) return boolean.class; Class r = resolveClass(name); if (r != null && (r.getSuperclass() == java.lang.Enum.class || haxe.lang.Enum.class.isAssignableFrom(r))) return r; return null; } public static <T> T createInstance(java.lang.Class cl, haxe.root.Array args) { int len = args.length; java.lang.Class[] cls = new java.lang.Class[len]; java.lang.Object[] objs = new java.lang.Object[len]; java.lang.reflect.Constructor[] ms = cl.getConstructors(); int msl = ms.length; int realMsl = 0; for(int i =0; i < msl; i++) { if (!ms[i].isVarArgs() && ms[i].getParameterTypes().length != len) { ms[i] = null; } else { ms[realMsl] = ms[i]; if (realMsl != i) ms[i] = null; realMsl++; } } boolean hasNumber = false; for (int i = 0; i < len; i++) { Object o = args.__get(i); objs[i]= o; cls[i] = o.getClass(); boolean isNum = false; if (o instanceof java.lang.Number) { cls[i] = java.lang.Number.class; isNum = hasNumber = true; } msl = realMsl; realMsl = 0; for (int j = 0; j < msl; j++) { java.lang.Class[] allcls = ms[j].getParameterTypes(); if (i < allcls.length) { if (! ((isNum && allcls[i].isPrimitive()) || allcls[i].isAssignableFrom(cls[i])) ) { ms[j] = null; } else { ms[realMsl] = ms[j]; if (realMsl != j) ms[j] = null; realMsl++; } } } } java.lang.reflect.Constructor found = ms[0]; if (hasNumber) { java.lang.Class[] allcls = found.getParameterTypes(); for (int i = 0; i < len; i++) { java.lang.Object o = objs[i]; if (o instanceof java.lang.Number) { java.lang.Class curCls = null; if (i < allcls.length) { curCls = allcls[i]; if (!curCls.isAssignableFrom(o.getClass())) { String name = curCls.getName(); if (name.equals("double") || name.equals("java.lang.Double")) { objs[i] = ((java.lang.Number)o).doubleValue(); } else if (name.equals("int") || name.equals("java.lang.Integer")) { objs[i] = ((java.lang.Number)o).intValue(); } else if (name.equals("float") || name.equals("java.lang.Float")) { objs[i] = ((java.lang.Number)o).floatValue(); } else if (name.equals("byte") || name.equals("java.lang.Byte")) { objs[i] = ((java.lang.Number)o).byteValue(); } else if (name.equals("short") || name.equals("java.lang.Short")) { objs[i] = ((java.lang.Number)o).shortValue(); } } } //else varargs not handled TODO } } } try { found.setAccessible(true); return (T) found.newInstance(objs); } catch (java.lang.reflect.InvocationTargetException e) { throw haxe.lang.HaxeException.wrap(e.getCause()); } catch (Throwable t) { throw haxe.lang.HaxeException.wrap(t); } } public static <T> T createEmptyInstance(java.lang.Class cl) { //line 248 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (haxe.root.Reflect.hasField(cl, "__hx_createEmpty")) { //line 249 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((T) (haxe.lang.Runtime.callField(cl, "__hx_createEmpty", null)) ); } //line 250 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((T) (haxe.root.Type.createInstance(((java.lang.Class) (cl) ), ((haxe.root.Array) (new haxe.root.Array(new java.lang.Object[]{})) ))) ); } public static <T> T createEnum(java.lang.Class e, java.lang.String constr, haxe.root.Array params) { if (params == null || params.length == 0) { java.lang.Object ret = haxe.lang.Runtime.slowGetField(e, constr, true); if (ret instanceof haxe.lang.Function) throw haxe.lang.HaxeException.wrap("Constructor " + constr + " needs parameters"); return (T) ret; } else { return (T) haxe.lang.Runtime.slowCallField(e, constr, params); } } public static <T> T createEnumIndex(java.lang.Class e, int index, haxe.root.Array params) { //line 270 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Array<java.lang.String> constr = haxe.root.Type.getEnumConstructs(e); //line 271 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ((T) (haxe.root.Type.createEnum(((java.lang.Class) (e) ), haxe.lang.Runtime.toString(constr.__get(index)), ((haxe.root.Array) (params) ))) ); } public static haxe.root.Array<java.lang.String> getInstanceFields(java.lang.Class c) { if (c == java.lang.String.class) { return haxe.lang.StringRefl.fields; } Array<String> ret = new Array<String>(); for (java.lang.reflect.Field f : c.getFields()) { java.lang.String fname = f.getName(); if (!java.lang.reflect.Modifier.isStatic(f.getModifiers()) && !fname.startsWith("__hx_")) ret.push(fname); } for (java.lang.reflect.Method m : c.getMethods()) { if (m.getDeclaringClass() == java.lang.Object.class) continue; java.lang.String mname = m.getName(); if (!java.lang.reflect.Modifier.isStatic(m.getModifiers()) && !mname.startsWith("__hx_")) ret.push(mname); } return ret; } public static haxe.root.Array<java.lang.String> getClassFields(java.lang.Class c) { Array<String> ret = new Array<String>(); if (c == java.lang.String.class) { ret.push("fromCharCode"); return ret; } for (java.lang.reflect.Field f : c.getDeclaredFields()) { java.lang.String fname = f.getName(); if (java.lang.reflect.Modifier.isStatic(f.getModifiers()) && !fname.startsWith("__hx_")) ret.push(fname); } for (java.lang.reflect.Method m : c.getDeclaredMethods()) { if (m.getDeclaringClass() == java.lang.Object.class) continue; java.lang.String mname = m.getName(); if (java.lang.reflect.Modifier.isStatic(m.getModifiers()) && !mname.startsWith("__hx_")) ret.push(mname); } return ret; } public static haxe.root.Array<java.lang.String> getEnumConstructs(java.lang.Class e) { //line 334 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (haxe.root.Reflect.hasField(e, "__hx_constructs")) { //line 336 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Array<java.lang.String> ret = haxe.java.Lib.array_String(((java.lang.String[]) (haxe.lang.Runtime.getField(e, "__hx_constructs", true)) )); //line 337 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ret.copy(); } //line 339 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.Enum[] vals = ((java.lang.Enum[]) (haxe.lang.Runtime.callField(e, "values", null)) ); //line 339 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Array<java.lang.String> ret1 = new haxe.root.Array<java.lang.String>(new java.lang.String[]{}); //line 340 "/usr/local/lib/haxe/std/java/_std/Type.hx" { //line 340 "/usr/local/lib/haxe/std/java/_std/Type.hx" int _g1 = 0; //line 340 "/usr/local/lib/haxe/std/java/_std/Type.hx" int _g = vals.length; //line 340 "/usr/local/lib/haxe/std/java/_std/Type.hx" while (( _g1 < _g )) { //line 340 "/usr/local/lib/haxe/std/java/_std/Type.hx" int i = _g1++; //line 341 "/usr/local/lib/haxe/std/java/_std/Type.hx" ret1.__set(i, vals[i].name()); } } //line 342 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ret1; } public static haxe.root.ValueType typeof(java.lang.Object v) { if (v == null) return ValueType.TNull; if (v instanceof haxe.lang.IHxObject) { haxe.lang.IHxObject vobj = (haxe.lang.IHxObject) v; java.lang.Class cl = vobj.getClass(); if (v instanceof haxe.lang.DynamicObject) return ValueType.TObject; else return ValueType.TClass(cl); } else if (v instanceof java.lang.Number) { java.lang.Number n = (java.lang.Number) v; if (n.intValue() == n.doubleValue()) return ValueType.TInt; else return ValueType.TFloat; } else if (v instanceof haxe.lang.Function) { return ValueType.TFunction; } else if (v instanceof java.lang.Enum || v instanceof haxe.lang.Enum) { return ValueType.TEnum(v.getClass()); } else if (v instanceof java.lang.Boolean) { return ValueType.TBool; } else if (v instanceof java.lang.Class) { return ValueType.TObject; } else { return ValueType.TClass(v.getClass()); } } public static <T> boolean enumEq(T a, T b) { if (a instanceof haxe.lang.Enum) return a.equals(b); else return haxe.lang.Runtime.eq(a, b); } public static java.lang.String enumConstructor(java.lang.Object e) { if (e instanceof java.lang.Enum) return ((java.lang.Enum) e).name(); else return ((haxe.lang.Enum) e).getTag(); } public static haxe.root.Array enumParameters(java.lang.Object e) { return ( e instanceof java.lang.Enum ) ? new haxe.root.Array() : ((haxe.lang.Enum) e).getParams(); } public static int enumIndex(java.lang.Object e) { if (e instanceof java.lang.Enum) return ((java.lang.Enum) e).ordinal(); else return ((haxe.lang.Enum) e).index; } public static <T> haxe.root.Array<T> allEnums(java.lang.Class e) { //line 421 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Array<java.lang.String> ctors = haxe.root.Type.getEnumConstructs(e); //line 422 "/usr/local/lib/haxe/std/java/_std/Type.hx" haxe.root.Array<T> ret = new haxe.root.Array<T>(( (T[]) (new java.lang.Object[] {}) )); //line 423 "/usr/local/lib/haxe/std/java/_std/Type.hx" { //line 423 "/usr/local/lib/haxe/std/java/_std/Type.hx" int _g = 0; //line 423 "/usr/local/lib/haxe/std/java/_std/Type.hx" while (( _g < ctors.length )) { //line 423 "/usr/local/lib/haxe/std/java/_std/Type.hx" java.lang.String ctor = ctors.__get(_g); //line 423 "/usr/local/lib/haxe/std/java/_std/Type.hx" ++ _g; //line 425 "/usr/local/lib/haxe/std/java/_std/Type.hx" T v = ((T) (haxe.root.Reflect.field(e, ctor)) ); //line 426 "/usr/local/lib/haxe/std/java/_std/Type.hx" if (haxe.root.Std.is(v, e)) { //line 427 "/usr/local/lib/haxe/std/java/_std/Type.hx" ret.push(v); } } } //line 430 "/usr/local/lib/haxe/std/java/_std/Type.hx" return ret; } public static java.lang.Object __hx_createEmpty() { //line 39 "/usr/local/lib/haxe/std/java/_std/Type.hx" return new haxe.root.Type(haxe.lang.EmptyObject.EMPTY); } public static java.lang.Object __hx_create(haxe.root.Array arr) { //line 39 "/usr/local/lib/haxe/std/java/_std/Type.hx" return new haxe.root.Type(); } }
package natlab.backends.Fortran.codegen; import java.util.ArrayList; import java.util.HashMap; import java.util.HashSet; import ast.ASTNode; import natlab.tame.tir.TIRAbstractAssignToListStmt; import natlab.tame.tir.TIRAbstractAssignToVarStmt; import natlab.tame.tir.TIRArrayGetStmt; import natlab.tame.tir.TIRArraySetStmt; import natlab.tame.tir.TIRAssignLiteralStmt; import natlab.tame.tir.TIRCommentStmt; import natlab.tame.tir.TIRForStmt; import natlab.tame.tir.TIRFunction; import natlab.tame.tir.TIRIfStmt; import natlab.tame.tir.TIRNode; import natlab.tame.tir.TIRWhileStmt; import natlab.tame.tir.analysis.TIRAbstractNodeCaseHandler; import natlab.tame.valueanalysis.ValueAnalysis; import natlab.tame.valueanalysis.aggrvalue.AggrValue; import natlab.tame.valueanalysis.basicmatrix.BasicMatrixValue; import natlab.backends.Fortran.codegen.FortranAST.*; import natlab.backends.Fortran.codegen.ASTcaseHandler.*; public class FortranCodeASTGenerator extends TIRAbstractNodeCaseHandler{ public ValueAnalysis<AggrValue<BasicMatrixValue>> analysis; public StringBuffer buf; public StringBuffer buf2; public FortranMapping FortranMap; public ArrayList<String> forStmtParameter; public ArrayList<String> arrayIndexParameter; public int callgraphSize; public int index; public String fileDir; public String majorName; public ArrayList<String> inArgs; public ArrayList<String> outRes; //funcNameRep: the key of this hashmap is the user defined function name, and the value is the corresponding substitute variable name. public HashMap<String, String> funcNameRep; //isSubroutine: this boolean value help the compiler to distinguish subroutine with function. public boolean isSubroutine; public HashSet<String> inputHasChanged; public HashSet<String> arrayConvert; //tmpVariables: to store those temporary variables which are used in Fortran code generation. The key is name, and the value is its shape. public HashMap<String, BasicMatrixValue> tmpVariables; public int ifWhileForBlockNest; public StatementSection stmtSecForIfWhileForBlock; public SubProgram SubProgram; public int indentNum; public String indent; //tmpVarAsArrayIndex: the key is the name of the temp variable which is used as array index and the value is the range of those variables. public HashMap<String, ArrayList<String>> tmpVarAsArrayIndex; static boolean Debug = false; public FortranCodeASTGenerator(ValueAnalysis<AggrValue<BasicMatrixValue>> analysis, int callgraphSize, int index, String fileDir){ this.analysis = analysis; this.FortranMap = new FortranMapping(); this.forStmtParameter = new ArrayList<String>(); this.arrayIndexParameter = new ArrayList<String>(); this.callgraphSize = callgraphSize; this.index = index; this.fileDir = fileDir; this.majorName = ""; this.inArgs = new ArrayList<String>(); this.outRes = new ArrayList<String>(); this.funcNameRep = new HashMap<String,String>(); this.isSubroutine = false; this.inputHasChanged = new HashSet<String>(); this.arrayConvert = new HashSet<String>(); this.tmpVariables = new HashMap<String,BasicMatrixValue>(); this.ifWhileForBlockNest = 0; this.stmtSecForIfWhileForBlock = new StatementSection(); this.SubProgram = new SubProgram(); this.indentNum = 0; this.indent = " "; this.tmpVarAsArrayIndex = new HashMap<String, ArrayList<String>>(); ((TIRNode)analysis.getNodeList().get(index).getAnalysis().getTree()).tirAnalyze(this); } /******************HELPER METHODS**************************/ public static SubProgram FortranProgramGen( ValueAnalysis<AggrValue<BasicMatrixValue>> analysis, int callgraphSize, int index, String fileDir){ return new FortranCodeASTGenerator(analysis, callgraphSize, index, fileDir).SubProgram; } public void iterateStatements(ast.List<ast.Stmt> stmts){ for(ast.Stmt stmt : stmts){ ((TIRNode)stmt).tirAnalyze(this); } } public boolean hasArrayAsInput(){ boolean result = false; for(String inArg : this.inArgs){ if(((BasicMatrixValue)(this.analysis.getNodeList().get(this.index).getAnalysis().getCurrentOutSet(). get(inArg)).getSingleton()).getShape().isScalar()==true){ //do nothing } else{ result = true; } } return result; } /******************AST NODE OVERRIDE***********************/ @Override public void caseASTNode(ASTNode node){ } @Override public void caseTIRFunction(TIRFunction node){ HandleCaseTIRFunction functionStmt = new HandleCaseTIRFunction(); functionStmt.getFortran(this, node); } @Override public void caseTIRCommentStmt(TIRCommentStmt node){ HandleCaseTIRCommentStmt commentStmt = new HandleCaseTIRCommentStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(commentStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(commentStmt.getFortran(this, node)); } } @Override public void caseTIRAssignLiteralStmt(TIRAssignLiteralStmt node){ /** * insert constant variable replacement check. */ if(((BasicMatrixValue)(this.analysis.getNodeList().get(this.index).getAnalysis().getCurrentOutSet(). get(node.getTargetName().getVarName()).getSingleton())).isConstant() &&(this.outRes.contains(node.getTargetName().getVarName())==false)){ if (Debug) System.out.println(node.getTargetName().getVarName()+" is a constant"); } else{ HandleCaseTIRAssignLiteralStmt assignLiteralStmt = new HandleCaseTIRAssignLiteralStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(assignLiteralStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(assignLiteralStmt.getFortran(this, node)); } } } @Override public void caseTIRAbstractAssignToVarStmt(TIRAbstractAssignToVarStmt node){ /** * insert constant variable replacement check. */ if(((BasicMatrixValue)(this.analysis.getNodeList().get(this.index).getAnalysis().getCurrentOutSet(). get(node.getTargetName().getVarName())).getSingleton()).isConstant() &&(this.outRes.contains(node.getTargetName().getVarName())==false)){ if (Debug) System.out.println(node.getTargetName().getVarName()+" is a constant"); } else{ HandleCaseTIRAbstractAssignToVarStmt abstractAssignToVarStmt = new HandleCaseTIRAbstractAssignToVarStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(abstractAssignToVarStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(abstractAssignToVarStmt.getFortran(this, node)); } } } @Override public void caseTIRAbstractAssignToListStmt(TIRAbstractAssignToListStmt node){ /** * insert constant variable replacement check. * p.s. need to check whether the expression is io expression, * because io expression doesn't have target variable */ /** * one more problem, for this case, the lhs is a list of variable. * And because node.getTargetName().getVarName() can only return the first variable, * we need use node.getTargets().asNameList(). */ if((HandleCaseTIRAbstractAssignToListStmt.getRHSCaseNumber(this, node)==6)==false){ if(((BasicMatrixValue)(this.analysis.getNodeList().get(this.index).getAnalysis().getCurrentOutSet(). get(node.getTargetName().getVarName()).getSingleton())).isConstant() &&(this.outRes.contains(node.getTargetName().getVarName())==false)){ if (Debug) System.out.println(node.getTargetName().getVarName()+" is a constant"); } else{ HandleCaseTIRAbstractAssignToListStmt abstractAssignToListStmt = new HandleCaseTIRAbstractAssignToListStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(abstractAssignToListStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(abstractAssignToListStmt.getFortran(this, node)); } } } else{ HandleCaseTIRAbstractAssignToListStmt abstractAssignToListStmt = new HandleCaseTIRAbstractAssignToListStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(abstractAssignToListStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(abstractAssignToListStmt.getFortran(this, node)); } } } @Override public void caseTIRIfStmt(TIRIfStmt node){ HandleCaseTIRIfStmt ifStmt = new HandleCaseTIRIfStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(ifStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(ifStmt.getFortran(this, node)); } } @Override public void caseTIRWhileStmt(TIRWhileStmt node){ HandleCaseTIRWhileStmt whileStmt = new HandleCaseTIRWhileStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(whileStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(whileStmt.getFortran(this, node)); } } @Override public void caseTIRForStmt(TIRForStmt node){ HandleCaseTIRForStmt forStmt = new HandleCaseTIRForStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(forStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(forStmt.getFortran(this, node)); } } @Override public void caseTIRArrayGetStmt(TIRArrayGetStmt node){ HandleCaseTIRArrayGetStmt arrGetStmt = new HandleCaseTIRArrayGetStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(arrGetStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(arrGetStmt.getFortran(this, node)); } } @Override public void caseTIRArraySetStmt(TIRArraySetStmt node){ HandleCaseTIRArraySetStmt arrSetStmt = new HandleCaseTIRArraySetStmt(); if(this.ifWhileForBlockNest!=0){ this.stmtSecForIfWhileForBlock.addStatement(arrSetStmt.getFortran(this, node)); } else{ this.SubProgram.getStatementSection().addStatement(arrSetStmt.getFortran(this, node)); } } }
/* * Copyright (C) 2008 The Guava Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.collect; import com.google.common.annotations.Beta; import com.google.common.annotations.GwtCompatible; import java.util.Collection; import java.util.Comparator; import java.util.Map.Entry; import javax.annotation.Nullable; /** * A {@link ListMultimap} whose contents will never change, with many other important properties * detailed at {@link ImmutableCollection}. * * <p>See the Guava User Guide article on <a href= * "https://github.com/google/guava/wiki/ImmutableCollectionsExplained"> * immutable collections</a>. * * @author Jared Levy * @since 2.0 */ @GwtCompatible(serializable = true, emulated = true) public class ImmutableListMultimap<K, V> extends ImmutableMultimap<K, V> implements ListMultimap<K, V> { /** Returns the empty multimap. */ // Casting is safe because the multimap will never hold any elements. @SuppressWarnings("unchecked") public static <K, V> ImmutableListMultimap<K, V> of() { return (ImmutableListMultimap<K, V>) EmptyImmutableListMultimap.INSTANCE; } /** * Returns an immutable multimap containing a single entry. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); return builder.build(); } /** * Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); return builder.build(); } /** * Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of(K k1, V v1, K k2, V v2, K k3, V v3) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); return builder.build(); } /** * Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); builder.put(k4, v4); return builder.build(); } /** * Returns an immutable multimap containing the given entries, in order. */ public static <K, V> ImmutableListMultimap<K, V> of( K k1, V v1, K k2, V v2, K k3, V v3, K k4, V v4, K k5, V v5) { ImmutableListMultimap.Builder<K, V> builder = ImmutableListMultimap.builder(); builder.put(k1, v1); builder.put(k2, v2); builder.put(k3, v3); builder.put(k4, v4); builder.put(k5, v5); return builder.build(); } // looking for of() with > 5 entries? Use the builder instead. /** * Returns a new builder. The generated builder is equivalent to the builder * created by the {@link Builder} constructor. */ public static <K, V> Builder<K, V> builder() { return new Builder<K, V>(); } /** * A builder for creating immutable {@code ListMultimap} instances, especially * {@code public static final} multimaps ("constant multimaps"). Example: * <pre> {@code * * static final Multimap<String, Integer> STRING_TO_INTEGER_MULTIMAP = * new ImmutableListMultimap.Builder<String, Integer>() * .put("one", 1) * .putAll("several", 1, 2, 3) * .putAll("many", 1, 2, 3, 4, 5) * .build();}</pre> * * <p>Builder instances can be reused; it is safe to call {@link #build} multiple * times to build multiple multimaps in series. Each multimap contains the * key-value mappings in the previously created multimaps. * * @since 2.0 */ public static final class Builder<K, V> extends ImmutableMultimap.Builder<K, V> { /** * Creates a new builder. The returned builder is equivalent to the builder * generated by {@link ImmutableListMultimap#builder}. */ public Builder() {} @Override public Builder<K, V> put(K key, V value) { super.put(key, value); return this; } /** * {@inheritDoc} * * @since 11.0 */ @Override public Builder<K, V> put(Entry<? extends K, ? extends V> entry) { super.put(entry); return this; } /** * {@inheritDoc} * * @since 19.0 */ @Beta @Override public Builder<K, V> putAll(Iterable<? extends Entry<? extends K, ? extends V>> entries) { super.putAll(entries); return this; } @Override public Builder<K, V> putAll(K key, Iterable<? extends V> values) { super.putAll(key, values); return this; } @Override public Builder<K, V> putAll(K key, V... values) { super.putAll(key, values); return this; } @Override public Builder<K, V> putAll(Multimap<? extends K, ? extends V> multimap) { super.putAll(multimap); return this; } /** * {@inheritDoc} * * @since 8.0 */ @Override public Builder<K, V> orderKeysBy(Comparator<? super K> keyComparator) { super.orderKeysBy(keyComparator); return this; } /** * {@inheritDoc} * * @since 8.0 */ @Override public Builder<K, V> orderValuesBy(Comparator<? super V> valueComparator) { super.orderValuesBy(valueComparator); return this; } /** * Returns a newly-created immutable list multimap. */ @Override public ImmutableListMultimap<K, V> build() { return (ImmutableListMultimap<K, V>) super.build(); } } /** * Returns an immutable multimap containing the same mappings as {@code * multimap}. The generated multimap's key and value orderings correspond to * the iteration ordering of the {@code multimap.asMap()} view. * * <p>Despite the method name, this method attempts to avoid actually copying * the data when it is safe to do so. The exact circumstances under which a * copy will or will not be performed are undocumented and subject to change. * * @throws NullPointerException if any key or value in {@code multimap} is * null */ public static <K, V> ImmutableListMultimap<K, V> copyOf( Multimap<? extends K, ? extends V> multimap) { if (multimap.isEmpty()) { return of(); } // TODO(lowasser): copy ImmutableSetMultimap by using asList() on the sets if (multimap instanceof ImmutableListMultimap) { @SuppressWarnings("unchecked") // safe since multimap is not writable ImmutableListMultimap<K, V> kvMultimap = (ImmutableListMultimap<K, V>) multimap; if (!kvMultimap.isPartialView()) { return kvMultimap; } } ImmutableMap.Builder<K, ImmutableList<V>> builder = new ImmutableMap.Builder<K, ImmutableList<V>>(multimap.asMap().size()); int size = 0; for (Entry<? extends K, ? extends Collection<? extends V>> entry : multimap.asMap().entrySet()) { ImmutableList<V> list = ImmutableList.copyOf(entry.getValue()); if (!list.isEmpty()) { builder.put(entry.getKey(), list); size += list.size(); } } return new ImmutableListMultimap<K, V>(builder.build(), size); } /** * Returns an immutable multimap containing the specified entries. The * returned multimap iterates over keys in the order they were first * encountered in the input, and the values for each key are iterated in the * order they were encountered. * * @throws NullPointerException if any key, value, or entry is null * @since 19.0 */ @Beta public static <K, V> ImmutableListMultimap<K, V> copyOf( Iterable<? extends Entry<? extends K, ? extends V>> entries) { return new Builder<K, V>().putAll(entries).build(); } ImmutableListMultimap(ImmutableMap<K, ImmutableList<V>> map, int size) { super(map, size); } // views /** * Returns an immutable list of the values for the given key. If no mappings * in the multimap have the provided key, an empty immutable list is * returned. The values are in the same order as the parameters used to build * this multimap. */ @Override public ImmutableList<V> get(@Nullable K key) { // This cast is safe as its type is known in constructor. ImmutableList<V> list = (ImmutableList<V>) map.get(key); return (list == null) ? ImmutableList.<V>of() : list; } private transient ImmutableListMultimap<V, K> inverse; /** * {@inheritDoc} * * <p>Because an inverse of a list multimap can contain multiple pairs with * the same key and value, this method returns an {@code * ImmutableListMultimap} rather than the {@code ImmutableMultimap} specified * in the {@code ImmutableMultimap} class. * * @since 11.0 */ @Override public ImmutableListMultimap<V, K> inverse() { ImmutableListMultimap<V, K> result = inverse; return (result == null) ? (inverse = invert()) : result; } private ImmutableListMultimap<V, K> invert() { Builder<V, K> builder = builder(); for (Entry<K, V> entry : entries()) { builder.put(entry.getValue(), entry.getKey()); } ImmutableListMultimap<V, K> invertedMultimap = builder.build(); invertedMultimap.inverse = this; return invertedMultimap; } /** * Guaranteed to throw an exception and leave the multimap unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public ImmutableList<V> removeAll(Object key) { throw new UnsupportedOperationException(); } /** * Guaranteed to throw an exception and leave the multimap unmodified. * * @throws UnsupportedOperationException always * @deprecated Unsupported operation. */ @Deprecated @Override public ImmutableList<V> replaceValues(K key, Iterable<? extends V> values) { throw new UnsupportedOperationException(); } }
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.prestosql.plugin.hive.optimizer; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.io.Files; import io.prestosql.Session; import io.prestosql.metadata.QualifiedObjectName; import io.prestosql.metadata.TableHandle; import io.prestosql.plugin.hive.HdfsConfig; import io.prestosql.plugin.hive.HdfsConfiguration; import io.prestosql.plugin.hive.HdfsConfigurationInitializer; import io.prestosql.plugin.hive.HdfsEnvironment; import io.prestosql.plugin.hive.HiveColumnHandle; import io.prestosql.plugin.hive.HiveHdfsConfiguration; import io.prestosql.plugin.hive.HiveTableHandle; import io.prestosql.plugin.hive.NodeVersion; import io.prestosql.plugin.hive.authentication.HiveIdentity; import io.prestosql.plugin.hive.authentication.NoHdfsAuthentication; import io.prestosql.plugin.hive.metastore.Database; import io.prestosql.plugin.hive.metastore.HiveMetastore; import io.prestosql.plugin.hive.metastore.MetastoreConfig; import io.prestosql.plugin.hive.metastore.file.FileHiveMetastore; import io.prestosql.plugin.hive.metastore.file.FileHiveMetastoreConfig; import io.prestosql.plugin.hive.testing.TestingHiveConnectorFactory; import io.prestosql.spi.connector.ColumnHandle; import io.prestosql.spi.predicate.Domain; import io.prestosql.spi.predicate.TupleDomain; import io.prestosql.spi.security.PrincipalType; import io.prestosql.sql.planner.assertions.BasePushdownPlanTest; import io.prestosql.testing.LocalQueryRunner; import org.testng.annotations.AfterClass; import org.testng.annotations.Test; import java.io.File; import java.util.Map; import java.util.Optional; import static com.google.common.base.Predicates.equalTo; import static com.google.common.io.MoreFiles.deleteRecursively; import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE; import static io.prestosql.plugin.hive.TestHiveReaderProjectionsUtil.createProjectedColumnHandle; import static io.prestosql.spi.type.BigintType.BIGINT; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.any; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.anyTree; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.equiJoinClause; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.expression; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.filter; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.join; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.project; import static io.prestosql.sql.planner.assertions.PlanMatchPattern.tableScan; import static io.prestosql.sql.planner.plan.JoinNode.Type.INNER; import static io.prestosql.testing.TestingSession.testSessionBuilder; import static java.lang.String.format; import static org.testng.Assert.assertTrue; public class TestHiveProjectionPushdownIntoTableScan extends BasePushdownPlanTest { private static final String HIVE_CATALOG_NAME = "hive"; private static final String SCHEMA_NAME = "test_schema"; private static final Session HIVE_SESSION = testSessionBuilder() .setCatalog(HIVE_CATALOG_NAME) .setSchema(SCHEMA_NAME) .build(); private File baseDir; @Override protected LocalQueryRunner createLocalQueryRunner() { baseDir = Files.createTempDir(); HdfsConfig config = new HdfsConfig(); HdfsConfiguration configuration = new HiveHdfsConfiguration(new HdfsConfigurationInitializer(config), ImmutableSet.of()); HdfsEnvironment environment = new HdfsEnvironment(configuration, config, new NoHdfsAuthentication()); HiveMetastore metastore = new FileHiveMetastore( new NodeVersion("test_version"), environment, new MetastoreConfig(), new FileHiveMetastoreConfig() .setCatalogDirectory(baseDir.toURI().toString()) .setMetastoreUser("test")); Database database = Database.builder() .setDatabaseName(SCHEMA_NAME) .setOwnerName("public") .setOwnerType(PrincipalType.ROLE) .build(); metastore.createDatabase(new HiveIdentity(HIVE_SESSION.toConnectorSession()), database); LocalQueryRunner queryRunner = LocalQueryRunner.create(HIVE_SESSION); queryRunner.createCatalog(HIVE_CATALOG_NAME, new TestingHiveConnectorFactory(metastore), ImmutableMap.of()); return queryRunner; } @Test public void testPushdownDisabled() { String testTable = "test_disabled_pushdown"; Session session = Session.builder(getQueryRunner().getDefaultSession()) .setCatalogSessionProperty(HIVE_CATALOG_NAME, "projection_pushdown_enabled", "false") .build(); getQueryRunner().execute(format( "CREATE TABLE %s (col0) AS" + " SELECT cast(row(5, 6) as row(a bigint, b bigint)) AS col0 WHERE false", testTable)); assertPlan( format("SELECT col0.a expr_a, col0.b expr_b FROM %s", testTable), session, any( project( ImmutableMap.of("expr", expression("col0.a"), "expr_2", expression("col0.b")), tableScan(testTable, ImmutableMap.of("col0", "col0"))))); } @Test public void testDereferencePushdown() { String testTable = "test_simple_projection_pushdown"; QualifiedObjectName completeTableName = new QualifiedObjectName(HIVE_CATALOG_NAME, SCHEMA_NAME, testTable); getQueryRunner().execute(format( "CREATE TABLE %s (col0, col1) AS" + " SELECT cast(row(5, 6) as row(x bigint, y bigint)) AS col0, 5 AS col1 WHERE false", testTable)); Session session = getQueryRunner().getDefaultSession(); Optional<TableHandle> tableHandle = getTableHandle(session, completeTableName); assertTrue(tableHandle.isPresent(), "expected the table handle to be present"); Map<String, ColumnHandle> columns = getColumnHandles(session, completeTableName); HiveColumnHandle column0Handle = (HiveColumnHandle) columns.get("col0"); HiveColumnHandle column1Handle = (HiveColumnHandle) columns.get("col1"); HiveColumnHandle columnX = createProjectedColumnHandle(column0Handle, ImmutableList.of(0)); HiveColumnHandle columnY = createProjectedColumnHandle(column0Handle, ImmutableList.of(1)); // Simple Projection pushdown assertPlan( "SELECT col0.x expr_x, col0.y expr_y FROM " + testTable, any(tableScan( equalTo(tableHandle.get().getConnectorHandle()), TupleDomain.all(), ImmutableMap.of("col0#x", equalTo(columnX), "col0#y", equalTo(columnY))))); // Projection and predicate pushdown assertPlan( format("SELECT col0.x FROM %s WHERE col0.x = col1 + 3 and col0.y = 2", testTable), anyTree( filter( "col0_y = BIGINT '2' AND (col0_x = cast((col1 + 3) as BIGINT))", tableScan( table -> ((HiveTableHandle) table).getCompactEffectivePredicate().getDomains().get() .equals(ImmutableMap.of(columnY, Domain.singleValue(BIGINT, 2L))), TupleDomain.all(), ImmutableMap.of("col0_y", equalTo(columnY), "col0_x", equalTo(columnX), "col1", equalTo(column1Handle)))))); // Projection and predicate pushdown with overlapping columns assertPlan( format("SELECT col0, col0.y expr_y FROM %s WHERE col0.x = 5", testTable), anyTree( filter( "col0_x = BIGINT '5'", tableScan( table -> ((HiveTableHandle) table).getCompactEffectivePredicate().getDomains().get() .equals(ImmutableMap.of(columnX, Domain.singleValue(BIGINT, 5L))), TupleDomain.all(), ImmutableMap.of("col0", equalTo(column0Handle), "col0_x", equalTo(columnX)))))); // Projection and predicate pushdown with joins assertPlan( format("SELECT T.col0.x, T.col0, T.col0.y FROM %s T join %s S on T.col1 = S.col1 WHERE (T.col0.x = 2)", testTable, testTable), anyTree( project( ImmutableMap.of( "expr_0_x", expression("expr_0.x"), "expr_0", expression("expr_0"), "expr_0_y", expression("expr_0.y")), join( INNER, ImmutableList.of(equiJoinClause("t_expr_1", "s_expr_1")), anyTree( filter( "expr_0_x = BIGINT '2'", tableScan( table -> ((HiveTableHandle) table).getCompactEffectivePredicate().getDomains().get() .equals(ImmutableMap.of(columnX, Domain.singleValue(BIGINT, 2L))), TupleDomain.all(), ImmutableMap.of("expr_0_x", equalTo(columnX), "expr_0", equalTo(column0Handle), "t_expr_1", equalTo(column1Handle))))), anyTree( tableScan( equalTo(tableHandle.get().getConnectorHandle()), TupleDomain.all(), ImmutableMap.of("s_expr_1", equalTo(column1Handle)))))))); } @AfterClass(alwaysRun = true) public void cleanup() throws Exception { if (baseDir != null) { deleteRecursively(baseDir.toPath(), ALLOW_INSECURE); } } }
/******************************************************************************* * Copyright (c) 2014 BestSolution.at EDV Systemhaus GmbH/Austria, * http://www.BestSolution.at * * This file is part of framework-grid which was developed with funding * from DI Christoph Hermann - InformationsTechnologie Beratung Hermann * /Austria. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. *******************************************************************************/ package at.bestsolution.framework.grid.swt.internal; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.stream.Collectors; import org.eclipse.jdt.annotation.NonNull; import org.eclipse.jdt.annotation.Nullable; import org.eclipse.nebula.widgets.grid.GridItem; import org.eclipse.swt.SWT; import at.bestsolution.framework.grid.Property; import at.bestsolution.framework.grid.Property.ChangeListener; import at.bestsolution.framework.grid.Util; import at.bestsolution.framework.grid.XGrid.SelectionMode; import at.bestsolution.framework.grid.XGridCell; import at.bestsolution.framework.grid.XGridColumn; import at.bestsolution.framework.grid.XGridContentProvider; import at.bestsolution.framework.grid.XGridContentProvider.ContentChangeListener; import at.bestsolution.framework.grid.XGridContentProvider.ContentChangeType; import at.bestsolution.framework.grid.XSelection; import at.bestsolution.framework.grid.swt.SWTGridColumn; import at.bestsolution.framework.grid.swt.SWTGridTable; /** * @param <R> * content type */ public class SWTGridContentHandler<R> { private final List<@NonNull Wrapper> data = new ArrayList<@NonNull Wrapper>(); private final Map<@NonNull Integer, @Nullable GridItem> dataMapByR = new HashMap<>(); private final Map<@NonNull GridItem, @NonNull Wrapper> dataMapByCol = new HashMap<>(); final @NonNull SWTGridTable<R> grid; private final @NonNull Property<@Nullable Comparator<@NonNull R>> defaultSortProperty = new SimpleProperty<>(null); private final @NonNull Property<@Nullable SWTGridColumn<R, ?>> sortColumnProperty = new SimpleProperty<>(null); private @Nullable XGridContentProvider<R> contentProvider; private final @NonNull ChangeListener<@Nullable Comparator<@NonNull R>> defaultSortChangeListener = this::handleDefaultSortChanged; private final @NonNull ChangeListener<@Nullable SWTGridColumn<R, ?>> sortColumnChangeListener = this::handleSortColumnChanged; private final @NonNull ContentChangeListener<R> contentChangeListener = this::handleContentChange; private final @NonNull ChangeListener<@Nullable XGridContentProvider<R>> contentProviderChangeListener = this::handleContentProviderChanged; /** * @param grid * grid */ public SWTGridContentHandler(@NonNull SWTGridTable<R> grid) { this.grid = grid; registerPropertyListeners(); } /** * */ private void registerPropertyListeners() { defaultSortProperty.addChangeListener(defaultSortChangeListener); sortColumnProperty.addChangeListener(sortColumnChangeListener); grid.contentProviderProperty().addChangeListener(contentProviderChangeListener); } private void handleDefaultSortChanged(Property<@Nullable Comparator<@NonNull R>> property, @Nullable Comparator<@NonNull R> oldValue, @Nullable Comparator<@NonNull R> newValue) { resetContent(contentProvider); } private void handleSortColumnChanged(Property<@Nullable SWTGridColumn<R, ?>> property, @Nullable SWTGridColumn<R, ?> oldValue, @Nullable SWTGridColumn<R, ?> newValue) { resetContent(contentProvider); } private void handleContentProviderChanged(Property<@Nullable XGridContentProvider<R>> property, @Nullable XGridContentProvider<R> oldValue, @Nullable XGridContentProvider<R> newValue) { resetContent(newValue); } /** * reset content */ public void resetContent() { resetContent(contentProvider); } /** * * @param newContentProvider * the new content provider */ private void resetContent(@Nullable XGridContentProvider<R> newContentProvider) { XSelection<R> previousSelection = grid.selectionProperty().get(); if (contentProvider != newContentProvider) { @Nullable XGridContentProvider<R> oldContentProvider = contentProvider; if (oldContentProvider != null) { oldContentProvider.removeChangeListener(contentChangeListener); } contentProvider = newContentProvider; if (newContentProvider != null) { newContentProvider.addChangeListener(contentChangeListener); } } clearData(); grid.getNebulaGrid().disposeAllItems(); if (newContentProvider != null) { insertData(newContentProvider); createGridItems(); List<@NonNull R> newSelection = new ArrayList<>(); for (R element : previousSelection.asList()) { // find elements which match elements from old selection if (data.contains(new Wrapper(element))) { newSelection.add(element); } } // restore selection if (newSelection.isEmpty() && !data.isEmpty()) { // if there is no previous selection select the first item newSelection.add(data.get(0).get()); } if (newSelection.isEmpty()) { grid.selectionProperty().set(Util.emptySelection()); } else if (grid.selectionModeProperty().get() == SelectionMode.SINGLE_ROW) { grid.selectionProperty().set(new SimpleSelection<@NonNull R>(grid,newSelection, grid.getColumns())); } else if (grid.selectionModeProperty().get() == SelectionMode.SINGLE_CELL) { SimpleCellSelection<R> scs = (SimpleCellSelection<R>) previousSelection; List<XGridCell<@NonNull R, ?>> cellList = new ArrayList<XGridCell<@NonNull R, ?>>(scs.getCells()); grid.selectionProperty().set(new SimpleCellSelection<@NonNull R>(grid,cellList, newSelection, grid.getColumns())); } } // pack for (@NonNull XGridColumn<@NonNull R, @Nullable ?> xcol : grid.getColumns()) { if (xcol.autoWidthProperty().get().booleanValue()) { SWTGridColumn<R, ?> col = (SWTGridColumn<R, ?>) xcol; col.getNebulaColumn().pack(); } } } private Comparator<@NonNull Wrapper> getComparator() { @Nullable SWTGridColumn<R, ?> column = sortColumnProperty.get(); Comparator<@NonNull R> comp = null; if (column != null) { switch (column.sortingProperty().get()) { case UP: comp = column.sorterProperty().get(); break; case DOWN: @Nullable Comparator<@NonNull R> originComparator = column.sorterProperty().get(); if (originComparator != null) { comp = originComparator.reversed(); } break; default: comp = defaultSortProperty.get(); } } else if (defaultSortProperty.get() != null) { comp = defaultSortProperty.get(); } if (comp != null) { final Comparator<@NonNull R> rComparator = comp; return (o1, o2) -> rComparator.compare(o1.get(), o2.get()); } return null; } private void insertData(XGridContentProvider<R> newContentProvider) { for (int i = 0; i < newContentProvider.size(); i++) { data.add(new Wrapper(newContentProvider.getElementAt(i))); } Comparator<@NonNull Wrapper> comparator = getComparator(); if (comparator != null) { data.sort(comparator); } } private void clearData() { data.clear(); dataMapByR.clear(); dataMapByCol.clear(); } private void handleContentChange(@NonNull ContentChangeType type, @NonNull List<@NonNull R> values) { switch (type) { case ADD: case REMOVE: resetContent(); break; case MODIFY: for (R element : values) { for (@NonNull XGridColumn<@NonNull R, @Nullable ?> col : grid.getColumns()) { col.requestUpdate(element); } } break; default: break; } } /** * create a {@link GridItem} for each row where no {@link GridItem} exist an * which is not excluded by current filter settings */ private void createGridItems() { grid.getNebulaGrid().setRedraw(false); try { for (@NonNull Wrapper element : data) { if (dataMapByR.get(element) == null) { boolean visible = true; for (@NonNull XGridColumn<@NonNull R, @Nullable ?> xcol : grid.getColumns()) { SWTGridColumn<R, ?> col = (SWTGridColumn<R, ?>) xcol; visible = col.matchesColumnFilter(element.get()); if (!visible) { break; } } if (visible) { final GridItem item = new GridItem(grid.getNebulaGrid(), SWT.NONE); dataMapByR.put(new Integer(element.hashCode()), item); dataMapByCol.put(item, element); for (XGridColumn<@NonNull R, @Nullable ?> col : grid.getColumns()) { col.requestUpdate(element.get()); } } } } } finally { grid.getNebulaGrid().setRedraw(true); } } /** * @param item * grid item * @return corresponding row value */ public R get(@NonNull GridItem item) { return dataMapByCol.get(item).get(); } /** * @param r * row value * @return corresponding GridItem */ public @Nullable GridItem get(@NonNull R r) { return dataMapByR.get(new Integer(new Wrapper(r).hashCode())); } /** * @return the defaultSortProperty */ public @NonNull Property<@Nullable Comparator<@NonNull R>> defaultSortProperty() { return defaultSortProperty; } /** * @return the sortProperty */ public Property<@Nullable SWTGridColumn<R, ?>> sortColumnProperty() { return sortColumnProperty; } /** * @return list of visible elements */ @SuppressWarnings("null") public @NonNull List<@NonNull R> getVisibleElements() { return dataMapByCol.values().stream().map((x) -> x.get()).collect(Collectors.<@NonNull R> toList()); } /** * @param allRows * export all elements if <code>true</code>, otherwise exclude * filtered items * @return export data */ @SuppressWarnings("null") public @NonNull Object[][] getData(boolean allRows) { if (allRows) { return getData(data); } else { List<@NonNull Wrapper> unFilteredElements = new ArrayList<>(); for (Wrapper w : data) { if (get(w.get()) != null) { unFilteredElements.add(w); } } return getData(unFilteredElements); } } private Object[][] getData(List<@NonNull Wrapper> rows) { Object[][] exportData = new Object[rows.size()][grid.getColumns().size()]; int rowIndex = 0; for (Wrapper element : rows) { int colIndex = 0; for (XGridColumn<R, ?> column : grid.getColumns()) { exportData[rowIndex][colIndex] = column.getExportValue(element.get()); colIndex++; } rowIndex++; } return exportData; } /** * dispose content handler. * <p> * remove listeners & do necessary cleanup * </p> */ public void dispose() { defaultSortProperty.removeChangeListener(defaultSortChangeListener); sortColumnProperty.removeChangeListener(sortColumnChangeListener); @Nullable XGridContentProvider<R> cp = contentProvider; if (cp != null) { cp.removeChangeListener(contentChangeListener); } grid.contentProviderProperty().removeChangeListener(contentProviderChangeListener); } /** * content wrapper */ private class Wrapper { private final @NonNull R object; public Wrapper(@NonNull R object) { this.object = object; } public @NonNull R get() { return object; } @Override public int hashCode() { return grid.elementComparerProperty().get().hashCode(object); } @Override public boolean equals(Object obj) { if (obj != null && obj instanceof SWTGridContentHandler.Wrapper) { @SuppressWarnings("unchecked") Wrapper w = (Wrapper) obj; return grid.elementComparerProperty().get().equals(object, w.get()); } return false; } } }
/* * Copyright 2021 Red Hat, Inc. and/or its affiliates. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.optaplanner.core.impl.score.stream.drools.common; import static java.util.Collections.singletonList; import static org.drools.model.DSL.exists; import static org.drools.model.DSL.not; import static org.drools.model.PatternDSL.betaIndexedBy; import static org.drools.model.PatternDSL.pattern; import java.math.BigDecimal; import java.util.Arrays; import java.util.List; import java.util.Objects; import java.util.function.Function; import java.util.function.IntFunction; import java.util.stream.Stream; import org.drools.model.BetaIndex4; import org.drools.model.DSL; import org.drools.model.PatternDSL; import org.drools.model.Variable; import org.drools.model.functions.Function4; import org.drools.model.functions.Predicate5; import org.drools.model.functions.accumulate.AccumulateFunction; import org.drools.model.view.ViewItem; import org.optaplanner.core.api.function.PentaPredicate; import org.optaplanner.core.api.function.QuadFunction; import org.optaplanner.core.api.function.QuadPredicate; import org.optaplanner.core.api.function.ToIntQuadFunction; import org.optaplanner.core.api.function.ToLongQuadFunction; import org.optaplanner.core.api.score.stream.penta.PentaJoiner; import org.optaplanner.core.api.score.stream.quad.QuadConstraintCollector; import org.optaplanner.core.impl.score.stream.common.JoinerType; import org.optaplanner.core.impl.score.stream.drools.DroolsVariableFactory; import org.optaplanner.core.impl.score.stream.penta.AbstractPentaJoiner; import org.optaplanner.core.impl.score.stream.penta.FilteringPentaJoiner; import org.optaplanner.core.impl.score.stream.penta.NonePentaJoiner; import org.optaplanner.core.impl.score.stream.tri.NoneTriJoiner; /** * Represents the left hand side of a Drools rule, the result of which are four variables. * For more, see {@link UniLeftHandSide} and {@link BiLeftHandSide}. * * @param <A> generic type of the first resulting variable * @param <B> generic type of the second resulting variable * @param <C> generic type of the third resulting variable * @param <D> generic type of the fourth resulting variable */ public final class QuadLeftHandSide<A, B, C, D> extends AbstractLeftHandSide { private final PatternVariable<A, ?, ?> patternVariableA; private final PatternVariable<B, ?, ?> patternVariableB; private final PatternVariable<C, ?, ?> patternVariableC; private final PatternVariable<D, ?, ?> patternVariableD; private final QuadRuleContext<A, B, C, D> ruleContext; protected QuadLeftHandSide(Variable<A> variableA, Variable<B> variableB, Variable<C> variableC, PatternVariable<D, ?, ?> patternVariableD, DroolsVariableFactory variableFactory) { this(new DetachedPatternVariable<>(variableA), new DetachedPatternVariable<>(variableB), new DetachedPatternVariable<>(variableC), patternVariableD, variableFactory); } protected QuadLeftHandSide(PatternVariable<A, ?, ?> patternVariableA, PatternVariable<B, ?, ?> patternVariableB, PatternVariable<C, ?, ?> patternVariableC, PatternVariable<D, ?, ?> patternVariableD, DroolsVariableFactory variableFactory) { super(variableFactory); this.patternVariableA = Objects.requireNonNull(patternVariableA); this.patternVariableB = Objects.requireNonNull(patternVariableB); this.patternVariableC = Objects.requireNonNull(patternVariableC); this.patternVariableD = Objects.requireNonNull(patternVariableD); this.ruleContext = buildRuleContext(); } private QuadRuleContext<A, B, C, D> buildRuleContext() { ViewItem<?>[] viewItems = Stream.of(patternVariableA, patternVariableB, patternVariableC, patternVariableD) .flatMap(variable -> variable.build().stream()) .toArray((IntFunction<ViewItem<?>[]>) ViewItem[]::new); return new QuadRuleContext<>(patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable(), patternVariableD.getPrimaryVariable(), viewItems); } public QuadLeftHandSide<A, B, C, D> andFilter(QuadPredicate<A, B, C, D> predicate) { return new QuadLeftHandSide<>(patternVariableA, patternVariableB, patternVariableC, patternVariableD.filter(predicate, patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable()), variableFactory); } private <E> QuadLeftHandSide<A, B, C, D> applyJoiners(Class<E> otherFactType, AbstractPentaJoiner<A, B, C, D, E> joiner, PentaPredicate<A, B, C, D, E> predicate, boolean shouldExist) { Variable<E> toExist = variableFactory.createVariable(otherFactType, "toExist"); PatternDSL.PatternDef<E> existencePattern = pattern(toExist); if (joiner == null) { return applyFilters(existencePattern, predicate, shouldExist); } JoinerType[] joinerTypes = joiner.getJoinerTypes(); for (int mappingIndex = 0; mappingIndex < joinerTypes.length; mappingIndex++) { JoinerType joinerType = joinerTypes[mappingIndex]; QuadFunction<A, B, C, D, Object> leftMapping = joiner.getLeftMapping(mappingIndex); Function<E, Object> rightMapping = joiner.getRightMapping(mappingIndex); Predicate5<E, A, B, C, D> joinPredicate = (e, a, b, c, d) -> joinerType.matches(leftMapping.apply(a, b, c, d), rightMapping.apply(e)); BetaIndex4<E, A, B, C, D, ?> index = betaIndexedBy(Object.class, getConstraintType(joinerType), mappingIndex, rightMapping::apply, leftMapping::apply, Object.class); existencePattern = existencePattern.expr("Join using joiner #" + mappingIndex + " in " + joiner, patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable(), patternVariableD.getPrimaryVariable(), joinPredicate, index); } return applyFilters(existencePattern, predicate, shouldExist); } private <E> QuadLeftHandSide<A, B, C, D> applyFilters(PatternDSL.PatternDef<E> existencePattern, PentaPredicate<A, B, C, D, E> predicate, boolean shouldExist) { PatternDSL.PatternDef<E> possiblyFilteredExistencePattern = predicate == null ? existencePattern : existencePattern.expr("Filter using " + predicate, patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable(), patternVariableD.getPrimaryVariable(), (e, a, b, c, d) -> predicate.test(a, b, c, d, e)); ViewItem<?> existenceExpression = exists(possiblyFilteredExistencePattern); if (!shouldExist) { existenceExpression = not(possiblyFilteredExistencePattern); } return new QuadLeftHandSide<>(patternVariableA, patternVariableB, patternVariableC, patternVariableD.addDependentExpression(existenceExpression), variableFactory); } private <E> QuadLeftHandSide<A, B, C, D> existsOrNot(Class<E> dClass, PentaJoiner<A, B, C, D, E>[] joiners, boolean shouldExist) { int indexOfFirstFilter = -1; // Prepare the joiner and filter that will be used in the pattern AbstractPentaJoiner<A, B, C, D, E> finalJoiner = null; PentaPredicate<A, B, C, D, E> finalFilter = null; for (int i = 0; i < joiners.length; i++) { AbstractPentaJoiner<A, B, C, D, E> joiner = (AbstractPentaJoiner<A, B, C, D, E>) joiners[i]; boolean hasAFilter = indexOfFirstFilter >= 0; if (joiner instanceof NonePentaJoiner && joiners.length > 1) { throw new IllegalStateException("If present, " + NoneTriJoiner.class + " must be the only joiner, got " + Arrays.toString(joiners) + " instead."); } else if (!(joiner instanceof FilteringPentaJoiner)) { if (hasAFilter) { throw new IllegalStateException("Indexing joiner (" + joiner + ") must not follow a filtering joiner (" + joiners[indexOfFirstFilter] + ")."); } else { // Merge this Joiner with the existing Joiners. finalJoiner = finalJoiner == null ? joiner : AbstractPentaJoiner.merge(finalJoiner, joiner); } } else { if (!hasAFilter) { // From now on, we only allow filtering joiners. indexOfFirstFilter = i; } // Merge all filters into one to avoid paying the penalty for lack of indexing more than once. finalFilter = finalFilter == null ? joiner.getFilter() : finalFilter.and(joiner.getFilter()); } } return applyJoiners(dClass, finalJoiner, finalFilter, shouldExist); } public <E> QuadLeftHandSide<A, B, C, D> andExists(Class<E> dClass, PentaJoiner<A, B, C, D, E>[] joiners) { return existsOrNot(dClass, joiners, true); } public <E> QuadLeftHandSide<A, B, C, D> andNotExists(Class<E> dClass, PentaJoiner<A, B, C, D, E>[] joiners) { return existsOrNot(dClass, joiners, false); } public <NewA> UniLeftHandSide<NewA> andGroupBy(QuadConstraintCollector<A, B, C, D, ?, NewA> collector) { Variable<NewA> accumulateOutput = variableFactory.createVariable("collected"); ViewItem<?> outerAccumulatePattern = buildAccumulate(createAccumulateFunction(collector, accumulateOutput)); return new UniLeftHandSide<>(accumulateOutput, singletonList(outerAccumulatePattern), variableFactory); } public <NewA, NewB> BiLeftHandSide<NewA, NewB> andGroupBy(QuadConstraintCollector<A, B, C, D, ?, NewA> collectorA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB) { Variable<NewA> accumulateOutputA = variableFactory.createVariable("collectedA"); Variable<NewB> accumulateOutputB = variableFactory.createVariable("collectedB"); ViewItem<?> outerAccumulatePattern = buildAccumulate(createAccumulateFunction(collectorA, accumulateOutputA), createAccumulateFunction(collectorB, accumulateOutputB)); return new BiLeftHandSide<>(accumulateOutputA, new DirectPatternVariable<>(accumulateOutputB, outerAccumulatePattern), variableFactory); } public <NewA, NewB, NewC> TriLeftHandSide<NewA, NewB, NewC> andGroupBy( QuadConstraintCollector<A, B, C, D, ?, NewA> collectorA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC) { Variable<NewA> accumulateOutputA = variableFactory.createVariable("collectedA"); Variable<NewB> accumulateOutputB = variableFactory.createVariable("collectedB"); Variable<NewC> accumulateOutputC = variableFactory.createVariable("collectedC"); ViewItem<?> outerAccumulatePattern = buildAccumulate(createAccumulateFunction(collectorA, accumulateOutputA), createAccumulateFunction(collectorB, accumulateOutputB), createAccumulateFunction(collectorC, accumulateOutputC)); return new TriLeftHandSide<>(accumulateOutputA, accumulateOutputB, new DirectPatternVariable<>(accumulateOutputC, outerAccumulatePattern), variableFactory); } public <NewA, NewB, NewC, NewD> QuadLeftHandSide<NewA, NewB, NewC, NewD> andGroupBy( QuadConstraintCollector<A, B, C, D, ?, NewA> collectorA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC, QuadConstraintCollector<A, B, C, D, ?, NewD> collectorD) { Variable<NewA> accumulateOutputA = variableFactory.createVariable("collectedA"); Variable<NewB> accumulateOutputB = variableFactory.createVariable("collectedB"); Variable<NewC> accumulateOutputC = variableFactory.createVariable("collectedC"); Variable<NewD> accumulateOutputD = variableFactory.createVariable("collectedD"); ViewItem<?> outerAccumulatePattern = buildAccumulate(createAccumulateFunction(collectorA, accumulateOutputA), createAccumulateFunction(collectorB, accumulateOutputB), createAccumulateFunction(collectorC, accumulateOutputC), createAccumulateFunction(collectorD, accumulateOutputD)); return new QuadLeftHandSide<>(accumulateOutputA, accumulateOutputB, accumulateOutputC, new DirectPatternVariable<>(accumulateOutputD, outerAccumulatePattern), variableFactory); } /** * Creates a Drools accumulate function based on a given collector. The accumulate function will take one * {@link Variable} as input and return its result into another {@link Variable}. * * @param <Out> type of the accumulate result * @param collector collector to use in the accumulate function * @param out variable in which to store accumulate result * @return Drools accumulate function */ private <Out> AccumulateFunction createAccumulateFunction(QuadConstraintCollector<A, B, C, D, ?, Out> collector, Variable<Out> out) { Variable<A> variableA = patternVariableA.getPrimaryVariable(); Variable<B> variableB = patternVariableB.getPrimaryVariable(); Variable<C> variableC = patternVariableC.getPrimaryVariable(); Variable<D> variableD = patternVariableD.getPrimaryVariable(); return new AccumulateFunction(null, () -> new QuadAccumulator<>(variableA, variableB, variableC, variableD, collector)) .with(variableA, variableB, variableC, variableD) .as(out); } public <NewA> UniLeftHandSide<NewA> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMapping) { Variable<NewA> groupKey = variableFactory.createVariable("groupKey"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, keyMapping::apply); return new UniLeftHandSide<>(groupKey, singletonList(groupByPattern), variableFactory); } public <NewA, NewB> BiLeftHandSide<NewA, NewB> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB) { Variable<NewA> groupKey = variableFactory.createVariable("groupKey"); Variable<NewB> accumulateOutput = variableFactory.createVariable("output"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, keyMappingA::apply, createAccumulateFunction(collectorB, accumulateOutput)); return new BiLeftHandSide<>(groupKey, new DirectPatternVariable<>(accumulateOutput, groupByPattern), variableFactory); } public <NewA, NewB, NewC> TriLeftHandSide<NewA, NewB, NewC> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC) { Variable<NewA> groupKey = variableFactory.createVariable("groupKey"); Variable<NewB> accumulateOutputB = variableFactory.createVariable("outputB"); Variable<NewC> accumulateOutputC = variableFactory.createVariable("outputC"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, keyMappingA::apply, createAccumulateFunction(collectorB, accumulateOutputB), createAccumulateFunction(collectorC, accumulateOutputC)); return new TriLeftHandSide<>(groupKey, accumulateOutputB, new DirectPatternVariable<>(accumulateOutputC, groupByPattern), variableFactory); } public <NewA, NewB, NewC, NewD> QuadLeftHandSide<NewA, NewB, NewC, NewD> andGroupBy( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadConstraintCollector<A, B, C, D, ?, NewB> collectorB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC, QuadConstraintCollector<A, B, C, D, ?, NewD> collectorD) { Variable<NewA> groupKey = variableFactory.createVariable("groupKey"); Variable<NewB> accumulateOutputB = variableFactory.createVariable("outputB"); Variable<NewC> accumulateOutputC = variableFactory.createVariable("outputC"); Variable<NewD> accumulateOutputD = variableFactory.createVariable("outputD"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, keyMappingA::apply, createAccumulateFunction(collectorB, accumulateOutputB), createAccumulateFunction(collectorC, accumulateOutputC), createAccumulateFunction(collectorD, accumulateOutputD)); return new QuadLeftHandSide<>(groupKey, accumulateOutputB, accumulateOutputC, new DirectPatternVariable<>(accumulateOutputD, groupByPattern), variableFactory); } public <NewA, NewB> BiLeftHandSide<NewA, NewB> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB) { Variable<BiTuple<NewA, NewB>> groupKey = variableFactory.createVariable(BiTuple.class, "groupKey"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeBiGroupKey(keyMappingA, keyMappingB)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); IndirectPatternVariable<NewB, BiTuple<NewA, NewB>> bPatternVar = decompose(groupKey, groupByPattern, newA, newB); return new BiLeftHandSide<>(newA, bPatternVar, variableFactory); } /** * Takes group key mappings and merges them in such a way that the result is a single composite key. * This is necessary because Drools groupBy can only take a single key - therefore multiple variables need to be * converted into a singular composite variable. * * @param keyMappingA mapping for the first variable * @param keyMappingB mapping for the second variable * @param <NewA> generic type of the first variable * @param <NewB> generic type of the second variable * @return never null, Drools function to convert the keys to a singular composite key */ private <NewA, NewB> Function4<A, B, C, D, BiTuple<NewA, NewB>> createCompositeBiGroupKey( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB) { return (a, b, c, d) -> new BiTuple<>(keyMappingA.apply(a, b, c, d), keyMappingB.apply(a, b, c, d)); } public <NewA, NewB, NewC> TriLeftHandSide<NewA, NewB, NewC> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC) { Variable<BiTuple<NewA, NewB>> groupKey = variableFactory.createVariable(BiTuple.class, "groupKey"); Variable<NewC> accumulateOutput = variableFactory.createVariable("output"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeBiGroupKey(keyMappingA, keyMappingB), createAccumulateFunction(collectorC, accumulateOutput)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); DirectPatternVariable<NewC> cPatternVar = decomposeWithAccumulate(groupKey, groupByPattern, newA, newB, accumulateOutput); return new TriLeftHandSide<>(newA, newB, cPatternVar, variableFactory); } public <NewA, NewB, NewC, NewD> QuadLeftHandSide<NewA, NewB, NewC, NewD> andGroupBy( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadConstraintCollector<A, B, C, D, ?, NewC> collectorC, QuadConstraintCollector<A, B, C, D, ?, NewD> collectorD) { Variable<BiTuple<NewA, NewB>> groupKey = variableFactory.createVariable(BiTuple.class, "groupKey"); Variable<NewC> accumulateOutputC = variableFactory.createVariable("outputC"); Variable<NewD> accumulateOutputD = variableFactory.createVariable("outputD"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeBiGroupKey(keyMappingA, keyMappingB), createAccumulateFunction(collectorC, accumulateOutputC), createAccumulateFunction(collectorD, accumulateOutputD)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); DirectPatternVariable<NewD> dPatternVar = decomposeWithAccumulate(groupKey, groupByPattern, newA, newB, accumulateOutputD); return new QuadLeftHandSide<>(newA, newB, accumulateOutputC, dPatternVar, variableFactory); } /** * Takes group key mappings and merges them in such a way that the result is a single composite key. * This is necessary because Drools groupBy can only take a single key - therefore multiple variables need to be * converted into a singular composite variable. * * @param keyMappingA mapping for the first variable * @param keyMappingB mapping for the second variable * @param keyMappingC mapping for the third variable * @param <NewA> generic type of the first variable * @param <NewB> generic type of the second variable * @param <NewC> generic type of the third variable * @return never null, Drools function to convert the keys to a singular composite key */ private <NewA, NewB, NewC> Function4<A, B, C, D, TriTuple<NewA, NewB, NewC>> createCompositeTriGroupKey( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadFunction<A, B, C, D, NewC> keyMappingC) { return (a, b, c, d) -> new TriTuple<>(keyMappingA.apply(a, b, c, d), keyMappingB.apply(a, b, c, d), keyMappingC.apply(a, b, c, d)); } public <NewA, NewB, NewC> TriLeftHandSide<NewA, NewB, NewC> andGroupBy(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadFunction<A, B, C, D, NewC> keyMappingC) { Variable<TriTuple<NewA, NewB, NewC>> groupKey = variableFactory.createVariable(TriTuple.class, "groupKey"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeTriGroupKey(keyMappingA, keyMappingB, keyMappingC)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); Variable<NewC> newC = variableFactory.createVariable("newC"); IndirectPatternVariable<NewC, TriTuple<NewA, NewB, NewC>> cPatternVar = decompose(groupKey, groupByPattern, newA, newB, newC); return new TriLeftHandSide<>(newA, newB, cPatternVar, variableFactory); } public <NewA, NewB, NewC, NewD> QuadLeftHandSide<NewA, NewB, NewC, NewD> andGroupBy( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadFunction<A, B, C, D, NewC> keyMappingC, QuadConstraintCollector<A, B, C, D, ?, NewD> collectorD) { Variable<TriTuple<NewA, NewB, NewC>> groupKey = variableFactory.createVariable(TriTuple.class, "groupKey"); Variable<NewD> accumulateOutputD = variableFactory.createVariable("outputD"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeTriGroupKey(keyMappingA, keyMappingB, keyMappingC), createAccumulateFunction(collectorD, accumulateOutputD)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); Variable<NewC> newC = variableFactory.createVariable("newC"); DirectPatternVariable<NewD> dPatternVar = decomposeWithAccumulate(groupKey, groupByPattern, newA, newB, newC, accumulateOutputD); return new QuadLeftHandSide<>(newA, newB, newC, dPatternVar, variableFactory); } /** * Takes group key mappings and merges them in such a way that the result is a single composite key. * This is necessary because Drools groupBy can only take a single key - therefore multiple variables need to be * converted into a singular composite variable. * * @param keyMappingA mapping for the first variable * @param keyMappingB mapping for the second variable * @param keyMappingC mapping for the third variable * @param <NewA> generic type of the first variable * @param <NewB> generic type of the second variable * @param <NewC> generic type of the third variable * @return never null, Drools function to convert the keys to a singular composite key */ private <NewA, NewB, NewC, NewD> Function4<A, B, C, D, QuadTuple<NewA, NewB, NewC, NewD>> createCompositeQuadGroupKey(QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadFunction<A, B, C, D, NewC> keyMappingC, QuadFunction<A, B, C, D, NewD> keyMappingD) { return (a, b, c, d) -> new QuadTuple<>(keyMappingA.apply(a, b, c, d), keyMappingB.apply(a, b, c, d), keyMappingC.apply(a, b, c, d), keyMappingD.apply(a, b, c, d)); } public <NewA, NewB, NewC, NewD> QuadLeftHandSide<NewA, NewB, NewC, NewD> andGroupBy( QuadFunction<A, B, C, D, NewA> keyMappingA, QuadFunction<A, B, C, D, NewB> keyMappingB, QuadFunction<A, B, C, D, NewC> keyMappingC, QuadFunction<A, B, C, D, NewD> keyMappingD) { Variable<QuadTuple<NewA, NewB, NewC, NewD>> groupKey = variableFactory.createVariable(QuadTuple.class, "groupKey"); ViewItem<?> groupByPattern = buildGroupBy(groupKey, createCompositeQuadGroupKey(keyMappingA, keyMappingB, keyMappingC, keyMappingD)); Variable<NewA> newA = variableFactory.createVariable("newA"); Variable<NewB> newB = variableFactory.createVariable("newB"); Variable<NewC> newC = variableFactory.createVariable("newC"); Variable<NewD> newD = variableFactory.createVariable("newD"); IndirectPatternVariable<NewD, QuadTuple<NewA, NewB, NewC, NewD>> dPatternVar = decompose(groupKey, groupByPattern, newA, newB, newC, newD); return new QuadLeftHandSide<>(newA, newB, newC, dPatternVar, variableFactory); } public <NewA> UniLeftHandSide<NewA> andMap(QuadFunction<A, B, C, D, NewA> mapping) { Variable<NewA> newA = variableFactory.createVariable("mapped", patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable(), patternVariableD.getPrimaryVariable(), mapping); List<ViewItem<?>> allPrerequisites = mergeViewItems(patternVariableA, patternVariableB, patternVariableC, patternVariableD); DirectPatternVariable<NewA> newPatternVariableA = new DirectPatternVariable<>(newA, allPrerequisites); return new UniLeftHandSide<>(newPatternVariableA, variableFactory); } public <NewD> QuadLeftHandSide<A, B, C, NewD> andFlattenLast(Function<D, Iterable<NewD>> mapping) { Variable<D> source = patternVariableD.getPrimaryVariable(); Variable<NewD> newD = variableFactory.createFlattenedVariable("flattened", source, mapping); List<ViewItem<?>> allPrerequisites = mergeViewItems(patternVariableA, patternVariableB, patternVariableC, patternVariableD); PatternVariable<NewD, ?, ?> newPatternVariableD = new DirectPatternVariable<>(newD, allPrerequisites); return new QuadLeftHandSide<>(patternVariableA.getPrimaryVariable(), patternVariableB.getPrimaryVariable(), patternVariableC.getPrimaryVariable(), newPatternVariableD, variableFactory); } public <Solution_> RuleBuilder<Solution_> andTerminate() { return ruleContext.newRuleBuilder(); } public <Solution_> RuleBuilder<Solution_> andTerminate(ToIntQuadFunction<A, B, C, D> matchWeighter) { return ruleContext.newRuleBuilder(matchWeighter); } public <Solution_> RuleBuilder<Solution_> andTerminate(ToLongQuadFunction<A, B, C, D> matchWeighter) { return ruleContext.newRuleBuilder(matchWeighter); } public <Solution_> RuleBuilder<Solution_> andTerminate(QuadFunction<A, B, C, D, BigDecimal> matchWeighter) { return ruleContext.newRuleBuilder(matchWeighter); } private ViewItem<?> buildAccumulate(AccumulateFunction... accFunctions) { ViewItem<?> innerAccumulatePattern = joinViewItemsWithLogicalAnd(patternVariableA, patternVariableB, patternVariableC, patternVariableD); return buildAccumulate(innerAccumulatePattern, accFunctions); } private <GroupKey_> ViewItem<?> buildGroupBy(Variable<GroupKey_> groupKey, Function4<A, B, C, D, GroupKey_> groupKeyExtractor, AccumulateFunction... accFunctions) { Variable<A> inputA = patternVariableA.getPrimaryVariable(); Variable<B> inputB = patternVariableB.getPrimaryVariable(); Variable<C> inputC = patternVariableC.getPrimaryVariable(); Variable<D> inputD = patternVariableD.getPrimaryVariable(); ViewItem<?> innerGroupByPattern = joinViewItemsWithLogicalAnd(patternVariableA, patternVariableB, patternVariableC, patternVariableD); return DSL.groupBy(innerGroupByPattern, inputA, inputB, inputC, inputD, groupKey, groupKeyExtractor, accFunctions); } }
/* * Copyright 2013 The Closure Compiler Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.javascript.jscomp; import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Preconditions.checkState; import com.google.common.base.Joiner; import com.google.common.base.Preconditions; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.javascript.jscomp.newtypes.Declaration; import com.google.javascript.jscomp.newtypes.DeclaredFunctionType; import com.google.javascript.jscomp.newtypes.DeclaredTypeRegistry; import com.google.javascript.jscomp.newtypes.EnumType; import com.google.javascript.jscomp.newtypes.FunctionNamespace; import com.google.javascript.jscomp.newtypes.JSType; import com.google.javascript.jscomp.newtypes.JSTypeCreatorFromJSDoc; import com.google.javascript.jscomp.newtypes.JSTypes; import com.google.javascript.jscomp.newtypes.Namespace; import com.google.javascript.jscomp.newtypes.NamespaceLit; import com.google.javascript.jscomp.newtypes.QualifiedName; import com.google.javascript.jscomp.newtypes.RawNominalType; import com.google.javascript.jscomp.newtypes.Typedef; import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.TypeIEnv; import java.io.Serializable; import java.util.ArrayList; import java.util.LinkedHashMap; import java.util.LinkedHashSet; import java.util.List; import java.util.Map; import java.util.Set; /** * @author blickly@google.com (Ben Lickly) * @author dimvar@google.com (Dimitris Vardoulakis) */ final class NTIScope implements DeclaredTypeRegistry, Serializable, TypeIEnv<JSType> { private final NTIScope parent; private final Node root; // Name on the function AST node; null for top scope & anonymous functions private final String name; private final JSTypes commonTypes; // Becomes true after freezeScope is run; so it's true during NTI. private boolean isFrozen = false; // A local w/out declared type is mapped to null, not to this.commonTypes.UNKNOWN. private final Map<String, JSType> locals = new LinkedHashMap<>(); private final Map<String, JSType> externs; private final Set<String> constVars = new LinkedHashSet<>(); private final List<String> formals; // Variables that are defined in this scope and used in inner scopes. private Set<String> escapedVars = new LinkedHashSet<>(); // outerVars are the variables that appear free in this scope // and are defined in an outer scope. private final Set<String> outerVars = new LinkedHashSet<>(); // When a function is also used as a namespace, we add entries to both // localFunDefs and localNamespaces. After freezeScope (when NTI runs), // the function has an entry in localFunDefs, and in locals or externs. private final Map<String, NTIScope> localFunDefs = new LinkedHashMap<>(); private ImmutableSet<String> unknownTypeNames = ImmutableSet.of(); private Map<String, Typedef> localTypedefs = new LinkedHashMap<>(); // Typedefs defined inside this scope, but on a namespace, not as local variables private Set<Typedef> namespaceTypedefs = new LinkedHashSet<>(); private Map<String, Namespace> localNamespaces = new LinkedHashMap<>(); // The namespace map that we preserve post-finalization, purely for use // in GlobalTypeInfo for symbol table purposes. private Map<String, Namespace> preservedNamespaces; // The set localEnums is used for enum resolution, and then discarded. private Set<EnumType> localEnums = new LinkedHashSet<>(); // For top level, the DeclaredFunctionType just includes a type for THIS. // For functions, the DeclaredFunctionType is never null, even those without jsdoc. // Any inferred parameters or return will be set to null individually. private DeclaredFunctionType declaredType; // This field is used to typecheck the body of a function that uses TTL. // We instantiate the TTL variables to ?. // If a function does not use TTL, this field has the same value as declaredType. // TODO(dimvar): instead, instantiate the non-TTL generics to ? and evaluate the TTL variables. private DeclaredFunctionType declaredTypeForOwnBody; NTIScope(Node root, NTIScope parent, List<String> formals, JSTypes commonTypes) { checkNotNull(commonTypes); if (parent == null) { this.name = null; this.externs = new LinkedHashMap<>(); } else { String nameOnAst = root.getFirstChild().getString(); this.name = nameOnAst.isEmpty() ? null : nameOnAst; this.externs = ImmutableMap.of(); } this.root = root; this.parent = parent; this.formals = formals; this.commonTypes = commonTypes; } Node getRoot() { return this.root; } NTIScope getParent() { return this.parent; } Node getBody() { checkArgument(root.isFunction()); return NodeUtil.getFunctionBody(root); } /** Used only for error messages; null for top scope */ String getReadableName() { // TODO(dimvar): don't return null for anonymous functions return isTopLevel() ? null : NodeUtil.getName(root); } String getName() { return name; } @Override public JSTypes getCommonTypes() { return this.commonTypes; } void setDeclaredType(DeclaredFunctionType declaredType) { checkNotNull(declaredType); this.declaredType = this.declaredTypeForOwnBody = declaredType; // In NTI, we set the type of a function node after we create the summary. // NTI doesn't analyze externs, so we set the type for extern functions here. if (this.root.isFromExterns()) { this.root.setTypeI(this.commonTypes.fromFunctionType(declaredType.toFunctionType())); } if (!declaredType.getTypeParameters().getTypeTransformations().isEmpty()) { this.declaredTypeForOwnBody = declaredType.instantiateGenericsWithUnknown(); } } @Override public DeclaredFunctionType getDeclaredFunctionType() { return this.declaredType; } public DeclaredFunctionType getDeclaredTypeForOwnBody() { return this.declaredTypeForOwnBody; } boolean isFunction() { return root.isFunction(); } boolean isTopLevel() { return parent == null; } boolean isConstructor() { JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(root); return isFunction() && jsdoc != null && jsdoc.isConstructor(); } boolean isInterface() { JSDocInfo jsdoc = NodeUtil.getBestJSDocInfo(root); return isFunction() && jsdoc != null && jsdoc.isInterface(); } boolean isPrototypeMethod() { checkArgument(root != null); return NodeUtil.isPrototypeMethod(root); } void addUnknownTypeNames(Set<String> names) { // TODO(dimvar): if sm uses a goog.forwardDeclare in a local scope, give // an error instead of crashing. checkState(this.isTopLevel()); this.unknownTypeNames = ImmutableSet.copyOf(names); } void addLocalFunDef(String name, NTIScope scope) { checkArgument(!name.isEmpty()); checkArgument(!name.contains(".")); checkArgument(!isDefinedLocally(name, false)); localFunDefs.put(name, scope); } boolean isFormalParam(String name) { return formals.contains(name); } boolean isFormalParamInAnyAncestorScope(String name) { return isFormalParam(name) || (this.parent != null && this.parent.isFormalParamInAnyAncestorScope(name)); } boolean isLocalFunDef(String name) { return localFunDefs.containsKey(name); } boolean isFunctionNamespace(String name) { checkArgument(!name.contains(".")); checkState(isFrozen); Declaration d = getDeclaration(name, false); if (d == null || d.getFunctionScope() == null || d.getTypeOfSimpleDecl() == null) { return false; } return d.getTypeOfSimpleDecl().isNamespace(); } // In other languages, type names and variable names are in distinct // namespaces and don't clash. // But because our typedefs and enums are var declarations, they are in the // same namespace as other variables. boolean isDefinedLocally(String name, boolean includeTypes) { checkNotNull(name); checkState(!name.contains(".")); if (locals.containsKey(name) || formals.contains(name) || localNamespaces.containsKey(name) || localFunDefs.containsKey(name) || "this".equals(name) || externs.containsKey(name) || localTypedefs.containsKey(name)) { return true; } if (includeTypes) { return unknownTypeNames.contains(name) || (declaredType != null && declaredType.isTypeVariableDefinedLocally(name)); } return false; } // For variables it is the same as isDefinedLocally; for properties it looks // for a definition in any scope. boolean isDefined(Node qnameNode) { checkArgument(qnameNode.isQualifiedName()); if (qnameNode.isName()) { return isDefinedLocally(qnameNode.getString(), false); } else if (qnameNode.isThis()) { return true; } QualifiedName qname = QualifiedName.fromNode(qnameNode); String leftmost = qname.getLeftmostName(); if (isNamespace(leftmost)) { return getNamespace(leftmost).isDefined(qname.getAllButLeftmost()); } return parent == null ? false : parent.isDefined(qnameNode); } boolean isNamespace(Node expr) { if (expr.isName()) { return isNamespace(expr.getString()); } if (!expr.isGetProp()) { return false; } return isNamespace(QualifiedName.fromNode(expr)); } boolean isNamespace(QualifiedName qname) { if (qname == null) { return false; } String leftmost = qname.getLeftmostName(); return isNamespace(leftmost) && (qname.isIdentifier() || getNamespace(leftmost).hasSubnamespace(qname.getAllButLeftmost())); } boolean isNamespace(String name) { checkArgument(!name.contains(".")); Declaration decl = getDeclaration(name, false); if (decl == null) { return false; } JSType simpleType = decl.getTypeOfSimpleDecl(); return decl.getNamespace() != null || (simpleType != null && simpleType.isNamespace()); } boolean isVisibleInScope(String name) { checkArgument(!name.contains(".")); return isDefinedLocally(name, false) || name.equals(this.name) || (parent != null && parent.isVisibleInScope(name)); } boolean isConstVar(String name) { checkArgument(!name.contains(".")); Declaration decl = getDeclaration(name, false); return decl != null && decl.isConstant(); } boolean isOuterVarEarly(String name) { checkArgument(!name.contains(".")); return !isDefinedLocally(name, false) && parent != null && parent.isVisibleInScope(name); } boolean isGlobalVar(String varName) { NTIScope s = this; while (s.parent != null) { if (s.isDefinedLocally(varName, false)) { return false; } s = s.parent; } return true; } boolean isUndeclaredFormal(String name) { checkArgument(!name.contains(".")); return formals.contains(name) && getDeclaredTypeOf(name) == null; } List<String> getFormals() { return new ArrayList<>(formals); } Set<String> getOuterVars() { return new LinkedHashSet<>(outerVars); } Set<String> getLocalFunDefs() { return ImmutableSet.copyOf(localFunDefs.keySet()); } boolean isOuterVar(String name) { return outerVars.contains(name); } boolean isUndeclaredOuterVar(String name) { return outerVars.contains(name) && getDeclaredTypeOf(name) == null; } boolean isEscapedVar(String name) { return this.escapedVars.contains(name); } boolean hasThis() { DeclaredFunctionType dft = this.declaredType; // dft is null early during GlobalTypeInfo return dft != null && dft.getThisType() != null; } @Override public JSType getDeclaredTypeOf(String name) { checkArgument(!name.contains(".")); if ("this".equals(name)) { if (!hasThis()) { return null; } return getDeclaredTypeForOwnBody().getThisType(); } Declaration decl = getLocalDeclaration(name, false); if (decl != null) { if (decl.getTypeOfSimpleDecl() != null) { Preconditions.checkState(!decl.getTypeOfSimpleDecl().isBottom(), "%s was bottom", name); return decl.getTypeOfSimpleDecl(); } NTIScope funScope = (NTIScope) decl.getFunctionScope(); if (funScope != null) { Preconditions.checkNotNull( funScope.getDeclaredFunctionType(), "decl=%s, funScope=%s", decl, funScope); return this.commonTypes.fromFunctionType( funScope.getDeclaredFunctionType().toFunctionType()); } checkState(decl.getNamespace() == null); return null; } // When a function is a namespace, the parent scope has a better type. if (name.equals(this.name) && !parent.isFunctionNamespace(name)) { return this.commonTypes.fromFunctionType(getDeclaredFunctionType().toFunctionType()); } if (parent != null) { return parent.getDeclaredTypeOf(name); } return null; } boolean hasUndeclaredFormalsOrOuters() { for (String formal : formals) { if (getDeclaredTypeOf(formal) == null) { return true; } } for (String outer : outerVars) { JSType declType = getDeclaredTypeOf(outer); if (declType == null // Undeclared functions have a non-null declared type, // but they always have a return type of unknown || (declType.getFunType() != null && !declType.getFunType().isSomeConstructorOrInterface() && declType.getFunType().getReturnType().isUnknown())) { return true; } } return false; } private NTIScope getScopeHelper(QualifiedName qname) { Declaration decl = getDeclaration(qname, false); return decl == null ? null : (NTIScope) decl.getFunctionScope(); } boolean isKnownFunction(String fnName) { checkArgument(!fnName.contains(".")); return getScopeHelper(new QualifiedName(fnName)) != null; } boolean isKnownFunction(QualifiedName qname) { return getScopeHelper(qname) != null; } boolean isExternalFunction(String fnName) { NTIScope s = getScopeHelper(new QualifiedName(fnName)); return s.root.isFromExterns(); } NTIScope getScope(String fnName) { NTIScope s = getScopeHelper(new QualifiedName(fnName)); checkState(s != null); return s; } Set<String> getLocals() { return ImmutableSet.copyOf(locals.keySet()); } Set<String> getExterns() { return ImmutableSet.copyOf(externs.keySet()); } // We don't check for duplicates here, mainly because we add some // intentionally during the two phases of GlobalTypeInfo. // If a variable is declared many times in a scope, the last definition // overwrites the previous ones. For correctness, we rely on the fact that // the var-check passes run before type checking. void addLocal(String name, JSType declType, boolean isConstant, boolean isFromExterns) { checkArgument(!name.contains(".")); if (isConstant) { constVars.add(name); } if (isFromExterns) { externs.put(name, declType); } else { locals.put(name, declType); } } static void mayRecordEscapedVar(NTIScope s, String name) { if (s.isDefinedLocally(name, false)) { return; } while (s != null) { if (s.isDefinedLocally(name, false)) { s.escapedVars.add(name); return; } s = s.parent; } } RawNominalType getNominalType(QualifiedName qname) { Declaration decl = getDeclaration(qname, false); return decl == null ? null : decl.getNominal(); } Typedef getTypedef(String name) { QualifiedName qname = QualifiedName.fromQualifiedString(name); Declaration decl; if (qname.isIdentifier()) { decl = getDeclaration(qname, true); } else { decl = getNamespace(qname.getLeftmostName()).getDeclaration(qname.getAllButLeftmost()); } return decl == null ? null : decl.getTypedef(); } EnumType getEnum(QualifiedName qname) { Declaration decl = getDeclaration(qname, false); return decl == null ? null : decl.getEnum(); } Namespace getNamespace(String name) { checkArgument(!name.contains(".")); Declaration decl = getDeclaration(name, false); return decl == null ? null : decl.getNamespace(); } void addFunNamespace(Node qnameNode) { if (qnameNode.isName()) { String varName = qnameNode.getString(); checkArgument(isDefinedLocally(varName, false)); checkState(!this.localNamespaces.containsKey(varName)); NTIScope s = checkNotNull(this.localFunDefs.get(varName)); this.localNamespaces.put(varName, new FunctionNamespace(this.commonTypes, varName, s, qnameNode)); } else { checkArgument(!isNamespace(qnameNode)); QualifiedName qname = QualifiedName.fromNode(qnameNode); Namespace ns = getNamespace(qname.getLeftmostName()); NTIScope s = (NTIScope) ns.getDeclaration(qname).getFunctionScope(); ns.addNamespace(qname.getAllButLeftmost(), new FunctionNamespace(this.commonTypes, qname.toString(), s, qnameNode)); } } void addNamespaceLit(Node qnameNode) { addNamespace(qnameNode, new NamespaceLit(this.commonTypes, qnameNode.getQualifiedName(), qnameNode)); } void updateType(String name, JSType newDeclType) { if (isDefinedLocally(name, false)) { locals.put(name, newDeclType); } else if (parent != null) { parent.updateType(name, newDeclType); } else { throw new RuntimeException( "Cannot update type of unknown variable: " + name); } } void addOuterVar(String name) { outerVars.add(name); } void addTypedef(Node qnameNode, Typedef td) { if (qnameNode.isName()) { checkState(!localTypedefs.containsKey(qnameNode.getString())); localTypedefs.put(qnameNode.getString(), td); } else { checkState(!isDefined(qnameNode)); QualifiedName qname = QualifiedName.fromNode(qnameNode); Namespace ns = getNamespace(qname.getLeftmostName()); ns.addTypedef(qname.getAllButLeftmost(), td); namespaceTypedefs.add(td); } } void addNamespace(Node qnameNode, Namespace ns) { if (ns instanceof EnumType) { this.localEnums.add((EnumType) ns); } if (qnameNode.isName()) { String varName = qnameNode.getString(); Preconditions.checkState(!this.localNamespaces.containsKey(varName), "Namespace %s already defined.", varName); this.localNamespaces.put(varName, ns); if (qnameNode.isFromExterns() && !this.externs.containsKey(varName)) { // We don't know the full type of a namespace until after we see all // its properties. But we want to add it to the externs, otherwise it // is treated as a local and initialized to the wrong thing in NTI. this.externs.put(qnameNode.getString(), null); } } else { checkState(!isDefined(qnameNode)); QualifiedName qname = QualifiedName.fromNode(qnameNode); Namespace rootns = getNamespace(qname.getLeftmostName()); rootns.addNamespace(qname.getAllButLeftmost(), ns); } } Namespace getNamespace(QualifiedName qname) { Namespace ns = getNamespace(qname.getLeftmostName()); return (ns == null || qname.isIdentifier()) ? ns : ns.getSubnamespace(qname.getAllButLeftmost()); } private Declaration getLocalDeclaration(String name, boolean includeTypes) { checkArgument(!name.contains(".")); if (!isDefinedLocally(name, includeTypes)) { return null; } DeclaredFunctionType declaredType = getDeclaredTypeForOwnBody(); JSType type = null; boolean isTypeVar = false; if ("this".equals(name)) { type = getDeclaredTypeOf("this"); } else if (locals.containsKey(name)) { type = locals.get(name); } else if (formals.contains(name)) { int formalIndex = formals.indexOf(name); if (declaredType != null && formalIndex != -1) { JSType formalType = declaredType.getFormalType(formalIndex); if (formalType != null && !formalType.isBottom()) { type = formalType; } } } else if (localFunDefs.containsKey(name)) { // After finalization, the externs contain the correct type for // external function namespaces, don't rely on localFunDefs if (isFrozen && externs.containsKey(name)) { type = externs.get(name); } } else if (localTypedefs.containsKey(name) || localNamespaces.containsKey(name)) { // Any further declarations are shadowed } else if (declaredType != null && declaredType.isTypeVariableDefinedLocally(name)) { isTypeVar = true; type = JSType.fromTypeVar(this.commonTypes, declaredType.getTypeVariableDefinedLocally(name)); } else if (externs.containsKey(name)) { type = externs.get(name); } Namespace ns = null; if (localNamespaces.containsKey(name)) { ns = localNamespaces.get(name); } else if (preservedNamespaces != null) { ns = preservedNamespaces.get(name); } return new Declaration(type, localTypedefs.get(name), ns, localFunDefs.get(name), isTypeVar, constVars.contains(name)); } @Override public Declaration getDeclaration(QualifiedName qname, boolean includeTypes) { if (qname.isIdentifier()) { return getDeclaration(qname.getLeftmostName(), includeTypes); } Namespace ns = getNamespace(qname.getLeftmostName()); if (ns == null) { return maybeGetForwardDeclaration(qname.toString()); } Declaration decl = ns.getDeclaration(qname.getAllButLeftmost()); return decl != null ? decl : maybeGetForwardDeclaration(qname.toString()); } private Declaration maybeGetForwardDeclaration(String qname) { NTIScope globalScope = this; while (globalScope.parent != null) { globalScope = globalScope.parent; } if (globalScope.unknownTypeNames.contains(qname)) { return new Declaration(this.commonTypes.UNKNOWN, null, null, null, false, false); } return null; } public Declaration getDeclaration(String name, boolean includeTypes) { checkArgument(!name.contains(".")); Declaration decl = getLocalDeclaration(name, includeTypes); if (decl != null) { return decl; } return parent == null ? null : parent.getDeclaration(name, includeTypes); } private Namespace getNamespaceAfterFreezing(String typeName) { checkNotNull(preservedNamespaces, "Failed to preserve namespaces post-finalization"); QualifiedName qname = QualifiedName.fromQualifiedString(typeName); Namespace ns = preservedNamespaces.get(qname.getLeftmostName()); if (ns != null && !qname.isIdentifier()) { ns = ns.getSubnamespace(qname.getAllButLeftmost()); } return ns; } /** * Given the name of a namespace that is a nominal type, returns an instance of that type. * Given the name of another namespace, returns the namespace type. */ public JSType getType(String typeName) { Namespace ns = getNamespaceAfterFreezing(typeName); if (ns == null) { return null; } return ns instanceof RawNominalType ? ((RawNominalType) ns).getInstanceAsJSType() : ns.toJSType(); } @Override public JSType getNamespaceOrTypedefType(String typeName) { Namespace ns = getNamespaceAfterFreezing(typeName); if (ns != null) { return ns.toJSType(); } Typedef td = getTypedef(typeName); return td == null ? null : td.getType(); } @Override public JSDocInfo getJsdocOfTypeDeclaration(String typeName) { JSType t = getType(typeName); if (t != null) { Node defSite = t.getSource(); if (defSite != null) { return NodeUtil.getBestJSDocInfo(defSite); } } return null; } void resolveTypedefs(JSTypeCreatorFromJSDoc typeParser) { for (Typedef td : this.localTypedefs.values()) { typeParser.resolveTypedef(td, this); } for (Typedef td : this.namespaceTypedefs) { typeParser.resolveTypedef(td, this); } this.namespaceTypedefs = null; } void resolveEnums(JSTypeCreatorFromJSDoc typeParser) { for (EnumType e : this.localEnums) { typeParser.resolveEnum(e, this); } this.localEnums = null; } void freezeScope() { Preconditions.checkNotNull(this.declaredType, "No declared type for scope: %s", this.root); unknownTypeNames = ImmutableSet.of(); // For now, we put types of namespaces directly into the locals. // Alternatively, we could move this into NewTypeInference.initEdgeEnvs for (Map.Entry<String, Namespace> entry : localNamespaces.entrySet()) { String name = entry.getKey(); Namespace ns = entry.getValue(); JSType t; if (ns instanceof NamespaceLit) { constVars.add(name); NamespaceLit nslit = (NamespaceLit) ns; // The argument to maybeSetWindowInstance should only be non-null for // window, but we don't check here to avoid hard-coding the name. // Enforced in GlobalTypeInfo. nslit.maybeSetWindowInstance(externs.get(name)); t = nslit.toJSType(); } else { t = ns.toJSType(); } if (externs.containsKey(name)) { externs.put(name, t); } else { locals.put(name, t); } } for (String typedefName : localTypedefs.keySet()) { locals.put(typedefName, this.commonTypes.UNDEFINED); } copyOuterVarsTransitively(this); preservedNamespaces = localNamespaces; localNamespaces = ImmutableMap.of(); escapedVars = ImmutableSet.of(); isFrozen = true; } // A scope must know about the free variables used in outer scopes, // otherwise we end up with invalid type envs. private static void copyOuterVarsTransitively(NTIScope s) { if (s.isTopLevel()) { return; } NTIScope parent = s.parent; Set<String> outerVars = s.outerVars; while (parent.isFunction()) { boolean copiedOneVar = false; for (String v : outerVars) { if (!parent.isDefinedLocally(v, false)) { copiedOneVar = true; parent.addOuterVar(v); } } if (!copiedOneVar) { break; } outerVars = parent.outerVars; parent = parent.parent; } } @Override public String toString() { StringBuilder sb = new StringBuilder(); if (isTopLevel()) { sb.append("<TOP SCOPE>"); } else { sb.append(getReadableName()); sb.append('('); Joiner.on(',').appendTo(sb, formals); sb.append(')'); } sb.append(" with root: "); sb.append(root); return sb.toString(); } }
/* * Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.cloudfront.model; import java.io.Serializable; /** * <p> * CloudFront origin access identity. * </p> */ public class CloudFrontOriginAccessIdentity implements Serializable, Cloneable { /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ private String id; /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ private String s3CanonicalUserId; /** * The current configuration information for the identity. */ private CloudFrontOriginAccessIdentityConfig cloudFrontOriginAccessIdentityConfig; /** * Default constructor for a new CloudFrontOriginAccessIdentity object. Callers should use the * setter or fluent setter (with...) methods to initialize this object after creating it. */ public CloudFrontOriginAccessIdentity() {} /** * Constructs a new CloudFrontOriginAccessIdentity object. * Callers should use the setter or fluent setter (with...) methods to * initialize any additional object members. * * @param id The ID for the origin access identity. For example: * E74FTE3AJFJ256A. * @param s3CanonicalUserId The Amazon S3 canonical user ID for the * origin access identity, which you use when giving the origin access * identity read permission to an object in Amazon S3. */ public CloudFrontOriginAccessIdentity(String id, String s3CanonicalUserId) { setId(id); setS3CanonicalUserId(s3CanonicalUserId); } /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @return The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ public String getId() { return id; } /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @param id The ID for the origin access identity. For example: E74FTE3AJFJ256A. */ public void setId(String id) { this.id = id; } /** * The ID for the origin access identity. For example: E74FTE3AJFJ256A. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param id The ID for the origin access identity. For example: E74FTE3AJFJ256A. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentity withId(String id) { this.id = id; return this; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @return The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ public String getS3CanonicalUserId() { return s3CanonicalUserId; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @param s3CanonicalUserId The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. */ public void setS3CanonicalUserId(String s3CanonicalUserId) { this.s3CanonicalUserId = s3CanonicalUserId; } /** * The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param s3CanonicalUserId The Amazon S3 canonical user ID for the origin access identity, which * you use when giving the origin access identity read permission to an * object in Amazon S3. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentity withS3CanonicalUserId(String s3CanonicalUserId) { this.s3CanonicalUserId = s3CanonicalUserId; return this; } /** * The current configuration information for the identity. * * @return The current configuration information for the identity. */ public CloudFrontOriginAccessIdentityConfig getCloudFrontOriginAccessIdentityConfig() { return cloudFrontOriginAccessIdentityConfig; } /** * The current configuration information for the identity. * * @param cloudFrontOriginAccessIdentityConfig The current configuration information for the identity. */ public void setCloudFrontOriginAccessIdentityConfig(CloudFrontOriginAccessIdentityConfig cloudFrontOriginAccessIdentityConfig) { this.cloudFrontOriginAccessIdentityConfig = cloudFrontOriginAccessIdentityConfig; } /** * The current configuration information for the identity. * <p> * Returns a reference to this object so that method calls can be chained together. * * @param cloudFrontOriginAccessIdentityConfig The current configuration information for the identity. * * @return A reference to this updated object so that method calls can be chained * together. */ public CloudFrontOriginAccessIdentity withCloudFrontOriginAccessIdentityConfig(CloudFrontOriginAccessIdentityConfig cloudFrontOriginAccessIdentityConfig) { this.cloudFrontOriginAccessIdentityConfig = cloudFrontOriginAccessIdentityConfig; return this; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getId() != null) sb.append("Id: " + getId() + ","); if (getS3CanonicalUserId() != null) sb.append("S3CanonicalUserId: " + getS3CanonicalUserId() + ","); if (getCloudFrontOriginAccessIdentityConfig() != null) sb.append("CloudFrontOriginAccessIdentityConfig: " + getCloudFrontOriginAccessIdentityConfig() ); sb.append("}"); return sb.toString(); } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getId() == null) ? 0 : getId().hashCode()); hashCode = prime * hashCode + ((getS3CanonicalUserId() == null) ? 0 : getS3CanonicalUserId().hashCode()); hashCode = prime * hashCode + ((getCloudFrontOriginAccessIdentityConfig() == null) ? 0 : getCloudFrontOriginAccessIdentityConfig().hashCode()); return hashCode; } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof CloudFrontOriginAccessIdentity == false) return false; CloudFrontOriginAccessIdentity other = (CloudFrontOriginAccessIdentity)obj; if (other.getId() == null ^ this.getId() == null) return false; if (other.getId() != null && other.getId().equals(this.getId()) == false) return false; if (other.getS3CanonicalUserId() == null ^ this.getS3CanonicalUserId() == null) return false; if (other.getS3CanonicalUserId() != null && other.getS3CanonicalUserId().equals(this.getS3CanonicalUserId()) == false) return false; if (other.getCloudFrontOriginAccessIdentityConfig() == null ^ this.getCloudFrontOriginAccessIdentityConfig() == null) return false; if (other.getCloudFrontOriginAccessIdentityConfig() != null && other.getCloudFrontOriginAccessIdentityConfig().equals(this.getCloudFrontOriginAccessIdentityConfig()) == false) return false; return true; } @Override public CloudFrontOriginAccessIdentity clone() { try { return (CloudFrontOriginAccessIdentity) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException( "Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package pdfpositional; import java.io.IOException; import org.apache.pdfbox.cos.COSDictionary; import org.apache.pdfbox.pdmodel.PDPage; import org.apache.pdfbox.pdmodel.font.PDType0Font; import org.apache.pdfbox.pdmodel.font.PDType1Font; import org.apache.pdfbox.text.TextPosition; import org.apache.pdfbox.util.Matrix; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.*; /** * * @author jonny */ public class PdfCharacterTest { private PdfCharacter instance; private Float conversion1; private Float conversion2; private TextPosition tPos1; private TextPosition tPos2; private TextPosition tPos3; private TextPosition tPos4; private String[] nonWhitespace = {"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z", "0", "1", "2", "3", "4", "5", "6", "7", "8", "9"}; private String[] whitespace = {"_", " ", "\r", "\n", "\t"}; private int[] ligaturesCode = {64256, 64257, 64258, 64259, 64260, 64261}; private String[] ligaturesConv = {"ff", "fi", "fl", "ffi", "ffl", "st"}; public void prepSingletons() { Long[] sbKeys = {173L, 45L, 8208L}; String[] sbValues = {"-", "-", "-"}; MappingSoftBreak.getInstance().addItems(sbKeys, sbValues); Long[] subKeys = {192L, 198L, 199L}; String[] subValues = {"A", "AE", "C"}; MappingSubstitution.getInstance().addItems(subKeys, subValues); Long[] punKeys = {46L}; String[] punValues = {"."}; MappingPunctuation.getInstance().addItems(punKeys, punValues); } public PdfCharacterTest() { } @BeforeClass public static void setUpClass() { } @AfterClass public static void tearDownClass() { } @Before public void setUp() throws IOException { prepSingletons(); tPos1 = createTextPosition("a", 10, 10, 8, 12); tPos2 = createTextPosition("b", 20, 10, 8, 12); tPos3 = createTextPosition("c", 40, 10, 8, 12); tPos4 = createTextPosition("d", 40, 30, 8, 12); conversion1 = 1.0f; conversion2 = 2.0f; instance = new PdfCharacter(tPos1, conversion1); } @After public void tearDown() { } public TextPosition createTextPosition(String character, float xDirAdj, float yDirAdj, float width, float height) throws IOException { PDPage page = new PDPage(); float[] individualWidths = {}; float spaceWidth = 4.0f; float fontSizeValue = 12f; int fontSizeInPt = 10; float ws = 4f; int[] charCodes = {(int)character.charAt(0)}; Matrix textPositionSt = new Matrix(fontSizeValue, 0.0f, 0.0f, fontSizeValue, xDirAdj, yDirAdj + height); Matrix textPositionEnd = new Matrix(fontSizeValue, 0.0f, 0.0f, fontSizeValue, xDirAdj + width, yDirAdj + height); org.apache.pdfbox.text.TextPosition tp; tp = new org.apache.pdfbox.text.TextPosition(fontSizeInPt, spaceWidth, height, textPositionSt, ws, ws, height, spaceWidth, spaceWidth, character, charCodes, PDType1Font.TIMES_ROMAN, fontSizeValue, fontSizeInPt); return tp; } // public TextPosition createTextPosition(String character, float xDirAdj, float yDirAdj, float width, float height) { // PDPage page = new PDPage(); // Matrix textPositionSt = new Matrix(); // Matrix textPositionEnd = new Matrix(); // float[] individualWidths = {}; // float spaceWidth = 4.0f; // float fontSizeValue = 12f; // int fontSizeInPt = 10; // float ws = 4f; // // return new TextPosition(page, textPositionSt, textPositionEnd, // 12f, individualWidths, spaceWidth, character, new PDType0Font(), // fontSizeValue, fontSizeInPt, ws){ // @Override // public float getXDirAdj() { // return xDirAdj; // } // @Override // public float getYDirAdj() { // return yDirAdj; // } // @Override // public float getWidthDirAdj() { // return width; // } // @Override // public float getHeightDir() { // return height; // } // }; // } /** * Test of getxPos method, of class PdfCharacter. */ @Test public void testGetxPos() { assertEquals(10.0F, instance.getxPos(), 0.0); instance.setConversion(conversion2); assertEquals(20.0F, instance.getxPos(), 0.0); } /** * Test of getyPos method, of class PdfCharacter. */ @Test public void testGetyPos() { assertEquals(-10.0F, instance.getyPos(), 0.0); instance.setConversion(conversion2); assertEquals(-20.0F, instance.getyPos(), 0.0); } /** * Test of getWidth method, of class PdfCharacter. */ @Test public void testGetWidth() { assertEquals(6.0f, instance.getWidth(), 0.0); instance.setConversion(conversion2); assertEquals(12.0f, instance.getWidth(), 0.0); } /** * Test of getHeight method, of class PdfCharacter. */ @Test public void testGetHeight() { assertEquals(12.0f, instance.getHeight(), 0.0); instance.setConversion(conversion2); assertEquals(24.0f, instance.getHeight(), 0.0); } /** * Test of getPosition method, of class PdfCharacter. */ @Test public void testGetPosition() { assertEquals(tPos1, instance.getPosition()); } /** * Test of setPosition method, of class PdfCharacter. */ @Test public void testSetPosition() { assertEquals(tPos1, instance.getPosition()); instance.setPosition(tPos2); assertEquals(tPos2, instance.getPosition()); instance.setPosition(tPos3); assertEquals(tPos3, instance.getPosition()); } /** * Test of getConversion method, of class PdfCharacter. */ @Test public void testGetConversion() { assertEquals(conversion1, instance.getConversion()); } /** * Test of setConversion method, of class PdfCharacter. */ @Test public void testSetConversion() { instance.setConversion(conversion1); assertEquals(conversion1, instance.getConversion()); instance.setConversion(conversion2); assertEquals(conversion2, instance.getConversion()); } /** * Test of isSameWord method, of class PdfCharacter. */ @Test public void testIsSameWord() { assertTrue(instance.isSameWord(tPos2)); assertFalse(instance.isSameWord(tPos3)); assertFalse(instance.isSameWord(tPos4)); } /** * Test of isSameLine method, of class PdfCharacter. */ @Test public void testIsSameLine() { assertTrue(instance.isSameLine(tPos2)); assertTrue(instance.isSameLine(tPos3)); assertFalse(instance.isSameLine(tPos4)); } /** * Test of isWithinPermittedSpacing method, of class PdfCharacter. */ @Test public void testIsWithinPermittedSpacing() { assertTrue(instance.isWithinPermittedSpacing(tPos2)); assertFalse(instance.isWithinPermittedSpacing(tPos3)); assertFalse(instance.isWithinPermittedSpacing(tPos4)); } /** * Test of getMaxNextWordXpos method, of class PdfCharacter. */ @Test public void testGetMaxNextWordXpos() { assertEquals(20f, instance.getMaxNextWordXpos(), 0.0); instance.setPosition(tPos2); assertEquals(40f, instance.getMaxNextWordXpos(), 0.0); instance.setPosition(tPos3); assertEquals(80f, instance.getMaxNextWordXpos(), 0.0); instance.setPosition(tPos4); assertEquals(80f, instance.getMaxNextWordXpos(), 0.0); } /** * Test of compareFloat method, of class PdfCharacter. */ @Test public void testCompareFloat() { assertTrue(instance.compareFloat(10f, 10f)); assertTrue(instance.compareFloat(-10f, -10f)); assertFalse(instance.compareFloat(10f, 11f)); assertTrue(instance.compareFloat(10.1f, 10.15f)); assertFalse(instance.compareFloat(10.1f, 10.2f)); } /** * Test of isWhiteSpace method, of class PdfCharacter. */ @Test public void testIsWhiteSpace() throws IOException { for (int i = 0; i < nonWhitespace.length; i++) { instance.setPosition(this.createTextPosition(nonWhitespace[i], 0, 0, 0, 0)); assertFalse(instance.isWhiteSpace()); } for (int i = 0; i < whitespace.length; i++) { instance.setPosition(this.createTextPosition(whitespace[i], 0, 0, 0, 0)); assertTrue(instance.isWhiteSpace()); } } /** * Test of getNormalizedCharacter method, of class PdfCharacter. */ @Test public void testGetNormalizedCharacter() throws IOException { Long[] keys = {64256L, 64257L, 64258L, 64259L, 64260L, 64261L, 8217L, 39L}; String[] values = {"ff", "fi", "fl", "ffi", "ffl", "st", "'", "'"}; MappingSubstitution.getInstance().addItems(keys, values); for (int i = 0; i < nonWhitespace.length; i++) { instance.setPosition(this.createTextPosition(nonWhitespace[i], 0, 0, 0, 0)); assertEquals(nonWhitespace[i], instance.getNormalizedCharacter()); } for (int i = 0; i < ligaturesCode.length; i++) { instance.setPosition(this.createTextPosition(Character.toString((char)ligaturesCode[i]), 0, 0, 0, 0)); assertEquals(ligaturesConv[i], instance.getNormalizedCharacter()); } } @Test public void testIsApostrophe() throws IOException { TextPosition textPosition = createTextPosition("a", 0, 0, 0, 0); PdfCharacter inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isApostrophe()); textPosition = createTextPosition(Character.toString((char)39), 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertTrue(inst.isApostrophe()); } @Test public void testIsPuctuationEnding() throws IOException { TextPosition textPosition = createTextPosition("a", 0, 0, 0, 0); PdfCharacter inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isPuctuationEnding()); textPosition = createTextPosition(".", 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertTrue(inst.isPuctuationEnding()); } @Test public void testIsSoftWordBreak() throws IOException { TextPosition textPosition = createTextPosition("a", 0, 0, 0, 0); PdfCharacter inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isSoftWordBreak()); textPosition = createTextPosition("-", 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertTrue(inst.isSoftWordBreak()); } @Test public void testIsWordStartCompatible() throws IOException { TextPosition textPosition = createTextPosition("a", 0, 0, 0, 0); PdfCharacter inst = new PdfCharacter(textPosition, conversion1); assertTrue(inst.isWordStartCompatible()); // soft breaks are not valid start chars textPosition = createTextPosition("-", 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isWordStartCompatible()); // punctuation is not a valid start value textPosition = createTextPosition(".", 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isWordStartCompatible()); // apostrophe is not a valid start value textPosition = createTextPosition("'", 0, 0, 0, 0); inst = new PdfCharacter(textPosition, conversion1); assertFalse(inst.isWordStartCompatible()); } }
/* * Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with * the License. A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR * CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions * and limitations under the License. */ package com.amazonaws.services.elasticmapreduce.model; import java.io.Serializable; import javax.annotation.Generated; /** * * @see <a href="http://docs.aws.amazon.com/goto/WebAPI/elasticmapreduce-2009-03-31/ListInstanceFleets" * target="_top">AWS API Documentation</a> */ @Generated("com.amazonaws:aws-java-sdk-code-generator") public class ListInstanceFleetsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable { /** * <p> * The list of instance fleets for the cluster and given filters. * </p> */ private com.amazonaws.internal.SdkInternalList<InstanceFleet> instanceFleets; /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> */ private String marker; /** * <p> * The list of instance fleets for the cluster and given filters. * </p> * * @return The list of instance fleets for the cluster and given filters. */ public java.util.List<InstanceFleet> getInstanceFleets() { if (instanceFleets == null) { instanceFleets = new com.amazonaws.internal.SdkInternalList<InstanceFleet>(); } return instanceFleets; } /** * <p> * The list of instance fleets for the cluster and given filters. * </p> * * @param instanceFleets * The list of instance fleets for the cluster and given filters. */ public void setInstanceFleets(java.util.Collection<InstanceFleet> instanceFleets) { if (instanceFleets == null) { this.instanceFleets = null; return; } this.instanceFleets = new com.amazonaws.internal.SdkInternalList<InstanceFleet>(instanceFleets); } /** * <p> * The list of instance fleets for the cluster and given filters. * </p> * <p> * <b>NOTE:</b> This method appends the values to the existing list (if any). Use * {@link #setInstanceFleets(java.util.Collection)} or {@link #withInstanceFleets(java.util.Collection)} if you want * to override the existing values. * </p> * * @param instanceFleets * The list of instance fleets for the cluster and given filters. * @return Returns a reference to this object so that method calls can be chained together. */ public ListInstanceFleetsResult withInstanceFleets(InstanceFleet... instanceFleets) { if (this.instanceFleets == null) { setInstanceFleets(new com.amazonaws.internal.SdkInternalList<InstanceFleet>(instanceFleets.length)); } for (InstanceFleet ele : instanceFleets) { this.instanceFleets.add(ele); } return this; } /** * <p> * The list of instance fleets for the cluster and given filters. * </p> * * @param instanceFleets * The list of instance fleets for the cluster and given filters. * @return Returns a reference to this object so that method calls can be chained together. */ public ListInstanceFleetsResult withInstanceFleets(java.util.Collection<InstanceFleet> instanceFleets) { setInstanceFleets(instanceFleets); return this; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param marker * The pagination token that indicates the next set of results to retrieve. */ public void setMarker(String marker) { this.marker = marker; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @return The pagination token that indicates the next set of results to retrieve. */ public String getMarker() { return this.marker; } /** * <p> * The pagination token that indicates the next set of results to retrieve. * </p> * * @param marker * The pagination token that indicates the next set of results to retrieve. * @return Returns a reference to this object so that method calls can be chained together. */ public ListInstanceFleetsResult withMarker(String marker) { setMarker(marker); return this; } /** * Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be * redacted from this string using a placeholder value. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); if (getInstanceFleets() != null) sb.append("InstanceFleets: ").append(getInstanceFleets()).append(","); if (getMarker() != null) sb.append("Marker: ").append(getMarker()); sb.append("}"); return sb.toString(); } @Override public boolean equals(Object obj) { if (this == obj) return true; if (obj == null) return false; if (obj instanceof ListInstanceFleetsResult == false) return false; ListInstanceFleetsResult other = (ListInstanceFleetsResult) obj; if (other.getInstanceFleets() == null ^ this.getInstanceFleets() == null) return false; if (other.getInstanceFleets() != null && other.getInstanceFleets().equals(this.getInstanceFleets()) == false) return false; if (other.getMarker() == null ^ this.getMarker() == null) return false; if (other.getMarker() != null && other.getMarker().equals(this.getMarker()) == false) return false; return true; } @Override public int hashCode() { final int prime = 31; int hashCode = 1; hashCode = prime * hashCode + ((getInstanceFleets() == null) ? 0 : getInstanceFleets().hashCode()); hashCode = prime * hashCode + ((getMarker() == null) ? 0 : getMarker().hashCode()); return hashCode; } @Override public ListInstanceFleetsResult clone() { try { return (ListInstanceFleetsResult) super.clone(); } catch (CloneNotSupportedException e) { throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e); } } }
/** * Copyright 2009 The Apache Software Foundation * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.mapreduce; import java.io.IOException; import java.io.UnsupportedEncodingException; import java.net.URI; import java.net.URISyntaxException; import java.net.URLDecoder; import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collection; import java.util.List; import java.util.Map; import java.util.TreeMap; import java.util.TreeSet; import java.util.UUID; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.filecache.DistributedCache; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.TimeRangeTracker; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.WritableUtils; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.RecordWriter; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; /** * Writes HFiles. Passed KeyValues must arrive in order. * Currently, can only write files to a single column family at a * time. Multiple column families requires coordinating keys cross family. * Writes current time as the sequence id for the file. Sets the major compacted * attribute on created hfiles. Calling write(null,null) will forceably roll * all HFiles being written. * @see KeyValueSortReducer */ public class HFileOutputFormat extends FileOutputFormat<ImmutableBytesWritable, KeyValue> { static Log LOG = LogFactory.getLog(HFileOutputFormat.class); static final String COMPRESSION_CONF_KEY = "hbase.hfileoutputformat.families.compression"; TimeRangeTracker trt = new TimeRangeTracker(); public RecordWriter<ImmutableBytesWritable, KeyValue> getRecordWriter(final TaskAttemptContext context) throws IOException, InterruptedException { // Get the path of the temporary output file final Path outputPath = FileOutputFormat.getOutputPath(context); final Path outputdir = new FileOutputCommitter(outputPath, context).getWorkPath(); final Configuration conf = context.getConfiguration(); final FileSystem fs = outputdir.getFileSystem(conf); // These configs. are from hbase-*.xml final long maxsize = conf.getLong(HConstants.HREGION_MAX_FILESIZE, HConstants.DEFAULT_MAX_FILE_SIZE); final int blocksize = conf.getInt("hbase.mapreduce.hfileoutputformat.blocksize", HFile.DEFAULT_BLOCKSIZE); // Invented config. Add to hbase-*.xml if other than default compression. final String defaultCompression = conf.get("hfile.compression", Compression.Algorithm.NONE.getName()); final boolean compactionExclude = conf.getBoolean( "hbase.mapreduce.hfileoutputformat.compaction.exclude", false); // create a map from column family to the compression algorithm final Map<byte[], String> compressionMap = createFamilyCompressionMap(conf); return new RecordWriter<ImmutableBytesWritable, KeyValue>() { // Map of families to writers and how much has been output on the writer. private final Map<byte [], WriterLength> writers = new TreeMap<byte [], WriterLength>(Bytes.BYTES_COMPARATOR); private byte [] previousRow = HConstants.EMPTY_BYTE_ARRAY; private final byte [] now = Bytes.toBytes(System.currentTimeMillis()); private boolean rollRequested = false; public void write(ImmutableBytesWritable row, KeyValue kv) throws IOException { // null input == user explicitly wants to flush if (row == null && kv == null) { rollWriters(); return; } byte [] rowKey = kv.getRow(); long length = kv.getLength(); byte [] family = kv.getFamily(); WriterLength wl = this.writers.get(family); // If this is a new column family, verify that the directory exists if (wl == null) { fs.mkdirs(new Path(outputdir, Bytes.toString(family))); } // If any of the HFiles for the column families has reached // maxsize, we need to roll all the writers if (wl != null && wl.written + length >= maxsize) { this.rollRequested = true; } // This can only happen once a row is finished though if (rollRequested && Bytes.compareTo(this.previousRow, rowKey) != 0) { rollWriters(); } // create a new HLog writer, if necessary if (wl == null || wl.writer == null) { wl = getNewWriter(family, conf); } // we now have the proper HLog writer. full steam ahead kv.updateLatestStamp(this.now); trt.includeTimestamp(kv); wl.writer.append(kv); wl.written += length; // Copy the row so we know when a row transition. this.previousRow = rowKey; } private void rollWriters() throws IOException { for (WriterLength wl : this.writers.values()) { if (wl.writer != null) { LOG.info("Writer=" + wl.writer.getPath() + ((wl.written == 0)? "": ", wrote=" + wl.written)); close(wl.writer); } wl.writer = null; wl.written = 0; } this.rollRequested = false; } /* Create a new HFile.Writer. * @param family * @return A WriterLength, containing a new HFile.Writer. * @throws IOException */ private WriterLength getNewWriter(byte[] family, Configuration conf) throws IOException { WriterLength wl = new WriterLength(); Path familydir = new Path(outputdir, Bytes.toString(family)); String compression = compressionMap.get(family); compression = compression == null ? defaultCompression : compression; wl.writer = HFile.getWriterFactoryNoCache(conf).createWriter(fs, StoreFile.getUniqueFile(fs, familydir), blocksize, compression, KeyValue.KEY_COMPARATOR); this.writers.put(family, wl); return wl; } private void close(final HFile.Writer w) throws IOException { if (w != null) { w.appendFileInfo(StoreFile.BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis())); w.appendFileInfo(StoreFile.BULKLOAD_TASK_KEY, Bytes.toBytes(context.getTaskAttemptID().toString())); w.appendFileInfo(StoreFile.MAJOR_COMPACTION_KEY, Bytes.toBytes(true)); w.appendFileInfo(StoreFile.EXCLUDE_FROM_MINOR_COMPACTION_KEY, Bytes.toBytes(compactionExclude)); w.appendFileInfo(StoreFile.TIMERANGE_KEY, WritableUtils.toByteArray(trt)); w.close(); } } public void close(TaskAttemptContext c) throws IOException, InterruptedException { for (WriterLength wl: this.writers.values()) { close(wl.writer); } } }; } /* * Data structure to hold a Writer and amount of data written on it. */ static class WriterLength { long written = 0; HFile.Writer writer = null; } /** * Return the start keys of all of the regions in this table, * as a list of ImmutableBytesWritable. */ private static List<ImmutableBytesWritable> getRegionStartKeys(HTable table) throws IOException { byte[][] byteKeys = table.getStartKeys(); ArrayList<ImmutableBytesWritable> ret = new ArrayList<ImmutableBytesWritable>(byteKeys.length); for (byte[] byteKey : byteKeys) { ret.add(new ImmutableBytesWritable(byteKey)); } return ret; } /** * Write out a SequenceFile that can be read by TotalOrderPartitioner * that contains the split points in startKeys. * @param partitionsPath output path for SequenceFile * @param startKeys the region start keys */ private static void writePartitions(Configuration conf, Path partitionsPath, List<ImmutableBytesWritable> startKeys) throws IOException { if (startKeys.isEmpty()) { throw new IllegalArgumentException("No regions passed"); } // We're generating a list of split points, and we don't ever // have keys < the first region (which has an empty start key) // so we need to remove it. Otherwise we would end up with an // empty reducer with index 0 TreeSet<ImmutableBytesWritable> sorted = new TreeSet<ImmutableBytesWritable>(startKeys); ImmutableBytesWritable first = sorted.first(); if (!first.equals(HConstants.EMPTY_BYTE_ARRAY)) { throw new IllegalArgumentException( "First region of table should have empty start key. Instead has: " + Bytes.toStringBinary(first.get())); } sorted.remove(first); // Write the actual file FileSystem fs = partitionsPath.getFileSystem(conf); SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, partitionsPath, ImmutableBytesWritable.class, NullWritable.class); try { for (ImmutableBytesWritable startKey : sorted) { writer.append(startKey, NullWritable.get()); } } finally { writer.close(); } } /** * Configure a MapReduce Job to perform an incremental load into the given * table. This * <ul> * <li>Inspects the table to configure a total order partitioner</li> * <li>Uploads the partitions file to the cluster and adds it to the DistributedCache</li> * <li>Sets the number of reduce tasks to match the current number of regions</li> * <li>Sets the output key/value class to match HFileOutputFormat's requirements</li> * <li>Sets the reducer up to perform the appropriate sorting (either KeyValueSortReducer or * PutSortReducer)</li> * </ul> * The user should be sure to set the map output value class to either KeyValue or Put before * running this function. */ public static void configureIncrementalLoad(Job job, HTable table) throws IOException { Configuration conf = job.getConfiguration(); Class<? extends Partitioner> topClass; try { topClass = getTotalOrderPartitionerClass(); } catch (ClassNotFoundException e) { throw new IOException("Failed getting TotalOrderPartitioner", e); } job.setPartitionerClass(topClass); job.setOutputKeyClass(ImmutableBytesWritable.class); job.setOutputValueClass(KeyValue.class); job.setOutputFormatClass(HFileOutputFormat.class); // Based on the configured map output class, set the correct reducer to properly // sort the incoming values. // TODO it would be nice to pick one or the other of these formats. if (KeyValue.class.equals(job.getMapOutputValueClass())) { job.setReducerClass(KeyValueSortReducer.class); } else if (Put.class.equals(job.getMapOutputValueClass())) { job.setReducerClass(PutSortReducer.class); } else { LOG.warn("Unknown map output value type:" + job.getMapOutputValueClass()); } LOG.info("Looking up current regions for table " + table); List<ImmutableBytesWritable> startKeys = getRegionStartKeys(table); LOG.info("Configuring " + startKeys.size() + " reduce partitions " + "to match current region count"); job.setNumReduceTasks(startKeys.size()); Path partitionsPath = new Path(job.getWorkingDirectory(), "partitions_" + UUID.randomUUID()); LOG.info("Writing partition information to " + partitionsPath); FileSystem fs = partitionsPath.getFileSystem(conf); writePartitions(conf, partitionsPath, startKeys); partitionsPath.makeQualified(fs); URI cacheUri; try { // Below we make explicit reference to the bundled TOP. Its cheating. // We are assume the define in the hbase bundled TOP is as it is in // hadoop (whether 0.20 or 0.22, etc.) cacheUri = new URI(partitionsPath.toString() + "#" + org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner.DEFAULT_PATH); } catch (URISyntaxException e) { throw new IOException(e); } DistributedCache.addCacheFile(cacheUri, conf); DistributedCache.createSymlink(conf); // Set compression algorithms based on column families configureCompression(table, conf); LOG.info("Incremental table output configured."); } /** * If > hadoop 0.20, then we want to use the hadoop TotalOrderPartitioner. * If 0.20, then we want to use the TOP that we have under hadoopbackport. * This method is about hbase being able to run on different versions of * hadoop. In 0.20.x hadoops, we have to use the TOP that is bundled with * hbase. Otherwise, we use the one in Hadoop. * @return Instance of the TotalOrderPartitioner class * @throws ClassNotFoundException If can't find a TotalOrderPartitioner. */ private static Class<? extends Partitioner> getTotalOrderPartitionerClass() throws ClassNotFoundException { Class<? extends Partitioner> clazz = null; try { clazz = (Class<? extends Partitioner>) Class.forName("org.apache.hadoop.mapreduce.lib.partition.TotalOrderPartitioner"); } catch (ClassNotFoundException e) { clazz = (Class<? extends Partitioner>) Class.forName("org.apache.hadoop.hbase.mapreduce.hadoopbackport.TotalOrderPartitioner"); } return clazz; } /** * Run inside the task to deserialize column family to compression algorithm * map from the * configuration. * * Package-private for unit tests only. * * @return a map from column family to the name of the configured compression * algorithm */ static Map<byte[], String> createFamilyCompressionMap(Configuration conf) { Map<byte[], String> compressionMap = new TreeMap<byte[], String>(Bytes.BYTES_COMPARATOR); String compressionConf = conf.get(COMPRESSION_CONF_KEY, ""); for (String familyConf : compressionConf.split("&")) { String[] familySplit = familyConf.split("="); if (familySplit.length != 2) { continue; } try { compressionMap.put(URLDecoder.decode(familySplit[0], "UTF-8").getBytes(), URLDecoder.decode(familySplit[1], "UTF-8")); } catch (UnsupportedEncodingException e) { // will not happen with UTF-8 encoding throw new AssertionError(e); } } return compressionMap; } /** * Serialize column family to compression algorithm map to configuration. * Invoked while configuring the MR job for incremental load. * * Package-private for unit tests only. * * @throws IOException * on failure to read column family descriptors */ static void configureCompression(HTable table, Configuration conf) throws IOException { StringBuilder compressionConfigValue = new StringBuilder(); HTableDescriptor tableDescriptor = table.getTableDescriptor(); if(tableDescriptor == null){ // could happen with mock table instance return; } Collection<HColumnDescriptor> families = tableDescriptor.getFamilies(); int i = 0; for (HColumnDescriptor familyDescriptor : families) { if (i++ > 0) { compressionConfigValue.append('&'); } compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getNameAsString(), "UTF-8")); compressionConfigValue.append('='); compressionConfigValue.append(URLEncoder.encode(familyDescriptor.getCompression().getName(), "UTF-8")); } // Get rid of the last ampersand conf.set(COMPRESSION_CONF_KEY, compressionConfigValue.toString()); } }
/* * Copyright (c) 2005, 2011, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. * */ package sun.jvm.hotspot.ui; import java.io.*; import java.awt.*; import java.awt.event.*; import javax.swing.*; import javax.swing.event.*; import javax.swing.text.*; import sun.jvm.hotspot.CommandProcessor; import sun.jvm.hotspot.debugger.*; import sun.jvm.hotspot.utilities.*; /** A JPanel subclass containing a scrollable text area displaying the debugger's console, if it has one. This should not be created for a debugger which does not have a console. */ public class CommandProcessorPanel extends JPanel { private CommandProcessor commands; private JTextArea editor; private boolean updating; private int mark; private String curText; // handles multi-line input via '\' // Don't run the "main" method of this class unless this flag is set to true first private static final boolean DEBUGGING = false; ByteArrayOutputStream baos = new ByteArrayOutputStream(10240); public CommandProcessorPanel(CommandProcessor cp) { commands = cp; setLayout(new BorderLayout()); editor = new JTextArea(); editor.setDocument(new EditableAtEndDocument()); editor.setFont(GraphicsUtilities.lookupFont("Courier")); JScrollPane scroller = new JScrollPane(); scroller.getViewport().add(editor); add(scroller, BorderLayout.CENTER); // Set up out PrintStream o = new PrintStream(baos, true); cp.setOutput(o); cp.setErr(o); editor.getDocument().addDocumentListener(new DocumentListener() { public void changedUpdate(DocumentEvent e) { } public void insertUpdate(DocumentEvent e) { if (updating) return; beginUpdate(); editor.setCaretPosition(editor.getDocument().getLength()); if (insertContains(e, '\n')) { String cmd = getMarkedText(); // Handle multi-line input if ((cmd.length() == 0) || (cmd.charAt(cmd.length() - 1) != '\\')) { // Trim "\\n" combinations final String ln = trimContinuations(cmd); SwingUtilities.invokeLater(new Runnable() { public void run() { beginUpdate(); try { commands.executeCommand(ln, true); commands.printPrompt(); Document d = editor.getDocument(); try { d.insertString(d.getLength(), baos.toString(), null); } catch (BadLocationException ble) { ble.printStackTrace(); } baos.reset(); editor.setCaretPosition(editor.getDocument().getLength()); setMark(); } finally { endUpdate(); } } }); } } else { endUpdate(); } } public void removeUpdate(DocumentEvent e) { } }); // This is a bit of a hack but is probably better than relying on // the JEditorPane to update the caret's position precisely the // size of the insertion editor.addCaretListener(new CaretListener() { public void caretUpdate(CaretEvent e) { int len = editor.getDocument().getLength(); if (e.getDot() > len) { editor.setCaretPosition(len); } } }); Box hbox = Box.createHorizontalBox(); hbox.add(Box.createGlue()); JButton button = new JButton("Clear Saved Text"); button.addActionListener(new ActionListener() { public void actionPerformed(ActionEvent e) { clear(); } }); hbox.add(button); hbox.add(Box.createGlue()); add(hbox, BorderLayout.SOUTH); clear(); } public void requestFocus() { editor.requestFocus(); } public void clear() { EditableAtEndDocument d = (EditableAtEndDocument) editor.getDocument(); d.clear(); commands.executeCommand("", false); setMark(); editor.requestFocus(); } public void setMark() { ((EditableAtEndDocument) editor.getDocument()).setMark(); } public String getMarkedText() { try { String s = ((EditableAtEndDocument) editor.getDocument()).getMarkedText(); int i = s.length(); while ((i > 0) && (s.charAt(i - 1) == '\n')) { i--; } return s.substring(0, i); } catch (BadLocationException e) { e.printStackTrace(); return null; } } //-------------------------------------------------------------------------------- // Internals only below this point // private void beginUpdate() { updating = true; } private void endUpdate() { updating = false; } private boolean insertContains(DocumentEvent e, char c) { String s = null; try { s = editor.getText(e.getOffset(), e.getLength()); for (int i = 0; i < e.getLength(); i++) { if (s.charAt(i) == c) { return true; } } } catch (BadLocationException ex) { ex.printStackTrace(); } return false; } private String trimContinuations(String text) { int i; while ((i = text.indexOf("\\\n")) >= 0) { text = text.substring(0, i) + text.substring(i+2, text.length()); } return text; } public static void main(String[] args) { JFrame frame = new JFrame(); frame.getContentPane().setLayout(new BorderLayout()); CommandProcessorPanel panel = new CommandProcessorPanel(null); frame.getContentPane().add(panel, BorderLayout.CENTER); frame.addWindowListener(new WindowAdapter() { public void windowClosing(WindowEvent e) { System.exit(0); } }); frame.setSize(500, 500); frame.setVisible(true); panel.requestFocus(); } }
/* * Copyright (c) 2010-2013 Evolveum * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.evolveum.midpoint.prism; import com.evolveum.midpoint.prism.delta.ContainerDelta; import com.evolveum.midpoint.prism.delta.ItemDelta; import com.evolveum.midpoint.prism.path.ItemPath; import com.evolveum.midpoint.prism.path.NameItemPathSegment; import com.evolveum.midpoint.prism.schema.PrismSchema; import com.evolveum.midpoint.util.DOMUtil; import com.evolveum.midpoint.util.DebugDumpable; import com.evolveum.midpoint.util.DebugUtil; import com.evolveum.midpoint.util.QNameUtil; import javax.xml.namespace.QName; import org.apache.commons.lang.StringUtils; import java.util.*; /** * Definition of a property container. * <p/> * Property container groups properties into logical blocks. The reason for * grouping may be as simple as better understandability of data structure. But * the group usually means different meaning, source or structure of the data. * For example, the property container is frequently used to hold properties * that are dynamic, not fixed by a static schema. Such grouping also naturally * translates to XML and helps to "quarantine" such properties to avoid Unique * Particle Attribute problems. * <p/> * Property Container contains a set of (potentially multi-valued) properties. * The order of properties is not significant, regardless of the fact that it * may be fixed in the XML representation. In the XML representation, each * element inside Property Container must be either Property or a Property * Container. * <p/> * This class represents schema definition for property container. See * {@link Definition} for more details. * * @author Radovan Semancik */ public class PrismContainerDefinition<V extends Containerable> extends ItemDefinition { private static final long serialVersionUID = -5068923696147960699L; protected ComplexTypeDefinition complexTypeDefinition; protected Class<V> compileTimeClass; /** * The constructors should be used only occasionally (if used at all). * Use the factory methods in the ResourceObjectDefintion instead. */ public PrismContainerDefinition(QName name, ComplexTypeDefinition complexTypeDefinition, PrismContext prismContext) { this(name, complexTypeDefinition, prismContext, null); } public PrismContainerDefinition(QName name, ComplexTypeDefinition complexTypeDefinition, PrismContext prismContext, Class<V> compileTimeClass) { super(name, determineTypeName(complexTypeDefinition), prismContext); this.complexTypeDefinition = complexTypeDefinition; if (complexTypeDefinition == null) { isRuntimeSchema = true; super.setDynamic(true); } else { isRuntimeSchema = complexTypeDefinition.isXsdAnyMarker(); super.setDynamic(isRuntimeSchema); } this.compileTimeClass = compileTimeClass; } private static QName determineTypeName(ComplexTypeDefinition complexTypeDefinition) { if (complexTypeDefinition == null) { // Property container without type: xsd:any // FIXME: this is kind of hack, but it works now return DOMUtil.XSD_ANY; } return complexTypeDefinition.getTypeName(); } public Class<V> getCompileTimeClass() { if (compileTimeClass != null) { return compileTimeClass; } if (complexTypeDefinition == null) { return null; } return (Class<V>) complexTypeDefinition.getCompileTimeClass(); } public void setCompileTimeClass(Class<V> compileTimeClass) { this.compileTimeClass = compileTimeClass; } protected String getSchemaNamespace() { return getName().getNamespaceURI(); } public ComplexTypeDefinition getComplexTypeDefinition() { return complexTypeDefinition; } public void setComplexTypeDefinition(ComplexTypeDefinition complexTypeDefinition) { this.complexTypeDefinition = complexTypeDefinition; } @Override public boolean isAbstract() { if (super.isAbstract()) { return true; } if (complexTypeDefinition != null && complexTypeDefinition.isAbstract()) { return true; } return false; } /** * Returns true if the definition does not define specific items but it is just * a "wildcard" for any kind of item (usually represented as xsd:any type). */ public boolean isWildcard() { if (getTypeName().equals(DOMUtil.XSD_ANY)) { return true; } // if (complexTypeDefinition != null && complexTypeDefinition.isXsdAnyMarker()) { // return true; // } return false; } @Override void revive(PrismContext prismContext) { if (this.prismContext != null) { return; } this.prismContext = prismContext; if (complexTypeDefinition != null) { complexTypeDefinition.revive(prismContext); } } public <D extends ItemDefinition> D findItemDefinition(QName name, Class<D> clazz) { if (clazz == null) { throw new IllegalArgumentException("type not specified while searching for " + name + " in " + this); } if (name == null) { throw new IllegalArgumentException("name not specified while searching in " + this); } if (complexTypeDefinition == null) { // xsd:any and similar dynamic definitions return null; } return complexTypeDefinition.findItemDefinition(name, clazz); } public <T extends ItemDefinition> T findItemDefinition(ItemPath path, Class<T> clazz) { while (!path.isEmpty() && !(path.first() instanceof NameItemPathSegment)) { path = path.rest(); } if (path.isEmpty()) { return (T) this; } QName firstName = ((NameItemPathSegment)path.first()).getName(); // we need to be compatible with older versions..soo if the path does // not contains qnames with namespaces defined (but the prefix was // specified) match definition according to the local name if (StringUtils.isEmpty(firstName.getNamespaceURI())) { for (ItemDefinition def : getDefinitions()){ if (QNameUtil.match(firstName, def.getName())){ return def.findItemDefinition(path.rest(), clazz); } } } for (ItemDefinition def : getDefinitions()) { if (firstName.equals(def.getName())) { return def.findItemDefinition(path.rest(), clazz); } } return null; } public ItemDefinition findItemDefinition(QName name) { return findItemDefinition(name, ItemDefinition.class); } public ItemDefinition findItemDefinition(ItemPath path) { return findItemDefinition(path, ItemDefinition.class); } /** * Finds a PropertyDefinition by looking at the property name. * <p/> * Returns null if nothing is found. * * @param name property definition name * @return found property definition or null */ public PrismPropertyDefinition findPropertyDefinition(QName name) { return findItemDefinition(name, PrismPropertyDefinition.class); } public PrismPropertyDefinition findPropertyDefinition(ItemPath path) { while (!path.isEmpty() && !(path.first() instanceof NameItemPathSegment)) { path = path.rest(); } if (path.isEmpty()) { throw new IllegalArgumentException("Property path is empty while searching for property definition in " + this); } QName firstName = ((NameItemPathSegment)path.first()).getName(); if (path.size() == 1) { return findPropertyDefinition(firstName); } PrismContainerDefinition pcd = findContainerDefinition(firstName); if (pcd == null) { throw new IllegalArgumentException("There is no " + firstName + " subcontainer in " + this); } return pcd.findPropertyDefinition(path.rest()); } public PrismReferenceDefinition findReferenceDefinition(QName name) { return findItemDefinition(name, PrismReferenceDefinition.class); } /** * Finds an inner PropertyContainerDefinition by looking at the property container name. * <p/> * Returns null if nothing is found. * * @param name property container definition name * @return found property container definition or null */ public <X extends Containerable> PrismContainerDefinition<X> findContainerDefinition(QName name) { return findItemDefinition(name, PrismContainerDefinition.class); } public <X extends Containerable> PrismContainerDefinition<X> findContainerDefinition(String name) { return findContainerDefinition(new QName(getNamespace(), name)); } /** * Finds an inner PropertyContainerDefinition by following the property container path. * <p/> * Returns null if nothing is found. * * @param path property container path * @return found property container definition or null */ public PrismContainerDefinition findContainerDefinition(ItemPath path) { return findItemDefinition(path, PrismContainerDefinition.class); } /** * Returns set of property definitions. * <p/> * WARNING: This may return definitions from the associated complex type. * Therefore changing the returned set may influence also the complex type definition. * <p/> * The set contains all property definitions of all types that were parsed. * Order of definitions is insignificant. * * @return set of definitions */ public List<? extends ItemDefinition> getDefinitions() { if (complexTypeDefinition == null) { // e.g. for xsd:any containers // FIXME return new ArrayList<ItemDefinition>(); } return complexTypeDefinition.getDefinitions(); } /** * Returns set of property definitions. * <p/> * The set contains all property definitions of all types that were parsed. * Order of definitions is insignificant. * <p/> * The returned set is immutable! All changes may be lost. * * @return set of definitions */ public List<PrismPropertyDefinition> getPropertyDefinitions() { List<PrismPropertyDefinition> props = new ArrayList<PrismPropertyDefinition>(); for (ItemDefinition def : complexTypeDefinition.getDefinitions()) { if (def instanceof PrismPropertyDefinition) { props.add((PrismPropertyDefinition) def); } } return props; } /** * Create property container instance with a default name. * <p/> * This is a preferred way how to create property container. */ @Override public PrismContainer<V> instantiate() { return instantiate(getName()); } /** * Create property container instance with a specified name and element. * <p/> * This is a preferred way how to create property container. */ @Override public PrismContainer<V> instantiate(QName elementName) { return new PrismContainer<V>(elementName, this, prismContext); } @Override public ContainerDelta<V> createEmptyDelta(ItemPath path) { return new ContainerDelta(path, this); } /** * Shallow clone */ @Override public PrismContainerDefinition<V> clone() { PrismContainerDefinition<V> clone = new PrismContainerDefinition<V>(name, complexTypeDefinition, prismContext, compileTimeClass); copyDefinitionData(clone); return clone; } protected void copyDefinitionData(PrismContainerDefinition<V> clone) { super.copyDefinitionData(clone); clone.complexTypeDefinition = this.complexTypeDefinition; clone.isRuntimeSchema = this.isRuntimeSchema; clone.compileTimeClass = this.compileTimeClass; } public PrismContainerDefinition<V> cloneWithReplacedDefinition(QName itemName, ItemDefinition newDefinition) { PrismContainerDefinition<V> clone = clone(); ComplexTypeDefinition originalComplexTypeDefinition = getComplexTypeDefinition(); ComplexTypeDefinition cloneComplexTypeDefinition = originalComplexTypeDefinition.clone(); clone.setComplexTypeDefinition(cloneComplexTypeDefinition); cloneComplexTypeDefinition.replaceDefinition(itemName, newDefinition); return clone; } /** * Creates new instance of property definition and adds it to the container. * <p/> * This is the preferred method of creating a new definition. * * @param name name of the property (element name) * @param typeName XSD type of the property * @return created property definition */ public PrismPropertyDefinition createPropertyDefinition(QName name, QName typeName) { PrismPropertyDefinition propDef = new PrismPropertyDefinition(name, typeName, prismContext); addDefinition(propDef); return propDef; } private void addDefinition(ItemDefinition itemDef) { ((Collection)getDefinitions()).add(itemDef); } /** * Creates new instance of property definition and adds it to the container. * <p/> * This is the preferred method of creating a new definition. * * @param name name of the property (element name) * @param typeName XSD type of the property * @param minOccurs minimal number of occurrences * @param maxOccurs maximal number of occurrences (-1 means unbounded) * @return created property definition */ public PrismPropertyDefinition createPropertyDefinition(QName name, QName typeName, int minOccurs, int maxOccurs) { PrismPropertyDefinition propDef = new PrismPropertyDefinition(name, typeName, prismContext); propDef.setMinOccurs(minOccurs); propDef.setMaxOccurs(maxOccurs); addDefinition(propDef); return propDef; } // Creates reference to other schema // TODO: maybe check if the name is in different namespace // TODO: maybe create entirely new concept of property reference? public PrismPropertyDefinition createPropertyDefinition(QName name) { PrismPropertyDefinition propDef = new PrismPropertyDefinition(name, null, prismContext); addDefinition(propDef); return propDef; } /** * Creates new instance of property definition and adds it to the container. * <p/> * This is the preferred method of creating a new definition. * * @param localName name of the property (element name) relative to the schema namespace * @param typeName XSD type of the property * @return created property definition */ public PrismPropertyDefinition createPropertyDefinition(String localName, QName typeName) { QName name = new QName(getSchemaNamespace(), localName); return createPropertyDefinition(name, typeName); } /** * Creates new instance of property definition and adds it to the container. * <p/> * This is the preferred method of creating a new definition. * * @param localName name of the property (element name) relative to the schema namespace * @param localTypeName XSD type of the property * @return created property definition */ public PrismPropertyDefinition createPropertyDefinition(String localName, String localTypeName) { QName name = new QName(getSchemaNamespace(), localName); QName typeName = new QName(getSchemaNamespace(), localTypeName); return createPropertyDefinition(name, typeName); } /** * Creates new instance of property definition and adds it to the container. * <p/> * This is the preferred method of creating a new definition. * * @param localName name of the property (element name) relative to the schema namespace * @param localTypeName XSD type of the property * @param minOccurs minimal number of occurrences * @param maxOccurs maximal number of occurrences (-1 means unbounded) * @return created property definition */ public PrismPropertyDefinition createPropertyDefinition(String localName, String localTypeName, int minOccurs, int maxOccurs) { QName name = new QName(getSchemaNamespace(), localName); QName typeName = new QName(getSchemaNamespace(), localTypeName); PrismPropertyDefinition propertyDefinition = createPropertyDefinition(name, typeName); propertyDefinition.setMinOccurs(minOccurs); propertyDefinition.setMaxOccurs(maxOccurs); return propertyDefinition; } public PrismContainerDefinition createContainerDefinition(QName name, QName typeName) { return createContainerDefinition(name, typeName, 1, 1); } public PrismContainerDefinition createContainerDefinition(QName name, QName typeName, int minOccurs, int maxOccurs) { PrismSchema typeSchema = prismContext.getSchemaRegistry().findSchemaByNamespace(typeName.getNamespaceURI()); if (typeSchema == null) { throw new IllegalArgumentException("Schema for namespace "+typeName.getNamespaceURI()+" is not known in the prism context"); } ComplexTypeDefinition typeDefinition = typeSchema.findComplexTypeDefinition(typeName); if (typeDefinition == null) { throw new IllegalArgumentException("Type "+typeName+" is not known in the schema"); } return createContainerDefinition(name, typeDefinition, minOccurs, maxOccurs); } public PrismContainerDefinition<V> createContainerDefinition(QName name, ComplexTypeDefinition complexTypeDefinition, int minOccurs, int maxOccurs) { PrismContainerDefinition<V> def = new PrismContainerDefinition<V>(name, complexTypeDefinition, prismContext); def.setMinOccurs(minOccurs); def.setMaxOccurs(maxOccurs); addDefinition(def); return def; } public PrismContainerValue<V> createValue() { return new PrismContainerValue<V>(); } @Override public String debugDump(int indent) { StringBuilder sb = new StringBuilder(); DebugUtil.indentDebugDump(sb, indent); sb.append(toString()); if (isRuntimeSchema()) { sb.append(" dynamic"); } for (Definition def : getDefinitions()) { sb.append("\n"); if (def == this) { // Not perfect loop protection, but works for now DebugUtil.indentDebugDump(sb, indent); sb.append("<itself>"); } else { sb.append(def.debugDump(indent + 1)); } } return sb.toString(); } public boolean isEmpty() { return complexTypeDefinition.isEmpty(); } /** * Return a human readable name of this class suitable for logs. */ @Override protected String getDebugDumpClassName() { return "PCD"; } @Override public String getDocClassName() { return "container"; } @Override protected void extendToString(StringBuilder sb) { super.extendToString(sb); if (isRuntimeSchema) { sb.append(",runtime"); } } }
/* * Copyright 2000-2017 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.ide.ui.laf.darcula; import com.intellij.ide.IdeEventQueue; import com.intellij.ide.ui.laf.IntelliJLaf; import com.intellij.ide.ui.laf.darcula.ui.DarculaEditorTextFieldBorder; import com.intellij.openapi.editor.event.EditorMouseAdapter; import com.intellij.openapi.editor.event.EditorMouseEvent; import com.intellij.openapi.editor.ex.EditorEx; import com.intellij.ui.*; import com.intellij.ui.components.panels.Wrapper; import com.intellij.util.ui.JBInsets; import com.intellij.util.ui.JBUI; import com.intellij.util.ui.MacUIUtil; import com.intellij.util.ui.UIUtil; import org.jetbrains.annotations.NotNull; import javax.swing.*; import javax.swing.plaf.InsetsUIResource; import javax.swing.text.JTextComponent; import javax.swing.text.Position; import java.awt.*; import java.awt.event.KeyEvent; import java.awt.event.MouseAdapter; import java.awt.event.MouseEvent; import java.awt.geom.Path2D; import java.awt.geom.Rectangle2D; import static com.intellij.ide.ui.laf.darcula.ui.TextFieldWithPopupHandlerUI.isSearchFieldWithHistoryPopup; import static com.intellij.ide.ui.laf.intellij.WinIntelliJTextFieldUI.HOVER_PROPERTY; import static com.intellij.util.ui.MacUIUtil.MAC_FILL_BORDER; import static javax.swing.SwingConstants.EAST; import static javax.swing.SwingConstants.WEST; /** * @author Konstantin Bulenkov */ public class DarculaUIUtil { private static final Color GLOW_COLOR = new JBColor(new Color(31, 121, 212), new Color(96, 175, 255)); @SuppressWarnings("UseJBColor") private static final Color MAC_ACTIVE_ERROR_COLOR = new Color(0x80f53b3b, true); private static final Color DEFAULT_ACTIVE_ERROR_COLOR = new JBColor(0xe53e4d, 0x8b3c3c); @SuppressWarnings("UseJBColor") private static final Color MAC_INACTIVE_ERROR_COLOR = new Color(0x80f7bcbc, true); private static final Color DEFAULT_INACTIVE_ERROR_COLOR = new JBColor(0xebbcbc, 0x725252); @SuppressWarnings("UseJBColor") private static final Color MAC_ACTIVE_WARNING_COLOR = new Color(0x80f5f510, true); private static final Color DEFAULT_ACTIVE_WARNING_COLOR = JBColor.YELLOW; @SuppressWarnings("UseJBColor") private static final Color MAC_INACTIVE_WARNING_COLOR = new Color(0x80f7f770, true); private static final Color DEFAULT_INACTIVE_WARNING_COLOR = JBColor.YELLOW.brighter(); public static final Color ACTIVE_ERROR_COLOR = new JBColor(() -> UIUtil.isUnderDefaultMacTheme() ? MAC_ACTIVE_ERROR_COLOR : DEFAULT_ACTIVE_ERROR_COLOR); public static final Color INACTIVE_ERROR_COLOR = new JBColor(() -> UIUtil.isUnderDefaultMacTheme() ? MAC_INACTIVE_ERROR_COLOR : DEFAULT_INACTIVE_ERROR_COLOR); public static final Color ACTIVE_WARNING_COLOR = new JBColor(() -> UIUtil.isUnderDefaultMacTheme() ? MAC_ACTIVE_WARNING_COLOR : DEFAULT_ACTIVE_WARNING_COLOR); public static final Color INACTIVE_WARNING_COLOR = new JBColor(() -> UIUtil.isUnderDefaultMacTheme() ? MAC_INACTIVE_WARNING_COLOR : DEFAULT_INACTIVE_WARNING_COLOR); @SuppressWarnings("UseJBColor") private static final Color MAC_REGULAR_COLOR = new Color(0x80479cfc, true); @SuppressWarnings("UseJBColor") private static final Color MAC_GRAPHITE_COLOR = new Color(0x8099979d, true); public enum Outline { error { public void setGraphicsColor(Graphics2D g, boolean focused) { g.setColor(focused ? ACTIVE_ERROR_COLOR : INACTIVE_ERROR_COLOR); } }, warning { public void setGraphicsColor(Graphics2D g, boolean focused) { g.setColor(focused ? ACTIVE_WARNING_COLOR : INACTIVE_WARNING_COLOR); } }; abstract public void setGraphicsColor(Graphics2D g, boolean focused); } public static void paintFocusRing(Graphics g, Rectangle bounds) { MacUIUtil.paintFocusRing((Graphics2D)g, GLOW_COLOR, bounds); } public static void paintFocusOval(Graphics g, int x, int y, int width, int height) { MacUIUtil.paintFocusRing((Graphics2D)g, GLOW_COLOR, new Rectangle(x, y, width, height), true); } public static void paintSearchFocusRing(Graphics2D g, Rectangle bounds, Component component) { paintSearchFocusRing(g, bounds, component, -1); } public static void paintSearchFocusRing(Graphics2D g, Rectangle bounds, Component component, int maxArcSize) { int correction = UIUtil.isUnderAquaLookAndFeel() ? 30 : UIUtil.isUnderDarcula() ? 50 : 0; final Color[] colors = new Color[]{ ColorUtil.toAlpha(GLOW_COLOR, 180 - correction), ColorUtil.toAlpha(GLOW_COLOR, 120 - correction), ColorUtil.toAlpha(GLOW_COLOR, 70 - correction), ColorUtil.toAlpha(GLOW_COLOR, 100 - correction), ColorUtil.toAlpha(GLOW_COLOR, 50 - correction) }; final Object oldAntialiasingValue = g.getRenderingHint(RenderingHints.KEY_ANTIALIASING); final Object oldStrokeControlValue = g.getRenderingHint(RenderingHints.KEY_STROKE_CONTROL); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, MacUIUtil.USE_QUARTZ ? RenderingHints.VALUE_STROKE_PURE : RenderingHints.VALUE_STROKE_NORMALIZE); final Rectangle r = new Rectangle(bounds.x - 3, bounds.y - 3, bounds.width + 6, bounds.height + 6); int arcSize = r.height - 1; if (maxArcSize>0) arcSize = Math.min(maxArcSize, arcSize); if (arcSize %2 == 1) arcSize--; g.setColor(component.getBackground()); g.fillRoundRect(r.x + 2, r.y + 2, r.width - 5, r.height - 5, arcSize - 4, arcSize - 4); g.setColor(colors[0]); g.drawRoundRect(r.x + 2, r.y + 2, r.width - 5, r.height - 5, arcSize-4, arcSize-4); g.setColor(colors[1]); g.drawRoundRect(r.x + 1, r.y + 1, r.width - 3, r.height - 3, arcSize-2, arcSize-2); g.setColor(colors[2]); g.drawRoundRect(r.x, r.y, r.width - 1, r.height - 1, arcSize, arcSize); g.setColor(colors[3]); g.drawRoundRect(r.x+3, r.y+3, r.width - 7, r.height - 7, arcSize-6, arcSize-6); g.setColor(colors[4]); g.drawRoundRect(r.x+4, r.y+4, r.width - 9, r.height - 9, arcSize-8, arcSize-8); // restore rendering hints g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, oldAntialiasingValue); g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, oldStrokeControlValue); } @Deprecated public static void paintErrorBorder(Graphics2D g, int width, int height, int arc, boolean symmetric, boolean hasFocus) { paintOutlineBorder(g, width, height, arc, symmetric, hasFocus, Outline.error); } public static void paintOutlineBorder(Graphics2D g, int width, int height, float arc, boolean symmetric, boolean hasFocus, Outline type) { type.setGraphicsColor(g, hasFocus); doPaint(g, width, height, arc, symmetric); } public static void paintFocusBorder(Graphics2D g, int width, int height, int arc, boolean symmetric) { paintFocusBorder(g, width, height, (float)arc, symmetric); } public static void paintFocusBorder(Graphics2D g, int width, int height, float arc, boolean symmetric) { g.setPaint(IntelliJLaf.isGraphite() ? MAC_GRAPHITE_COLOR : MAC_REGULAR_COLOR); doPaint(g, width, height, arc, symmetric); } @SuppressWarnings("SuspiciousNameCombination") private static void doPaint(Graphics2D g, int width, int height, float arc, boolean symmetric) { double bw = UIUtil.isUnderDefaultMacTheme() ? JBUI.scale(UIUtil.isRetina(g) ? 0.5f : 1.0f) : 0.0; double lw = JBUI.scale(UIUtil.isUnderDefaultMacTheme() ? 3 : 2); g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, MacUIUtil.USE_QUARTZ ? RenderingHints.VALUE_STROKE_PURE : RenderingHints.VALUE_STROKE_NORMALIZE); double outerArc = arc > 0 ? arc + lw - JBUI.scale(2f) : lw; double rightOuterArc = symmetric ? outerArc : JBUI.scale(6f); Path2D outerRect = new Path2D.Double(Path2D.WIND_EVEN_ODD); outerRect.moveTo(width - rightOuterArc, 0); outerRect.quadTo(width, 0, width, rightOuterArc); outerRect.lineTo(width, height - rightOuterArc); outerRect.quadTo(width, height, width - rightOuterArc, height); outerRect.lineTo(outerArc, height); outerRect.quadTo(0, height, 0, height - outerArc); outerRect.lineTo(0, outerArc); outerRect.quadTo(0, 0, outerArc, 0); outerRect.closePath(); lw += bw; double rightInnerArc = symmetric ? outerArc : JBUI.scale(7f); Path2D innerRect = new Path2D.Double(Path2D.WIND_EVEN_ODD); innerRect.moveTo(width - rightInnerArc, lw); innerRect.quadTo(width - lw, lw , width - lw, rightInnerArc); innerRect.lineTo(width - lw, height - rightInnerArc); innerRect.quadTo(width - lw, height - lw, width - rightInnerArc, height - lw); innerRect.lineTo(outerArc, height - lw); innerRect.quadTo(lw, height - lw, lw, height - outerArc); innerRect.lineTo(lw, outerArc); innerRect.quadTo(lw, lw, outerArc, lw); innerRect.closePath(); Path2D path = new Path2D.Double(Path2D.WIND_EVEN_ODD); path.append(outerRect, false); path.append(innerRect, false); g.fill(path); } public static boolean isCurrentEventShiftDownEvent() { AWTEvent event = IdeEventQueue.getInstance().getTrueCurrentEvent(); return (event instanceof KeyEvent && ((KeyEvent)event).isShiftDown()); } /** * @see javax.swing.plaf.basic.BasicTextUI#getNextVisualPositionFrom(JTextComponent, int, Position.Bias, int, Position.Bias[]) * @return -1 if visual position shouldn't be patched, otherwise selection start or selection end */ public static int getPatchedNextVisualPositionFrom(JTextComponent t, int pos, int direction) { if (!isCurrentEventShiftDownEvent()) { if (direction == WEST && t.getSelectionStart() < t.getSelectionEnd() && t.getSelectionEnd() == pos) { return t.getSelectionStart(); } if (direction == EAST && t.getSelectionStart() < t.getSelectionEnd() && t.getSelectionStart() == pos) { return t.getSelectionEnd(); } } return -1; } public static class MacEditorTextFieldBorder extends DarculaEditorTextFieldBorder { public MacEditorTextFieldBorder(EditorTextField editorTextField, EditorEx editor) { super(editorTextField, editor); } @Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { if (isComboBoxEditor(c)) { g.setColor(c.getBackground()); g.fillRect(x, y, width, height); return; } EditorTextField editorTextField = UIUtil.getParentOfType(EditorTextField.class, c); if (editorTextField == null) return; Graphics2D g2 = (Graphics2D)g.create(); try { if (c.isOpaque() || (c instanceof JComponent && ((JComponent)c).getClientProperty(MAC_FILL_BORDER) == Boolean.TRUE)) { g2.setColor(UIUtil.getPanelBackground()); g2.fillRect(x, y, width, height); } Rectangle2D rect = new Rectangle2D.Double(x + JBUI.scale(3), y + JBUI.scale(3), width - JBUI.scale(3)*2, height - JBUI.scale(3)*2); g2.setColor(c.getBackground()); g2.fill(rect); if (!editorTextField.isEnabled()) { g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.4f)); } double bw = UIUtil.isRetina(g2) ? 0.5 : 1.0; Path2D outline = new Path2D.Double(Path2D.WIND_EVEN_ODD); outline.append(rect, false); outline.append(new Rectangle2D.Double(rect.getX() + bw, rect.getY() + bw, rect.getWidth() - 2*bw, rect.getHeight() - 2*bw), false); g2.setColor(Gray.xBC); g2.fill(outline); g2.translate(x, y); boolean hasFocus = editorTextField.getFocusTarget().hasFocus(); Object op = editorTextField.getClientProperty("JComponent.outline"); if (op != null) { paintOutlineBorder(g2, width, height, 0, true, hasFocus, Outline.valueOf(op.toString())); } else if (editorTextField.isEnabled() && editorTextField.isVisible() && hasFocus) { paintFocusBorder(g2, width, height, 0, true); } } finally { g2.dispose(); } } @Override public Insets getBorderInsets(Component c) { return isComboBoxEditor(c) ? new InsetsUIResource(1, 3, 2, 3) : new InsetsUIResource(6, 7, 6, 7); } } public static class WinEditorTextFieldBorder extends DarculaEditorTextFieldBorder { public WinEditorTextFieldBorder(EditorTextField editorTextField, EditorEx editor) { super(editorTextField, editor); editor.addEditorMouseListener(new EditorMouseAdapter() { @Override public void mouseEntered(EditorMouseEvent e) { editorTextField.putClientProperty(HOVER_PROPERTY, Boolean.TRUE); editorTextField.repaint(); } @Override public void mouseExited(EditorMouseEvent e) { editorTextField.putClientProperty(HOVER_PROPERTY, Boolean.FALSE); editorTextField.repaint(); } }); } @Override public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) { if (isComboBoxEditor(c)) { g.setColor(c.getBackground()); g.fillRect(x, y, width, height); return; } EditorTextField editorTextField = UIUtil.getParentOfType(EditorTextField.class, c); if (editorTextField == null) return; Graphics2D g2 = (Graphics2D)g.create(); try { Rectangle r = new Rectangle(x, y, width, height); if (UIUtil.getParentOfType(Wrapper.class, c) != null && isSearchFieldWithHistoryPopup(c)) { JBInsets.removeFrom(r, JBUI.insets(2, 0)); } g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE); // Fill background area of border if (isBorderOpaque() || c.getParent() != null) { g2.setColor(c.getParent().getBackground()); Path2D borderArea = new Path2D.Double(Path2D.WIND_EVEN_ODD); borderArea.append(r, false); Rectangle innerRect = new Rectangle(r); JBInsets.removeFrom(innerRect, JBUI.insets(2)); borderArea.append(innerRect, false); g2.fill(borderArea); } // draw border itself boolean hasFocus = editorTextField.getFocusTarget().hasFocus(); int bw = 1; Object op = editorTextField.getClientProperty("JComponent.outline"); if (op != null) { Outline.valueOf(op.toString()).setGraphicsColor(g2, c.hasFocus()); bw = 2; } else { if (hasFocus) { g2.setColor(UIManager.getColor("TextField.focusedBorderColor")); } else if (editorTextField.isEnabled() && editorTextField.getClientProperty(HOVER_PROPERTY) == Boolean.TRUE) { g2.setColor(UIManager.getColor("TextField.hoverBorderColor")); } else { g2.setColor(UIManager.getColor("TextField.borderColor")); } JBInsets.removeFrom(r, JBUI.insets(1)); } if (!editorTextField.isEnabled()) { g2.setComposite(AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.47f)); } Path2D border = new Path2D.Double(Path2D.WIND_EVEN_ODD); border.append(r, false); Rectangle innerRect = new Rectangle(r); JBInsets.removeFrom(innerRect, JBUI.insets(bw)); border.append(innerRect, false); g2.fill(border); } finally { g2.dispose(); } } @Override public Insets getBorderInsets(Component c) { if (UIUtil.getParentOfType(ComboBoxCompositeEditor.class, c) != null) { return JBUI.emptyInsets().asUIResource(); } else { return isComboBoxEditor(c) ? JBUI.insets(1, 6).asUIResource() : JBUI.insets(4, 6).asUIResource(); } } } public static class MouseHoverPropertyTrigger extends MouseAdapter { private final JComponent repaintComponent; private final String hoverProperty; public MouseHoverPropertyTrigger(@NotNull JComponent repaintComponent, @NotNull String hoverProperty) { this.repaintComponent = repaintComponent; this.hoverProperty = hoverProperty; } @Override public void mouseEntered(MouseEvent e) { setHover((JComponent)e.getComponent(), Boolean.TRUE); } @Override public void mouseExited(MouseEvent e) { setHover((JComponent)e.getComponent(), Boolean.FALSE); } private void setHover(JComponent c, Boolean value) { if (c.isEnabled()) { c.putClientProperty(hoverProperty, value); repaintComponent.repaint(); } } } }
package com.kevelbreh.steamchat.steam2.handler; import android.content.Context; import android.content.Intent; import android.os.Bundle; import android.provider.Settings; import com.google.protobuf.ByteString; import com.kevelbreh.steamchat.SteamChat; import com.kevelbreh.steamchat.account.SteamAccount; import com.kevelbreh.steamchat.activity.AuthenticationActivity; import com.kevelbreh.steamchat.steam.SteamID; import com.kevelbreh.steamchat.steam.language.Language; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientAccountInfo; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientLogon; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientLogonResponse; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientSessionToken; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientChangeStatus; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientUpdateMachineAuth; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientUpdateMachineAuthResponse; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientNewLoginKey; import com.kevelbreh.steamchat.steam.proto.SteamMessagesClientServerProto.CMsgClientNewLoginKeyAccepted; import com.kevelbreh.steamchat.steam.security.Cryptography; import com.kevelbreh.steamchat.steam.util.BinaryReader; import com.kevelbreh.steamchat.steam2.SteamConnection; import com.kevelbreh.steamchat.steam2.SteamEventBus; import com.kevelbreh.steamchat.steam2.SteamService; import com.kevelbreh.steamchat.steam2.packet.ProtoPacket; import com.nostra13.universalimageloader.utils.IoUtils; import java.io.BufferedInputStream; import java.io.File; import java.io.FileInputStream; import java.io.FileOutputStream; import java.io.IOException; import java.net.InetAddress; import java.net.UnknownHostException; import java.nio.ByteBuffer; public class UserHandler { @SuppressWarnings("unused") @SteamEventBus.UserEvent(event = SteamService.EVENT_STEAM_USER_LOGIN) public static void doSteamLogin(SteamService service, final Bundle data) { service.getSteamAccount().setData(data); service.resetSteamConnection(); } @SuppressWarnings("unused") @SteamEventBus.UserEvent(event = SteamService.EVENT_STEAM_CHANNEL_READY) public static void onSteamChannelReady(SteamService service, final Bundle data) { final ProtoPacket<CMsgClientLogon.Builder> request = new ProtoPacket<CMsgClientLogon.Builder> (CMsgClientLogon.class, Language.Message.CLIENT_LOG_ON); final SteamID user = new SteamID(0, 1, 1, Language.Account.INDIVIDUAL); // get proper universe. // If there is null data and there is no account, open up the authentication activity so that the // user can try log in. SteamAccount account = service.getSteamAccount(); if (account.getData() == null && !account.hasAccount()) { SteamChat.debug("directing logging in to authentication activity."); Intent intent = new Intent(service, AuthenticationActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); service.startActivity(intent); service.getSteamConnection().close(); return; } // If the user has an account then start logging in the user with the credentials saved within // the found account. // todo: Set the username and password found in the account or something if (account.hasAccount()) { SteamChat.debug("Logging in using account"); request.getBody().setAccountName(account.getExtra("username")); request.getBody().setPassword(account.getExtra("password")); //packet.getBody().setLoginKey(account.getExtra("login_key")); request.getBody().setEresultSentryfile(Language.Result.OK); request.getBody().setShaSentryfile(getSentryHash(service, service.getSteamAccount().getExtra("sentry"))); } // Else if there is no account found but there is at least temp data set; attempt to then log // in the user for the first time using the temp data. else { SteamChat.debug("Logging in using temp data from authentication"); final Bundle temp = service.getSteamAccount().getData(); request.getBody().setAccountName(temp.getString("username")); request.getBody().setPassword(temp.getString("password")); final String guard = temp.getString("guard", null); final String machine = temp.getString("machine", null); if (guard != null && machine != null) { request.getBody().setAuthCode(guard); request.getBody().setMachineName(machine); } request.getBody().clearShaSentryfile(); request.getBody().setEresultSentryfile(Language.Result.FILE_NOT_FOUND); } request.setSessionId(0); request.setSteamId(user.getLong()); request.getBody().setObfustucatedPrivateIp(getIPAddress() ^ 0xBAADF00D); request.getBody().setProtocolVersion(65579); // current protocol request.getBody().setClientOsType(-203); // linux unknown request.getBody().setClientLanguage("english"); request.getBody().setSteam2TicketRequest(false); request.getBody().setClientPackageVersion(1771); request.getBody().setMachineId(getDeviceId(service)); service.getSteamConnection().send(request); } @SuppressWarnings("unused") @SteamEventBus.SteamEvent(event = Language.Message.CLIENT_LOG_ON_RESPONSE) public static void onClientLogOnResponse(SteamService service, final byte[] data) { final ProtoPacket<CMsgClientLogonResponse.Builder> response = new ProtoPacket<CMsgClientLogonResponse.Builder> (CMsgClientLogonResponse.class, Language.Message.CLIENT_LOG_ON_RESPONSE); try { response.setData(data); response.deserialize(); if (response.getBody().getEresult() == Language.Result.OK) { final long steam_id = response.getHeader().getSteamid(); final int session_id = response.getHeader().getClientSessionid(); final int heartbeat_interval = response.getBody().getOutOfGameHeartbeatSeconds(); service.getSteamConnection().setSteamId(steam_id); service.getSteamConnection().setSessionId(session_id); service.getSteamConnection().startHeartbeat(heartbeat_interval); if (!service.getSteamAccount().hasAccount()) { service.getSteamAccount().setCredentials(); service.getSteamAccount().setExtra("account_flags", String.valueOf(response.getBody().getAccountFlags())); service.getSteamAccount().setExtra("steam_id", String.valueOf(steam_id)); } else { return; } } else { service.getSteamConnection().close(); SteamAccount.delete(service); SteamChat.debug("E-RESULT LOGIN: " + response.getBody().getEresult()); } // Send a new intent to the authenticator activity with a result from the login. This will get // skipped if the user previously had a steam account. Intent intent = new Intent(service, AuthenticationActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK); intent.putExtra("result", response.getBody().getEresult()); service.startActivity(intent); } catch(final IOException e) { SteamChat.debug("onClientLogOnResponse", e.getMessage(), e); } } @SuppressWarnings("unused") @SteamEventBus.SteamEvent(event = Language.Message.CLIENT_LOGGED_OFF) public static void onClientLoggedOff(SteamService service, final byte[] data) { SteamChat.debug("onClientLoggedOff"); service.getSteamConnection().close(); SteamAccount.delete(service); } @SuppressWarnings("unused") @SteamEventBus.SteamEvent(event = Language.Message.CLIENT_UPDATE_MACHINE_AUTH) public static void onClientUpdateMachineAuth(SteamService service, final byte[] data) { ProtoPacket<CMsgClientUpdateMachineAuth.Builder> request = new ProtoPacket<CMsgClientUpdateMachineAuth.Builder>(CMsgClientUpdateMachineAuth.class); ProtoPacket<CMsgClientUpdateMachineAuthResponse.Builder> response = new ProtoPacket<CMsgClientUpdateMachineAuthResponse.Builder> (CMsgClientUpdateMachineAuthResponse.class, Language.Message.CLIENT_UPDATE_MACHINE_AUTH_RESPONSE); try { request.setData(data); request.deserialize(); final String filename = request.getBody().getFilename(); final byte[] sentry = request.getBody().getBytes().toByteArray(); final byte[] hash = Cryptography.SHAHash(sentry); service.getSteamAccount().setExtra("sentry", filename); setSentryFile(service, filename, sentry); response.setTargetJobId(request.getHeader().getJobidSource()); response.getBody().setCubwrote(request.getBody().getCubtowrite()); response.getBody().setEresult(Language.Result.OK); response.getBody().setFilename(filename); response.getBody().setFilesize(sentry.length); response.getBody().setGetlasterror(0); response.getBody().setOffset(request.getBody().getOffset()); response.getBody().setShaFile(ByteString.copyFrom(hash)); response.getBody().setOtpIdentifier(request.getBody().getOtpIdentifier()); response.getBody().setOtpType(request.getBody().getOtpType()); response.getBody().setOtpValue(0); service.getSteamConnection().send(response); } catch(final IOException e) { // Failed to update the auth machine. Perhaps we should dispatch a notification to the user // that authenticating this machine failed. Then they know why they have to log in and out the // whole time. SteamChat.debug("onClientUpdateMachineAuth", e.getMessage(), e); } } @SteamEventBus.SteamEvent(event = Language.Message.CLIENT_NEW_LOGIN_KEY) public static void onClientNewLoginKey(SteamService service, final byte[] data) { ProtoPacket<CMsgClientNewLoginKey.Builder> request = new ProtoPacket<CMsgClientNewLoginKey.Builder>(CMsgClientNewLoginKey.class); ProtoPacket<CMsgClientNewLoginKeyAccepted.Builder> response = new ProtoPacket<CMsgClientNewLoginKeyAccepted.Builder>(CMsgClientNewLoginKeyAccepted.class, Language.Message.CLIENT_NEW_LOGIN_KEY_ACCEPTED); try { request.setData(data); request.deserialize(); final String token = request.getBody().getLoginKey(); response.setTargetJobId(request.getSourceJobId()); response.getBody().setUniqueId(request.getBody().getUniqueId()); service.getSteamConnection().send(response); } catch(final IOException e) { // Failed to accept a new login key for the user. Next time the user the TCP connections tries to // connect it would most likely fail and request the user to use their username and password. SteamChat.debug("onClientNewLoginKey", e.getMessage(), e); } } @SteamEventBus.SteamEvent(event = Language.Message.CLIENT_ACCOUNT_INFO) public static void onClientAccountInfo(SteamService service, final byte[] data) throws IOException { ProtoPacket<CMsgClientAccountInfo.Builder> packet = new ProtoPacket<CMsgClientAccountInfo.Builder>(CMsgClientAccountInfo.class); ProtoPacket<CMsgClientChangeStatus.Builder> request = new ProtoPacket<CMsgClientChangeStatus.Builder>(CMsgClientChangeStatus.class, Language.Message.CLIENT_CHANGE_STATUS); packet.setData(data); packet.deserialize(); request.getBody().setPlayerName(packet.getBody().getPersonaName()); request.getBody().setPersonaState(Language.PersonaState.ONLINE); service.getSteamConnection().send(request); } /** * @return a steam suitable device id for this device. The device id will only change once the * user does a factory restore on their phone. */ private static ByteString getDeviceId(Context context) { final String android_id = Settings.Secure.getString(context.getContentResolver(), Settings.Secure.ANDROID_ID); return ByteString.copyFrom(android_id.getBytes()); } /** * @return an integer representing the IP address for this device. This performs a network activity * so can not be called on the main thread. */ private static int getIPAddress() { try { final ByteBuffer buff = ByteBuffer.wrap(InetAddress.getLocalHost().getAddress()); return (int) (buff.getInt() & 0xFFFFFFFFL); } catch(UnknownHostException e) { SteamChat.debug(e.toString()); return 0; } } /** * Once a user has given access for this device to steam guard, a sentry file is created containing * some random data to identify this machine. Thereon after this hash needs to be sent to steam * on every login to avoid steam guard from being nasty. * * @param context of the application. * @param filename of the sentry file. * @return the an SHA1 hash of the steam sentry file contents. */ private static ByteString getSentryHash(Context context, String filename) { File file = context.getFileStreamPath(filename); byte[] data = new byte[(int) file.length()]; try { BufferedInputStream stream = new BufferedInputStream(new FileInputStream(file)); stream.read(data, 0, data.length); stream.close(); return ByteString.copyFrom(Cryptography.SHAHash(data)); } catch(final IOException e) { SteamChat.debug(e.toString()); return null; } } /** * Save the sentry data received from a machine auth update request. This allows the user to log in * and bypassing steam guard as the device will be white listed with the steam service. * * @param context of the application. * @param filename used for the sentry file. * @param data to be written to file. * @throws IOException is thrown if there are any errors while trying to create / write to file. */ private static void setSentryFile(Context context, String filename, final byte[] data) throws IOException { FileOutputStream stream = context.openFileOutput(filename, Context.MODE_PRIVATE); stream.write(data); stream.close(); } /** * Helpful little method to dump the contents of a byte array. This was created mainly to see if the sentry * file contents and hash matched. * * @param name to be used for logging. * @param data to be dumped. */ @SuppressWarnings("unused") private static void dumpBytes(String name, byte[] data) { StringBuilder sb = new StringBuilder(); for (byte b : data) { sb.append(String.format("%02X ", b)); } SteamChat.debug(name, sb.toString()); } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.UUID; import java.util.concurrent.atomic.AtomicInteger; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.IgniteException; import org.apache.ignite.compute.ComputeJobSibling; import org.apache.ignite.compute.ComputeTaskSessionAttributeListener; import org.apache.ignite.compute.ComputeTaskSessionScope; import org.apache.ignite.internal.managers.deployment.GridDeployment; import org.apache.ignite.internal.util.future.GridFutureAdapter; import org.apache.ignite.internal.util.future.IgniteFutureImpl; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.A; import org.apache.ignite.internal.util.typedef.internal.S; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.lang.IgniteFuture; import org.apache.ignite.lang.IgniteUuid; import org.jetbrains.annotations.Nullable; /** * Task session. */ public class GridTaskSessionImpl implements GridTaskSessionInternal { /** */ private final String taskName; /** */ private final GridDeployment dep; /** */ private final String taskClsName; /** */ private final IgniteUuid sesId; /** */ private final long startTime; /** */ private final long endTime; /** */ private final UUID taskNodeId; /** */ private final GridKernalContext ctx; /** */ private Collection<ComputeJobSibling> siblings; /** */ private Map<Object, Object> attrs; /** */ private List<ComputeTaskSessionAttributeListener> lsnrs; /** */ private ClassLoader clsLdr; /** */ private volatile boolean closed; /** */ private volatile String cpSpi; /** */ private volatile String failSpi; /** */ private volatile String loadSpi; /** */ private final Object mux = new Object(); /** */ private final AtomicInteger usage = new AtomicInteger(1); /** */ private final boolean fullSup; /** */ private final Collection<UUID> top; /** */ private final UUID subjId; /** */ private final IgniteFutureImpl mapFut; /** * @param taskNodeId Task node ID. * @param taskName Task name. * @param dep Deployment. * @param taskClsName Task class name. * @param sesId Task session ID. * @param top Topology. * @param startTime Task execution start time. * @param endTime Task execution end time. * @param siblings Collection of siblings. * @param attrs Session attributes. * @param ctx Grid Kernal Context. * @param fullSup Session full support enabled flag. * @param subjId Subject ID. */ public GridTaskSessionImpl( UUID taskNodeId, String taskName, @Nullable GridDeployment dep, String taskClsName, IgniteUuid sesId, @Nullable Collection<UUID> top, long startTime, long endTime, Collection<ComputeJobSibling> siblings, @Nullable Map<Object, Object> attrs, GridKernalContext ctx, boolean fullSup, UUID subjId) { assert taskNodeId != null; assert taskName != null; assert sesId != null; assert ctx != null; this.taskNodeId = taskNodeId; this.taskName = taskName; this.dep = dep; this.top = top; // Note that class name might be null here if task was not explicitly // deployed. this.taskClsName = taskClsName; this.sesId = sesId; this.startTime = startTime; this.endTime = endTime; this.siblings = siblings != null ? Collections.unmodifiableCollection(siblings) : null; this.ctx = ctx; if (attrs != null && !attrs.isEmpty()) { this.attrs = new HashMap<>(attrs.size(), 1.0f); this.attrs.putAll(attrs); } this.fullSup = fullSup; this.subjId = subjId; mapFut = new IgniteFutureImpl(new GridFutureAdapter()); } /** {@inheritDoc} */ @Override public boolean isFullSupport() { return fullSup; } /** {@inheritDoc} */ @Override public UUID subjectId() { return subjId; } /** * */ protected void checkFullSupport() { if (!fullSup) throw new IllegalStateException("Sessions attributes and checkpoints are disabled by default " + "for better performance (to enable, annotate task class with " + "@ComputeTaskSessionFullSupport annotation)."); } /** * @return {@code True} if session was acquired. */ public boolean acquire() { while (true) { int cur = usage.get(); if (cur == 0) return false; if (usage.compareAndSet(cur, cur + 1)) return true; } } /** * @return {@code True} if session cannot be acquired any more. */ public boolean release() { while (true) { int cur = usage.get(); assert cur > 0; if (usage.compareAndSet(cur, cur - 1)) // CASed to 0. return cur == 1; } } /** {@inheritDoc} */ @Override public GridTaskSessionInternal session() { return this; } /** {@inheritDoc} */ @Nullable @Override public IgniteUuid getJobId() { return null; } /** {@inheritDoc} */ @Override public void onClosed() { if (closed) return; synchronized (mux) { if (closed) return; closed = true; if (fullSup) mux.notifyAll(); } } /** {@inheritDoc} */ @Override public boolean isClosed() { return closed; } /** * @return Task node ID. */ @Override public UUID getTaskNodeId() { return taskNodeId; } /** {@inheritDoc} */ @Override public long getStartTime() { return startTime; } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public <K, V> V waitForAttribute(K key, long timeout) throws InterruptedException { A.notNull(key, "key"); checkFullSupport(); if (timeout == 0) timeout = Long.MAX_VALUE; long now = U.currentTimeMillis(); // Prevent overflow. long end = now + timeout < 0 ? Long.MAX_VALUE : now + timeout; // Don't wait longer than session timeout. if (end > endTime) end = endTime; synchronized (mux) { while (!closed && (attrs == null || !attrs.containsKey(key)) && now < end) { mux.wait(end - now); now = U.currentTimeMillis(); } if (closed) throw new InterruptedException("Session was closed: " + this); return attrs != null ? (V)attrs.get(key) : null; } } /** {@inheritDoc} */ @Override public boolean waitForAttribute(Object key, Object val, long timeout) throws InterruptedException { A.notNull(key, "key"); checkFullSupport(); if (timeout == 0) timeout = Long.MAX_VALUE; long now = U.currentTimeMillis(); // Prevent overflow. long end = now + timeout < 0 ? Long.MAX_VALUE : now + timeout; // Don't wait longer than session timeout. if (end > endTime) end = endTime; synchronized (mux) { boolean isFound = false; while (!closed && !(isFound = isAttributeSet(key, val)) && now < end) { mux.wait(end - now); now = U.currentTimeMillis(); } if (closed) throw new InterruptedException("Session was closed: " + this); return isFound; } } /** {@inheritDoc} */ @Override public Map<?, ?> waitForAttributes(Collection<?> keys, long timeout) throws InterruptedException { A.notNull(keys, "keys"); checkFullSupport(); if (keys.isEmpty()) return Collections.emptyMap(); if (timeout == 0) timeout = Long.MAX_VALUE; long now = U.currentTimeMillis(); // Prevent overflow. long end = now + timeout < 0 ? Long.MAX_VALUE : now + timeout; // Don't wait longer than session timeout. if (end > endTime) end = endTime; synchronized (mux) { while (!closed && (attrs == null || !attrs.keySet().containsAll(keys)) && now < end) { mux.wait(end - now); now = U.currentTimeMillis(); } if (closed) throw new InterruptedException("Session was closed: " + this); Map<Object, Object> retVal = new HashMap<>(keys.size(), 1.0f); if (attrs != null) for (Object key : keys) retVal.put(key, attrs.get(key)); return retVal; } } /** {@inheritDoc} */ @Override public boolean waitForAttributes(Map<?, ?> attrs, long timeout) throws InterruptedException { A.notNull(attrs, "attrs"); checkFullSupport(); if (attrs.isEmpty()) return true; if (timeout == 0) timeout = Long.MAX_VALUE; long now = U.currentTimeMillis(); // Prevent overflow. long end = now + timeout < 0 ? Long.MAX_VALUE : now + timeout; // Don't wait longer than session timeout. if (end > endTime) end = endTime; synchronized (mux) { boolean isFound = false; while (!closed && now < end) { isFound = this.attrs != null && this.attrs.entrySet().containsAll(attrs.entrySet()); if (isFound) break; mux.wait(end - now); now = U.currentTimeMillis(); } if (closed) throw new InterruptedException("Session was closed: " + this); return isFound; } } /** {@inheritDoc} */ @Override public String getTaskName() { return taskName; } /** * Returns task class name. * * @return Task class name. */ public String getTaskClassName() { return taskClsName; } /** {@inheritDoc} */ @Override public IgniteUuid getId() { return sesId; } /** {@inheritDoc} */ @Override public long getEndTime() { return endTime; } /** * @return Task version. */ public String getUserVersion() { return dep == null ? "" : dep.userVersion(); } /** {@inheritDoc} */ @Override public ClassLoader getClassLoader() { synchronized (mux) { return clsLdr; } } /** * @param clsLdr Class loader. */ public void setClassLoader(ClassLoader clsLdr) { assert clsLdr != null; synchronized (mux) { this.clsLdr = clsLdr; } } /** {@inheritDoc} */ @Override public boolean isTaskNode() { return taskNodeId.equals(ctx.discovery().localNode().id()); } /** {@inheritDoc} */ @Override public Collection<ComputeJobSibling> refreshJobSiblings() { return getJobSiblings(); } /** {@inheritDoc} */ @Override public Collection<ComputeJobSibling> getJobSiblings() { synchronized (mux) { return siblings; } } /** * @param siblings Siblings. */ public void setJobSiblings(Collection<ComputeJobSibling> siblings) { synchronized (mux) { this.siblings = Collections.unmodifiableCollection(siblings); } } /** * @param siblings Siblings. */ public void addJobSiblings(Collection<ComputeJobSibling> siblings) { assert isTaskNode(); synchronized (mux) { Collection<ComputeJobSibling> tmp = new ArrayList<>(this.siblings.size() + siblings.size()); tmp.addAll(this.siblings); tmp.addAll(siblings); this.siblings = Collections.unmodifiableCollection(tmp); } } /** {@inheritDoc} */ @Override public ComputeJobSibling getJobSibling(IgniteUuid jobId) { A.notNull(jobId, "jobId"); Collection<ComputeJobSibling> tmp = getJobSiblings(); for (ComputeJobSibling sibling : tmp) if (sibling.getJobId().equals(jobId)) return sibling; return null; } /** {@inheritDoc} */ @Override public void setAttribute(Object key, Object val) { A.notNull(key, "key"); checkFullSupport(); setAttributes(Collections.singletonMap(key, val)); } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public <K, V> V getAttribute(K key) { A.notNull(key, "key"); checkFullSupport(); synchronized (mux) { return attrs != null ? (V)attrs.get(key) : null; } } /** {@inheritDoc} */ @Override public void setAttributes(Map<?, ?> attrs) { A.notNull(attrs, "attrs"); checkFullSupport(); if (attrs.isEmpty()) return; // Note that there is no mux notification in this block. // The reason is that we wait for ordered attributes to // come back from task prior to notification. The notification // will happen in 'setInternal(...)' method. synchronized (mux) { if (this.attrs == null) this.attrs = new HashMap<>(attrs.size(), 1.0f); this.attrs.putAll(attrs); } if (isTaskNode()) { try { ctx.task().setAttributes(this, attrs); } catch (IgniteCheckedException e) { throw U.convertException(e); } } } /** {@inheritDoc} */ @Override public Map<Object, Object> getAttributes() { checkFullSupport(); synchronized (mux) { return attrs == null || attrs.isEmpty() ? Collections.emptyMap() : U.sealMap(attrs); } } /** * @param attrs Attributes to set. */ public void setInternal(Map<?, ?> attrs) { A.notNull(attrs, "attrs"); checkFullSupport(); if (attrs.isEmpty()) return; List<ComputeTaskSessionAttributeListener> lsnrs; synchronized (mux) { if (this.attrs == null) this.attrs = new HashMap<>(attrs.size(), 1.0f); this.attrs.putAll(attrs); lsnrs = this.lsnrs; mux.notifyAll(); } if (lsnrs != null) for (Map.Entry<?, ?> entry : attrs.entrySet()) for (ComputeTaskSessionAttributeListener lsnr : lsnrs) lsnr.onAttributeSet(entry.getKey(), entry.getValue()); } /** {@inheritDoc} */ @Override public void addAttributeListener(ComputeTaskSessionAttributeListener lsnr, boolean rewind) { A.notNull(lsnr, "lsnr"); checkFullSupport(); Map<Object, Object> attrs = null; List<ComputeTaskSessionAttributeListener> lsnrs; synchronized (mux) { lsnrs = this.lsnrs != null ? new ArrayList<ComputeTaskSessionAttributeListener>(this.lsnrs.size() + 1) : new ArrayList<ComputeTaskSessionAttributeListener>(1); if (this.lsnrs != null) lsnrs.addAll(this.lsnrs); lsnrs.add(lsnr); this.lsnrs = lsnrs; if (rewind && this.attrs != null) attrs = new HashMap<>(this.attrs); } if (attrs != null) for (Map.Entry<Object, Object> entry : attrs.entrySet()) for (ComputeTaskSessionAttributeListener l : lsnrs) l.onAttributeSet(entry.getKey(), entry.getValue()); } /** {@inheritDoc} */ @Override public boolean removeAttributeListener(ComputeTaskSessionAttributeListener lsnr) { A.notNull(lsnr, "lsnr"); checkFullSupport(); synchronized (mux) { if (lsnrs == null) return false; List<ComputeTaskSessionAttributeListener> lsnrs = new ArrayList<>(this.lsnrs); boolean rmv = lsnrs.remove(lsnr); this.lsnrs = lsnrs.isEmpty() ? null : lsnrs; return rmv; } } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state) { saveCheckpoint(key, state, ComputeTaskSessionScope.SESSION_SCOPE, 0); } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state, ComputeTaskSessionScope scope, long timeout) { saveCheckpoint(key, state, scope, timeout, true); } /** {@inheritDoc} */ @Override public void saveCheckpoint(String key, Object state, ComputeTaskSessionScope scope, long timeout, boolean overwrite) { saveCheckpoint0(this, key, state, scope, timeout, overwrite); } /** * @param ses Session. * @param key Key. * @param state State. * @param scope Scope. * @param timeout Timeout. * @param overwrite Overwrite. * @throws IgniteException If failed. */ protected void saveCheckpoint0(GridTaskSessionInternal ses, String key, Object state, ComputeTaskSessionScope scope, long timeout, boolean overwrite) throws IgniteException { assert ses != null; // Internal call, so assert should be enough. A.notNull(key, "key"); A.ensure(timeout >= 0, "timeout >= 0"); if (closed) throw new IgniteException("Failed to save checkpoint (session closed): " + ses); checkFullSupport(); try { ctx.checkpoint().storeCheckpoint(ses, key, state, scope, timeout, overwrite); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @SuppressWarnings("unchecked") @Override public <T> T loadCheckpoint(String key) { return loadCheckpoint0(this, key); } /** * @param ses Session. * @param key Key. * @return Checkpoint. * @throws IgniteException If failed. */ protected <T> T loadCheckpoint0(GridTaskSessionInternal ses, String key) { assert ses != null; // Internal call, so assert should be enough. A.notNull(key, "key"); if (closed) throw new IgniteException("Failed to load checkpoint (session closed): " + ses); checkFullSupport(); try { return (T) ctx.checkpoint().loadCheckpoint(ses, key); } catch (IgniteCheckedException e) { throw U.convertException(e); } } /** {@inheritDoc} */ @Override public boolean removeCheckpoint(String key) { return removeCheckpoint0(this, key); } /** * @param ses Session. * @param key Key. * @return {@code True} if removed. * @throws IgniteException If failed. */ protected boolean removeCheckpoint0(GridTaskSessionInternal ses, String key) throws IgniteException { assert ses != null; // Internal call, so assert should be enough. A.notNull(key, "key"); if (closed) throw new IgniteException("Failed to remove checkpoint (session closed): " + ses); checkFullSupport(); return ctx.checkpoint().removeCheckpoint(ses, key); } /** {@inheritDoc} */ @Override public Collection<UUID> getTopology() { return top != null ? top : F.nodeIds(ctx.discovery().allNodes()); } /** * @param key Key. * @param val Value. * @return {@code true} if key/value pair was set. */ private boolean isAttributeSet(Object key, Object val) { assert Thread.holdsLock(mux); assert fullSup; if (attrs != null && attrs.containsKey(key)) { Object stored = attrs.get(key); if (val == null && stored == null) return true; if (val != null && stored != null) return val.equals(stored); } return false; } /** {@inheritDoc} */ @Override public String getCheckpointSpi() { return cpSpi; } /** * @param cpSpi Checkpoint SPI name. */ public void setCheckpointSpi(String cpSpi) { this.cpSpi = cpSpi; } /** * @return Failover SPI name. */ public String getFailoverSpi() { return failSpi; } /** * @param failSpi Failover SPI name. */ public void setFailoverSpi(String failSpi) { this.failSpi = failSpi; } /** * @return Load balancing SPI name. */ public String getLoadBalancingSpi() { return loadSpi; } /** * @param loadSpi Load balancing SPI name. */ public void setLoadBalancingSpi(String loadSpi) { this.loadSpi = loadSpi; } /** * @return Task internal version. */ public long getSequenceNumber() { return dep == null ? 0 : dep.sequenceNumber(); } /** * @return Deployment. */ public GridDeployment deployment() { return dep; } /** * Task map callback. */ public void onMapped() { ((GridFutureAdapter)mapFut.internalFuture()).onDone(); } /** * Finish task callback. */ public void onDone() { ((GridFutureAdapter)mapFut.internalFuture()).onDone(); } /** {@inheritDoc} */ @Override public IgniteFuture<?> mapFuture() { return mapFut; } /** {@inheritDoc} */ @Override public String toString() { return S.toString(GridTaskSessionImpl.class, this); } }
/* * TopStack (c) Copyright 2012-2013 Transcend Computing, Inc. * * Licensed under the Apache License, Version 2.0 (the License); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an AS IS BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /* * Copyright 2010 Amazon.com, Inc. or its affiliates. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"). * You may not use this file except in compliance with the License. * A copy of the License is located at * * http://aws.amazon.com/apache2.0 * * or in the "license" file accompanying this file. This file is distributed * on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either * express or implied. See the License for the specific language governing * permissions and limitations under the License. */ package com.amazonaws.services.autoscaling.model; import com.amazonaws.AmazonWebServiceRequest; import com.amazonaws.services.cloudwatch.model.Dimension; /** * Container for the parameters to the * {@link com.amazonaws.services.autoscaling.AmazonAutoScaling#createOrUpdateScalingTrigger(CreateOrUpdateScalingTriggerRequest) * CreateOrUpdateScalingTrigger operation}. * <p> * Sets the parameters that govern how and when to scale an AutoScaling group. * </p> * * @see com.amazonaws.services.autoscaling.AmazonAutoScaling#createOrUpdateScalingTrigger(CreateOrUpdateScalingTriggerRequest) */ public class CreateOrUpdateScalingTriggerRequest extends AmazonWebServiceRequest { /** * The name for the trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String triggerName; /** * The name of the <a>AutoScalingGroup</a> to be associated with the * trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String autoScalingGroupName; /** * The measure name associated with the metric used by the trigger to * determine when to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String measureName; /** * The statistic that the trigger when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>(Average|Sum|Minimum|Maximum)<br/> */ private String statistic; /** * A list of dimensions associated with the metric used by the trigger to * determine whether to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> */ private java.util.List<Dimension> dimensions; /** * The period, in seconds, associated witht he metric statistics. */ private Integer period; /** * * <p> * <b>Constraints:</b><br/> * <b>Pattern: * </b>(Seconds|Bytes|Bits|Percent|Count|Bytes/Second|Bits/Second * |Count/Second|None)<br/> */ private String unit; /** * The standard unit of measurement for a given measure that the trigger * uses when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String customUnit; /** * * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ * -\u0039\u003b-\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]*<br/> */ private String namespace; /** * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. */ private Double lowerThreshold; /** * The incremental amount to use when performing scaling activities after * the lower threshold has been breached. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String lowerBreachScaleIncrement; /** * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. */ private Double upperThreshold; /** * The incremental amount to use when performing scaling activities after * the upper threshold has been breached. <note> If only a positive or * negative number is specified, then the AutoScaling group will increase or * decrease by the specified number of actual instances. If positive or * negative number followed by a percent sign is specified, the AutoScaling * group will increase or decrease as a percentage. </note> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> */ private String upperBreachScaleIncrement; /** * The amount of time, in seconds, used to evaluate and determine if a * breach is occurring. The service will look at data between the current * time and the number of seconds specified in this parameter to see if a * breach has occurred. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>120 - 86400<br/> */ private Integer breachDuration; /** * The name of the <a>AutoScalingGroup</a> to be associated with the * trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The name of the <a>AutoScalingGroup</a> to be associated with the * trigger. */ public String getAutoScalingGroupName() { return autoScalingGroupName; } /** * The amount of time, in seconds, used to evaluate and determine if a * breach is occurring. The service will look at data between the current * time and the number of seconds specified in this parameter to see if a * breach has occurred. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>120 - 86400<br/> * * @return The amount of time, in seconds, used to evaluate and determine if * a breach is occurring. The service will look at data between the * current time and the number of seconds specified in this * parameter to see if a breach has occurred. */ public Integer getBreachDuration() { return breachDuration; } /** * The standard unit of measurement for a given measure that the trigger * uses when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The standard unit of measurement for a given measure that the * trigger uses when fetching metrics to examine. */ public String getCustomUnit() { return customUnit; } /** * A list of dimensions associated with the metric used by the trigger to * determine whether to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * * @return A list of dimensions associated with the metric used by the * trigger to determine whether to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> Amazon * CloudWatch product documentation</a>. */ public java.util.List<Dimension> getDimensions() { if (dimensions == null) { dimensions = new java.util.ArrayList<Dimension>(); } return dimensions; } /** * The incremental amount to use when performing scaling activities after * the lower threshold has been breached. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The incremental amount to use when performing scaling activities * after the lower threshold has been breached. */ public String getLowerBreachScaleIncrement() { return lowerBreachScaleIncrement; } /** * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * * @return The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed * the upper threshold, the trigger activates. */ public Double getLowerThreshold() { return lowerThreshold; } /** * The measure name associated with the metric used by the trigger to * determine when to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The measure name associated with the metric used by the trigger * to determine when to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch * product documentation</a>. */ public String getMeasureName() { return measureName; } /** * * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ * -\u0039\u003b-\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]*<br/> * * @return */ public String getNamespace() { return namespace; } /** * The period, in seconds, associated witht he metric statistics. * * @return The period, in seconds, associated witht he metric statistics. */ public Integer getPeriod() { return period; } /** * The statistic that the trigger when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>(Average|Sum|Minimum|Maximum)<br/> * * @return The statistic that the trigger when fetching metrics to examine. */ public String getStatistic() { return statistic; } /** * The name for the trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The name for the trigger. */ public String getTriggerName() { return triggerName; } /** * * <p> * <b>Constraints:</b><br/> * <b>Pattern: * </b>(Seconds|Bytes|Bits|Percent|Count|Bytes/Second|Bits/Second * |Count/Second|None)<br/> * * @return */ public String getUnit() { return unit; } /** * The incremental amount to use when performing scaling activities after * the upper threshold has been breached. <note> If only a positive or * negative number is specified, then the AutoScaling group will increase or * decrease by the specified number of actual instances. If positive or * negative number followed by a percent sign is specified, the AutoScaling * group will increase or decrease as a percentage. </note> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @return The incremental amount to use when performing scaling activities * after the upper threshold has been breached. <note> If only a * positive or negative number is specified, then the AutoScaling * group will increase or decrease by the specified number of actual * instances. If positive or negative number followed by a percent * sign is specified, the AutoScaling group will increase or * decrease as a percentage. </note> */ public String getUpperBreachScaleIncrement() { return upperBreachScaleIncrement; } /** * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * * @return The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed * the upper threshold, the trigger activates. */ public Double getUpperThreshold() { return upperThreshold; } /** * The name of the <a>AutoScalingGroup</a> to be associated with the * trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param autoScalingGroupName * The name of the <a>AutoScalingGroup</a> to be associated with * the trigger. */ public void setAutoScalingGroupName(String autoScalingGroupName) { this.autoScalingGroupName = autoScalingGroupName; } /** * The amount of time, in seconds, used to evaluate and determine if a * breach is occurring. The service will look at data between the current * time and the number of seconds specified in this parameter to see if a * breach has occurred. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>120 - 86400<br/> * * @param breachDuration * The amount of time, in seconds, used to evaluate and determine * if a breach is occurring. The service will look at data * between the current time and the number of seconds specified * in this parameter to see if a breach has occurred. */ public void setBreachDuration(Integer breachDuration) { this.breachDuration = breachDuration; } /** * The standard unit of measurement for a given measure that the trigger * uses when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param customUnit * The standard unit of measurement for a given measure that the * trigger uses when fetching metrics to examine. */ public void setCustomUnit(String customUnit) { this.customUnit = customUnit; } /** * A list of dimensions associated with the metric used by the trigger to * determine whether to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * * @param dimensions * A list of dimensions associated with the metric used by the * trigger to determine whether to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> * Amazon CloudWatch product documentation</a>. */ public void setDimensions(java.util.Collection<Dimension> dimensions) { java.util.List<Dimension> dimensionsCopy = new java.util.ArrayList<Dimension>(); if (dimensions != null) { dimensionsCopy.addAll(dimensions); } this.dimensions = dimensionsCopy; } /** * The incremental amount to use when performing scaling activities after * the lower threshold has been breached. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param lowerBreachScaleIncrement * The incremental amount to use when performing scaling * activities after the lower threshold has been breached. */ public void setLowerBreachScaleIncrement(String lowerBreachScaleIncrement) { this.lowerBreachScaleIncrement = lowerBreachScaleIncrement; } /** * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * * @param lowerThreshold * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or * exceed the upper threshold, the trigger activates. */ public void setLowerThreshold(Double lowerThreshold) { this.lowerThreshold = lowerThreshold; } /** * The measure name associated with the metric used by the trigger to * determine when to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param measureName * The measure name associated with the metric used by the * trigger to determine when to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> * Amazon CloudWatch product documentation</a>. */ public void setMeasureName(String measureName) { this.measureName = measureName; } /** * * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ * -\u0039\u003b-\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]*<br/> * * @param namespace */ public void setNamespace(String namespace) { this.namespace = namespace; } /** * The period, in seconds, associated witht he metric statistics. * * @param period * The period, in seconds, associated witht he metric statistics. */ public void setPeriod(Integer period) { this.period = period; } /** * The statistic that the trigger when fetching metrics to examine. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>(Average|Sum|Minimum|Maximum)<br/> * * @param statistic * The statistic that the trigger when fetching metrics to * examine. */ public void setStatistic(String statistic) { this.statistic = statistic; } /** * The name for the trigger. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param triggerName * The name for the trigger. */ public void setTriggerName(String triggerName) { this.triggerName = triggerName; } /** * * <p> * <b>Constraints:</b><br/> * <b>Pattern: * </b>(Seconds|Bytes|Bits|Percent|Count|Bytes/Second|Bits/Second * |Count/Second|None)<br/> * * @param unit */ public void setUnit(String unit) { this.unit = unit; } /** * The incremental amount to use when performing scaling activities after * the upper threshold has been breached. <note> If only a positive or * negative number is specified, then the AutoScaling group will increase or * decrease by the specified number of actual instances. If positive or * negative number followed by a percent sign is specified, the AutoScaling * group will increase or decrease as a percentage. </note> * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param upperBreachScaleIncrement * The incremental amount to use when performing scaling * activities after the upper threshold has been breached. <note> * If only a positive or negative number is specified, then the * AutoScaling group will increase or decrease by the specified * number of actual instances. If positive or negative number * followed by a percent sign is specified, the AutoScaling group * will increase or decrease as a percentage. </note> */ public void setUpperBreachScaleIncrement(String upperBreachScaleIncrement) { this.upperBreachScaleIncrement = upperBreachScaleIncrement; } /** * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * * @param upperThreshold * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or * exceed the upper threshold, the trigger activates. */ public void setUpperThreshold(Double upperThreshold) { this.upperThreshold = upperThreshold; } /** * Returns a string representation of this object; useful for testing and * debugging. * * @return A string representation of this object. * * @see java.lang.Object#toString() */ @Override public String toString() { StringBuilder sb = new StringBuilder(); sb.append("{"); sb.append("TriggerName: " + triggerName + ", "); sb.append("AutoScalingGroupName: " + autoScalingGroupName + ", "); sb.append("MeasureName: " + measureName + ", "); sb.append("Statistic: " + statistic + ", "); sb.append("Dimensions: " + dimensions + ", "); sb.append("Period: " + period + ", "); sb.append("Unit: " + unit + ", "); sb.append("CustomUnit: " + customUnit + ", "); sb.append("Namespace: " + namespace + ", "); sb.append("LowerThreshold: " + lowerThreshold + ", "); sb.append("LowerBreachScaleIncrement: " + lowerBreachScaleIncrement + ", "); sb.append("UpperThreshold: " + upperThreshold + ", "); sb.append("UpperBreachScaleIncrement: " + upperBreachScaleIncrement + ", "); sb.append("BreachDuration: " + breachDuration + ", "); sb.append("}"); return sb.toString(); } /** * The name of the <a>AutoScalingGroup</a> to be associated with the * trigger. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param autoScalingGroupName * The name of the <a>AutoScalingGroup</a> to be associated with * the trigger. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withAutoScalingGroupName( String autoScalingGroupName) { this.autoScalingGroupName = autoScalingGroupName; return this; } /** * The amount of time, in seconds, used to evaluate and determine if a * breach is occurring. The service will look at data between the current * time and the number of seconds specified in this parameter to see if a * breach has occurred. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Range: </b>120 - 86400<br/> * * @param breachDuration * The amount of time, in seconds, used to evaluate and determine * if a breach is occurring. The service will look at data * between the current time and the number of seconds specified * in this parameter to see if a breach has occurred. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withBreachDuration( Integer breachDuration) { this.breachDuration = breachDuration; return this; } /** * The standard unit of measurement for a given measure that the trigger * uses when fetching metrics to examine. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param customUnit * The standard unit of measurement for a given measure that the * trigger uses when fetching metrics to examine. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withCustomUnit(String customUnit) { this.customUnit = customUnit; return this; } /** * A list of dimensions associated with the metric used by the trigger to * determine whether to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * * @param dimensions * A list of dimensions associated with the metric used by the * trigger to determine whether to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> * Amazon CloudWatch product documentation</a>. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withDimensions( Dimension... dimensions) { for (Dimension value : dimensions) { getDimensions().add(value); } return this; } /** * A list of dimensions associated with the metric used by the trigger to * determine whether to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - <br/> * * @param dimensions * A list of dimensions associated with the metric used by the * trigger to determine whether to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> * Amazon CloudWatch product documentation</a>. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withDimensions( java.util.Collection<Dimension> dimensions) { java.util.List<Dimension> dimensionsCopy = new java.util.ArrayList<Dimension>(); if (dimensions != null) { dimensionsCopy.addAll(dimensions); } this.dimensions = dimensionsCopy; return this; } /** * The incremental amount to use when performing scaling activities after * the lower threshold has been breached. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param lowerBreachScaleIncrement * The incremental amount to use when performing scaling * activities after the lower threshold has been breached. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withLowerBreachScaleIncrement( String lowerBreachScaleIncrement) { this.lowerBreachScaleIncrement = lowerBreachScaleIncrement; return this; } /** * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * <p> * Returns a reference to this object so that method calls can be chained * together. * * @param lowerThreshold * The lower limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or * exceed the upper threshold, the trigger activates. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withLowerThreshold( Double lowerThreshold) { this.lowerThreshold = lowerThreshold; return this; } /** * The measure name associated with the metric used by the trigger to * determine when to fire. For more information, please see <a * hlink="http://aws.amazon.com/cloudwatch/"> Amazon CloudWatch product * documentation</a>. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param measureName * The measure name associated with the metric used by the * trigger to determine when to fire. For more information, * please see <a hlink="http://aws.amazon.com/cloudwatch/"> * Amazon CloudWatch product documentation</a>. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withMeasureName( String measureName) { this.measureName = measureName; return this; } /** * * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 100<br/> * <b>Pattern: </b>[ * -\u0039\u003b-\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]*<br/> * * @param namespace * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withNamespace(String namespace) { this.namespace = namespace; return this; } /** * The period, in seconds, associated witht he metric statistics. * <p> * Returns a reference to this object so that method calls can be chained * together. * * @param period * The period, in seconds, associated witht he metric statistics. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withPeriod(Integer period) { this.period = period; return this; } /** * The statistic that the trigger when fetching metrics to examine. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: </b>(Average|Sum|Minimum|Maximum)<br/> * * @param statistic * The statistic that the trigger when fetching metrics to * examine. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withStatistic(String statistic) { this.statistic = statistic; return this; } /** * The name for the trigger. * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param triggerName * The name for the trigger. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withTriggerName( String triggerName) { this.triggerName = triggerName; return this; } /** * * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Pattern: * </b>(Seconds|Bytes|Bits|Percent|Count|Bytes/Second|Bits/Second * |Count/Second|None)<br/> * * @param unit * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withUnit(String unit) { this.unit = unit; return this; } /** * The incremental amount to use when performing scaling activities after * the upper threshold has been breached. <note> If only a positive or * negative number is specified, then the AutoScaling group will increase or * decrease by the specified number of actual instances. If positive or * negative number followed by a percent sign is specified, the AutoScaling * group will increase or decrease as a percentage. </note> * <p> * Returns a reference to this object so that method calls can be chained * together. * <p> * <b>Constraints:</b><br/> * <b>Length: </b>1 - 255<br/> * <b>Pattern: </b>[ -\uD7FF\uE000-\uFFFD\uD800\uDC00-\uDBFF\uDFFF\r\n\t]* * <br/> * * @param upperBreachScaleIncrement * The incremental amount to use when performing scaling * activities after the upper threshold has been breached. <note> * If only a positive or negative number is specified, then the * AutoScaling group will increase or decrease by the specified * number of actual instances. If positive or negative number * followed by a percent sign is specified, the AutoScaling group * will increase or decrease as a percentage. </note> * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withUpperBreachScaleIncrement( String upperBreachScaleIncrement) { this.upperBreachScaleIncrement = upperBreachScaleIncrement; return this; } /** * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or exceed the upper * threshold, the trigger activates. * <p> * Returns a reference to this object so that method calls can be chained * together. * * @param upperThreshold * The upper limit for the metric. If all data points in the last * BreachDuration seconds fall below the lower threshold or * exceed the upper threshold, the trigger activates. * * @return A reference to this updated object so that method calls can be * chained together. */ public CreateOrUpdateScalingTriggerRequest withUpperThreshold( Double upperThreshold) { this.upperThreshold = upperThreshold; return this; } }
package com.airbnb.airpal.modules; import com.airbnb.airlift.http.client.OldJettyHttpClient; import com.airbnb.airpal.AirpalConfiguration; import com.airbnb.airpal.api.output.PersistentJobOutputFactory; import com.airbnb.airpal.api.output.builders.OutputBuilderFactory; import com.airbnb.airpal.api.output.persistors.CSVPersistorFactory; import com.airbnb.airpal.api.output.persistors.PersistorFactory; import com.airbnb.airpal.core.AirpalUserFactory; import com.airbnb.airpal.core.execution.ExecutionClient; import com.airbnb.airpal.core.health.PrestoHealthCheck; import com.airbnb.airpal.core.store.files.ExpiringFileStore; import com.airbnb.airpal.core.store.history.JobHistoryStore; import com.airbnb.airpal.core.store.history.JobHistoryStoreDAO; import com.airbnb.airpal.core.store.jobs.ActiveJobsStore; import com.airbnb.airpal.core.store.jobs.InMemoryActiveJobsStore; import com.airbnb.airpal.core.store.queries.QueryStore; import com.airbnb.airpal.core.store.queries.QueryStoreDAO; import com.airbnb.airpal.core.store.usage.CachingUsageStore; import com.airbnb.airpal.core.store.usage.SQLUsageStore; import com.airbnb.airpal.core.store.usage.UsageStore; import com.airbnb.airpal.presto.ClientSessionFactory; import com.airbnb.airpal.presto.QueryInfoClient; import com.airbnb.airpal.presto.metadata.ColumnCache; import com.airbnb.airpal.presto.metadata.PreviewTableCache; import com.airbnb.airpal.presto.metadata.SchemaCache; import com.airbnb.airpal.resources.ExecuteResource; import com.airbnb.airpal.resources.FilesResource; import com.airbnb.airpal.resources.HealthResource; import com.airbnb.airpal.resources.PingResource; import com.airbnb.airpal.resources.QueryResource; import com.airbnb.airpal.resources.ResultsPreviewResource; import com.airbnb.airpal.resources.S3FilesResource; import com.airbnb.airpal.resources.SessionResource; import com.airbnb.airpal.resources.TablesResource; import com.airbnb.airpal.resources.sse.SSEEventSourceServlet; import com.airbnb.airpal.sql.beans.TableRow; import com.airbnb.airpal.sql.jdbi.QueryStoreMapper; import com.airbnb.airpal.sql.jdbi.URIArgumentFactory; import com.airbnb.airpal.sql.jdbi.UUIDArgumentFactory; import com.amazonaws.auth.AWSCredentials; import com.amazonaws.auth.BasicAWSCredentials; import com.amazonaws.services.s3.AmazonS3; import com.amazonaws.services.s3.AmazonS3Client; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Strings; import com.google.common.eventbus.AsyncEventBus; import com.google.common.eventbus.EventBus; import com.google.inject.AbstractModule; import com.google.inject.Provider; import com.google.inject.Provides; import com.google.inject.Scopes; import com.google.inject.Singleton; import com.google.inject.name.Names; import io.airlift.configuration.ConfigurationFactory; import io.airlift.http.client.AsyncHttpClient; import io.airlift.http.client.HttpClientConfig; import io.airlift.units.DataSize; import io.airlift.units.Duration; import io.dropwizard.jdbi.DBIFactory; import io.dropwizard.setup.Environment; import lombok.extern.slf4j.Slf4j; import org.apache.shiro.web.env.EnvironmentLoaderListener; import org.skife.jdbi.v2.DBI; import javax.inject.Named; import java.net.URI; import java.util.Collections; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import static com.airbnb.airpal.presto.QueryRunner.QueryRunnerFactory; @Slf4j public class AirpalModule extends AbstractModule { private final AirpalConfiguration config; private final Environment environment; public AirpalModule(AirpalConfiguration config, Environment environment) { this.config = config; this.environment = environment; } @Override protected void configure() { bind(TablesResource.class).in(Scopes.SINGLETON); bind(ExecuteResource.class).in(Scopes.SINGLETON); bind(QueryResource.class).in(Scopes.SINGLETON); bind(HealthResource.class).in(Scopes.SINGLETON); bind(PingResource.class).in(Scopes.SINGLETON); bind(SessionResource.class).in(Scopes.SINGLETON); bind(SSEEventSourceServlet.class).in(Scopes.SINGLETON); bind(FilesResource.class).in(Scopes.SINGLETON); bind(ResultsPreviewResource.class).in(Scopes.SINGLETON); bind(S3FilesResource.class).in(Scopes.SINGLETON); bind(EnvironmentLoaderListener.class).in(Scopes.SINGLETON); bind(String.class).annotatedWith(Names.named("createTableDestinationSchema")).toInstance(config.getCreateTableDestinationSchema()); bind(String.class).annotatedWith(Names.named("s3Bucket")).toInstance(Strings.nullToEmpty(config.getS3Bucket())); bind(PrestoHealthCheck.class).in(Scopes.SINGLETON); bind(ExecutionClient.class).in(Scopes.SINGLETON); bind(PersistentJobOutputFactory.class).in(Scopes.SINGLETON); bind(JobHistoryStore.class).to(JobHistoryStoreDAO.class).in(Scopes.SINGLETON); } @Singleton @Provides public DBI provideDBI(ObjectMapper objectMapper) throws ClassNotFoundException { final DBIFactory factory = new DBIFactory(); final DBI dbi = factory.build(environment, config.getDataSourceFactory(), "mysql"); dbi.registerMapper(new TableRow.TableRowMapper(objectMapper)); dbi.registerMapper(new QueryStoreMapper(objectMapper)); dbi.registerArgumentFactory(new UUIDArgumentFactory()); dbi.registerArgumentFactory(new URIArgumentFactory()); return dbi; } @Singleton @Provides public ConfigurationFactory provideConfigurationFactory() { return new ConfigurationFactory(Collections.<String, String>emptyMap()); } @Singleton @Named("query-runner-http-client") @Provides public AsyncHttpClient provideQueryRunnerHttpClient() { final HttpClientConfig httpClientConfig = new HttpClientConfig().setConnectTimeout(new Duration(10, TimeUnit.SECONDS)); return new OldJettyHttpClient(httpClientConfig); } @Named("coordinator-uri") @Provides public URI providePrestoCoordinatorURI() { return config.getPrestoCoordinator(); } @Singleton @Named("default-catalog") @Provides public String provideDefaultCatalog() { return config.getPrestoCatalog(); } @Provides @Singleton public ClientSessionFactory provideClientSessionFactory(@Named("coordinator-uri") Provider<URI> uriProvider) { return new ClientSessionFactory(uriProvider, config.getPrestoUser(), config.getPrestoSource(), config.getPrestoCatalog(), config.getPrestoSchema(), config.isPrestoDebug()); } @Provides public QueryRunnerFactory provideQueryRunner(ClientSessionFactory sessionFactory, @Named("query-runner-http-client") AsyncHttpClient httpClient) { return new QueryRunnerFactory(sessionFactory, httpClient); } @Provides public QueryInfoClient provideQueryInfoClient() { return QueryInfoClient.create(); } @Singleton @Provides public SchemaCache provideSchemaCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { final SchemaCache cache = new SchemaCache(queryRunnerFactory, executorService); cache.populateCache(config.getPrestoCatalog()); return cache; } @Singleton @Provides public ColumnCache provideColumnCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { return new ColumnCache(queryRunnerFactory, new Duration(5, TimeUnit.MINUTES), new Duration(60, TimeUnit.MINUTES), executorService); } @Singleton @Provides public PreviewTableCache providePreviewTableCache(QueryRunnerFactory queryRunnerFactory, @Named("presto") ExecutorService executorService) { return new PreviewTableCache(queryRunnerFactory, new Duration(20, TimeUnit.MINUTES), executorService, 100); } @Singleton @Named("event-bus") @Provides public ExecutorService provideEventBusExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("event-bus-%d")); } @Singleton @Named("presto") @Provides public ExecutorService provideCompleterExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("presto-%d")); } @Singleton @Named("hive") @Provides public ScheduledExecutorService provideTableCacheUpdater() { return Executors.newSingleThreadScheduledExecutor(); } @Singleton @Named("sse") @Provides public ExecutorService provideSSEExecutorService() { return Executors.newCachedThreadPool(SchemaCache.daemonThreadsNamed("sse-%d")); } @Singleton @Provides public EventBus provideEventBus(@Named("event-bus") ExecutorService executor) { return new AsyncEventBus(executor); } @Provides public AWSCredentials provideAWSCredentials() { if ((config.getS3AccessKey() == null) || (config.getS3SecretKey() == null)) { return null; } else { return new BasicAWSCredentials(config.getS3AccessKey(), config.getS3SecretKey()); } } @Singleton @Provides public AmazonS3 provideAmazonS3Client(AWSCredentials awsCredentials) { if (awsCredentials == null) { return new AmazonS3Client(); } return new AmazonS3Client(awsCredentials); } @Singleton @Provides public UsageStore provideUsageCache(DBI dbi) { UsageStore delegate = new SQLUsageStore(config.getUsageWindow(), dbi); return new CachingUsageStore(delegate, io.dropwizard.util.Duration.minutes(6)); } @Provides public QueryStore provideQueryStore(DBI dbi) { return dbi.onDemand(QueryStoreDAO.class); } @Provides @Singleton public AirpalUserFactory provideAirpalUserFactory() { return new AirpalUserFactory(config.getPrestoSchema(), org.joda.time.Duration.standardMinutes(15), "default"); } @Provides @Singleton public ActiveJobsStore provideActiveJobsStore() { return new InMemoryActiveJobsStore(); } @Provides @Singleton public ExpiringFileStore provideExpiringFileStore() { return new ExpiringFileStore(new DataSize(100, DataSize.Unit.MEGABYTE)); } @Provides @Singleton public CSVPersistorFactory provideCSVPersistorFactory(ExpiringFileStore fileStore, AmazonS3 s3Client, @Named("s3Bucket") String s3Bucket) { return new CSVPersistorFactory(config.isUseS3(), s3Client, s3Bucket, fileStore); } @Provides @Singleton public PersistorFactory providePersistorFactory(CSVPersistorFactory csvPersistorFactory) { return new PersistorFactory(csvPersistorFactory); } @Provides @Singleton public OutputBuilderFactory provideOutputBuilderFactory() { long maxFileSizeInBytes = Math.round(Math.floor(config.getMaxOutputSize().getValue(DataSize.Unit.BYTE))); return new OutputBuilderFactory(maxFileSizeInBytes); } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs; import static org.apache.hadoop.fs.CommonConfigurationKeys.*; import java.io.DataOutputStream; import java.io.File; import java.io.IOException; import java.net.URI; import java.util.Arrays; import java.util.HashSet; import java.util.Set; import junit.framework.TestCase; import org.apache.hadoop.conf.Configuration; /** * This class tests commands from Trash. */ public class TestTrash extends TestCase { private final static Path TEST_DIR = new Path(new File(System.getProperty("test.build.data","/tmp") ).toURI().toString().replace(' ', '+'), "testTrash"); protected static Path writeFile(FileSystem fs, Path f) throws IOException { DataOutputStream out = fs.create(f); out.writeBytes("dhruba: " + f); out.close(); assertTrue(fs.exists(f)); return f; } protected static Path mkdir(FileSystem fs, Path p) throws IOException { assertTrue(fs.mkdirs(p)); assertTrue(fs.exists(p)); assertTrue(fs.getFileStatus(p).isDirectory()); return p; } // check that the specified file is in Trash protected static void checkTrash(FileSystem fs, Path trashRoot, Path path) throws IOException { Path p = new Path(trashRoot+"/"+ path.toUri().getPath()); assertTrue(fs.exists(p)); } // counts how many instances of the file are in the Trash // they all are in format fileName* protected static int countSameDeletedFiles(FileSystem fs, Path trashDir, Path fileName) throws IOException { final String prefix = fileName.getName(); System.out.println("Counting " + fileName + " in " + trashDir.toString()); // filter that matches all the files that start with fileName* PathFilter pf = new PathFilter() { public boolean accept(Path file) { return file.getName().startsWith(prefix); } }; // run the filter FileStatus [] fss = fs.listStatus(trashDir, pf); return fss==null? 0 : fss.length; } // check that the specified file is not in Trash static void checkNotInTrash(FileSystem fs, Path trashRoot, String pathname) throws IOException { Path p = new Path(trashRoot+"/"+ new Path(pathname).getName()); assertTrue(!fs.exists(p)); } protected static void trashShell(final FileSystem fs, final Path base) throws IOException { Configuration conf = new Configuration(); conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute conf.set("fs.default.name", fs.getUri().toString()); FsShell shell = new FsShell(); shell.setConf(conf); Path trashRoot = null; // First create a new directory with mkdirs Path myPath = new Path(base, "test/mkdirs"); mkdir(fs, myPath); // Second, create a file in that directory. Path myFile = new Path(base, "test/mkdirs/myFile"); writeFile(fs, myFile); // Verify that expunge without Trash directory // won't throw Exception { String[] args = new String[1]; args[0] = "-expunge"; int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); } // Verify that we succeed in removing the file we created. // This should go into Trash. { String[] args = new String[2]; args[0] = "-rm"; args[1] = myFile.toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); trashRoot = shell.getCurrentTrashDir(); checkTrash(fs, trashRoot, myFile); } // Verify that we can recreate the file writeFile(fs, myFile); // Verify that we succeed in removing the file we re-created { String[] args = new String[2]; args[0] = "-rm"; args[1] = new Path(base, "test/mkdirs/myFile").toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); } // Verify that we can recreate the file writeFile(fs, myFile); // Verify that we succeed in removing the whole directory // along with the file inside it. { String[] args = new String[2]; args[0] = "-rmr"; args[1] = new Path(base, "test/mkdirs").toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); } // recreate directory mkdir(fs, myPath); // Verify that we succeed in removing the whole directory { String[] args = new String[2]; args[0] = "-rmr"; args[1] = new Path(base, "test/mkdirs").toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); } // Check that we can delete a file from the trash { Path toErase = new Path(trashRoot, "toErase"); int retVal = -1; writeFile(fs, toErase); try { retVal = shell.run(new String[] {"-rm", toErase.toString()}); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(retVal == 0); checkNotInTrash (fs, trashRoot, toErase.toString()); checkNotInTrash (fs, trashRoot, toErase.toString()+".1"); } // simulate Trash removal { String[] args = new String[1]; args[0] = "-expunge"; int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); } // verify that after expunging the Trash, it really goes away checkNotInTrash(fs, trashRoot, new Path(base, "test/mkdirs/myFile").toString()); // recreate directory and file mkdir(fs, myPath); writeFile(fs, myFile); // remove file first, then remove directory { String[] args = new String[2]; args[0] = "-rm"; args[1] = myFile.toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); checkTrash(fs, trashRoot, myFile); args = new String[2]; args[0] = "-rmr"; args[1] = myPath.toString(); val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); checkTrash(fs, trashRoot, myPath); } // attempt to remove parent of trash { String[] args = new String[2]; args[0] = "-rmr"; args[1] = trashRoot.getParent().getParent().toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == -1); assertTrue(fs.exists(trashRoot)); } // Verify skip trash option really works // recreate directory and file mkdir(fs, myPath); writeFile(fs, myFile); // Verify that skip trash option really skips the trash for files (rm) { String[] args = new String[3]; args[0] = "-rm"; args[1] = "-skipTrash"; args[2] = myFile.toString(); int val = -1; try { // Clear out trash assertEquals(0, shell.run(new String [] { "-expunge" } )); val = shell.run(args); }catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertFalse(fs.exists(trashRoot)); // No new Current should be created assertFalse(fs.exists(myFile)); assertTrue(val == 0); } // recreate directory and file mkdir(fs, myPath); writeFile(fs, myFile); // Verify that skip trash option really skips the trash for rmr { String[] args = new String[3]; args[0] = "-rmr"; args[1] = "-skipTrash"; args[2] = myPath.toString(); int val = -1; try { // Clear out trash assertEquals(0, shell.run(new String [] { "-expunge" } )); val = shell.run(args); }catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertFalse(fs.exists(trashRoot)); // No new Current should be created assertFalse(fs.exists(myPath)); assertFalse(fs.exists(myFile)); assertTrue(val == 0); } // deleting same file multiple times { int val = -1; mkdir(fs, myPath); try { assertEquals(0, shell.run(new String [] { "-expunge" } )); } catch (Exception e) { System.err.println("Exception raised from fs expunge " + e.getLocalizedMessage()); } // create a file in that directory. myFile = new Path(base, "test/mkdirs/myFile"); String [] args = new String[] {"-rm", myFile.toString()}; int num_runs = 10; for(int i=0;i<num_runs; i++) { //create file writeFile(fs, myFile); // delete file try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val==0); } // current trash directory Path trashDir = new Path(trashRoot.toUri().getPath() + myFile.getParent().toUri().getPath()); System.out.println("Deleting same myFile: myFile.parent=" + myFile.getParent().toUri().getPath() + "; trashroot="+trashRoot.toUri().getPath() + "; trashDir=" + trashDir.toUri().getPath()); int count = countSameDeletedFiles(fs, trashDir, myFile); System.out.println("counted " + count + " files " + myFile.getName() + "* in " + trashDir); assertTrue(count==num_runs); } } public static void trashNonDefaultFS(Configuration conf) throws IOException { conf.set(FS_TRASH_INTERVAL_KEY, "10"); // 10 minute // attempt non-default FileSystem trash { final FileSystem lfs = FileSystem.getLocal(conf); Path p = TEST_DIR; Path f = new Path(p, "foo/bar"); if (lfs.exists(p)) { lfs.delete(p, true); } try { f = writeFile(lfs, f); FileSystem.closeAll(); FileSystem localFs = FileSystem.get(URI.create("file:///"), conf); Trash lTrash = new Trash(localFs, conf); lTrash.moveToTrash(f.getParent()); checkTrash(localFs, lTrash.getCurrentTrashDir(), f); } finally { if (lfs.exists(p)) { lfs.delete(p, true); } } } } public void testTrash() throws IOException { Configuration conf = new Configuration(); conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class); trashShell(FileSystem.getLocal(conf), TEST_DIR); } public void testNonDefaultFS() throws IOException { Configuration conf = new Configuration(); conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class); conf.set("fs.default.name", "invalid://host/bar/foo"); trashNonDefaultFS(conf); } public void testTrashEmptier() throws Exception { Configuration conf = new Configuration(); // Trash with 12 second deletes and 6 seconds checkpoints conf.set(FS_TRASH_INTERVAL_KEY, "0.2"); // 12 seconds conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class); conf.set(FS_TRASH_CHECKPOINT_INTERVAL_KEY, "0.1"); // 6 seconds Trash trash = new Trash(conf); // Start Emptier in background Runnable emptier = trash.getEmptier(); Thread emptierThread = new Thread(emptier); emptierThread.start(); FileSystem fs = FileSystem.getLocal(conf); conf.set("fs.default.name", fs.getUri().toString()); FsShell shell = new FsShell(); shell.setConf(conf); shell.init(); // First create a new directory with mkdirs Path myPath = new Path(TEST_DIR, "test/mkdirs"); mkdir(fs, myPath); int fileIndex = 0; Set<String> checkpoints = new HashSet<String>(); while (true) { // Create a file with a new name Path myFile = new Path(TEST_DIR, "test/mkdirs/myFile" + fileIndex++); writeFile(fs, myFile); // Delete the file to trash String[] args = new String[2]; args[0] = "-rm"; args[1] = myFile.toString(); int val = -1; try { val = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); } assertTrue(val == 0); Path trashDir = shell.getCurrentTrashDir(); FileStatus files[] = fs.listStatus(trashDir.getParent()); // Scan files in .Trash and add them to set of checkpoints for (FileStatus file : files) { String fileName = file.getPath().getName(); checkpoints.add(fileName); } // If checkpoints has 4 objects it is Current + 3 checkpoint directories if (checkpoints.size() == 4) { // The actual contents should be smaller since the last checkpoint // should've been deleted and Current might not have been recreated yet assertTrue(checkpoints.size() > files.length); break; } Thread.sleep(5000); } emptierThread.interrupt(); emptierThread.join(); } /** * @see TestCase#tearDown() */ @Override protected void tearDown() throws IOException { File trashDir = new File(TEST_DIR.toUri().getPath()); if (trashDir.exists() && !FileUtil.fullyDelete(trashDir)) { throw new IOException("Cannot remove data directory: " + trashDir); } } static class TestLFS extends LocalFileSystem { Path home; TestLFS() { this(new Path(TEST_DIR, "user/test")); } TestLFS(Path home) { super(); this.home = home; } public Path getHomeDirectory() { return home; } } /** * test same file deletion - multiple time * this is more of a performance test - shouldn't be run as a unit test * @throws IOException */ public static void performanceTestDeleteSameFile() throws IOException{ Path base = TEST_DIR; Configuration conf = new Configuration(); conf.setClass("fs.file.impl", TestLFS.class, FileSystem.class); FileSystem fs = FileSystem.getLocal(conf); conf.set("fs.default.name", fs.getUri().toString()); conf.set(FS_TRASH_INTERVAL_KEY, "10"); //minutes.. FsShell shell = new FsShell(); shell.setConf(conf); //Path trashRoot = null; Path myPath = new Path(base, "test/mkdirs"); mkdir(fs, myPath); // create a file in that directory. Path myFile; long start; long first = 0; int retVal = 0; int factor = 10; // how much slower any of subsequent deletion can be myFile = new Path(base, "test/mkdirs/myFile"); String [] args = new String[] {"-rm", myFile.toString()}; int iters = 1000; for(int i=0;i<iters; i++) { writeFile(fs, myFile); start = System.currentTimeMillis(); try { retVal = shell.run(args); } catch (Exception e) { System.err.println("Exception raised from Trash.run " + e.getLocalizedMessage()); throw new IOException(e.getMessage()); } assertTrue(retVal == 0); long iterTime = System.currentTimeMillis() - start; // take median of the first 10 runs if(i<10) { if(i==0) { first = iterTime; } else { first = (first + iterTime)/2; } } // we don't want to print every iteration - let's do every 10th int print_freq = iters/10; if(i>10) { if((i%print_freq) == 0) System.out.println("iteration="+i+";res =" + retVal + "; start=" + start + "; iterTime = " + iterTime + " vs. firstTime=" + first); long factoredTime = first*factor; assertTrue(iterTime<factoredTime); //no more then twice of median first 10 } } } public static void main(String [] arg) throws IOException{ // run performance piece as a separate test performanceTestDeleteSameFile(); } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/videointelligence/v1beta2/video_intelligence.proto package com.google.cloud.videointelligence.v1beta2; /** * * * <pre> * Video segment. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1beta2.VideoSegment} */ public final class VideoSegment extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1beta2.VideoSegment) VideoSegmentOrBuilder { private static final long serialVersionUID = 0L; // Use VideoSegment.newBuilder() to construct. private VideoSegment(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private VideoSegment() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new VideoSegment(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private VideoSegment( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.protobuf.Duration.Builder subBuilder = null; if (startTimeOffset_ != null) { subBuilder = startTimeOffset_.toBuilder(); } startTimeOffset_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(startTimeOffset_); startTimeOffset_ = subBuilder.buildPartial(); } break; } case 18: { com.google.protobuf.Duration.Builder subBuilder = null; if (endTimeOffset_ != null) { subBuilder = endTimeOffset_.toBuilder(); } endTimeOffset_ = input.readMessage(com.google.protobuf.Duration.parser(), extensionRegistry); if (subBuilder != null) { subBuilder.mergeFrom(endTimeOffset_); endTimeOffset_ = subBuilder.buildPartial(); } break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1beta2_VideoSegment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1beta2_VideoSegment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1beta2.VideoSegment.class, com.google.cloud.videointelligence.v1beta2.VideoSegment.Builder.class); } public static final int START_TIME_OFFSET_FIELD_NUMBER = 1; private com.google.protobuf.Duration startTimeOffset_; /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> * * @return Whether the startTimeOffset field is set. */ @java.lang.Override public boolean hasStartTimeOffset() { return startTimeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> * * @return The startTimeOffset. */ @java.lang.Override public com.google.protobuf.Duration getStartTimeOffset() { return startTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : startTimeOffset_; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getStartTimeOffsetOrBuilder() { return getStartTimeOffset(); } public static final int END_TIME_OFFSET_FIELD_NUMBER = 2; private com.google.protobuf.Duration endTimeOffset_; /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> * * @return Whether the endTimeOffset field is set. */ @java.lang.Override public boolean hasEndTimeOffset() { return endTimeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> * * @return The endTimeOffset. */ @java.lang.Override public com.google.protobuf.Duration getEndTimeOffset() { return endTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : endTimeOffset_; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ @java.lang.Override public com.google.protobuf.DurationOrBuilder getEndTimeOffsetOrBuilder() { return getEndTimeOffset(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (startTimeOffset_ != null) { output.writeMessage(1, getStartTimeOffset()); } if (endTimeOffset_ != null) { output.writeMessage(2, getEndTimeOffset()); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (startTimeOffset_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStartTimeOffset()); } if (endTimeOffset_ != null) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getEndTimeOffset()); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.videointelligence.v1beta2.VideoSegment)) { return super.equals(obj); } com.google.cloud.videointelligence.v1beta2.VideoSegment other = (com.google.cloud.videointelligence.v1beta2.VideoSegment) obj; if (hasStartTimeOffset() != other.hasStartTimeOffset()) return false; if (hasStartTimeOffset()) { if (!getStartTimeOffset().equals(other.getStartTimeOffset())) return false; } if (hasEndTimeOffset() != other.hasEndTimeOffset()) return false; if (hasEndTimeOffset()) { if (!getEndTimeOffset().equals(other.getEndTimeOffset())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartTimeOffset()) { hash = (37 * hash) + START_TIME_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getStartTimeOffset().hashCode(); } if (hasEndTimeOffset()) { hash = (37 * hash) + END_TIME_OFFSET_FIELD_NUMBER; hash = (53 * hash) + getEndTimeOffset().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.videointelligence.v1beta2.VideoSegment prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Video segment. * </pre> * * Protobuf type {@code google.cloud.videointelligence.v1beta2.VideoSegment} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1beta2.VideoSegment) com.google.cloud.videointelligence.v1beta2.VideoSegmentOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1beta2_VideoSegment_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1beta2_VideoSegment_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.videointelligence.v1beta2.VideoSegment.class, com.google.cloud.videointelligence.v1beta2.VideoSegment.Builder.class); } // Construct using com.google.cloud.videointelligence.v1beta2.VideoSegment.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); if (startTimeOffsetBuilder_ == null) { startTimeOffset_ = null; } else { startTimeOffset_ = null; startTimeOffsetBuilder_ = null; } if (endTimeOffsetBuilder_ == null) { endTimeOffset_ = null; } else { endTimeOffset_ = null; endTimeOffsetBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.videointelligence.v1beta2.VideoIntelligenceServiceProto .internal_static_google_cloud_videointelligence_v1beta2_VideoSegment_descriptor; } @java.lang.Override public com.google.cloud.videointelligence.v1beta2.VideoSegment getDefaultInstanceForType() { return com.google.cloud.videointelligence.v1beta2.VideoSegment.getDefaultInstance(); } @java.lang.Override public com.google.cloud.videointelligence.v1beta2.VideoSegment build() { com.google.cloud.videointelligence.v1beta2.VideoSegment result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.videointelligence.v1beta2.VideoSegment buildPartial() { com.google.cloud.videointelligence.v1beta2.VideoSegment result = new com.google.cloud.videointelligence.v1beta2.VideoSegment(this); if (startTimeOffsetBuilder_ == null) { result.startTimeOffset_ = startTimeOffset_; } else { result.startTimeOffset_ = startTimeOffsetBuilder_.build(); } if (endTimeOffsetBuilder_ == null) { result.endTimeOffset_ = endTimeOffset_; } else { result.endTimeOffset_ = endTimeOffsetBuilder_.build(); } onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.videointelligence.v1beta2.VideoSegment) { return mergeFrom((com.google.cloud.videointelligence.v1beta2.VideoSegment) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.videointelligence.v1beta2.VideoSegment other) { if (other == com.google.cloud.videointelligence.v1beta2.VideoSegment.getDefaultInstance()) return this; if (other.hasStartTimeOffset()) { mergeStartTimeOffset(other.getStartTimeOffset()); } if (other.hasEndTimeOffset()) { mergeEndTimeOffset(other.getEndTimeOffset()); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.videointelligence.v1beta2.VideoSegment parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.videointelligence.v1beta2.VideoSegment) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private com.google.protobuf.Duration startTimeOffset_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> startTimeOffsetBuilder_; /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> * * @return Whether the startTimeOffset field is set. */ public boolean hasStartTimeOffset() { return startTimeOffsetBuilder_ != null || startTimeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> * * @return The startTimeOffset. */ public com.google.protobuf.Duration getStartTimeOffset() { if (startTimeOffsetBuilder_ == null) { return startTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : startTimeOffset_; } else { return startTimeOffsetBuilder_.getMessage(); } } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public Builder setStartTimeOffset(com.google.protobuf.Duration value) { if (startTimeOffsetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTimeOffset_ = value; onChanged(); } else { startTimeOffsetBuilder_.setMessage(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public Builder setStartTimeOffset(com.google.protobuf.Duration.Builder builderForValue) { if (startTimeOffsetBuilder_ == null) { startTimeOffset_ = builderForValue.build(); onChanged(); } else { startTimeOffsetBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public Builder mergeStartTimeOffset(com.google.protobuf.Duration value) { if (startTimeOffsetBuilder_ == null) { if (startTimeOffset_ != null) { startTimeOffset_ = com.google.protobuf.Duration.newBuilder(startTimeOffset_) .mergeFrom(value) .buildPartial(); } else { startTimeOffset_ = value; } onChanged(); } else { startTimeOffsetBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public Builder clearStartTimeOffset() { if (startTimeOffsetBuilder_ == null) { startTimeOffset_ = null; onChanged(); } else { startTimeOffset_ = null; startTimeOffsetBuilder_ = null; } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public com.google.protobuf.Duration.Builder getStartTimeOffsetBuilder() { onChanged(); return getStartTimeOffsetFieldBuilder().getBuilder(); } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ public com.google.protobuf.DurationOrBuilder getStartTimeOffsetOrBuilder() { if (startTimeOffsetBuilder_ != null) { return startTimeOffsetBuilder_.getMessageOrBuilder(); } else { return startTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : startTimeOffset_; } } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the start of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration start_time_offset = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getStartTimeOffsetFieldBuilder() { if (startTimeOffsetBuilder_ == null) { startTimeOffsetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getStartTimeOffset(), getParentForChildren(), isClean()); startTimeOffset_ = null; } return startTimeOffsetBuilder_; } private com.google.protobuf.Duration endTimeOffset_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> endTimeOffsetBuilder_; /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> * * @return Whether the endTimeOffset field is set. */ public boolean hasEndTimeOffset() { return endTimeOffsetBuilder_ != null || endTimeOffset_ != null; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> * * @return The endTimeOffset. */ public com.google.protobuf.Duration getEndTimeOffset() { if (endTimeOffsetBuilder_ == null) { return endTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : endTimeOffset_; } else { return endTimeOffsetBuilder_.getMessage(); } } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public Builder setEndTimeOffset(com.google.protobuf.Duration value) { if (endTimeOffsetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } endTimeOffset_ = value; onChanged(); } else { endTimeOffsetBuilder_.setMessage(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public Builder setEndTimeOffset(com.google.protobuf.Duration.Builder builderForValue) { if (endTimeOffsetBuilder_ == null) { endTimeOffset_ = builderForValue.build(); onChanged(); } else { endTimeOffsetBuilder_.setMessage(builderForValue.build()); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public Builder mergeEndTimeOffset(com.google.protobuf.Duration value) { if (endTimeOffsetBuilder_ == null) { if (endTimeOffset_ != null) { endTimeOffset_ = com.google.protobuf.Duration.newBuilder(endTimeOffset_) .mergeFrom(value) .buildPartial(); } else { endTimeOffset_ = value; } onChanged(); } else { endTimeOffsetBuilder_.mergeFrom(value); } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public Builder clearEndTimeOffset() { if (endTimeOffsetBuilder_ == null) { endTimeOffset_ = null; onChanged(); } else { endTimeOffset_ = null; endTimeOffsetBuilder_ = null; } return this; } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public com.google.protobuf.Duration.Builder getEndTimeOffsetBuilder() { onChanged(); return getEndTimeOffsetFieldBuilder().getBuilder(); } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ public com.google.protobuf.DurationOrBuilder getEndTimeOffsetOrBuilder() { if (endTimeOffsetBuilder_ != null) { return endTimeOffsetBuilder_.getMessageOrBuilder(); } else { return endTimeOffset_ == null ? com.google.protobuf.Duration.getDefaultInstance() : endTimeOffset_; } } /** * * * <pre> * Time-offset, relative to the beginning of the video, * corresponding to the end of the segment (inclusive). * </pre> * * <code>.google.protobuf.Duration end_time_offset = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder> getEndTimeOffsetFieldBuilder() { if (endTimeOffsetBuilder_ == null) { endTimeOffsetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Duration, com.google.protobuf.Duration.Builder, com.google.protobuf.DurationOrBuilder>( getEndTimeOffset(), getParentForChildren(), isClean()); endTimeOffset_ = null; } return endTimeOffsetBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1beta2.VideoSegment) } // @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1beta2.VideoSegment) private static final com.google.cloud.videointelligence.v1beta2.VideoSegment DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1beta2.VideoSegment(); } public static com.google.cloud.videointelligence.v1beta2.VideoSegment getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<VideoSegment> PARSER = new com.google.protobuf.AbstractParser<VideoSegment>() { @java.lang.Override public VideoSegment parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new VideoSegment(input, extensionRegistry); } }; public static com.google.protobuf.Parser<VideoSegment> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<VideoSegment> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.videointelligence.v1beta2.VideoSegment getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.cache.distributed.near; import java.io.Serializable; import java.util.List; import java.util.Random; import java.util.concurrent.Callable; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicIntegerArray; import javax.cache.CacheException; import org.apache.ignite.IgniteCache; import org.apache.ignite.IgniteCheckedException; import org.apache.ignite.cache.affinity.AffinityKey; import org.apache.ignite.cache.affinity.rendezvous.RendezvousAffinityFunction; import org.apache.ignite.cache.query.QueryCancelledException; import org.apache.ignite.cache.query.SqlFieldsQuery; import org.apache.ignite.cache.query.annotations.QuerySqlField; import org.apache.ignite.cluster.ClusterTopologyException; import org.apache.ignite.configuration.CacheConfiguration; import org.apache.ignite.configuration.DataRegionConfiguration; import org.apache.ignite.configuration.DataStorageConfiguration; import org.apache.ignite.configuration.IgniteConfiguration; import org.apache.ignite.internal.IgniteEx; import org.apache.ignite.internal.IgniteFutureTimeoutCheckedException; import org.apache.ignite.internal.IgniteInternalFuture; import org.apache.ignite.internal.IgniteInterruptedCheckedException; import org.apache.ignite.internal.util.GridRandom; import org.apache.ignite.internal.util.typedef.CAX; import org.apache.ignite.internal.util.typedef.F; import org.apache.ignite.internal.util.typedef.internal.U; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import org.apache.ignite.transactions.TransactionException; import org.apache.ignite.transactions.TransactionTimeoutException; import org.junit.Ignore; import org.junit.Test; import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL; import static org.apache.ignite.cache.CacheMode.PARTITIONED; import static org.apache.ignite.cache.CacheMode.REPLICATED; import static org.apache.ignite.cache.CacheRebalanceMode.SYNC; import static org.apache.ignite.cache.CacheWriteSynchronizationMode.FULL_SYNC; /** * Test for distributed queries with node restarts. */ public class IgniteCacheQueryNodeRestartSelfTest2 extends GridCommonAbstractTest { /** */ private static final String PARTITIONED_QRY = "select co.id, count(*) cnt\n" + "from \"pe\".Person pe, \"pr\".Product pr, \"co\".Company co, \"pu\".Purchase pu\n" + "where pe.id = pu.personId and pu.productId = pr.id and pr.companyId = co.id \n" + "group by co.id order by cnt desc, co.id"; /** */ private static final String REPLICATED_QRY = "select pr.id, co.id\n" + "from \"pr\".Product pr, \"co\".Company co\n" + "where pr.companyId = co.id\n" + "order by co.id, pr.id "; /** */ private static final int GRID_CNT = 6; /** */ private static final int PERS_CNT = 600; /** */ private static final int PURCHASE_CNT = 6000; /** */ private static final int COMPANY_CNT = 25; /** */ private static final int PRODUCT_CNT = 100; /** {@inheritDoc} */ @Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception { IgniteConfiguration c = super.getConfiguration(igniteInstanceName); DataStorageConfiguration memCfg = new DataStorageConfiguration().setDefaultDataRegionConfiguration( new DataRegionConfiguration().setMaxSize(50L * 1024 * 1024)); c.setDataStorageConfiguration(memCfg); int i = 0; CacheConfiguration<?, ?>[] ccs = new CacheConfiguration[4]; for (String name : F.asList("pe", "pu")) { CacheConfiguration<?, ?> cc = defaultCacheConfiguration(); cc.setName(name); cc.setCacheMode(PARTITIONED); cc.setBackups(2); cc.setWriteSynchronizationMode(FULL_SYNC); cc.setAtomicityMode(TRANSACTIONAL); cc.setRebalanceMode(SYNC); cc.setAffinity(new RendezvousAffinityFunction(false, 60)); if (name.equals("pe")) { cc.setIndexedTypes( Integer.class, Person.class ); } else if (name.equals("pu")) { cc.setIndexedTypes( AffinityKey.class, Purchase.class ); } ccs[i++] = cc; } for (String name : F.asList("co", "pr")) { CacheConfiguration<?, ?> cc = defaultCacheConfiguration(); cc.setName(name); cc.setCacheMode(REPLICATED); cc.setWriteSynchronizationMode(FULL_SYNC); cc.setAtomicityMode(TRANSACTIONAL); cc.setRebalanceMode(SYNC); cc.setAffinity(new RendezvousAffinityFunction(false, 50)); if (name.equals("co")) { cc.setIndexedTypes( Integer.class, Company.class ); } else if (name.equals("pr")) { cc.setIndexedTypes( Integer.class, Product.class ); } ccs[i++] = cc; } c.setCacheConfiguration(ccs); return c; } /** * */ private void fillCaches() { IgniteCache<Integer, Company> co = grid(0).cache("co"); for (int i = 0; i < COMPANY_CNT; i++) co.put(i, new Company(i)); IgniteCache<Integer, Product> pr = grid(0).cache("pr"); Random rnd = new GridRandom(); for (int i = 0; i < PRODUCT_CNT; i++) pr.put(i, new Product(i, rnd.nextInt(COMPANY_CNT))); IgniteCache<Integer, Person> pe = grid(0).cache("pe"); for (int i = 0; i < PERS_CNT; i++) pe.put(i, new Person(i)); IgniteCache<AffinityKey<Integer>, Purchase> pu = grid(0).cache("pu"); for (int i = 0; i < PURCHASE_CNT; i++) { int persId = rnd.nextInt(PERS_CNT); int prodId = rnd.nextInt(PRODUCT_CNT); pu.put(new AffinityKey<>(i, persId), new Purchase(persId, prodId)); } } /** * @throws Exception If failed. */ @Ignore("https://issues.apache.org/jira/browse/IGNITE-10917") @Test public void testRestarts() throws Exception { int duration = 90 * 1000; int qryThreadNum = 4; int restartThreadsNum = 2; // 4 + 2 = 6 nodes final int nodeLifeTime = 2 * 1000; final int logFreq = 10; startGridsMultiThreaded(GRID_CNT); final AtomicIntegerArray locks = new AtomicIntegerArray(GRID_CNT); fillCaches(); final List<List<?>> pRes = grid(0).cache("pu").query(new SqlFieldsQuery(PARTITIONED_QRY)).getAll(); Thread.sleep(3000); assertEquals(pRes, grid(0).cache("pu").query(new SqlFieldsQuery(PARTITIONED_QRY)).getAll()); final List<List<?>> rRes = grid(0).cache("co").query(new SqlFieldsQuery(REPLICATED_QRY)).getAll(); assertFalse(pRes.isEmpty()); assertFalse(rRes.isEmpty()); final AtomicInteger qryCnt = new AtomicInteger(); final AtomicBoolean qrysDone = new AtomicBoolean(); IgniteInternalFuture<?> fut1 = multithreadedAsync(new CAX() { @Override public void applyx() throws IgniteCheckedException { final GridRandom rnd = new GridRandom(); while (!qrysDone.get()) { int g; do { g = rnd.nextInt(locks.length()); } while (!locks.compareAndSet(g, 0, 1)); try { final IgniteEx grid = grid(g); if (rnd.nextBoolean()) { // Partitioned query. final IgniteCache<?,?> cache = grid.cache("pu"); final SqlFieldsQuery qry = new SqlFieldsQuery(PARTITIONED_QRY); boolean smallPageSize = rnd.nextBoolean(); if (smallPageSize) qry.setPageSize(3); final IgniteCache<Integer, Company> co = grid.cache("co"); try { runQuery(grid, new Runnable() { @Override public void run() { if (rnd.nextBoolean()) co.get(rnd.nextInt(COMPANY_CNT)); // Get lock run test with open transaction. assertEquals(pRes, cache.query(qry).getAll()); } }); } catch (CacheException e) { // Interruptions are expected here. if (e.getCause() instanceof IgniteInterruptedCheckedException || e.getCause() instanceof InterruptedException || e.getCause() instanceof ClusterTopologyException || e.getCause() instanceof TransactionTimeoutException || e.getCause() instanceof TransactionException) continue; if (e.getCause() instanceof QueryCancelledException) fail("Retry is expected"); if (!smallPageSize) U.error(grid.log(), "On large page size must retry.", e); assertTrue("On large page size must retry.", smallPageSize); boolean failedOnRemoteFetch = false; boolean failedOnInterruption = false; for (Throwable th = e; th != null; th = th.getCause()) { if (th instanceof InterruptedException) { failedOnInterruption = true; break; } if (!(th instanceof CacheException)) continue; if (th.getMessage() != null && th.getMessage().startsWith("Failed to fetch data from node:")) { failedOnRemoteFetch = true; break; } } // Interruptions are expected here. if (failedOnInterruption) continue; if (!failedOnRemoteFetch) { U.error(grid.log(), "Must fail inside of GridResultPage.fetchNextPage or subclass.", e); fail("Must fail inside of GridResultPage.fetchNextPage or subclass."); } } } else { // Replicated query. IgniteCache<?, ?> cache = grid.cache("co"); assertEquals(rRes, cache.query(new SqlFieldsQuery(REPLICATED_QRY)).getAll()); } } finally { // Clearing lock in final handler to avoid endless loop if exception is thrown. locks.set(g, 0); int c = qryCnt.incrementAndGet(); if (c % logFreq == 0) info("Executed queries: " + c); } } } }, qryThreadNum, "query-thread"); final AtomicInteger restartCnt = new AtomicInteger(); final AtomicBoolean restartsDone = new AtomicBoolean(); IgniteInternalFuture<?> fut2 = multithreadedAsync(new Callable<Object>() { @SuppressWarnings({"BusyWait"}) @Override public Object call() throws Exception { GridRandom rnd = new GridRandom(); while (!restartsDone.get()) { int g; do { g = rnd.nextInt(locks.length()); } while (!locks.compareAndSet(g, 0, -1)); try { log.info("Stop node: " + g); stopGrid(g); Thread.sleep(rnd.nextInt(nodeLifeTime)); log.info("Start node: " + g); startGrid(g); Thread.sleep(rnd.nextInt(nodeLifeTime)); } finally { locks.set(g, 0); int c = restartCnt.incrementAndGet(); if (c % logFreq == 0) info("Node restarts: " + c); } } return true; } }, restartThreadsNum, "restart-thread"); Thread.sleep(duration); info("Stopping.."); restartsDone.set(true); try { fut2.get(20_000); } catch (IgniteFutureTimeoutCheckedException e) { U.dumpThreads(log); fail("Stopping restarts timeout."); } info("Restarts stopped."); qrysDone.set(true); // Query thread can stuck in next page waiting loop because all nodes are left. try { fut1.get(5_000); } catch (IgniteFutureTimeoutCheckedException ignored) { fut1.cancel(); } info("Queries stopped."); } /** * Run query closure. * * @param grid Grid. * @param qryRunnable Query runnable. */ protected void runQuery(IgniteEx grid, Runnable qryRunnable) { qryRunnable.run(); } /** * */ private static class Person implements Serializable { /** */ @QuerySqlField(index = true) int id; /** * @param id Person ID. */ Person(int id) { this.id = id; } } /** * */ private static class Purchase implements Serializable { /** */ @QuerySqlField(index = true) int personId; /** */ @QuerySqlField(index = true) int productId; /** * @param personId Person ID. * @param productId Product ID. */ Purchase(int personId, int productId) { this.personId = personId; this.productId = productId; } } /** * */ private static class Company implements Serializable { /** */ @QuerySqlField(index = true) int id; /** * @param id ID. */ Company(int id) { this.id = id; } } /** * */ private static class Product implements Serializable { /** */ @QuerySqlField(index = true) int id; /** */ @QuerySqlField(index = true) int companyId; /** * @param id ID. * @param companyId Company ID. */ Product(int id, int companyId) { this.id = id; this.companyId = companyId; } } }
/* Copyright 2017 Alfa Financial Software * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.alfasoftware.morf.jdbc.sqlserver; import static com.google.common.base.Strings.isNullOrEmpty; import static org.alfasoftware.morf.metadata.SchemaUtils.namesOfColumns; import static org.alfasoftware.morf.metadata.SchemaUtils.primaryKeysForTable; import static org.alfasoftware.morf.metadata.SchemaUtils.table; import java.sql.Connection; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.List; import java.util.Optional; import java.util.Set; import org.alfasoftware.morf.jdbc.DatabaseType; import org.alfasoftware.morf.jdbc.SqlDialect; import org.alfasoftware.morf.jdbc.SqlScriptExecutor; import org.alfasoftware.morf.metadata.Column; import org.alfasoftware.morf.metadata.DataType; import org.alfasoftware.morf.metadata.Index; import org.alfasoftware.morf.metadata.Table; import org.alfasoftware.morf.metadata.View; import org.alfasoftware.morf.sql.Hint; import org.alfasoftware.morf.sql.OptimiseForRowCount; import org.alfasoftware.morf.sql.SelectFirstStatement; import org.alfasoftware.morf.sql.SelectStatement; import org.alfasoftware.morf.sql.UpdateStatement; import org.alfasoftware.morf.sql.UseImplicitJoinOrder; import org.alfasoftware.morf.sql.UseIndex; import org.alfasoftware.morf.sql.element.AliasedField; import org.alfasoftware.morf.sql.element.Cast; import org.alfasoftware.morf.sql.element.ConcatenatedField; import org.alfasoftware.morf.sql.element.FieldLiteral; import org.alfasoftware.morf.sql.element.FieldReference; import org.alfasoftware.morf.sql.element.Function; import org.alfasoftware.morf.sql.element.WindowFunction; import org.apache.commons.lang3.StringUtils; import org.joda.time.LocalDate; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Joiner; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList.Builder; import com.google.common.collect.ImmutableSet; /** * Provides SQL Server specific SQL statements. * * @author Copyright (c) Alfa Financial Software 2010 */ class SqlServerDialect extends SqlDialect { /* * This SQL came from http://stackoverflow.com/questions/8641954/how-to-drop-column-with-constraint */ @VisibleForTesting static final String dropDefaultForColumnSql = "DECLARE @sql NVARCHAR(MAX) \n" + "WHILE 1=1\n" + "BEGIN\n" + " SELECT TOP 1 @sql = N'alter table {table} drop constraint ['+dc.NAME+N']'\n" + " from sys.default_constraints dc\n" + " JOIN sys.columns c\n" + " ON c.default_object_id = dc.object_id\n" + " WHERE\n" + " dc.parent_object_id = OBJECT_ID('{table}')\n" + " AND c.name = N'{column}'\n" + " IF @@ROWCOUNT = 0 BREAK\n" + " EXEC (@sql)\n" + "END"; /** * Used to force collation to be case-sensitive. */ private static final String COLLATE = "COLLATE SQL_Latin1_General_CP1_CS_AS"; /** * The hint types supported. */ private static final Set<Class<? extends Hint>> SUPPORTED_HINTS = ImmutableSet.of(UseIndex.class, OptimiseForRowCount.class, UseImplicitJoinOrder.class); /** * Creates an instance of MS SQL Server dialect. * * @param schemaName The database schema name. */ public SqlServerDialect(String schemaName) { super(schemaName); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#tableDeploymentStatements(org.alfasoftware.morf.metadata.Table) */ @Override public Collection<String> internalTableDeploymentStatements(Table table) { List<String> statements = new ArrayList<>(); // Create the table deployment statement StringBuilder createTableStatement = new StringBuilder(); createTableStatement.append("CREATE "); createTableStatement.append("TABLE "); createTableStatement.append(schemaNamePrefix()); createTableStatement.append(table.getName()); createTableStatement.append(" ("); boolean first = true; for (Column column : table.columns()) { if (!first) { createTableStatement.append(", "); } createTableStatement.append(String.format("[%s] ", column.getName())); createTableStatement.append(sqlRepresentationOfColumnType(table, column, false)); if (column.isAutoNumbered()) { int autoNumberStart = column.getAutoNumberStart() == -1 ? 1 : column.getAutoNumberStart(); createTableStatement.append(" IDENTITY(" + autoNumberStart + ", 1)"); } first = false; } List<Column> primaryKeys = primaryKeysForTable(table); if (!primaryKeys.isEmpty()) { createTableStatement.append(", "); createTableStatement.append(buildPrimaryKeyConstraint(table.getName(), namesOfColumns(primaryKeys))); } createTableStatement.append(")"); statements.add(createTableStatement.toString()); return statements; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getFromDummyTable() */ @Override protected String getFromDummyTable() { return StringUtils.EMPTY; // SQLServer doesn't have a "DUAL" table like oracle etc. } /** * @param tableName Name of the table. * @param primaryKeys List of the primary keys on the table. */ private String buildPrimaryKeyConstraint(String tableName, List<String> primaryKeys) { StringBuilder pkConstraint = new StringBuilder(); pkConstraint.append("CONSTRAINT ["); pkConstraint.append(undecorateName(tableName)); pkConstraint.append("_PK] PRIMARY KEY (["); pkConstraint.append(Joiner.on("], [").join(primaryKeys)); pkConstraint.append("])"); return pkConstraint.toString(); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#indexDeploymentStatements(org.alfasoftware.morf.metadata.Table, * org.alfasoftware.morf.metadata.Index) */ @Override protected Collection<String> indexDeploymentStatements(Table table, Index index) { StringBuilder createIndexStatement = new StringBuilder(); createIndexStatement.append("CREATE "); if (index.isUnique()) { createIndexStatement.append("UNIQUE NONCLUSTERED "); } createIndexStatement.append("INDEX "); createIndexStatement.append(index.getName()); createIndexStatement.append(" ON "); createIndexStatement.append(schemaNamePrefix()); createIndexStatement.append(table.getName()); createIndexStatement.append(" ("); boolean firstColumn = true; for (String columnName : index.columnNames()) { if (firstColumn) { firstColumn = false; } else { createIndexStatement.append(", "); } createIndexStatement.append(String.format("[%s]", columnName)); } createIndexStatement.append(")"); return Collections.singletonList(createIndexStatement.toString()); } /** * {@inheritDoc} * * @see org.alfasoftware.morf.jdbc.SqlDialect#indexDropStatements(org.alfasoftware.morf.metadata.Table, * org.alfasoftware.morf.metadata.Index) */ @Override public Collection<String> indexDropStatements(Table table, Index indexToBeRemoved) { return Arrays.asList("DROP INDEX " + indexToBeRemoved.getName() + " ON " + schemaNamePrefix() + table.getName()); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#dropStatements(org.alfasoftware.morf.metadata.View) */ @Override public Collection<String> dropStatements(View view) { List<String> statements = new ArrayList<>(); StringBuilder createTableStatement = new StringBuilder(); createTableStatement.append(String.format("IF EXISTS (SELECT * FROM sys.views WHERE object_id = OBJECT_ID(N'%s%s'))", schemaNamePrefix(), view.getName())); createTableStatement.append(String.format(" DROP VIEW %s%s", schemaNamePrefix(), view.getName())); statements.add(createTableStatement.toString()); return statements; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#preInsertWithPresetAutonumStatements(org.alfasoftware.morf.metadata.Table, boolean) */ @Override public Collection<String> preInsertWithPresetAutonumStatements(Table table, boolean insertingUnderAutonumLimit) { if (getAutoIncrementColumnForTable(table) != null) { return Arrays.asList("SET IDENTITY_INSERT " + schemaNamePrefix() + table.getName() + " ON"); } else { return SqlDialect.NO_STATEMENTS; } } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#postInsertWithPresetAutonumStatements(org.alfasoftware.morf.metadata.Table, boolean) */ @Override public void postInsertWithPresetAutonumStatements(Table table, SqlScriptExecutor executor,Connection connection, boolean insertingUnderAutonumLimit) { Column autonumber = getAutoIncrementColumnForTable(table); if (autonumber == null) { return; } // See http://social.msdn.microsoft.com/Forums/en-US/transactsql/thread/4443e023-b6e9-4d71-9b53-06245b2e95ef // After an insert of values lower than the autonumber seed value, the "current" autonumber value is set to the // autonumber seed value or the highest value inserted, whichever is higher. However, this differs from the // behaviour when the table is first created, when the first record will be created with the autonumber // seed value (not the autonumber seed value plus one). This sequence resolves the difference by first forcing // the current (next - 1) value to the start value minus one, then lets SQL server correct it to the highest // value in the table if it is too low. // TODO Alfa internal ref WEB-23969 if we're running on SQL Server 2012, this bug no longer exists so we can // just reseed as for an empty table (i.e. autonumber.getAutoNumberStart()). Need to implement this. executor.execute(ImmutableList.of( "SET IDENTITY_INSERT " + schemaNamePrefix() + table.getName() + " OFF", "IF EXISTS (SELECT 1 FROM " + schemaNamePrefix() + table.getName() + ")\n" + "BEGIN\n" + " DBCC CHECKIDENT (\"" + schemaNamePrefix() + table.getName() + "\", RESEED, " + (autonumber.getAutoNumberStart() - 1) + ")\n" + " DBCC CHECKIDENT (\"" + schemaNamePrefix() + table.getName() + "\", RESEED)\n" + "END\n" + "ELSE\n" + "BEGIN\n" + " DBCC CHECKIDENT (\"" + schemaNamePrefix() + table.getName() + "\", RESEED, " + autonumber.getAutoNumberStart() + ")\n" + "END" ),connection); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getColumnRepresentation(org.alfasoftware.morf.metadata.DataType, * int, int) */ @Override protected String getColumnRepresentation(DataType dataType, int width, int scale) { if (needsCollation(dataType)) { return String.format("%s %s", getInternalColumnRepresentation(dataType, width, scale), COLLATE); } return getInternalColumnRepresentation(dataType, width, scale); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.alfasoftware.morf.sql.element.Cast) */ @Override protected String getSqlFrom(Cast cast) { DataType dataType = cast.getDataType(); StringBuilder output = new StringBuilder(); output.append("CAST(") .append(getSqlFrom(cast.getExpression())) .append(" AS ") .append(getInternalColumnRepresentation(dataType, cast.getWidth(), cast.getScale())) .append(")"); if (needsCollation(dataType)) { output.append(" ").append(COLLATE); } return output.toString(); } /** * @param dataType a data type to examine * @return true if this data type should have COLLATE set. */ private static boolean needsCollation(DataType dataType) { return dataType == DataType.STRING || dataType == DataType.CLOB; } /** * Gets the underlying column representation (e.g. without any COLLATE statements). * * @param dataType the column datatype. * @param width the column width. * @param scale the column scale. * @return a string representation of the column definition. */ private String getInternalColumnRepresentation(DataType dataType, int width, int scale) { switch (dataType) { case STRING: return String.format("NVARCHAR(%d)", width); case DECIMAL: return String.format("NUMERIC(%d,%d)", width, scale); case DATE: return "DATE"; case BOOLEAN: return "BIT"; case BIG_INTEGER: return "BIGINT"; case INTEGER: return "INTEGER"; case BLOB: return "IMAGE"; case CLOB: return "NVARCHAR(MAX)"; default: throw new UnsupportedOperationException("Cannot map column with type [" + dataType + "]"); } } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#connectionTestStatement() */ @Override public String connectionTestStatement() { return "select 1"; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getDatabaseType() */ @Override public DatabaseType getDatabaseType() { return DatabaseType.Registry.findByIdentifier(SqlServer.IDENTIFIER); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(ConcatenatedField) */ @Override protected String getSqlFrom(ConcatenatedField concatenatedField) { List<String> sql = new ArrayList<>(); for (AliasedField field : concatenatedField.getConcatenationFields()) { sql.add("COALESCE("+getSqlFrom(field)+",'')"); } return StringUtils.join(sql, " + "); } /** * {@inheritDoc} * * @see org.alfasoftware.morf.jdbc.SqlDialect#decorateTemporaryTableName(java.lang.String) */ @Override public String decorateTemporaryTableName(String undecoratedName) { return "#" + undecoratedName; } /** * Removes any decoration characters from the name. (# for temp table). * * @param name name of table * @return version of name with any decoration removed. */ public String undecorateName(String name) { if (name.startsWith("#")) { return name.substring(1); } else { return name; } } /** * {@inheritDoc} * * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.alfasoftware.morf.sql.UpdateStatement) */ @Override protected String getSqlFrom(UpdateStatement statement) { String destinationTableName = statement.getTable().getName(); if (StringUtils.isBlank(destinationTableName)) { throw new IllegalArgumentException(String.format("Cannot create SQL for a blank table [%s]", destinationTableName)); } StringBuilder sqlBuilder = new StringBuilder(); // Add the preamble sqlBuilder.append("UPDATE "); // Now add the table to update sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); // Put in the standard fields sqlBuilder.append(getUpdateStatementSetFieldSql(statement.getFields())); // Add a FROM clause if the table is aliased if (!statement.getTable().getAlias().equals("")) { sqlBuilder.append(" FROM "); sqlBuilder.append(schemaNamePrefix(statement.getTable())); sqlBuilder.append(destinationTableName); sqlBuilder.append(String.format(" %s", statement.getTable().getAlias())); } // Now put the where clause in if (statement.getWhereCriterion() != null) { sqlBuilder.append(" WHERE "); sqlBuilder.append(getSqlFrom(statement.getWhereCriterion())); } return sqlBuilder.toString(); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#alterTableAddColumnStatements(org.alfasoftware.morf.metadata.Table, org.alfasoftware.morf.metadata.Column) */ @Override public Collection<String> alterTableAddColumnStatements(Table table, Column column) { List<String> statements = new ArrayList<>(); // TODO looks like if we're adding to an existing PK we should drop the PK first here. SQL // server is currently hard to test so need to investigate further. StringBuilder statement = new StringBuilder() .append("ALTER TABLE ") .append(schemaNamePrefix()) .append(table.getName()) .append(" ADD ") // We don't say COLUMN here for some reason .append(column.getName()) .append(' ') .append(sqlRepresentationOfColumnType(table, column, true)); statements.add(statement.toString()); // Recreate the primary key if the column is in it if (column.isPrimaryKey()) { // Add the new column if this is a change and it wasn't part of they key // before. Remove it if it now isn't part of the key and it was before StringBuilder primaryKeyStatement = new StringBuilder() .append("ALTER TABLE ") .append(schemaNamePrefix()) .append(table.getName()) .append(" ADD ") .append(buildPrimaryKeyConstraint(table.getName(), namesOfColumns(primaryKeysForTable(table)))); statements.add(primaryKeyStatement.toString()); } return statements; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#alterTableChangeColumnStatements(org.alfasoftware.morf.metadata.Table, org.alfasoftware.morf.metadata.Column, org.alfasoftware.morf.metadata.Column) */ @Override public Collection<String> alterTableChangeColumnStatements(Table table, final Column oldColumn, Column newColumn) { List<String> statements = new ArrayList<>(); // If we are removing the autonumber then we must completely rebuild the table // without the autonumber (identity) property before we do anything else // PLEASE NOTE - THIS DOES NOT COPY VIEWS OR INDEXES -- See WEB-23759 if (oldColumn.isAutoNumbered() && !newColumn.isAutoNumbered()) { // Create clone of table Table clone = table(table.getName() + "Clone") .columns(table.columns().toArray(new Column[table.columns().size()])); Collection<String> cloneTableStatements = tableDeploymentStatements(clone); statements.addAll(tableDeploymentStatements(clone)); // Meta data switch of the data from the original table to the cloned table statements.add("ALTER TABLE " + schemaNamePrefix() + table.getName() + " SWITCH TO " + schemaNamePrefix() + clone.getName()); // Drop original table statements.add("DROP TABLE " + schemaNamePrefix() + table.getName()); // Rename clone to make it look like the original table statements.add(String.format("EXECUTE sp_rename '%s%s', '%s%s'", schemaNamePrefix(), clone.getName(), schemaNamePrefix(), table.getName() )); if (containsPrimaryKeyConstraint(cloneTableStatements, clone.getName())) { statements.add(String.format("EXECUTE sp_rename '%s%s_PK', '%s%s_PK', 'OBJECT'", schemaNamePrefix(), clone.getName(), schemaNamePrefix(), table.getName() )); } } // build the old version of the table Table oldTable = oldTableForChangeColumn(table, oldColumn, newColumn); // If we are dropping or changing a column, drop indexes containing that column for (Index index : oldTable.indexes()) { for (String column : index.columnNames()) { if (column.equalsIgnoreCase(oldColumn.getName())) { statements.addAll(indexDropStatements(oldTable, index)); } } } // Drop any defaults for the old column if (StringUtils.isNotBlank(oldColumn.getDefaultValue())) statements.add(dropDefaultForColumn(table, oldColumn)); // -- Rename the column if we need to // if (!oldColumn.getName().equals(newColumn.getName())) { statements.add(String.format("EXEC sp_rename '%s%s.%s', '%s', 'COLUMN'", schemaNamePrefix(), table.getName(), oldColumn.getName(), newColumn.getName() )); } // Drop and re-create the primary key if either new or old columns are part of the PK boolean recreatePrimaryKey = oldColumn.isPrimaryKey() || newColumn.isPrimaryKey(); // only drop if there actually was a PK though... if (recreatePrimaryKey && !primaryKeysForTable(oldTable).isEmpty()) { statements.add(dropPrimaryKey(table)); } statements.add(new StringBuilder() .append("ALTER TABLE ") .append(schemaNamePrefix()) .append(table.getName()) .append(" ALTER COLUMN ") .append(newColumn.getName()) .append(' ') .append(sqlRepresentationOfColumnType(table, newColumn, true)) .toString()); // Create the indexes we dropped previously for (Index index : table.indexes()) { for (String column : index.columnNames()) { if (column.equalsIgnoreCase(newColumn.getName())) { statements.addAll(addIndexStatements(table, index)); } } } List<Column> primaryKeyColumns = primaryKeysForTable(table); // Recreate the primary key if necessary if (recreatePrimaryKey && !primaryKeyColumns.isEmpty()) { statements.add(new StringBuilder() .append("ALTER TABLE ").append(schemaNamePrefix()).append(table.getName()).append(" ADD ") .append(buildPrimaryKeyConstraint(table.getName(), namesOfColumns(primaryKeyColumns))) .toString() ); } return statements; } /** * @param statements the statements to check for the primary key constraint * @param tableName the table name which is expected to make up the pk constraint name * @return true if the statements contain tableName_PK */ private boolean containsPrimaryKeyConstraint(Collection<String> statements, String tableName) { for (String s : statements) { if (s.contains(tableName + "_PK")) { return true; } } return false; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#changePrimaryKeyColumns(org.alfasoftware.morf.metadata.Table, java.util.List, java.util.List) */ @Override public Collection<String> changePrimaryKeyColumns(Table table, List<String> oldPrimaryKeyColumns, List<String> newPrimaryKeyColumns) { List<String> statements = new ArrayList<>(); if (!oldPrimaryKeyColumns.isEmpty()) { statements.add(dropPrimaryKey(table)); } if (!newPrimaryKeyColumns.isEmpty()) { statements.add(new StringBuilder() .append("ALTER TABLE ").append(schemaNamePrefix()).append(table.getName()).append(" ADD ") .append(buildPrimaryKeyConstraint(table.getName(), newPrimaryKeyColumns)) .toString() ); } return statements; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#alterTableDropColumnStatements(org.alfasoftware.morf.metadata.Table, org.alfasoftware.morf.metadata.Column) */ @Override public Collection<String> alterTableDropColumnStatements(Table table, final Column column) { List<String> statements = new ArrayList<>(); // Drop any defaults for the old column if (StringUtils.isNotBlank(column.getDefaultValue())) statements.add(dropDefaultForColumn(table, column)); // Drop the primary key if the column is part of the primary key and we are dropping the column boolean recreatePrimaryKey = column.isPrimaryKey(); if (recreatePrimaryKey) { statements.add(dropPrimaryKey(table)); } // We can't use the superclass method as we need to make sure we // modify the correct schema in the database StringBuilder statement = new StringBuilder() .append("ALTER TABLE ") .append(schemaNamePrefix()) .append(table.getName()) .append(" DROP COLUMN ") .append(column.getName()); statements.add(statement.toString()); List<Column> primaryKeyColumns = primaryKeysForTable(table); // Recreate the primary key if necessary if (recreatePrimaryKey && !primaryKeyColumns.isEmpty()) { statements.add(new StringBuilder() .append("ALTER TABLE ").append(schemaNamePrefix()).append(table.getName()).append(" ADD ") .append(buildPrimaryKeyConstraint(table.getName(), namesOfColumns(primaryKeyColumns))) .toString() ); } return statements; } /** * @param table * @param statements */ private String dropPrimaryKey(Table table) { StringBuilder dropPkStatement = new StringBuilder(); dropPkStatement.append("ALTER TABLE ").append(schemaNamePrefix()).append(table.getName()).append(" DROP "); dropPkStatement.append("CONSTRAINT ["); dropPkStatement.append(undecorateName(table.getName())); dropPkStatement.append("_PK]"); return dropPkStatement.toString(); } /** * Return the SQL representation for the column on the table. * * @see #sqlRepresentationOfColumnType(Table, Column) * @param table The table * @param column The column * @param includeDefaultWithValues Whether to include the WITH VALUES clause. * This is only applicable on ALTER statements. */ private String sqlRepresentationOfColumnType(Table table, Column column, boolean includeDefaultWithValues) { StringBuilder suffix = new StringBuilder(column.isNullable() ? "" : " NOT NULL"); if (StringUtils.isNotEmpty(column.getDefaultValue())) { suffix.append(" CONSTRAINT " + getColumnDefaultConstraintName(table, column) + " DEFAULT " + getSqlFrom(new FieldLiteral(column.getDefaultValue(), column.getType()))) ; suffix.append(includeDefaultWithValues ? " WITH VALUES" : ""); } return getColumnRepresentation(column.getType(), column.getWidth(), column.getScale()) + suffix; } /** * Returns SQL to drop the DEFAULT constraint for a particular column on a * particular table. * * @param table The name of the table on which the column resides. * @param column The name of the column. * @return SQL to drop the DEFAULT constraint for the specified column on the * specified table. */ private String dropDefaultForColumn(final Table table, final Column column) { // This SQL came from http://stackoverflow.com/questions/8641954/how-to-drop-column-with-constraint return dropDefaultForColumnSql .replace("{table}", table.getName()) .replace("{column}", column.getName()); } /** * Get the name of the DEFAULT constraint for a column. * * @param table The table on which the column exists. * @param column The column to get the name for. * @return The name of the DEFAULT constraint for the column on the table. */ private String getColumnDefaultConstraintName(final Table table, final Column column) { return table.getName() + "_" + column.getName() + "_DF"; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForYYYYMMDDToDate(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForYYYYMMDDToDate(Function function) { return "CONVERT(date, " + getSqlFrom(function.getArguments().get(0)) + ", 112)"; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForDateToYyyymmdd(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForDateToYyyymmdd(Function function) { return String.format("CONVERT(VARCHAR(8),%s, 112)", getSqlFrom(function.getArguments().get(0))); }; /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForDateToYyyymmddHHmmss(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForDateToYyyymmddHHmmss(Function function) { return String.format("REPLACE(REPLACE(REPLACE(CONVERT(VARCHAR(19),%s, 120),'-',''), ':', ''), ' ', '')", getSqlFrom(function.getArguments().get(0))); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForNow(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForNow(Function function) { return "GETUTCDATE()"; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForDaysBetween(org.alfasoftware.morf.sql.element.AliasedField, org.alfasoftware.morf.sql.element.AliasedField) */ @Override protected String getSqlForDaysBetween(AliasedField toDate, AliasedField fromDate) { return String.format("DATEDIFF(DAY, %s, %s)", getSqlFrom(fromDate), getSqlFrom(toDate)); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForMonthsBetween(org.alfasoftware.morf.sql.element.AliasedField, org.alfasoftware.morf.sql.element.AliasedField) */ @Override protected String getSqlForMonthsBetween(AliasedField toDate, AliasedField fromDate) { String toDateStr = getSqlFrom(toDate); String fromDateStr = getSqlFrom(fromDate); return String.format( "CASE " + "WHEN %s = %s THEN 0 " + "ELSE " + "DATEDIFF(MONTH, %s, %s) + " + "CASE " + "WHEN %s > %s THEN " + "CASE " + "WHEN DATEPART(day, %s) <= DATEPART(day, %s) OR MONTH(%s) <> MONTH(DATEADD(DAY, 1, %s)) THEN 0 " + "ELSE -1 " + "END " + "ELSE " + "CASE " + "WHEN DATEPART(day, %s) <= DATEPART(day, %s) OR MONTH(%s) <> MONTH(DATEADD(DAY, 1, %s)) THEN 0 " + "ELSE 1 " + "END " + "END " + "END ", fromDateStr, toDateStr, fromDateStr, toDateStr, toDateStr, fromDateStr, fromDateStr, toDateStr, toDateStr, toDateStr, toDateStr, fromDateStr, fromDateStr, fromDateStr ); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForLeftPad(org.alfasoftware.morf.sql.element.AliasedField, org.alfasoftware.morf.sql.element.AliasedField, org.alfasoftware.morf.sql.element.AliasedField) */ @Override protected String getSqlForLeftPad(AliasedField field, AliasedField length, AliasedField character) { String strField = getSqlFrom(field); String strLength = getSqlFrom(length); String strCharacter = getSqlFrom(character); return String.format("CASE " + "WHEN LEN(%s) > %s THEN " + "LEFT(%s, %s) " + "ELSE " + "RIGHT(REPLICATE(%s, %s) + %s, %s) " + "END", strField, strLength, strField, strLength, strCharacter, strLength, strField, strLength); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForOrderByField(org.alfasoftware.morf.sql.element.FieldReference) */ @Override protected String getSqlForOrderByField(FieldReference orderByField) { StringBuilder result = new StringBuilder(); String sqlFromField = getSqlFrom(orderByField); if (orderByField.getNullValueHandling().isPresent()) { switch (orderByField.getNullValueHandling().get()) { case FIRST: result.append("(CASE WHEN ").append(sqlFromField).append(" IS NULL THEN 0 ELSE 1 END), "); break; case LAST: result.append("(CASE WHEN ").append(sqlFromField).append(" IS NULL THEN 1 ELSE 0 END), "); break; case NONE: default: break; } } result.append(sqlFromField); switch (orderByField.getDirection()) { case DESCENDING: result.append(" DESC"); break; case ASCENDING: case NONE: default: break; } return result.toString().trim(); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlforLength(Function) */ @Override protected String getSqlforLength(Function function){ return String.format("LEN(%s)", getSqlFrom(function.getArguments().get(0))); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForAddDays(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForAddDays(Function function) { return String.format( "DATEADD(dd, %s, %s)", getSqlFrom(function.getArguments().get(1)), getSqlFrom(function.getArguments().get(0)) ); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForAddMonths(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForAddMonths(Function function) { return String.format( "DATEADD(month, %s, %s)", getSqlFrom(function.getArguments().get(1)), getSqlFrom(function.getArguments().get(0)) ); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForMod(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForMod(Function function) { return String.format("%s %% %s", getSqlFrom(function.getArguments().get(0)), getSqlFrom(function.getArguments().get(1))); } @Override public Collection<String> renameTableStatements(Table fromTable, Table toTable) { String from = fromTable.getName(); String to = toTable.getName(); Builder<String> builder = ImmutableList.<String>builder(); builder.add("IF EXISTS (SELECT 1 FROM sys.objects WHERE OBJECT_ID = OBJECT_ID(N'" + from + "_version_DF') AND type = (N'D')) exec sp_rename N'" + from + "_version_DF', N'" + to + "_version_DF'"); if (!primaryKeysForTable(fromTable).isEmpty()) { builder.add("sp_rename N'" + from + "." + from + "_PK', N'" + to + "_PK', N'INDEX'"); } builder.add("sp_rename N'" + from + "', N'" + to + "'"); return builder.build(); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.alfasoftware.morf.sql.element.FieldLiteral) */ @Override protected String getSqlFrom(FieldLiteral field) { switch (field.getDataType()) { case DATE: // SQL server does not support ISO standard date literals. return String.format("'%s'", field.getValue()); default: return super.getSqlFrom(field); } } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.joda.time.LocalDate) */ @Override protected String getSqlFrom(LocalDate literalValue) { // SQL server does not support ISO standard date literals. return String.format("'%s'", literalValue.toString("yyyy-MM-dd")); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#renameIndexStatements(org.alfasoftware.morf.metadata.Table, java.lang.String, java.lang.String) */ @Override public Collection<String> renameIndexStatements(Table table, String fromIndexName, String toIndexName) { return ImmutableList.of(String.format("sp_rename N'%s%s.%s', N'%s', N'INDEX'", schemaNamePrefix(), table.getName(), fromIndexName, toIndexName)); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForRandomString(org.alfasoftware.morf.sql.element.Function) */ @Override protected String getSqlForRandomString(Function function) { return String.format("SUBSTRING(REPLACE(CONVERT(varchar(255),NEWID()),'-',''), 1, %s)",getSqlFrom(function.getArguments().get(0))); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.alfasoftware.morf.sql.SelectFirstStatement) */ @Override protected String getSqlFrom(SelectFirstStatement stmt) { StringBuilder result = new StringBuilder("SELECT TOP 1 "); // Start by adding the field result.append(getSqlFrom(stmt.getFields().get(0))); appendFrom(result, stmt); appendJoins(result, stmt, innerJoinKeyword(stmt)); appendWhere(result, stmt); appendOrderBy(result, stmt); return result.toString().trim(); } @Override protected String selectStatementPostStatementDirectives(SelectStatement selectStatement) { if (selectStatement.getHints().isEmpty()) { return super.selectStatementPreFieldDirectives(selectStatement); } String option = " OPTION("; boolean comma = false; StringBuilder hintsBuilder = new StringBuilder(); for (Hint hint : selectStatement.getHints()) { if (SUPPORTED_HINTS.contains(hint.getClass())) { if (comma) { hintsBuilder.append(", "); } else { comma = true; } } if (hint instanceof OptimiseForRowCount) { hintsBuilder.append("FAST " + ((OptimiseForRowCount)hint).getRowCount()); } if (hint instanceof UseIndex) { UseIndex useIndex = (UseIndex) hint; hintsBuilder.append("TABLE HINT(") // Includes schema name - see https://msdn.microsoft.com/en-us/library/ms181714.aspx .append(StringUtils.isEmpty(useIndex.getTable().getAlias()) ? schemaNamePrefix(useIndex.getTable()) + useIndex.getTable().getName() : useIndex.getTable().getAlias()) .append(", INDEX(" + useIndex.getIndexName() + ")") .append(")"); } if (hint instanceof UseImplicitJoinOrder) { hintsBuilder.append("FORCE ORDER"); } } option += hintsBuilder.toString() + ")"; return isNullOrEmpty(hintsBuilder.toString()) ? "" : option; } /** * SQL server places a shared lock on a record when it is selected without doing anything else (no MVCC) * so no need to specify a lock mode. * * @see org.alfasoftware.morf.jdbc.SqlDialect#getForUpdateSql() * @see http://stackoverflow.com/questions/10935850/when-to-use-select-for-update */ @Override protected String getForUpdateSql() { return StringUtils.EMPTY; } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#supportsWindowFunctions() */ @Override public boolean supportsWindowFunctions() { return false; // SqlServer does not have full support for window functions before 2012 } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlFrom(org.alfasoftware.morf.sql.element.WindowFunction) */ @Override protected String getSqlFrom(final WindowFunction windowFunctionField) { throw new UnsupportedOperationException(this.getClass().getSimpleName()+" does not support window functions."); } /** * @see org.alfasoftware.morf.jdbc.SqlDialect#getSqlForLastDayOfMonth */ @Override protected String getSqlForLastDayOfMonth(AliasedField date) { return "DATEADD(s,-1,DATEADD(mm, DATEDIFF(m,0," + getSqlFrom(date) + ")+1,0))"; } /** * @see SqlDialect#getDeleteLimitPreFromClause(int) */ @Override protected Optional<String> getDeleteLimitPreFromClause(int limit) { return Optional.of("TOP (" + limit + ")"); } }
import android.content.Context; import android.content.Intent; import android.content.res.Resources; import android.database.Cursor; import android.os.Bundle; import android.text.TextUtils; import android.util.Log; import android.view.LayoutInflater; import android.view.MenuItem; import android.view.View; import android.view.ViewGroup; import com.google.android.libraries.social.stream.legacy.views.StreamGridView; import com.google.android.libraries.social.ui.views.fab.FloatingActionButton; import java.util.ArrayList; import java.util.List; public final class dsy extends cge implements bx, dtl, gqk, kxa, kxo, kxp, kyd, kye, lfm, lfs, lht, luu { private final lfk Z = new lfk(this, this.bp, this); String a; private Boolean aQ; private Boolean aR; private String aS; private long aT; private hqr aU; private lfr aV; private kxi aW; private kwz aX; private hum aY; private dsr aZ; private final lfz aa = new lfz(this, this.bp); private final lfi ab = new lfi(this, this.bp); private final kza ac = new kza(this, this.bp, this, this); private final lin ad = new lin(this, this.bp); private final iic ae; private kzu af; private String ag; private String ah; private boolean ai; private boolean aj; private boolean ak; private List<lhr> al; private int am; private boolean an; private boolean ao; private boolean ap; private Boolean aq; String b; String c; dti d; public dsy() { iic localiic = new iic(this, this.bp, new dta(this)); localiic.e = new dsz(this); localiic.f = true; this.ae = localiic; this.am = 0; this.ao = true; this.ap = true; new gzc(this, new dtc(this)); } private final void P() { int i; hqr localhqr; if (!TextUtils.isEmpty(this.d.b)) { i = 1; localhqr = null; if (i != 0) { localhqr = new hqr(new String[] { "_id" }, 3); Integer[] arrayOfInteger1 = new Integer[1]; arrayOfInteger1[0] = Integer.valueOf(0); localhqr.a(arrayOfInteger1); if (this.al.size() > 1) { Integer[] arrayOfInteger4 = new Integer[1]; arrayOfInteger4[0] = Integer.valueOf(1); localhqr.a(arrayOfInteger4); } if (!Z()) { break label127; } Integer[] arrayOfInteger3 = new Integer[1]; arrayOfInteger3[0] = Integer.valueOf(2); localhqr.a(arrayOfInteger3); } } for (;;) { this.aU = localhqr; return; i = 0; break; label127: if (this.ak) { Integer[] arrayOfInteger2 = new Integer[1]; arrayOfInteger2[0] = Integer.valueOf(3); localhqr.a(arrayOfInteger2); } } } private final void Q() { Boolean localBoolean; if (this.aL != null) { localBoolean = this.aR; if (localBoolean != null) { break label30; } } label30: for (boolean bool = false; bool; bool = localBoolean.booleanValue()) { this.aL.a(); return; } cyh localcyh = this.aL; localcyh.f = true; localcyh.b(); } private final void R() { int i = this.at.c(); Intent localIntent = efj.j(this.bn, i); localIntent.putExtra("square_embed", h(true)); localIntent.putExtra("disable_location", true); localIntent.putExtra("clear_acl", true); localIntent.putExtra("circle_usage_type", 16); localIntent.putExtra("category_display_mode", 2); localIntent.putExtra("filter_null_gaia_ids", true); a(localIntent); } private final void S() { int i = this.at.c(); Intent localIntent = efj.j(this.bn, i); localIntent.putExtra("square_embed", h(false)); localIntent.putExtra("disable_location", true); a(localIntent); } private final void a(String paramString1, String paramString2) { if ((this.b == null) && (paramString1 == null)) { return; } this.b = paramString1; this.ah = paramString2; dti localdti = this.d; if (paramString1 != null) {} for (boolean bool = true;; bool = false) { localdti.p = bool; this.aI = true; this.aB = null; this.ak = false; P(); M_(); this.m.putString("stream_id", this.b); k().b(2, null, this); this.aK = true; Q(); super.W(); return; } } private final void g(boolean paramBoolean) { Bundle localBundle = gxy.a("extra_square_id", this.a); localBundle.putByteArray("extra_notification_volume_change", qat.a(gxy.a(4, this.d.c().c, this.d.c().e, this.d.c().c, paramBoolean))); int i = this.at.c(); gxx localgxx = (gxx)this.bo.a(gxx.class); gxw localgxw = new gxw(this.bn, i); localgxw.c = gxz.ae; if (localBundle != null) { localgxw.h.putAll(localBundle); } localgxx.a(localgxw); } private final lmb h(boolean paramBoolean) { String str1 = this.d.d; String str2 = this.d.c; if (this.d.r != null) {} for (boolean bool = true;; bool = false) { return new lmb(this.a, this.c, str1, str2, paramBoolean, bool); } } protected final String D() { return "android_communities_gmh"; } protected final gzf F() { mbf localmbf = this.bn; int i = this.at.c(); String str1 = this.a; String str2 = this.b; if (this.az != null) {} for (String[] arrayOfString = this.az.u();; arrayOfString = null) { return bnd.a(localmbf, i, 4, null, str1, str2, arrayOfString, null, this.aC, ab()); } } protected final boolean G() { return false; } public final void K_() { int i = this.at.c(); Intent localIntent = ((kyh)mbb.a(this.bn, kyh.class)).a(i, this.a, this.d.g, this.d.i, null); ay.a(f(), localIntent, ((gvj)this.bo.a(gvj.class)).a()); } protected final boolean L_() { return (this.d.isEmpty()) && (super.L_()); } public final void M() { b(efj.k(this.bn, this.at.c())); } protected final void M_() { this.aH = efj.a(null, null, false, this.a, this.b, 0, null); } public final String N() { return this.a; } public final int O() { return this.d.g; } public final void W() { super.W(); am().b(new kzg(this.bn, this.at.c(), this.a, this.an)); N_(); } protected final boolean Z() { return (this.aQ != null) && (!this.aQ.booleanValue()); } public final boolean Z_() { int m; if ((!super.Z_()) && (!this.ao)) { gzj localgzj = am(); if (localgzj.c == null) { break label100; } gzy localgzy = localgzj.b; efj.k(); int j = localgzj.c.b; int k = localgzy.c.size(); m = 0; if (m >= k) { break label100; } if (((gzf)localgzy.c.get(m)).h != j) { break label94; } } label94: label100: for (int i = 1;; i = 0) { if (i == 0) { break label105; } return true; m++; break; } label105: return false; } public final View a(LayoutInflater paramLayoutInflater, ViewGroup paramViewGroup, Bundle paramBundle) { View localView = super.a(paramLayoutInflater, paramViewGroup, paramBundle); this.d = ((dti)this.az); this.d.v = this.aq; dti localdti = this.d; if (this.b != null) {} for (boolean bool = true;; bool = false) { localdti.p = bool; cyh localcyh = this.aL; localcyh.f = true; localcyh.b(); return localView; } } protected final dfz a(Context paramContext, StreamGridView paramStreamGridView, hfa paramhfa, int paramInt, dat paramdat, dge paramdge, cyh paramcyh, lob paramlob) { return new dti(paramContext, paramStreamGridView, paramhfa, paramInt, paramdat, paramdge, paramcyh, paramlob); } public final fu<Cursor> a(int paramInt, Bundle paramBundle) { switch (paramInt) { default: return super.a(paramInt, paramBundle); } kzo localkzo = new kzo(this.bn, this.at.c(), this.a, kzq.d); localkzo.r = false; return localkzo; } public final void a(int paramInt, Bundle paramBundle, String paramString) {} public final void a(int paramInt, String paramString1, String paramString2) { if (this.am == paramInt) { return; } this.am = paramInt; this.d.x = this.am; ad(); a(paramString1, paramString2); } public final void a(int paramInt, boolean paramBoolean, Bundle paramBundle, String paramString) {} public final void a(Bundle paramBundle) { super.a(paramBundle); this.al = new ArrayList(); if (paramBundle != null) { if (paramBundle.containsKey("square_expanded")) { this.aq = Boolean.valueOf(paramBundle.getBoolean("square_expanded")); } if (paramBundle.containsKey("square_name")) { this.c = paramBundle.getString("square_name"); } if (paramBundle.containsKey("square_stream_name")) { this.ah = paramBundle.getString("square_stream_name"); } this.ai = paramBundle.getBoolean("square_is_restricted"); } for (this.an = false;; this.an = true) { k().a(100, null, this); Intent localIntent = f().getIntent(); this.aS = localIntent.getStringExtra("com.google.android.libraries.social.notifications.notif_id"); this.aT = localIntent.getLongExtra("com.google.android.libraries.social.notifications.updated_version", 0L); this.aV = new lfr(this.bn, this, this.at.c()); this.aV.c = this; return; } } public final void a(Bundle paramBundle, String paramString) { if ("report_invite_abuse".equals(paramString)) { gzj localgzj = am(); bnt localbnt = new bnt(this.bn, this.at.c(), this.aS, this.aT); localgzj.d.a(localbnt, false); localgzj.b(localbnt); } } public final void a(fu<Cursor> paramfu, Cursor paramCursor) { boolean bool6; boolean bool7; label209: boolean bool8; label238: boolean bool9; label267: boolean bool10; label288: boolean bool11; label309: boolean bool16; label446: boolean bool12; label475: boolean bool13; label498: boolean bool14; label532: int j; label608: int k; Object localObject1; label656: int n; label711: int i6; label835: Object localObject2; int i2; int i3; label953: String str2; Object localObject3; int i4; switch (paramfu.i) { default: super.a(paramfu, paramCursor); return; case 100: this.ao = false; if ((paramCursor != null) && (paramCursor.moveToFirst())) { dti localdti1 = this.d; localdti1.a = paramCursor.getString(0); localdti1.b = paramCursor.getString(1); localdti1.c = paramCursor.getString(3); localdti1.d = paramCursor.getString(2); localdti1.e = paramCursor.getString(4); localdti1.f = paramCursor.getInt(6); localdti1.g = paramCursor.getInt(7); if (paramCursor.getInt(8) != 0) { bool6 = true; localdti1.m = bool6; localdti1.h = paramCursor.getInt(10); localdti1.i = paramCursor.getInt(5); if (paramCursor.getInt(11) == 0) { break label1048; } bool7 = true; localdti1.j = bool7; if ((!localdti1.m) || (paramCursor.getInt(16) == 0)) { break label1054; } bool8 = true; localdti1.k = bool8; if ((!localdti1.m) || (paramCursor.getInt(15) == 0)) { break label1060; } bool9 = true; localdti1.l = bool9; if (paramCursor.getInt(17) == 0) { break label1066; } bool10 = true; localdti1.n = bool10; if (paramCursor.getInt(26) == 0) { break label1072; } bool11 = true; localdti1.o = bool11; localdti1.q = paramCursor.getInt(28); localdti1.r = paramCursor.getString(30); localdti1.s = jht.c(paramCursor.getBlob(31)); localdti1.u = ((qek)hqv.a(new qek(), paramCursor.getBlob(32))); localdti1.t.clear(); if (!paramCursor.isNull(33)) { localdti1.t.addAll(mab.d(paramCursor.getBlob(33))); } if (localdti1.v == null) { if (localdti1.m) { break label1078; } bool16 = true; localdti1.v = Boolean.valueOf(bool16); } localdti1.notifyDataSetChanged(); if (paramCursor.getInt(12) == 0) { break label1084; } bool12 = true; this.aQ = Boolean.valueOf(bool12); if (paramCursor.getInt(8) == 0) { break label1090; } bool13 = true; this.aR = Boolean.valueOf(bool13); this.c = paramCursor.getString(1); if (paramCursor.getString(30) == null) { break label1096; } bool14 = true; this.ai = bool14; kwz localkwz = this.aX; kxb localkxb = new kxb(this.bn, ehr.ao, ehr.aq, ehr.ap, ehr.an, ehr.am); localkwz.a(localkxb.a, null, localkxb); kzc[] arrayOfkzc = kzc.a(paramCursor.getBlob(18)); if (arrayOfkzc == null) { break label1102; } j = arrayOfkzc.length; if ((arrayOfkzc == null) && (this.aQ.booleanValue())) { break label1235; } if (j != 1) { break label1108; } this.ag = arrayOfkzc[0].a; String str5 = arrayOfkzc[0].b; k = 0; localObject1 = str5; int m = Math.max(0, -1 + this.al.size()); if ((!this.ap) && (k == m) && ((this.ag == null) || (TextUtils.equals(this.ag, this.b)))) { break label1123; } n = 1; if (Log.isLoggable("HostedSquareStreamFrag", 3)) { boolean bool15 = this.ap; String str3 = this.ag; String str4 = this.b; new StringBuilder(92 + String.valueOf(str3).length() + String.valueOf(str4).length()).append("populatePrimarySpinner firstLoad=").append(bool15).append(" numStreams=").append(k).append(" old=").append(m).append(" streamId=").append(str3).append(" old=").append(str4); } this.ap = false; if (n == 0) { i6 = 0; if (i6 < k) { if (TextUtils.equals(arrayOfkzc[i6].a, ((lhr)this.al.get(i6 + 1)).b)) { break label1129; } n = 1; } } if (n == 0) { break label1235; } int i1 = -1; this.al.clear(); localObject2 = this.ag; if (k > 1) { List localList = this.al; int i5 = efj.Bs; localList.add(new lhr(g().getString(i5), null)); i1 = 0; } i2 = 0; i3 = i1; if (i2 >= k) { break label1135; } kzc localkzc = arrayOfkzc[i2]; str2 = localkzc.a; localObject3 = localkzc.b; this.al.add(new lhr((String)localObject3, str2)); if (!TextUtils.equals(this.b, str2)) { break label1507; } i4 = i2 + 1; } } break; } for (Object localObject4 = str2;; localObject4 = localObject2) { i2++; i3 = i4; localObject2 = localObject4; localObject1 = localObject3; break label953; bool6 = false; break; label1048: bool7 = false; break label209; label1054: bool8 = false; break label238; label1060: bool9 = false; break label267; label1066: bool10 = false; break label288; label1072: bool11 = false; break label309; label1078: bool16 = false; break label446; label1084: bool12 = false; break label475; label1090: bool13 = false; break label498; label1096: bool14 = false; break label532; label1102: j = 0; break label608; label1108: this.ag = null; k = j; localObject1 = null; break label656; label1123: n = 0; break label711; label1129: i6++; break label835; label1135: this.am = i3; this.d.x = i3; if (this.al.size() > 1) { dti localdti2 = this.d; localdti2.w = this.al; if (localdti2.y != null) { localdti2.y.a(localdti2.w); } this.d.x = this.am; P(); } if (!TextUtils.isEmpty(this.ag)) { localObject2 = null; } a((String)localObject2, (String)localObject1); label1235: Q(); this.as.a(); hwq localhwq = this.aw; hwt localhwt = hwt.b; if (localhwt == null) { throw new NullPointerException(); } localhwq.g = ((hwt)localhwt); localhwq.a(); if (Log.isLoggable("HostedSquareStreamFrag", 4)) { String str1 = String.valueOf(this.c); if (str1.length() == 0) { break label1341; } "- setSquareData name=".concat(str1); } for (;;) { P(); k().b(2, null, this); N_(); return; label1341: new String("- setSquareData name="); } int i; if (!this.ak) { gzj localgzj = am(); boolean bool2 = localgzj.a("prefetch_newposts"); boolean bool3 = localgzj.a("fetch_older"); boolean bool4 = localgzj.a("fetch_newer"); boolean bool5 = localgzj.a("GetSquareTask"); if ((bool3) || (bool4) || (bool5) || (bool2)) { i = 0; if (i == 0) { break label1502; } } } else { if ((this.d.aN[2].c == null) || (this.d.e_(2) != 0) || (paramCursor.getCount() != 0)) { break label1502; } } label1502: for (boolean bool1 = true;; bool1 = false) { this.ak = bool1; if (this.ak) { P(); } super.a(paramfu, paramCursor); return; i = 1; break; } label1507: localObject3 = localObject1; i4 = i3; } } public final void a(gpr paramgpr) { int i = 1; gqe localgqe = (gqe)paramgpr.a(eyg.G); localgqe.b = Math.max(i, localgqe.b); if (Z_()) { localgqe.a = i; } paramgpr.a(this.c); int j; label111: int k; label169: int i1; label286: int i3; if ((this.aR == null) || (this.d == null)) { j = i; if (j == 0) { if (!this.d.k) { break label441; } paramgpr.b(eyg.p); if (this.d.l) { paramgpr.b(eyg.L); } if (this.d.l) { int i6 = eyg.K; gqj localgqj = new gqj(this); localgqj.a = efj.Bz; paramgpr.a(i6, localgqj); } if (this.d.g != i) { break label464; } k = i; if ((k == 0) && (((git)mbb.a(this.bn, git.class)).f().c("is_google_plus"))) { paramgpr.b(eyg.H); } if (!this.aR.booleanValue()) { break label513; } paramgpr.b(eyg.M); paramgpr.b(eyg.E); if (!this.d.n) { break label470; } paramgpr.b(eyg.w); int n = this.d.g; if ((n != 2) && (n != i)) { break label662; } i1 = i; if (i1 != 0) { i3 = eyg.B; paramgpr.a(i3, eyg.O, 0, efj.BC); if (this.d.g != i) { break label668; } } } } for (;;) { if (i != 0) { paramgpr.a(i3, eyg.N, 0, efj.BB); } if (this.ae.a(this.at.c())) { int i2 = eyg.s; iic localiic = this.ae; gxr localgxr = new gxr(); gxq localgxq = new gxq(pjo.c); localgxr.b.add(localgxq); paramgpr.a(i2, new iig(localiic, localgxr.a(this.bn))); } return; j = 0; break; label441: if (!this.d.l) { break label111; } paramgpr.b(eyg.q); break label111; label464: k = 0; break label169; label470: MenuItem localMenuItem = paramgpr.b(eyg.z); if (!this.d.o) {} int i5; for (int i4 = i;; i5 = 0) { localMenuItem.setEnabled(i4); break; } label513: dti localdti = this.d; kxg localkxg = efj.e(localdti.g, localdti.i); switch (dtb.a[localkxg.ordinal()]) { default: label572: if (this.d.g != 5) { break; } } for (int m = i; m != 0; m = 0) { paramgpr.b(eyg.v); break; paramgpr.b(eyg.D); break label572; paramgpr.b(eyg.n); break label572; paramgpr.b(eyg.t); break label572; paramgpr.b(eyg.I); break label572; } label662: i1 = 0; break label286; label668: i = 0; } } public final void a(String paramString, hae paramhae, haa paramhaa) { boolean bool = true; if ((paramhae != null) && (kdd.a(paramhae.c, "notFound"))) { this.aj = bool; } super.a(paramString, paramhae, paramhaa); N_(); if (hae.a(paramhae)) { if ("GetSquareTask".equals(paramString)) { if (!this.aj) { break label135; } localhwq3 = this.aw; j = efj.Bv; localhwq3.d = null; localhwq3.c = j; localhwq3.e(); localhwq4 = this.aw; localhwt2 = hwt.c; if (localhwt2 == null) { throw new NullPointerException(); } localhwq4.g = ((hwt)localhwt2); localhwq4.a(); paramhaa.c = false; } } label135: label229: while (!"NotificationsReportAbuseTask".equals(paramString)) { for (;;) { hwq localhwq3; int j; hwq localhwq4; hwt localhwt2; return; this.aJ = bool; if (!this.aj) { super.af(); } if (!TextUtils.isEmpty(this.d.b)) {} hwq localhwq2; hwt localhwt1; while (!bool) { hwq localhwq1 = this.aw; int i = efj.Bl; localhwq1.d = null; localhwq1.c = i; localhwq1.e(); localhwq2 = this.aw; localhwt1 = hwt.c; if (localhwt1 != null) { break label229; } throw new NullPointerException(); bool = false; } continue; localhwq2.g = ((hwt)localhwt1); localhwq2.a(); } } f().finish(); } public final void a(String paramString, kxg paramkxg) { this.d.notifyDataSetChanged(); } public final void a(kxb paramkxb) { if (this.ax != null) { lvz locallvz = this.ax; int[] arrayOfInt = new int[1]; arrayOfInt[0] = paramkxb.b; locallvz.a(arrayOfInt); } Boolean localBoolean = this.aR; boolean bool = false; if (localBoolean == null) {} for (;;) { if ((bool) && (this.N != null)) { ((FloatingActionButton)this.N.findViewById(eyg.u)).a(paramkxb.b, paramkxb.c); } return; bool = localBoolean.booleanValue(); } } public final void a(tp paramtp) { super.a(paramtp); efj.a(paramtp, true); } protected final void a(boolean paramBoolean) { if (Log.isLoggable("HostedSquareStreamFrag", 4)) { new StringBuilder(26).append("fetchContent - newer=").append(paramBoolean); } if (Z()) {} while ((!paramBoolean) && (this.aM)) { return; } if (paramBoolean) { this.aB = null; } while (L_()) { hwq localhwq1 = this.aw; int j = efj.Bo; localhwq1.f = null; localhwq1.e = j; localhwq1.e(); hwq localhwq2 = this.aw; hwt localhwt = hwt.a; if (localhwt == null) { throw new NullPointerException(); if (this.aB != null) { break; } } else { localhwq2.g = ((hwt)localhwt); localhwq2.a(); } } String[] arrayOfString; bnd localbnd; if (((paramBoolean) && (!am().a("fetch_newer"))) || ((!paramBoolean) && (!am().a("fetch_older")))) { mbf localmbf = this.bn; int i = this.at.c(); String str1 = this.a; String str2 = this.b; String str3 = this.aB; if (this.az == null) { break label280; } arrayOfString = this.az.u(); localbnd = bnd.a(localmbf, i, 4, null, str1, str2, str3, null, arrayOfString, null, false, false, this.aC, null); if (!paramBoolean) { break label286; } } label280: label286: for (String str4 = "fetch_newer";; str4 = "fetch_older") { localbnd.f = str4; am().b(localbnd); N_(); return; arrayOfString = null; break; } } public final boolean a(MenuItem paramMenuItem) { int i = paramMenuItem.getItemId(); if (i == eyg.p) { R(); mbf localmbf15 = this.bn; gxr localgxr15 = new gxr(); gxq localgxq15 = new gxq(pjo.ax); localgxr15.b.add(localgxq15); new gwz(4, localgxr15.a(this.bn)).b(localmbf15); return true; } if (i == eyg.q) { S(); mbf localmbf14 = this.bn; gxr localgxr14 = new gxr(); gxq localgxq14 = new gxq(pjo.aN); localgxr14.b.add(localgxq14); new gwz(4, localgxr14.a(this.bn)).b(localmbf14); return true; } if (i == eyg.C) { R(); mbf localmbf13 = this.bn; gxr localgxr13 = new gxr(); gxq localgxq13 = new gxq(pjo.ax); localgxr13.b.add(localgxq13); new gwz(4, localgxr13.a(this.bn)).b(localmbf13); } for (;;) { return super.a(paramMenuItem); if (i == eyg.L) { S(); mbf localmbf12 = this.bn; gxr localgxr12 = new gxr(); gxq localgxq12 = new gxq(pjo.aN); localgxr12.b.add(localgxq12); new gwz(4, localgxr12.a(this.bn)).b(localmbf12); } else if (i == eyg.D) { kxg localkxg4 = kxg.d; this.aV.a(this.a, localkxg4); mbf localmbf11 = this.bn; gxr localgxr11 = new gxr(); gxq localgxq11 = new gxq(pjo.az); localgxr11.b.add(localgxq11); new gwz(4, localgxr11.a(this.bn)).b(localmbf11); } else if (i == eyg.n) { kxg localkxg3 = kxg.e; this.aV.a(this.a, localkxg3); mbf localmbf10 = this.bn; gxr localgxr10 = new gxr(); gxq localgxq10 = new gxq(pjo.b); localgxr10.b.add(localgxq10); new gwz(4, localgxr10.a(this.bn)).b(localmbf10); } else if (i == eyg.t) { kxg localkxg2 = kxg.g; this.aV.a(this.a, localkxg2); mbf localmbf9 = this.bn; gxr localgxr9 = new gxr(); gxq localgxq9 = new gxq(pjo.j); localgxr9.b.add(localgxq9); new gwz(4, localgxr9.a(this.bn)).b(localmbf9); } else if (i == eyg.I) { kxg localkxg1 = kxg.f; this.aV.a(this.a, localkxg1); mbf localmbf8 = this.bn; gxr localgxr8 = new gxr(); gxq localgxq8 = new gxq(pjo.g); localgxr8.b.add(localgxq8); new gwz(4, localgxr8.a(this.bn)).b(localmbf8); } else if (i == eyg.v) { this.ab.b(this.a); mbf localmbf7 = this.bn; gxr localgxr7 = new gxr(); gxq localgxq7 = new gxq(pjo.T); localgxr7.b.add(localgxq7); new gwz(4, localgxr7.a(this.bn)).b(localmbf7); } else if (i == eyg.z) { this.ad.a(this.a, Boolean.valueOf(true), null); g(true); mbf localmbf6 = this.bn; gxr localgxr6 = new gxr(); gxq localgxq6 = new gxq(pjo.aO); localgxr6.b.add(localgxq6); new gwz(4, localgxr6.a(this.bn)).b(localmbf6); } else if (i == eyg.w) { this.ad.a(this.a, Boolean.valueOf(false), null); g(false); mbf localmbf5 = this.bn; gxr localgxr5 = new gxr(); gxq localgxq5 = new gxq(pjo.aR); localgxr5.b.add(localgxq5); new gwz(4, localgxr5.a(this.bn)).b(localmbf5); } else if (i == eyg.M) { a(this.aZ.a(this.at.c(), this.a), 3); mbf localmbf4 = this.bn; gxr localgxr4 = new gxr(); gxq localgxq4 = new gxq(pjo.A); localgxr4.b.add(localgxq4); new gwz(4, localgxr4.a(this.bn)).b(localmbf4); } else if (i == eyg.E) { this.aa.a(this.a, this.d.h); mbf localmbf3 = this.bn; gxr localgxr3 = new gxr(); gxq localgxq3 = new gxq(pjo.aD); localgxr3.b.add(localgxq3); new gwz(4, localgxr3.a(this.bn)).b(localmbf3); } else { if (i == eyg.H) { if (this.aS != null) { Bundle localBundle1 = gxy.a("extra_notification_id", this.aS); gxx localgxx2 = (gxx)this.bo.a(gxx.class); gxw localgxw2 = new gxw(this.bn); localgxw2.c = gxz.Z; if (localBundle1 != null) { localgxw2.h.putAll(localBundle1); } localgxx2.a(localgxw2); int k = efj.Bp; String str1 = g().getString(k); int m = efj.Br; String str2 = g().getString(m); int n = efj.Bq; String str3 = g().getString(n); int i1 = efj.Bj; lut locallut = lut.a(str1, str2, str3, g().getString(i1), 0, 0); locallut.n = this; locallut.p = 0; Bundle localBundle2 = locallut.m; localBundle2.putString("com.google.android.libraries.social.notifications.notif_id", this.aS); localBundle2.putLong("com.google.android.libraries.social.notifications.updated_version", this.aT); locallut.a(this.x, "report_invite_abuse"); } for (;;) { mbf localmbf2 = this.bn; gxr localgxr2 = new gxr(); gxq localgxq2 = new gxq(pjo.aM); localgxr2.b.add(localgxq2); new gwz(4, localgxr2.a(this.bn)).b(localmbf2); break; kza localkza = this.ac; new kyy().a(localkza.a.h(), null); gxx localgxx1 = (gxx)this.bo.a(gxx.class); gxw localgxw1 = new gxw(this.bn); localgxw1.c = gxz.Z; localgxx1.a(localgxw1); } } if (i == eyg.O) { a(((kya)this.bo.a(kya.class)).a(this.at.c(), this.a)); mbf localmbf1 = this.bn; gxr localgxr1 = new gxr(); gxq localgxq1 = new gxq(pjo.aq); localgxr1.b.add(localgxq1); new gwz(4, localgxr1.a(this.bn)).b(localmbf1); } else if (i == eyg.N) { kzu localkzu = this.af; kyb localkyb = localkzu.d; Context localContext = localkzu.c; int j = eu.e; Object[] arrayOfObject = new Object[1]; arrayOfObject[0] = localkzu.b.d(); localkyb.a(localContext.getString(j, arrayOfObject), "", localkzu.c.getString(eu.d), localkzu.c.getString(eu.f), localkzu.c.getString(eu.a)).a(localkzu.a.h(), "delete_square_mixin_tag"); } } } } public final boolean aa() { return true; } protected final void af() { if (this.aj) { return; } super.af(); } protected final void b(Intent paramIntent) { boolean bool = TextUtils.isEmpty(this.c); gnb localgnb = null; String str1; String str2; String str3; if (!bool) { if (this.b == null) { break label93; } str1 = this.b; str2 = this.a; str3 = this.c; if (this.b != null) { break label102; } } label93: label102: for (String str4 = "";; str4 = this.ah) { localgnb = new gnb(new lhp(str2, str3, str1, str4, this.ai)); paramIntent.putExtra("extra_acl", localgnb); super.b(paramIntent); return; str1 = this.ag; break; } } public final void b(Bundle paramBundle) { super.b(paramBundle); paramBundle.putString("extra_square_id", this.a); } public final void b(Bundle paramBundle, String paramString) {} public final void b(boolean paramBoolean) { this.aq = Boolean.valueOf(paramBoolean); this.d.v = Boolean.valueOf(paramBoolean); } public final void c(int paramInt) { int i = this.at.c(); String str1; int j; hum localhum; mbf localmbf; String str2; if (efj.o(paramInt)) { Bundle localBundle = this.m; str1 = localBundle.getString("suggestion_id"); j = localBundle.getInt("suggestion_ui", -1); if ((!TextUtils.isEmpty(str1)) && (j != -1)) { localhum = this.aY; localmbf = this.bn; str2 = String.valueOf(this.a); if (str2.length() == 0) { break label114; } } } label114: for (String str3 = "g:".concat(str2);; str3 = new String("g:")) { localhum.a(localmbf, i, str3, str1, j, 230); return; } } protected final void c(Bundle paramBundle) { super.c(paramBundle); this.aX = new kwz(this.bn); this.aX.a(this); this.af = new kzu(this, this.bp, this, this); mbb localmbb = this.bo; localmbb.a(gii.class, this.ac); localmbb.a(lfy.class, this.aa); localmbb.a(lfh.class, this.ab); localmbb.a(lfi.class, this.ab); localmbb.a(lfk.class, this.Z); localmbb.a(kwz.class, this.aX); localmbb.a(lht.class, this); localmbb.a(kxp.class, this); localmbb.a(kxo.class, this); localmbb.a(dtl.class, this); localmbb.a(kxx.class, this.af); this.aY = ((hum)this.bo.a(hum.class)); kxq localkxq = (kxq)this.bo.a(kxq.class); this.aW = localkxq.a(this.bn).a(f()); lvi locallvi1 = this.au; lvh locallvh1 = localkxq.a(); locallvi1.a.add(locallvh1); lvi locallvi2 = this.au; lvh locallvh2 = localkxq.b(); locallvi2.a.add(locallvh2); lvi locallvi3 = this.au; kxi localkxi = this.aW; locallvi3.a.add(localkxi); this.aZ = ((dsr)this.bo.a(dsr.class)); } public final void c(Bundle paramBundle, String paramString) {} public final String d() { return this.c; } public final void d(int paramInt) { if ((paramInt == 3) || (paramInt == 2)) { this.ap = true; super.W(); } } protected final void e(Intent paramIntent) { paramIntent.putExtra("square_membership", this.d.g); paramIntent.putExtra("square_joinability", this.d.i); paramIntent.putExtra("refresh", true); super.e(paramIntent); } public final void e(Bundle paramBundle) { super.e(paramBundle); if (this.c != null) { paramBundle.putString("square_name", this.c); } if (this.ah != null) { paramBundle.putString("square_stream_name", this.ah); } if (this.aq != null) { paramBundle.putBoolean("square_expanded", this.aq.booleanValue()); } paramBundle.putBoolean("square_is_restricted", this.ai); } public final void f_() { super.f_(); this.aW.a(null); this.aX.c.remove(this); } public final void m() { super.m(); if (this.an) { super.W(); am().b(new kzg(this.bn, this.at.c(), this.a, this.an)); N_(); this.an = false; } } public final gya r_() { return gya.E; } public final gxq v() { return new kuw(pjo.C, this.a); } protected final hqr w() { return this.aU; } public final CharSequence z() { int i = efj.BA; Object[] arrayOfObject = new Object[1]; arrayOfObject[0] = this.a; return g().getString(i, arrayOfObject); } } /* Location: F:\apktool\apktool\com.google.android.apps.plus\classes-dex2jar.jar * Qualified Name: dsy * JD-Core Version: 0.7.0.1 */
package com.medicapital.client.core.entities; import static com.medicapital.client.log.Tracer.tracer; import java.util.ArrayList; import java.util.Collection; import java.util.LinkedHashMap; import java.util.Map; import java.util.Set; import com.google.gwt.event.shared.EventBus; import com.medicapital.client.core.PageablePresenter; import com.medicapital.client.core.Presenter; import com.medicapital.client.core.RegistrationList; import com.medicapital.client.core.commands.DisplayCommandFactory; import com.medicapital.client.core.entities.EntitiesView; import com.medicapital.client.dao.CommandRespBroadcastHandler; import com.medicapital.client.event.ClientEvent; import com.medicapital.common.commands.CommandResp; import com.medicapital.common.commands.entity.SelectCommand; import com.medicapital.common.commands.entity.SelectCommandResp; import com.medicapital.common.commands.entity.SelectCountCommand; import com.medicapital.common.commands.entity.SelectCountCommandResp; import com.medicapital.common.dao.ResponseHandler; import com.medicapital.common.dao.ServiceAccess; import com.medicapital.common.entities.SerializableEntity; /** * Presenter helps to display data in dynamic table. It manages paging of the * table so next, previous, first and last actions are possible to handle. It * uses generic view interface which allows to display elements in proper * manner. View interface can be extended so new actions and features can be * added very easily. * * @author michal * * @param <E> * E - element kept by presenter */ abstract public class EntitiesPresenter<E extends SerializableEntity> extends PageablePresenter implements Presenter { private final EntitiesView<E> display; private final EventBus eventBus; private final Map<Integer, E> displayedElements = new LinkedHashMap<Integer, E>(); private final RegistrationList listHandlers = new RegistrationList(); private final Class<E> entityClass; private ServiceAccess serviceAccess; private DisplayCommandFactory<E> displayCommandFactory; public EntitiesPresenter(Class<E> entityClass, final EntitiesView<E> display, final EventBus eventBus) { super(display); this.display = display; this.eventBus = eventBus; this.entityClass = entityClass; registerEventBusHandlers(eventBus, listHandlers); updateNavigationBar(); } /** * Register handlers in event bus * * @param registrationList * @param eventBus */ protected void registerEventBusHandlers(final EventBus eventBus, final RegistrationList registrationList) { tracer(this).debug("Registering handlers for action events from event bus..."); registrationList.add(eventBus.addHandler(ClientEvent.TYPE, new CommandRespBroadcastHandler<E>(this) { @Override protected void handleCreatedEntity(E createdEntity) { EntitiesPresenter.this.handleCreatedEntity(createdEntity); } @Override protected void handleDeletedEntities(Set<Integer> deletedEntitiesIds) { EntitiesPresenter.this.handleDeletedEntities(deletedEntitiesIds); } @Override protected void handleUpdatedEntity(E updatedEntity) { EntitiesPresenter.this.handleUpdatedEntity(updatedEntity); } @Override protected Class<E> getEntityClass() { return entityClass; } })); } /** * Handle created entity * * @param createdEntity */ protected void handleCreatedEntity(E createdEntity) { // ignore } /** * Handle deleted entities * * @param deletedEntitiesIds */ protected void handleDeletedEntities(Set<Integer> deletedEntitiesIds) { if (deletedEntitiesIds != null) { for (int deletedEntityId : deletedEntitiesIds) { if (displayedElements.containsKey(deletedEntityId)) { tracer(this).debug("Entity was deleted: " + deletedEntityId); displayedElements.remove(deletedEntityId); setTotalRows(getTotalRows() - 1); } } refreshDisplay(false); } } /** * Handle updated entity * * @param updatedEntity */ protected void handleUpdatedEntity(E updatedEntity) { if (displayedElements.containsKey(updatedEntity.getId())) { tracer(this).debug("Entity was updated: " + updatedEntity); displayedElements.put(updatedEntity.getId(), updatedEntity); refreshDisplay(false); } } /** * Display data in list * * @param data */ final public void display(final Collection<E> data) { displayedElements.clear(); display.setViewVisible(true); clearView(); if (data != null) { tracer(this).debug("Displaying data, elements count: " + data.size()); for (E element : data) { displayedElements.put(element.getId(), element); } displayDataOnView(data); } } @Override protected void displayCurrentPageData() { validatePresenter(); displayedElements.clear(); clearView(); getElements(); getDataBaseMaxElementsCount(); } /** * Get elements from service access */ private void getElements() { tracer(this).debug("Getting elements: startRow: " + getStartRow() + ", rows count: " + getPageSize()); SelectCommand<E> selectCommand = displayCommandFactory.createSelectCommand(getStartRow(), getPageSize()); serviceAccess.execute(selectCommand, new ResponseHandler<E>() { @Override public void handle(CommandResp<E> response) { if (response instanceof SelectCommandResp) { SelectCommandResp<E> selectCommandResp = (SelectCommandResp<E>) response; display(selectCommandResp.getData()); } } @Override public void handleException(Throwable throwable) { // ignore } }); } /** * Display data o view * * @param data */ abstract protected void displayDataOnView(final Collection<E> data); /** * Clear view */ protected void clearView() { tracer(this).debug("Clearing view"); display.clear(); } /** * Check number of element in data base */ public void getDataBaseMaxElementsCount() { tracer(this).debug("Getting max data base elements"); validatePresenter(); SelectCountCommand<E> countCommand = displayCommandFactory.createCountCommand(); serviceAccess.execute(countCommand, new ResponseHandler<E>() { @Override public void handle(CommandResp<E> response) { if (response instanceof SelectCountCommandResp) { SelectCountCommandResp<E> selectCommandResp = (SelectCountCommandResp<E>) response; setTotalRows(selectCommandResp.getCount()); } } @Override public void handleException(Throwable throwable) { // ignore } }); } /** * Clear state and handlers of the presenter */ @Override public void clearPresenter() { tracer(this).debug("Clearing presenter..."); listHandlers.clear(); } /** * Re-download data and refresh display */ final public void refreshDisplay() { refreshDisplay(true); } /** * Refresh display * * @param redownloadData * if true data will be get from server again. Otherwise only * data in list will be repainted * @throws IllegalArgumentException * when re-download is true but no display command factory is * set */ public void refreshDisplay(boolean redownloadData) throws IllegalArgumentException { tracer(this).debug("Refreshing display - redownloadingData=" + redownloadData); if (redownloadData) { displayCurrentPageData(); } else { display(new ArrayList<E>(displayedElements.values())); } } final protected Map<Integer, E> getDisplayedElements() { return displayedElements; } final public DisplayCommandFactory<E> getDisplayCommandFactory() { return displayCommandFactory; } final public void setDisplayCommandFactory(DisplayCommandFactory<E> displayCommandFactory) { this.displayCommandFactory = displayCommandFactory; } final public void setServiceAccess(ServiceAccess serviceAccess) { this.serviceAccess = serviceAccess; } final public ServiceAccess getServiceAccess() { return serviceAccess; } /** * Validate state of presenter * * @throws IllegalArgumentException */ protected void validatePresenter() throws IllegalArgumentException { tracer(this).debug("Validating presenter..."); if (getServiceAccess() == null) { throw new IllegalArgumentException("Service access not set"); } else if (displayCommandFactory == null) { throw new IllegalArgumentException("Display command factory not set"); } } final public Class<E> getEntityClass() { return entityClass; } final protected RegistrationList getListHandlers() { return listHandlers; } final protected EventBus getEventBus() { return eventBus; } final public EntitiesView<E> getDisplay() { return display; } }
// Copyright 2000-2019 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file. package org.jetbrains.idea.maven.importing; import com.intellij.openapi.actionSystem.ActionManager; import com.intellij.openapi.actionSystem.AnAction; import com.intellij.openapi.actionSystem.KeyboardShortcut; import com.intellij.openapi.command.WriteCommandAction; import com.intellij.openapi.keymap.Keymap; import com.intellij.openapi.keymap.KeymapManager; import com.intellij.openapi.vfs.VirtualFile; import org.jetbrains.idea.maven.MavenImportingTestCase; import org.jetbrains.idea.maven.project.MavenProject; import org.jetbrains.idea.maven.tasks.MavenKeymapExtension; import org.jetbrains.idea.maven.tasks.MavenShortcutsManager; import java.io.IOException; import java.util.Arrays; import java.util.Collections; import java.util.List; public class MavenShortcutsManagerTest extends MavenImportingTestCase { private MavenShortcutsManager myShortcutsManager; @Override protected void setUp() throws Exception { super.setUp(); myShortcutsManager = MavenShortcutsManager.getInstance(myProject); myShortcutsManager.doInit(myProject); initProjectsManager(true); } @Override public void tearDown() throws Exception { try { MavenKeymapExtension.clearActions(myProject); } catch (Throwable e) { addSuppressedException(e); } finally { myShortcutsManager = null; super.tearDown(); } } public void testRefreshingActionsOnImport() { assertTrue(getProjectActions().isEmpty()); VirtualFile p1 = createModulePom("p1", "<groupId>test</groupId>" + "<artifactId>p1</artifactId>" + "<version>1</version>"); VirtualFile p2 = createModulePom("p2", "<groupId>test</groupId>" + "<artifactId>p2</artifactId>" + "<version>1</version>"); importProjects(p1, p2); assertEmptyKeymap(); } public void testRefreshingOnProjectRead() { importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>"); assertEmptyKeymap(); String goal = "clean"; assignShortcut(myProjectPom, goal, "alt shift X"); importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>" + "<build>" + " <plugins>" + " <plugin>" + " <groupId>org.apache.maven.plugins</groupId>" + " <artifactId>maven-surefire-plugin</artifactId>" + " </plugin>" + " </plugins>" + "</build>"); assertKeymapContains(myProjectPom, goal); } public void testRefreshingOnPluginResolve() { importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>"); assertEmptyKeymap(); String goal = "org.apache.maven.plugins:maven-surefire-plugin:2.4.3:test"; assignShortcut(myProjectPom, goal, "alt shift X"); importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>" + "<build>" + " <plugins>" + " <plugin>" + " <groupId>org.apache.maven.plugins</groupId>" + " <artifactId>maven-surefire-plugin</artifactId>" + " <version>2.4.3</version>" + " </plugin>" + " </plugins>" + "</build>"); resolvePlugins(); assertKeymapContains(myProjectPom, goal); } public void testActionWhenSeveralSimilarPlugins() { importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>"); importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>" + "<build>" + " <plugins>" + " <plugin>" + " <groupId>org.apache.maven.plugins</groupId>" + " <artifactId>maven-surefire-plugin</artifactId>" + " <version>2.4.3</version>" + " </plugin>" + " <plugin>" + " <groupId>org.apache.maven.plugins</groupId>" + " <artifactId>maven-surefire-plugin</artifactId>" + " <version>2.4.3</version>" + " </plugin>" + " </plugins>" + "</build>"); String goal = "org.apache.maven.plugins:maven-surefire-plugin:2.4.3:test"; assignShortcut(myProjectPom, goal, "alt shift X"); resolvePlugins(); assertKeymapContains(myProjectPom, goal); } public void testRefreshingOnProjectAddition() { importProject("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>"); VirtualFile m = createModulePom("module", "<groupId>test</groupId>" + "<artifactId>module</artifactId>" + "<version>1</version>"); String goal = "clean"; assertKeymapDoesNotContain(m, goal); createProjectPom("<groupId>test</groupId>" + "<artifactId>project</artifactId>" + "<version>1</version>" + "<modules>" + " <module>module</module>" + "</modules>"); importProject(); assertEmptyKeymap(); assignShortcut(m, goal, "alt shift X"); assertKeymapContains(m, goal); } public void testDeletingActionOnProjectRemoval() throws IOException { final VirtualFile p1 = createModulePom("p1", "<groupId>test</groupId>" + "<artifactId>p1</artifactId>" + "<version>1</version>"); VirtualFile p2 = createModulePom("p2", "<groupId>test</groupId>" + "<artifactId>p2</artifactId>" + "<version>1</version>"); importProjects(p1, p2); assertEmptyKeymap(); String goal = "clean"; assignShortcut(p1, goal, "alt shift X"); assignShortcut(p2, goal, "alt shift Y"); assertKeymapContains(p1, goal); assertKeymapContains(p2, goal); WriteCommandAction.writeCommandAction(myProject).run(() -> p1.delete(this)); configConfirmationForYesAnswer(); importProjects(p1, p2); assertKeymapDoesNotContain(p1, goal); assertKeymapContains(p2, goal); } public void testRefreshingActionsOnChangingIgnoreFlag() { VirtualFile p1 = createModulePom("p1", "<groupId>test</groupId>" + "<artifactId>p1</artifactId>" + "<version>1</version>"); VirtualFile p2 = createModulePom("p2", "<groupId>test</groupId>" + "<artifactId>p2</artifactId>" + "<version>1</version>"); importProjects(p1, p2); assertEmptyKeymap(); String goal = "clean"; assignShortcut(p1, goal, "alt shift X"); assignShortcut(p2, goal, "alt shift Y"); assertKeymapContains(p1, goal); assertKeymapContains(p2, goal); myProjectsManager.setIgnoredState(Collections.singletonList(myProjectsManager.findProject(p1)), true); assertKeymapDoesNotContain(p1, goal); assertKeymapContains(p2, goal); myProjectsManager.setIgnoredState(Collections.singletonList(myProjectsManager.findProject(p1)), false); assertKeymapContains(p1, goal); assertKeymapContains(p2, goal); } private void assertKeymapContains(VirtualFile pomFile, String goal) { String id = myShortcutsManager.getActionId(pomFile.getPath(), goal); assertContain(getProjectActions(), id); } private void assertEmptyKeymap() { assertEmpty(getProjectActions()); } private void assertKeymapDoesNotContain(VirtualFile pomFile, String goal) { String id = myShortcutsManager.getActionId(pomFile.getPath(), goal); assertDoNotContain(getProjectActions(), id); } private void assignShortcut(VirtualFile pomFile, String goal, String shortcut) { MavenProject mavenProject = myProjectsManager.findProject(pomFile); assert mavenProject != null; String actionId = myShortcutsManager.getActionId(mavenProject.getPath(), goal); assert actionId != null; AnAction action = ActionManager.getInstance().getAction(actionId); if (action == null) { MavenKeymapExtension.getOrRegisterAction(mavenProject, actionId, goal); } Keymap activeKeymap = KeymapManager.getInstance().getActiveKeymap(); activeKeymap.addShortcut(actionId, KeyboardShortcut.fromString(shortcut)); } private List<String> getProjectActions() { String prefix = MavenKeymapExtension.getActionPrefix(myProject, null); return Arrays.asList(ActionManager.getInstance().getActionIds(prefix)); } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.state.api; import org.apache.flink.api.common.RuntimeExecutionMode; import org.apache.flink.api.common.functions.RichFlatMapFunction; import org.apache.flink.api.common.functions.RichMapFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.client.program.ClusterClient; import org.apache.flink.configuration.Configuration; import org.apache.flink.contrib.streaming.state.EmbeddedRocksDBStateBackend; import org.apache.flink.runtime.jobgraph.JobGraph; import org.apache.flink.runtime.jobgraph.SavepointRestoreSettings; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.runtime.state.StateBackend; import org.apache.flink.runtime.state.hashmap.HashMapStateBackend; import org.apache.flink.state.api.functions.BroadcastStateBootstrapFunction; import org.apache.flink.state.api.functions.KeyedStateBootstrapFunction; import org.apache.flink.state.api.functions.StateBootstrapFunction; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction; import org.apache.flink.streaming.api.functions.sink.DiscardingSink; import org.apache.flink.streaming.util.StreamCollector; import org.apache.flink.test.util.AbstractTestBase; import org.apache.flink.util.AbstractID; import org.apache.flink.util.Collector; import org.apache.flink.util.SerializedThrowable; import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashSet; import java.util.List; import java.util.Objects; import java.util.Optional; import java.util.Set; import java.util.concurrent.CompletableFuture; /** IT test for writing savepoints. */ public class SavepointWriterITCase extends AbstractTestBase { private static final String ACCOUNT_UID = "accounts"; private static final String CURRENCY_UID = "currency"; private static final String MODIFY_UID = "numbers"; private static final MapStateDescriptor<String, Double> descriptor = new MapStateDescriptor<>("currency-rate", Types.STRING, Types.DOUBLE); private static final Collection<Account> accounts = Arrays.asList(new Account(1, 100.0), new Account(2, 100.0), new Account(3, 100.0)); private static final Collection<CurrencyRate> currencyRates = Arrays.asList(new CurrencyRate("USD", 1.0), new CurrencyRate("EUR", 1.3)); @Rule public StreamCollector collector = new StreamCollector(); @Test public void testDefaultStateBackend() throws Exception { testStateBootstrapAndModification(null); } @Test public void testHashMapStateBackend() throws Exception { testStateBootstrapAndModification(new HashMapStateBackend()); } @Test public void testEmbeddedRocksDBStateBackend() throws Exception { testStateBootstrapAndModification(new EmbeddedRocksDBStateBackend()); } public void testStateBootstrapAndModification(StateBackend backend) throws Exception { final String savepointPath = getTempDirPath(new AbstractID().toHexString()); bootstrapState(backend, savepointPath); validateBootstrap(backend, savepointPath); final String modifyPath = getTempDirPath(new AbstractID().toHexString()); modifySavepoint(backend, savepointPath, modifyPath); validateModification(backend, modifyPath); } private void bootstrapState(StateBackend backend, String savepointPath) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC); StateBootstrapTransformation<Account> transformation = OperatorTransformation.bootstrapWith(env.fromCollection(accounts)) .keyBy(acc -> acc.id) .transform(new AccountBootstrapper()); StateBootstrapTransformation<CurrencyRate> broadcastTransformation = OperatorTransformation.bootstrapWith(env.fromCollection(currencyRates)) .transform(new CurrencyBootstrapFunction()); SavepointWriter writer = backend == null ? SavepointWriter.newSavepoint(128) : SavepointWriter.newSavepoint(backend, 128); writer.withOperator(ACCOUNT_UID, transformation) .withOperator(CURRENCY_UID, broadcastTransformation) .write(savepointPath); env.execute("Bootstrap"); } private void validateBootstrap(StateBackend backend, String savepointPath) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); if (backend != null) { env.setStateBackend(backend); } DataStream<Account> stream = env.fromCollection(accounts) .keyBy(acc -> acc.id) .flatMap(new UpdateAndGetAccount()) .uid(ACCOUNT_UID); CompletableFuture<Collection<Account>> results = collector.collect(stream); env.fromCollection(currencyRates) .connect(env.fromCollection(currencyRates).broadcast(descriptor)) .process(new CurrencyValidationFunction()) .uid(CURRENCY_UID) .addSink(new DiscardingSink<>()); JobGraph jobGraph = env.getStreamGraph().getJobGraph(); jobGraph.setSavepointRestoreSettings( SavepointRestoreSettings.forPath(savepointPath, false)); ClusterClient<?> client = MINI_CLUSTER_RESOURCE.getClusterClient(); Optional<SerializedThrowable> serializedThrowable = client.submitJob(jobGraph) .thenCompose(client::requestJobResult) .get() .getSerializedThrowable(); serializedThrowable.ifPresent( t -> { throw new AssertionError("Unexpected exception during bootstrapping", t); }); Assert.assertEquals("Unexpected output", 3, results.get().size()); } private void modifySavepoint(StateBackend backend, String savepointPath, String modifyPath) throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC); StateBootstrapTransformation<Integer> transformation = OperatorTransformation.bootstrapWith(env.fromElements(1, 2, 3)) .transform(new ModifyProcessFunction()); SavepointWriter writer = backend == null ? SavepointWriter.fromExistingSavepoint(savepointPath) : SavepointWriter.fromExistingSavepoint(savepointPath, backend); writer.removeOperator(CURRENCY_UID) .withOperator(MODIFY_UID, transformation) .write(modifyPath); env.execute("Modifying"); } private void validateModification(StateBackend backend, String savepointPath) throws Exception { StreamExecutionEnvironment sEnv = StreamExecutionEnvironment.getExecutionEnvironment(); if (backend != null) { sEnv.setStateBackend(backend); } DataStream<Account> stream = sEnv.fromCollection(accounts) .keyBy(acc -> acc.id) .flatMap(new UpdateAndGetAccount()) .uid(ACCOUNT_UID); CompletableFuture<Collection<Account>> results = collector.collect(stream); stream.map(acc -> acc.id) .map(new StatefulOperator()) .uid(MODIFY_UID) .addSink(new DiscardingSink<>()); JobGraph jobGraph = sEnv.getStreamGraph().getJobGraph(); jobGraph.setSavepointRestoreSettings( SavepointRestoreSettings.forPath(savepointPath, false)); ClusterClient<?> client = MINI_CLUSTER_RESOURCE.getClusterClient(); Optional<SerializedThrowable> serializedThrowable = client.submitJob(jobGraph) .thenCompose(client::requestJobResult) .get() .getSerializedThrowable(); Assert.assertFalse(serializedThrowable.isPresent()); Assert.assertEquals("Unexpected output", 3, results.get().size()); } /** A simple pojo. */ @SuppressWarnings("WeakerAccess") public static class Account { Account(int id, double amount) { this.id = id; this.amount = amount; this.timestamp = 1000L; } public int id; public double amount; public long timestamp; @Override public boolean equals(Object obj) { return obj instanceof Account && ((Account) obj).id == id && ((Account) obj).amount == amount; } @Override public int hashCode() { return Objects.hash(id, amount); } } /** A simple pojo. */ @SuppressWarnings("WeakerAccess") public static class CurrencyRate { public String currency; public Double rate; CurrencyRate(String currency, double rate) { this.currency = currency; this.rate = rate; } @Override public boolean equals(Object obj) { return obj instanceof CurrencyRate && ((CurrencyRate) obj).currency.equals(currency) && ((CurrencyRate) obj).rate.equals(rate); } @Override public int hashCode() { return Objects.hash(currency, rate); } } /** A savepoint writer function. */ public static class AccountBootstrapper extends KeyedStateBootstrapFunction<Integer, Account> { ValueState<Double> state; @Override public void open(Configuration parameters) { ValueStateDescriptor<Double> descriptor = new ValueStateDescriptor<>("total", Types.DOUBLE); state = getRuntimeContext().getState(descriptor); } @Override public void processElement(Account value, Context ctx) throws Exception { state.update(value.amount); } } /** A streaming function bootstrapped off the state. */ public static class UpdateAndGetAccount extends RichFlatMapFunction<Account, Account> { ValueState<Double> state; @Override public void open(Configuration parameters) throws Exception { super.open(parameters); ValueStateDescriptor<Double> descriptor = new ValueStateDescriptor<>("total", Types.DOUBLE); state = getRuntimeContext().getState(descriptor); } @Override public void flatMap(Account value, Collector<Account> out) throws Exception { Double current = state.value(); if (current != null) { value.amount += current; } state.update(value.amount); out.collect(value); } } /** A bootstrap function. */ public static class ModifyProcessFunction extends StateBootstrapFunction<Integer> { List<Integer> numbers; ListState<Integer> state; @Override public void open(Configuration parameters) { numbers = new ArrayList<>(); } @Override public void processElement(Integer value, Context ctx) { numbers.add(value); } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { state.clear(); state.addAll(numbers); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { state = context.getOperatorStateStore() .getUnionListState(new ListStateDescriptor<>("numbers", Types.INT)); } } /** A streaming function bootstrapped off the state. */ public static class StatefulOperator extends RichMapFunction<Integer, Integer> implements CheckpointedFunction { List<Integer> numbers; ListState<Integer> state; @Override public void open(Configuration parameters) { numbers = new ArrayList<>(); } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { state.clear(); state.addAll(numbers); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { state = context.getOperatorStateStore() .getUnionListState(new ListStateDescriptor<>("numbers", Types.INT)); if (context.isRestored()) { Set<Integer> expected = new HashSet<>(); expected.add(1); expected.add(2); expected.add(3); for (Integer number : state.get()) { Assert.assertTrue("Duplicate state", expected.contains(number)); expected.remove(number); } Assert.assertTrue( "Failed to bootstrap all state elements: " + Arrays.toString(expected.toArray()), expected.isEmpty()); } } @Override public Integer map(Integer value) { return null; } } /** A broadcast bootstrap function. */ public static class CurrencyBootstrapFunction extends BroadcastStateBootstrapFunction<CurrencyRate> { @Override public void processElement(CurrencyRate value, Context ctx) throws Exception { ctx.getBroadcastState(descriptor).put(value.currency, value.rate); } } /** Checks the restored broadcast state. */ public static class CurrencyValidationFunction extends BroadcastProcessFunction<CurrencyRate, CurrencyRate, Void> { @Override public void processElement(CurrencyRate value, ReadOnlyContext ctx, Collector<Void> out) throws Exception { Assert.assertEquals( "Incorrect currency rate", value.rate, ctx.getBroadcastState(descriptor).get(value.currency), 0.0001); } @Override public void processBroadcastElement(CurrencyRate value, Context ctx, Collector<Void> out) { // ignore } } }
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.sysml.test.integration.functions.binary.matrix_full_other; import java.util.HashMap; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import org.apache.sysml.api.DMLScript.RUNTIME_PLATFORM; import org.apache.sysml.lops.LopProperties.ExecType; import org.apache.sysml.lops.MMTSJ.MMTSJType; import org.apache.sysml.runtime.matrix.data.MatrixValue.CellIndex; import org.apache.sysml.test.integration.AutomatedTestBase; import org.apache.sysml.test.integration.TestConfiguration; import org.apache.sysml.test.utils.TestUtils; public class FullMatrixMultiplicationTransposeSelfTest extends AutomatedTestBase { private final static String TEST_NAME1 = "TransposeSelfMatrixMultiplication1"; private final static String TEST_NAME2 = "TransposeSelfMatrixMultiplication2"; private final static String TEST_DIR = "functions/binary/matrix_full_other/"; private final static String TEST_CLASS_DIR = TEST_DIR + FullMatrixMultiplicationTransposeSelfTest.class.getSimpleName() + "/"; private final static double eps = 1e-10; //for CP private final static int rows1 = 3500; private final static int cols1 = 1500; //for MR private final static int rows2 = 7000;//7000; private final static int cols2 = 750;//750; private final static double sparsity1 = 0.7; private final static double sparsity2 = 0.1; @Override public void setUp() { addTestConfiguration( TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "B" }) ); addTestConfiguration( TEST_NAME2, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME2, new String[] { "B" }) ); if (TEST_CACHE_ENABLED) { setOutAndExpectedDeletionDisabled(true); } } @BeforeClass public static void init() { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } @AfterClass public static void cleanUp() { if (TEST_CACHE_ENABLED) { TestUtils.clearDirectory(TEST_DATA_DIR + TEST_CLASS_DIR); } } @Test public void testMMLeftDenseCP() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.LEFT, ExecType.CP, false); } @Test public void testMMRightDenseCP() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.RIGHT, ExecType.CP, false); } @Test public void testMMLeftSparseCP() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.LEFT, ExecType.CP, true); } @Test public void testMMRightSparseCP() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.RIGHT, ExecType.CP, true); } @Test public void testMMLeftDenseMR() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.LEFT, ExecType.MR, false); } @Test public void testMMRightDenseMR() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.RIGHT, ExecType.MR, false); } @Test public void testMMLeftSparseMR() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.LEFT, ExecType.MR, true); } @Test public void testMMRightSparseMR() { runTransposeSelfMatrixMultiplicationTest(MMTSJType.RIGHT, ExecType.MR, true); } @Test public void testVVLeftDenseCP() { runTransposeSelfVectorMultiplicationTest(MMTSJType.LEFT, ExecType.CP, false); } @Test public void testVVRightDenseCP() { runTransposeSelfVectorMultiplicationTest(MMTSJType.RIGHT, ExecType.CP, false); } @Test public void testVVLeftSparseCP() { runTransposeSelfVectorMultiplicationTest(MMTSJType.LEFT, ExecType.CP, true); } @Test public void testVVRightSparseCP() { runTransposeSelfVectorMultiplicationTest(MMTSJType.RIGHT, ExecType.CP, true); } @Test public void testVVLeftDenseMR() { runTransposeSelfVectorMultiplicationTest(MMTSJType.LEFT, ExecType.MR, false); } @Test public void testVVRightDenseMR() { runTransposeSelfVectorMultiplicationTest(MMTSJType.RIGHT, ExecType.MR, false); } @Test public void testVVLeftSparseMR() { runTransposeSelfVectorMultiplicationTest(MMTSJType.LEFT, ExecType.MR, true); } @Test public void testVVRightSparseMR() { runTransposeSelfVectorMultiplicationTest(MMTSJType.RIGHT, ExecType.MR, true); } private void runTransposeSelfMatrixMultiplicationTest( MMTSJType type, ExecType instType, boolean sparse ) { //setup exec type, rows, cols int rows = -1, cols = -1; String TEST_NAME = null; if( type == MMTSJType.LEFT ) { if( instType == ExecType.CP ) { rows = rows1; cols = cols1; } else { //if type MR rows = rows2; cols = cols2; } TEST_NAME = TEST_NAME1; } else { if( instType == ExecType.CP ) { rows = cols1; cols = rows1; } else { //if type MR rows = cols2; cols = rows2; } TEST_NAME = TEST_NAME2; } double sparsity = sparse ? sparsity2 : sparsity1; String TEST_CACHE_DIR = ""; if (TEST_CACHE_ENABLED) { TEST_CACHE_DIR = rows + "_" + cols + "_" + sparsity + "/"; } //rtplatform for MR RUNTIME_PLATFORM platformOld = rtplatform; rtplatform = (instType==ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID; try { TestConfiguration config = getTestConfiguration(TEST_NAME); loadTestConfiguration(config, TEST_CACHE_DIR); /* This is for running the junit test the new way, i.e., construct the arguments directly */ String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME + ".dml"; programArgs = new String[]{"-args", input("A"), Integer.toString(rows), Integer.toString(cols), output("B") }; fullRScriptName = HOME + TEST_NAME + ".R"; rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir(); //generate actual dataset double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7); writeInputMatrix("A", A, true); boolean exceptionExpected = false; runTest(true, exceptionExpected, null, -1); runRScript(true); //compare matrices HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B"); HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B"); TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R"); } finally { rtplatform = platformOld; } } private void runTransposeSelfVectorMultiplicationTest( MMTSJType type, ExecType instType, boolean sparse ) { //setup exec type, rows, cols int rows = -1, cols = -1; String TEST_NAME = null; if( type == MMTSJType.LEFT ) { if( instType == ExecType.CP ) { rows = rows1; cols = 1; } else { //if type MR rows = rows2; cols = 1; } TEST_NAME = TEST_NAME1; } else { if( instType == ExecType.CP ) { rows = 1; cols = rows1; } else { //if type MR rows = 1; cols = rows2; } TEST_NAME = TEST_NAME2; } double sparsity = sparse ? sparsity2 : sparsity1; String TEST_CACHE_DIR = ""; if (TEST_CACHE_ENABLED) { TEST_CACHE_DIR = rows + "_" + cols + "_" + sparsity + "/"; } //rtplatform for MR RUNTIME_PLATFORM platformOld = rtplatform; rtplatform = (instType==ExecType.MR) ? RUNTIME_PLATFORM.HADOOP : RUNTIME_PLATFORM.HYBRID; try { TestConfiguration config = getTestConfiguration(TEST_NAME); loadTestConfiguration(config, TEST_CACHE_DIR); /* This is for running the junit test the new way, i.e., construct the arguments directly */ String HOME = SCRIPT_DIR + TEST_DIR; fullDMLScriptName = HOME + TEST_NAME + ".dml"; programArgs = new String[]{"-args", input("A"), Integer.toString(rows), Integer.toString(cols), output("B") }; fullRScriptName = HOME + TEST_NAME + ".R"; rCmd = "Rscript" + " " + fullRScriptName + " " + inputDir() + " " + expectedDir(); //generate actual dataset double[][] A = getRandomMatrix(rows, cols, 0, 1, sparsity, 7); writeInputMatrix("A", A, true); boolean exceptionExpected = false; runTest(true, exceptionExpected, null, -1); runRScript(true); //compare matrices HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromHDFS("B"); HashMap<CellIndex, Double> rfile = readRMatrixFromFS("B"); TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R"); } finally { rtplatform = platformOld; } } }
/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package com.exalttech.trex.ui.views.importPcap; import com.exalttech.trex.remote.models.profiles.Profile; import com.exalttech.trex.ui.components.TextFieldTableViewCell; import com.exalttech.trex.ui.models.PacketInfo; import com.exalttech.trex.ui.util.TrexAlertBuilder; import com.exalttech.trex.ui.views.models.ImportPcapTableData; import com.exalttech.trex.ui.views.streams.builder.PacketBuilderHelper; import com.exalttech.trex.ui.views.streams.builder.VMInstructionBuilder; import com.exalttech.trex.ui.views.streams.builder.VMInstructionBuilder.InstructionType; import com.exalttech.trex.ui.views.streams.viewer.PacketParser; import com.exalttech.trex.util.TrafficProfile; import com.exalttech.trex.util.Util; import javafx.beans.value.ChangeListener; import javafx.beans.value.ObservableValue; import javafx.collections.FXCollections; import javafx.collections.ListChangeListener; import javafx.collections.ObservableList; import javafx.fxml.FXML; import javafx.fxml.FXMLLoader; import javafx.scene.control.*; import javafx.scene.control.cell.CheckBoxTableCell; import javafx.scene.control.cell.PropertyValueFactory; import javafx.scene.layout.AnchorPane; import javafx.util.Callback; import org.apache.commons.io.FileUtils; import org.apache.log4j.Logger; import org.pcap4j.core.NotOpenException; import org.pcap4j.core.PcapHandle; import org.pcap4j.core.PcapNativeException; import org.pcap4j.core.Pcaps; import org.pcap4j.packet.Packet; import org.testng.util.Strings; import java.io.EOFException; import java.io.File; import java.util.*; import java.util.concurrent.TimeoutException; public class ImportedPacketTableView extends AnchorPane { private static final Logger LOG = Logger.getLogger(ImportedPacketTableView.class.getName()); @FXML TableColumn selectedColumn; @FXML TableColumn nameColumn; @FXML TableColumn packetNumColumn; @FXML TableColumn lengthColumn; @FXML TableColumn macSrcColumn; @FXML TableColumn macDstColumn; @FXML TableColumn ipSrcColumn; @FXML TableColumn ipDstColumn; @FXML TableColumn packetTypeColumn; @FXML TableView<ImportPcapTableData> importedStreamTable; List<Profile> profilesList; List<String> existingNamesList = new ArrayList<>(); TrafficProfile trafficProfile; String yamlFileName; ObservableList<ImportPcapTableData> tableDataList = FXCollections.observableArrayList(); ObservableList<Integer> highlightRows = FXCollections.observableArrayList(); CheckBox selectAll; HighlightedRowFactory<ImportPcapTableData> highlightedRowFactory; Map<String, ImportPcapTableData> duplicateRowNamesMap = new HashMap<>(); int index = 0; ImportedPacketProperties propertiesBinder; ImportPcapTableData firstPacket = null; /** * Constructor * * @param profilesList * @param yamlFileName */ public ImportedPacketTableView(List<Profile> profilesList, String yamlFileName) { this.profilesList = profilesList; this.yamlFileName = yamlFileName; trafficProfile = new TrafficProfile(); highlightedRowFactory = new HighlightedRowFactory<>(highlightRows); initView(); } /** * Initialize view */ private void initView() { try { FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("/fxml/ImportedPacketTableView.fxml")); fxmlLoader.setRoot(this); fxmlLoader.setController(this); fxmlLoader.load(); initTableRowsColumns(); extractExistingNames(); } catch (Exception ex) { LOG.error("Error setting UI", ex); } } /** * Set properties binder model * * @param propertiesBinder */ public void setPropertiesBinder(ImportedPacketProperties propertiesBinder) { this.propertiesBinder = propertiesBinder; PacketUpdater.getInstance().setImportedProperties(propertiesBinder); } /** * Initialize table rows and columns */ private void initTableRowsColumns() { selectedColumn.setCellValueFactory(new PropertyValueFactory<>("selected")); selectedColumn.setCellFactory(CheckBoxTableCell.forTableColumn(selectedColumn)); selectAll = new CheckBox(); selectAll.getStyleClass().add("selectAll"); selectAll.setSelected(true); selectAll.selectedProperty().addListener((ObservableValue<? extends Boolean> observable, Boolean oldValue, Boolean newValue) -> { selectAllRows(); }); selectedColumn.setGraphic(selectAll); nameColumn.setCellValueFactory(new PropertyValueFactory<>("name")); nameColumn.setCellFactory(new TextFieldTableViewCell()); packetNumColumn.setCellValueFactory(new PropertyValueFactory<>("index")); lengthColumn.setCellValueFactory(new PropertyValueFactory<>("length")); macSrcColumn.setCellValueFactory(new PropertyValueFactory<>("macSrc")); macDstColumn.setCellValueFactory(new PropertyValueFactory<>("macDst")); ipSrcColumn.setCellValueFactory(new PropertyValueFactory<>("ipSrc")); ipDstColumn.setCellValueFactory(new PropertyValueFactory<>("ipDst")); packetTypeColumn.setCellValueFactory(new PropertyValueFactory<>("packetType")); importedStreamTable.setRowFactory(highlightedRowFactory); } /** * Select all rows */ public void selectAllRows() { for (ImportPcapTableData item : tableDataList) { item.setSelected(selectAll.isSelected()); } } /** * Extract stream names from existing profile */ private void extractExistingNames() { for (Profile profile : profilesList) { existingNamesList.add(profile.getName()); } } /** * Parse pcap file to get all streams * * @param pcapFile * @return */ public boolean setPcapFile(File pcapFile) throws PcapNativeException, TimeoutException, NotOpenException { List<PacketInfo> packetInfoList = new ArrayList<>(); PacketUpdater.getInstance().reset(); PcapHandle handler = Pcaps.openOffline(pcapFile.getAbsolutePath()); PacketParser parser = new PacketParser(); Packet packet; while (true) { try { packet = handler.getNextPacketEx(); } catch (EOFException e) { break; } if (!PacketUpdater.getInstance().validatePacket(packet)) { break; } PacketInfo packetInfo = new PacketInfo(); packet = PacketUpdater.getInstance().updatePacketSrcDst(packet); packetInfo.setPacket(packet); packetInfo.setTimeStamp(handler.getTimestamp().getTime()); parser.parsePacket(packet, packetInfo); packetInfoList.add(packetInfo); } setTableData(packetInfoList); return PacketUpdater.getInstance().isValidPacket(); } /** * Set table data * * @param packetInfoList */ private void setTableData(List<PacketInfo> packetInfoList) { index = 1; tableDataList.clear(); for (PacketInfo packetInfo : packetInfoList) { ImportPcapTableData tableData = new ImportPcapTableData(); StringBuilder name = new StringBuilder(); if (!Strings.isNullOrEmpty(propertiesBinder.getPrefix())) { name.append(propertiesBinder.getPrefix()).append("_"); } name.append("packet_").append(index); tableData.setName(name.toString()); tableData.setIndex(index); tableData.setLength(packetInfo.getPacket().length()); tableData.setMacSrc(packetInfo.getSrcMac()); tableData.setMacDst(packetInfo.getDestMac()); tableData.setIpSrc(packetInfo.getSrcIpv4()); tableData.setIpDst(packetInfo.getDestIpv4()); tableData.setPacketType(trafficProfile.getPacketTypeText(packetInfo.getPacket()).getType()); tableData.setPacket(packetInfo.getPacket()); tableData.setHasVlan(packetInfo.hasVlan()); tableData.setTimeStamp(packetInfo.getTimeStamp()); tableDataList.add(tableData); index++; } importedStreamTable.setItems(tableDataList); } /** * Import pcap to current yaml file * * @return */ public boolean doImport() { try { if (validateStreamNames()) { index = 0; ImportPcapTableData current = getNextSelectedPacket(); firstPacket = current; ImportPcapTableData next = null; boolean firstStream = true; long diffTimeStamp = 1; while (index <= tableDataList.size()) { if (current != null) { Profile profile = new Profile(); profile.setName(current.getName()); profile.getStream().getMode().setType("single_burst"); String hexDataString = PacketBuilderHelper.getPacketHex(current.getPacket().getRawData()); profile.getStream().getPacket().setBinary(trafficProfile.encodeBinaryFromHexString(hexDataString)); // add vm if (propertiesBinder.isDestinationEnabled() || propertiesBinder.isSourceEnabled()) { profile.getStream().setAdditionalProperties(getVm(current)); } // update pps/ISG defineISG_PPSValues(profile, getIpg(diffTimeStamp, firstStream)); if (!firstStream) { profile.getStream().setSelfStart(false); } // get next stream next = getNextSelectedPacket(); if (next != null && next != current) { profile.setNext(next.getName()); diffTimeStamp = next.getTimeStamp() - current.getTimeStamp(); } else if (propertiesBinder.getCount() > 0) { profile.setNext(firstPacket.getName()); profile.getStream().setActionCount(propertiesBinder.getCount()); } if (firstStream) { firstStream = false; } current = next; profilesList.add(profile); } } // save yaml data String yamlData = trafficProfile.convertTrafficProfileToYaml(profilesList.toArray(new Profile[profilesList.size()])); FileUtils.writeStringToFile(new File(yamlFileName), yamlData); return true; } } catch (Exception ex) { LOG.error("Error saving Yaml file", ex); } return false; } /** * Get next selected stream * * @return */ private ImportPcapTableData getNextSelectedPacket() { if (index == tableDataList.size()) { index++; return null; } ImportPcapTableData tableData = tableDataList.get(index); index++; if (tableData.isSelected()) { return tableData; } return getNextSelectedPacket(); } /** * Validate stream names * * @return */ private boolean validateStreamNames() { ObservableList<Integer> duplicateIndexesList = FXCollections.observableArrayList(); duplicateRowNamesMap.clear(); // validate saved stream names for (ImportPcapTableData tableData : tableDataList) { if (tableData.isSelected()) { // comparing with existing streams if ((existingNamesList.contains(tableData.getName()) || Util.isNullOrEmpty(tableData.getName().trim()))) { addDuplicateIndex(duplicateIndexesList, tableData); } // compairing with other streams in list if (duplicateRowNamesMap.get(tableData.getName()) != null) { // name is duplicate addDuplicateIndex(duplicateIndexesList, tableData); addDuplicateIndex(duplicateIndexesList, duplicateRowNamesMap.get(tableData.getName())); } else { // add existing names in imported table duplicateRowNamesMap.put(tableData.getName(), tableData); } } } highlightedRowFactory.getRowsToHighlight().setAll(duplicateIndexesList); boolean validNames = true; if (!duplicateIndexesList.isEmpty()) { validNames = false; TrexAlertBuilder.build() .setType(Alert.AlertType.ERROR) .setContent("Some packet names (highlighted in red) have the same names of exisiting packets !") .getAlert() .showAndWait(); } return validNames; } /** * Add index to duplicate list * @param duplicateIndexesList * @param tableData */ private void addDuplicateIndex(ObservableList<Integer> duplicateIndexesList,ImportPcapTableData tableData ){ if(!duplicateIndexesList.contains(tableData.getIndex())){ duplicateIndexesList.add(tableData.getIndex()); } } /** * Build vm instructions for source/destination ipv4 * * @param packetData * @return */ public Map<String, Object> getVm(ImportPcapTableData packetData) { VMInstructionBuilder vmInstructionBuilder = new VMInstructionBuilder(packetData.hasVlan(), packetData.getPacketType().indexOf("UDP") != -1); ArrayList<Object> instructionsList = new ArrayList<>(); if (propertiesBinder.isDestinationEnabled()) { instructionsList.addAll(vmInstructionBuilder.addVmInstruction(getInstructionType(packetData, propertiesBinder.getDstAddress()), propertiesBinder.getDstMode(), propertiesBinder.getDstCount(), "1", propertiesBinder.getDstAddress())); } if (propertiesBinder.isSourceEnabled()) { instructionsList.addAll(vmInstructionBuilder.addVmInstruction(getInstructionType(packetData, propertiesBinder.getSrcAddress()), propertiesBinder.getSrcMode(), propertiesBinder.getSrcCount(), "1", propertiesBinder.getSrcAddress())); } // add ipv4 checksum instructions if (instructionsList.size() > 0) { instructionsList.addAll(vmInstructionBuilder.addChecksumInstruction()); } Map<String, Object> additionalProperties = new HashMap<>(); LinkedHashMap<String, Object> vmBody = new LinkedHashMap<>(); vmBody.put("split_by_var", vmInstructionBuilder.getSplitByVar()); vmBody.put("instructions", instructionsList); // add cache size vmInstructionBuilder.addCacheSize(vmBody); additionalProperties.put("vm", vmBody); return additionalProperties; } /** * Return instruction type according to place of selected address * * @param packetData * @param ipAddress * @return */ private InstructionType getInstructionType(ImportPcapTableData packetData, String ipAddress) { if (ipAddress.equals(packetData.getIpSrc())) { return InstructionType.IP_SRC; } return InstructionType.IP_DST; } /** * Define ISG/PPS values * * @param profile * @param ipg */ private void defineISG_PPSValues(Profile profile, double ipg) { profile.getStream().setIsg(ipg * 1000); if (ipg == 0) { ipg = 1; } double ppsValue = Math.max(1.0, 1.0/ipg); profile.getStream().getMode().getRate().setValue(ppsValue); } /** * Calculate and return IPG value * * @param diffTimestamp * @param firstStream * @return */ private double getIpg(long diffTimestamp, boolean firstStream) { if (propertiesBinder.isIPGSelected()) { return propertiesBinder.getIpg(); } else if (firstStream) { return 1; } else { double ipg_usage = (double) diffTimestamp / 1000; double ipg = ipg_usage / propertiesBinder.getSpeedup(); return ipg; } } /** * HighlightedRowFactory factory implementation * * @param <T> */ public class HighlightedRowFactory<T> implements Callback<TableView<T>, TableRow<T>> { ObservableList<Integer> rowsToHighlight; public HighlightedRowFactory(ObservableList<Integer> rowsToHighlight) { this.rowsToHighlight = rowsToHighlight; } public ObservableList<Integer> getRowsToHighlight() { return rowsToHighlight; } @Override public TableRow call(TableView param) { TableRow row = new TableRow(); row.itemProperty().addListener(new ChangeListener() { @Override public void changed(ObservableValue observable, Object oldValue, Object newValue) { updateRowStyle(row); } }); rowsToHighlight.addListener(new ListChangeListener<Integer>() { @Override public void onChanged(ListChangeListener.Change<? extends Integer> c) { updateRowStyle(row); } }); return row; } /** * Update row higlighted style * * @param row */ private void updateRowStyle(TableRow row) { if (row != null && row.getItem() != null) { row.getStyleClass().remove("highlightedRow"); int index = ((ImportPcapTableData) row.getItem()).getIndex(); if (rowsToHighlight.contains(index)) { row.getStyleClass().add("highlightedRow"); } } } } }
// Generated from Java.g4 by ANTLR 4.4 package profiler; import org.antlr.v4.runtime.ParserRuleContext; import org.antlr.v4.runtime.misc.NotNull; import org.antlr.v4.runtime.tree.ErrorNode; import org.antlr.v4.runtime.tree.TerminalNode; /** * This class provides an empty implementation of {@link JavaListener}, * which can be extended to create a listener which only needs to handle a subset * of the available methods. */ public class JavaBaseListener implements JavaListener { /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMemberDeclaration(@NotNull JavaParser.MemberDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMemberDeclaration(@NotNull JavaParser.MemberDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterDefaultValue(@NotNull JavaParser.DefaultValueContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitDefaultValue(@NotNull JavaParser.DefaultValueContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationTypeElementDeclaration(@NotNull JavaParser.AnnotationTypeElementDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationTypeElementDeclaration(@NotNull JavaParser.AnnotationTypeElementDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterType(@NotNull JavaParser.TypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitType(@NotNull JavaParser.TypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationTypeBody(@NotNull JavaParser.AnnotationTypeBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationTypeBody(@NotNull JavaParser.AnnotationTypeBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterGenericInterfaceMethodDeclaration(@NotNull JavaParser.GenericInterfaceMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitGenericInterfaceMethodDeclaration(@NotNull JavaParser.GenericInterfaceMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassBodyDeclaration(@NotNull JavaParser.ClassBodyDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassBodyDeclaration(@NotNull JavaParser.ClassBodyDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterBlock(@NotNull JavaParser.BlockContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitBlock(@NotNull JavaParser.BlockContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnumBodyDeclarations(@NotNull JavaParser.EnumBodyDeclarationsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnumBodyDeclarations(@NotNull JavaParser.EnumBodyDeclarationsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterForUpdate(@NotNull JavaParser.ForUpdateContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitForUpdate(@NotNull JavaParser.ForUpdateContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnhancedForControl(@NotNull JavaParser.EnhancedForControlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnhancedForControl(@NotNull JavaParser.EnhancedForControlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationConstantRest(@NotNull JavaParser.AnnotationConstantRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationConstantRest(@NotNull JavaParser.AnnotationConstantRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExplicitGenericInvocation(@NotNull JavaParser.ExplicitGenericInvocationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExplicitGenericInvocation(@NotNull JavaParser.ExplicitGenericInvocationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterNonWildcardTypeArgumentsOrDiamond(@NotNull JavaParser.NonWildcardTypeArgumentsOrDiamondContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitNonWildcardTypeArgumentsOrDiamond(@NotNull JavaParser.NonWildcardTypeArgumentsOrDiamondContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExpressionList(@NotNull JavaParser.ExpressionListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExpressionList(@NotNull JavaParser.ExpressionListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationTypeElementRest(@NotNull JavaParser.AnnotationTypeElementRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationTypeElementRest(@NotNull JavaParser.AnnotationTypeElementRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassOrInterfaceType(@NotNull JavaParser.ClassOrInterfaceTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassOrInterfaceType(@NotNull JavaParser.ClassOrInterfaceTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeBound(@NotNull JavaParser.TypeBoundContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeBound(@NotNull JavaParser.TypeBoundContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterVariableDeclaratorId(@NotNull JavaParser.VariableDeclaratorIdContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitVariableDeclaratorId(@NotNull JavaParser.VariableDeclaratorIdContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPrimary(@NotNull JavaParser.PrimaryContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPrimary(@NotNull JavaParser.PrimaryContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassCreatorRest(@NotNull JavaParser.ClassCreatorRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassCreatorRest(@NotNull JavaParser.ClassCreatorRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInterfaceBodyDeclaration(@NotNull JavaParser.InterfaceBodyDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInterfaceBodyDeclaration(@NotNull JavaParser.InterfaceBodyDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeArguments(@NotNull JavaParser.TypeArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeArguments(@NotNull JavaParser.TypeArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationName(@NotNull JavaParser.AnnotationNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationName(@NotNull JavaParser.AnnotationNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFinallyBlock(@NotNull JavaParser.FinallyBlockContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFinallyBlock(@NotNull JavaParser.FinallyBlockContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeParameters(@NotNull JavaParser.TypeParametersContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeParameters(@NotNull JavaParser.TypeParametersContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterLastFormalParameter(@NotNull JavaParser.LastFormalParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitLastFormalParameter(@NotNull JavaParser.LastFormalParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterConstructorBody(@NotNull JavaParser.ConstructorBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitConstructorBody(@NotNull JavaParser.ConstructorBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterLiteral(@NotNull JavaParser.LiteralContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitLiteral(@NotNull JavaParser.LiteralContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationMethodOrConstantRest(@NotNull JavaParser.AnnotationMethodOrConstantRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationMethodOrConstantRest(@NotNull JavaParser.AnnotationMethodOrConstantRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCatchClause(@NotNull JavaParser.CatchClauseContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCatchClause(@NotNull JavaParser.CatchClauseContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterVariableDeclarator(@NotNull JavaParser.VariableDeclaratorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitVariableDeclarator(@NotNull JavaParser.VariableDeclaratorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeList(@NotNull JavaParser.TypeListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeList(@NotNull JavaParser.TypeListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnumConstants(@NotNull JavaParser.EnumConstantsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnumConstants(@NotNull JavaParser.EnumConstantsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassBody(@NotNull JavaParser.ClassBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassBody(@NotNull JavaParser.ClassBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCreatedName(@NotNull JavaParser.CreatedNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCreatedName(@NotNull JavaParser.CreatedNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnumDeclaration(@NotNull JavaParser.EnumDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnumDeclaration(@NotNull JavaParser.EnumDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFormalParameter(@NotNull JavaParser.FormalParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFormalParameter(@NotNull JavaParser.FormalParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterParExpression(@NotNull JavaParser.ParExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitParExpression(@NotNull JavaParser.ParExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotation(@NotNull JavaParser.AnnotationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotation(@NotNull JavaParser.AnnotationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterVariableInitializer(@NotNull JavaParser.VariableInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitVariableInitializer(@NotNull JavaParser.VariableInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterElementValueArrayInitializer(@NotNull JavaParser.ElementValueArrayInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitElementValueArrayInitializer(@NotNull JavaParser.ElementValueArrayInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCreator(@NotNull JavaParser.CreatorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCreator(@NotNull JavaParser.CreatorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterArrayCreatorRest(@NotNull JavaParser.ArrayCreatorRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitArrayCreatorRest(@NotNull JavaParser.ArrayCreatorRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExpression(@NotNull JavaParser.ExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExpression(@NotNull JavaParser.ExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterConstantExpression(@NotNull JavaParser.ConstantExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitConstantExpression(@NotNull JavaParser.ConstantExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterQualifiedNameList(@NotNull JavaParser.QualifiedNameListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitQualifiedNameList(@NotNull JavaParser.QualifiedNameListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterConstructorDeclaration(@NotNull JavaParser.ConstructorDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitConstructorDeclaration(@NotNull JavaParser.ConstructorDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterForControl(@NotNull JavaParser.ForControlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitForControl(@NotNull JavaParser.ForControlContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterSuperSuffix(@NotNull JavaParser.SuperSuffixContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitSuperSuffix(@NotNull JavaParser.SuperSuffixContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterVariableDeclarators(@NotNull JavaParser.VariableDeclaratorsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitVariableDeclarators(@NotNull JavaParser.VariableDeclaratorsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCatchType(@NotNull JavaParser.CatchTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCatchType(@NotNull JavaParser.CatchTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassOrInterfaceModifier(@NotNull JavaParser.ClassOrInterfaceModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassOrInterfaceModifier(@NotNull JavaParser.ClassOrInterfaceModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnumConstantName(@NotNull JavaParser.EnumConstantNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnumConstantName(@NotNull JavaParser.EnumConstantNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterModifier(@NotNull JavaParser.ModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitModifier(@NotNull JavaParser.ModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInnerCreator(@NotNull JavaParser.InnerCreatorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInnerCreator(@NotNull JavaParser.InnerCreatorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterExplicitGenericInvocationSuffix(@NotNull JavaParser.ExplicitGenericInvocationSuffixContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitExplicitGenericInvocationSuffix(@NotNull JavaParser.ExplicitGenericInvocationSuffixContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterVariableModifier(@NotNull JavaParser.VariableModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitVariableModifier(@NotNull JavaParser.VariableModifierContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterElementValuePair(@NotNull JavaParser.ElementValuePairContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitElementValuePair(@NotNull JavaParser.ElementValuePairContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterArrayInitializer(@NotNull JavaParser.ArrayInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitArrayInitializer(@NotNull JavaParser.ArrayInitializerContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterElementValue(@NotNull JavaParser.ElementValueContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitElementValue(@NotNull JavaParser.ElementValueContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterConstDeclaration(@NotNull JavaParser.ConstDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitConstDeclaration(@NotNull JavaParser.ConstDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterResource(@NotNull JavaParser.ResourceContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitResource(@NotNull JavaParser.ResourceContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterQualifiedName(@NotNull JavaParser.QualifiedNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitQualifiedName(@NotNull JavaParser.QualifiedNameContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterResourceSpecification(@NotNull JavaParser.ResourceSpecificationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitResourceSpecification(@NotNull JavaParser.ResourceSpecificationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFormalParameterList(@NotNull JavaParser.FormalParameterListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFormalParameterList(@NotNull JavaParser.FormalParameterListContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationTypeDeclaration(@NotNull JavaParser.AnnotationTypeDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationTypeDeclaration(@NotNull JavaParser.AnnotationTypeDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterCompilationUnit(@NotNull JavaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitCompilationUnit(@NotNull JavaParser.CompilationUnitContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterAnnotationMethodRest(@NotNull JavaParser.AnnotationMethodRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitAnnotationMethodRest(@NotNull JavaParser.AnnotationMethodRestContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterSwitchBlockStatementGroup(@NotNull JavaParser.SwitchBlockStatementGroupContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitSwitchBlockStatementGroup(@NotNull JavaParser.SwitchBlockStatementGroupContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeParameter(@NotNull JavaParser.TypeParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeParameter(@NotNull JavaParser.TypeParameterContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInterfaceBody(@NotNull JavaParser.InterfaceBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInterfaceBody(@NotNull JavaParser.InterfaceBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMethodDeclaration(@NotNull JavaParser.MethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMethodDeclaration(@NotNull JavaParser.MethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterMethodBody(@NotNull JavaParser.MethodBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitMethodBody(@NotNull JavaParser.MethodBodyContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeArgument(@NotNull JavaParser.TypeArgumentContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeDeclaration(@NotNull JavaParser.TypeDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeDeclaration(@NotNull JavaParser.TypeDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterGenericConstructorDeclaration(@NotNull JavaParser.GenericConstructorDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitGenericConstructorDeclaration(@NotNull JavaParser.GenericConstructorDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterClassDeclaration(@NotNull JavaParser.ClassDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitClassDeclaration(@NotNull JavaParser.ClassDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEnumConstant(@NotNull JavaParser.EnumConstantContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEnumConstant(@NotNull JavaParser.EnumConstantContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterStatement(@NotNull JavaParser.StatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitStatement(@NotNull JavaParser.StatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterImportDeclaration(@NotNull JavaParser.ImportDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitImportDeclaration(@NotNull JavaParser.ImportDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPrimitiveType(@NotNull JavaParser.PrimitiveTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPrimitiveType(@NotNull JavaParser.PrimitiveTypeContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInterfaceDeclaration(@NotNull JavaParser.InterfaceDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInterfaceDeclaration(@NotNull JavaParser.InterfaceDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterLocalVariableDeclarationStatement(@NotNull JavaParser.LocalVariableDeclarationStatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitLocalVariableDeclarationStatement(@NotNull JavaParser.LocalVariableDeclarationStatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterBlockStatement(@NotNull JavaParser.BlockStatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitBlockStatement(@NotNull JavaParser.BlockStatementContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFieldDeclaration(@NotNull JavaParser.FieldDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFieldDeclaration(@NotNull JavaParser.FieldDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterConstantDeclarator(@NotNull JavaParser.ConstantDeclaratorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitConstantDeclarator(@NotNull JavaParser.ConstantDeclaratorContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterResources(@NotNull JavaParser.ResourcesContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitResources(@NotNull JavaParser.ResourcesContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterStatementExpression(@NotNull JavaParser.StatementExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitStatementExpression(@NotNull JavaParser.StatementExpressionContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInterfaceMethodDeclaration(@NotNull JavaParser.InterfaceMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInterfaceMethodDeclaration(@NotNull JavaParser.InterfaceMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterPackageDeclaration(@NotNull JavaParser.PackageDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitPackageDeclaration(@NotNull JavaParser.PackageDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterElementValuePairs(@NotNull JavaParser.ElementValuePairsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitElementValuePairs(@NotNull JavaParser.ElementValuePairsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterLocalVariableDeclaration(@NotNull JavaParser.LocalVariableDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitLocalVariableDeclaration(@NotNull JavaParser.LocalVariableDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterNonWildcardTypeArguments(@NotNull JavaParser.NonWildcardTypeArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitNonWildcardTypeArguments(@NotNull JavaParser.NonWildcardTypeArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterInterfaceMemberDeclaration(@NotNull JavaParser.InterfaceMemberDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitInterfaceMemberDeclaration(@NotNull JavaParser.InterfaceMemberDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterSwitchLabel(@NotNull JavaParser.SwitchLabelContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitSwitchLabel(@NotNull JavaParser.SwitchLabelContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterForInit(@NotNull JavaParser.ForInitContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitForInit(@NotNull JavaParser.ForInitContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterFormalParameters(@NotNull JavaParser.FormalParametersContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitFormalParameters(@NotNull JavaParser.FormalParametersContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterArguments(@NotNull JavaParser.ArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitArguments(@NotNull JavaParser.ArgumentsContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterGenericMethodDeclaration(@NotNull JavaParser.GenericMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitGenericMethodDeclaration(@NotNull JavaParser.GenericMethodDeclarationContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterTypeArgumentsOrDiamond(@NotNull JavaParser.TypeArgumentsOrDiamondContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitTypeArgumentsOrDiamond(@NotNull JavaParser.TypeArgumentsOrDiamondContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void enterEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void exitEveryRule(@NotNull ParserRuleContext ctx) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitTerminal(@NotNull TerminalNode node) { } /** * {@inheritDoc} * * <p>The default implementation does nothing.</p> */ @Override public void visitErrorNode(@NotNull ErrorNode node) { } }
/** * Copyright Pravega Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.pravega.test.integration; import io.netty.buffer.ByteBuf; import io.netty.buffer.Unpooled; import io.netty.channel.embedded.EmbeddedChannel; import io.netty.handler.codec.LengthFieldBasedFrameDecoder; import io.pravega.client.ClientConfig; import io.pravega.client.admin.impl.StreamManagerImpl; import io.pravega.client.connection.impl.ConnectionPoolImpl; import io.pravega.client.connection.impl.RawClient; import io.pravega.client.connection.impl.SocketConnectionFactoryImpl; import io.pravega.client.control.impl.Controller; import io.pravega.client.security.auth.DelegationTokenProviderFactory; import io.pravega.client.segment.impl.ConditionalOutputStream; import io.pravega.client.segment.impl.ConditionalOutputStreamFactoryImpl; import io.pravega.client.segment.impl.Segment; import io.pravega.client.segment.impl.SegmentOutputStream; import io.pravega.client.segment.impl.SegmentOutputStreamFactoryImpl; import io.pravega.client.stream.EventStreamWriter; import io.pravega.client.stream.EventWriterConfig; import io.pravega.client.stream.ScalingPolicy; import io.pravega.client.stream.Serializer; import io.pravega.client.stream.StreamConfiguration; import io.pravega.client.stream.impl.ByteBufferSerializer; import io.pravega.client.stream.impl.ClientFactoryImpl; import io.pravega.client.stream.impl.JavaSerializer; import io.pravega.client.stream.impl.PendingEvent; import io.pravega.client.stream.mock.MockClientFactory; import io.pravega.client.stream.mock.MockController; import io.pravega.client.stream.mock.MockStreamManager; import io.pravega.common.Timer; import io.pravega.common.concurrent.Futures; import io.pravega.segmentstore.contracts.StreamSegmentStore; import io.pravega.segmentstore.contracts.tables.TableStore; import io.pravega.segmentstore.server.host.handler.AppendProcessor; import io.pravega.segmentstore.server.host.handler.PravegaConnectionListener; import io.pravega.segmentstore.server.host.handler.PravegaRequestProcessor; import io.pravega.segmentstore.server.host.handler.ServerConnectionInboundHandler; import io.pravega.segmentstore.server.host.handler.TrackedConnection; import io.pravega.segmentstore.server.store.ServiceBuilder; import io.pravega.segmentstore.server.store.ServiceBuilderConfig; import io.pravega.segmentstore.server.store.ServiceConfig; import io.pravega.segmentstore.server.writer.WriterConfig; import io.pravega.shared.metrics.MetricNotifier; import io.pravega.shared.protocol.netty.Append; import io.pravega.shared.protocol.netty.AppendDecoder; import io.pravega.shared.protocol.netty.CommandDecoder; import io.pravega.shared.protocol.netty.CommandEncoder; import io.pravega.shared.protocol.netty.ExceptionLoggingHandler; import io.pravega.shared.protocol.netty.PravegaNodeUri; import io.pravega.shared.protocol.netty.Reply; import io.pravega.shared.protocol.netty.Request; import io.pravega.shared.protocol.netty.WireCommand; import io.pravega.shared.protocol.netty.WireCommands; import io.pravega.shared.protocol.netty.WireCommands.AppendSetup; import io.pravega.shared.protocol.netty.WireCommands.CreateSegment; import io.pravega.shared.protocol.netty.WireCommands.DataAppended; import io.pravega.shared.protocol.netty.WireCommands.Event; import io.pravega.shared.protocol.netty.WireCommands.NoSuchSegment; import io.pravega.shared.protocol.netty.WireCommands.SegmentCreated; import io.pravega.shared.protocol.netty.WireCommands.SetupAppend; import io.pravega.test.common.InlineExecutor; import io.pravega.test.common.LeakDetectorTestSuite; import io.pravega.test.common.TestUtils; import java.nio.ByteBuffer; import java.util.UUID; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.Future; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Consumer; import lombok.Cleanup; import lombok.extern.slf4j.Slf4j; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; import static io.pravega.shared.protocol.netty.WireCommands.MAX_WIRECOMMAND_SIZE; import static io.pravega.shared.protocol.netty.WireCommands.TYPE_PLUS_LENGTH_SIZE; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.mockito.Mockito.mock; @Slf4j public class AppendTest extends LeakDetectorTestSuite { private static final ServiceBuilder SERVICE_BUILDER = ServiceBuilder.newInMemoryBuilder(ServiceBuilderConfig.builder() .include(ServiceConfig.builder().with(ServiceConfig.CONTAINER_COUNT, 1)) .include(WriterConfig.builder().with(WriterConfig.MAX_ROLLOVER_SIZE, 10485760L)) .build()); private final Consumer<Segment> segmentSealedCallback = segment -> { }; @BeforeClass public static void setup() throws Exception { SERVICE_BUILDER.initialize(); } @AfterClass public static void teardown() { SERVICE_BUILDER.close(); } @Test(timeout = 10000) public void testSetupOnNonExistentSegment() throws Exception { String segment = "testSetupOnNonExistentSegment"; StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); @Cleanup EmbeddedChannel channel = createChannel(store); UUID uuid = UUID.randomUUID(); NoSuchSegment setup = (NoSuchSegment) sendRequest(channel, new SetupAppend(1, uuid, segment, "")); assertEquals(segment, setup.getSegment()); } @Test(timeout = 10000) public void sendReceivingAppend() throws Exception { String segment = "sendReceivingAppend"; ByteBuf data = Unpooled.wrappedBuffer("Hello world\n".getBytes()); StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); @Cleanup EmbeddedChannel channel = createChannel(store); SegmentCreated created = (SegmentCreated) sendRequest(channel, new CreateSegment(1, segment, CreateSegment.NO_SCALE, 0, "", 1024L)); assertEquals(segment, created.getSegment()); UUID uuid = UUID.randomUUID(); AppendSetup setup = (AppendSetup) sendRequest(channel, new SetupAppend(2, uuid, segment, "")); assertEquals(segment, setup.getSegment()); assertEquals(uuid, setup.getWriterId()); DataAppended ack = (DataAppended) sendRequest(channel, new Append(segment, uuid, data.readableBytes(), new Event(data), 1L)); assertEquals(uuid, ack.getWriterId()); assertEquals(data.readableBytes(), ack.getEventNumber()); assertEquals(Long.MIN_VALUE, ack.getPreviousEventNumber()); } @Test(timeout = 10000) public void sendLargeAppend() throws Exception { String segment = "sendLargeAppend"; ByteBuf data = Unpooled.wrappedBuffer(new byte[Serializer.MAX_EVENT_SIZE]); StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); @Cleanup EmbeddedChannel channel = createChannel(store); SegmentCreated created = (SegmentCreated) sendRequest(channel, new CreateSegment(1, segment, CreateSegment.NO_SCALE, 0, "", 1024L)); assertEquals(segment, created.getSegment()); UUID uuid = UUID.randomUUID(); AppendSetup setup = (AppendSetup) sendRequest(channel, new SetupAppend(2, uuid, segment, "")); assertEquals(segment, setup.getSegment()); assertEquals(uuid, setup.getWriterId()); DataAppended ack = (DataAppended) sendRequest(channel, new Append(segment, uuid, data.readableBytes(), new Event(data), 1L)); assertEquals(uuid, ack.getWriterId()); assertEquals(data.readableBytes(), ack.getEventNumber()); assertEquals(Long.MIN_VALUE, ack.getPreviousEventNumber()); } @Test(timeout = 10000) public void testMultipleAppends() throws Exception { String segment = "testMultipleAppends"; ByteBuf data = Unpooled.wrappedBuffer("Hello world\n".getBytes()); StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); @Cleanup EmbeddedChannel channel = createChannel(store); SegmentCreated created = (SegmentCreated) sendRequest(channel, new CreateSegment(1, segment, CreateSegment.NO_SCALE, 0, "", 1024L)); assertEquals(segment, created.getSegment()); UUID uuid = UUID.randomUUID(); AppendSetup setup = (AppendSetup) sendRequest(channel, new SetupAppend(2, uuid, segment, "")); assertEquals(segment, setup.getSegment()); assertEquals(uuid, setup.getWriterId()); data.retain(); DataAppended ack = (DataAppended) sendRequest(channel, new Append(segment, uuid, 1, new Event(data), 1L)); assertEquals(uuid, ack.getWriterId()); assertEquals(1, ack.getEventNumber()); assertEquals(Long.MIN_VALUE, ack.getPreviousEventNumber()); DataAppended ack2 = (DataAppended) sendRequest(channel, new Append(segment, uuid, 2, new Event(data), 1L)); assertEquals(uuid, ack2.getWriterId()); assertEquals(2, ack2.getEventNumber()); assertEquals(1, ack2.getPreviousEventNumber()); } static Reply sendRequest(EmbeddedChannel channel, Request request) throws Exception { channel.writeInbound(request); log.info("Request {} sent to Segment store", request); Object encodedReply = channel.readOutbound(); for (int i = 0; encodedReply == null && i < 500; i++) { channel.runPendingTasks(); Thread.sleep(10); encodedReply = channel.readOutbound(); } if (encodedReply == null) { log.error("Error while try waiting for a response from Segment Store"); throw new IllegalStateException("No reply to request: " + request); } WireCommand decoded = CommandDecoder.parseCommand((ByteBuf) encodedReply); ((ByteBuf) encodedReply).release(); assertNotNull(decoded); return (Reply) decoded; } static EmbeddedChannel createChannel(StreamSegmentStore store) { ServerConnectionInboundHandler lsh = new ServerConnectionInboundHandler(); EmbeddedChannel channel = new EmbeddedChannel(new ExceptionLoggingHandler(""), new CommandEncoder(null, MetricNotifier.NO_OP_METRIC_NOTIFIER), new LengthFieldBasedFrameDecoder(MAX_WIRECOMMAND_SIZE, 4, 4), new CommandDecoder(), new AppendDecoder(), lsh); lsh.setRequestProcessor(AppendProcessor.defaultBuilder() .store(store) .connection(new TrackedConnection(lsh)) .nextRequestProcessor(new PravegaRequestProcessor(store, mock(TableStore.class), lsh)) .build()); return channel; } @Test(timeout = 10000) public void appendThroughSegmentClient() throws Exception { String endpoint = "localhost"; int port = TestUtils.getAvailableListenPort(); String testString = "Hello world\n"; String scope = "scope"; String stream = "appendThroughSegmentClient"; StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); TableStore tableStore = SERVICE_BUILDER.createTableStoreService(); @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor()); server.startListening(); @Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build()); @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF); @Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true); controller.createScope(scope); controller.createStream(scope, stream, StreamConfiguration.builder().build()); SegmentOutputStreamFactoryImpl segmentClient = new SegmentOutputStreamFactoryImpl(controller, connectionPool); Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next(); @Cleanup SegmentOutputStream out = segmentClient.createOutputStreamForSegment(segment, segmentSealedCallback, EventWriterConfig.builder().build(), DelegationTokenProviderFactory.createWithEmptyToken()); CompletableFuture<Void> ack = new CompletableFuture<>(); out.write(PendingEvent.withHeader(null, ByteBuffer.wrap(testString.getBytes()), ack)); ack.get(5, TimeUnit.SECONDS); } @Test(timeout = 10000) public void appendThroughConditionalClient() throws Exception { String endpoint = "localhost"; int port = TestUtils.getAvailableListenPort(); String testString = "Hello world\n"; String scope = "scope"; String stream = "appendThroughConditionalClient"; StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); TableStore tableStore = SERVICE_BUILDER.createTableStoreService(); @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor()); server.startListening(); @Cleanup SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(ClientConfig.builder().build()); @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(ClientConfig.builder().build(), clientCF); @Cleanup Controller controller = new MockController(endpoint, port, connectionPool, true); controller.createScope(scope); controller.createStream(scope, stream, StreamConfiguration.builder().build()); ConditionalOutputStreamFactoryImpl segmentClient = new ConditionalOutputStreamFactoryImpl(controller, connectionPool); Segment segment = Futures.getAndHandleExceptions(controller.getCurrentSegments(scope, stream), RuntimeException::new).getSegments().iterator().next(); @Cleanup ConditionalOutputStream out = segmentClient.createConditionalOutputStream(segment, DelegationTokenProviderFactory.createWithEmptyToken(), EventWriterConfig.builder().build()); assertTrue(out.write(ByteBuffer.wrap(testString.getBytes()), 0)); } @Test(timeout = 10000) public void appendThroughStreamingClient() throws InterruptedException, ExecutionException, TimeoutException { String endpoint = "localhost"; String streamName = "appendThroughStreamingClient"; int port = TestUtils.getAvailableListenPort(); String testString = "Hello world\n"; StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); TableStore tableStore = SERVICE_BUILDER.createTableStoreService(); @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor()); server.startListening(); @Cleanup MockStreamManager streamManager = new MockStreamManager("Scope", endpoint, port); @Cleanup MockClientFactory clientFactory = streamManager.getClientFactory(); streamManager.createScope("Scope"); streamManager.createStream("Scope", streamName, null); @Cleanup EventStreamWriter<String> producer = clientFactory.createEventWriter(streamName, new JavaSerializer<>(), EventWriterConfig.builder().build()); Future<Void> ack = producer.writeEvent(testString); ack.get(5, TimeUnit.SECONDS); } @Test(timeout = 100000) public void appendALotOfData() { String endpoint = "localhost"; String scope = "Scope"; String streamName = "appendALotOfData"; int port = TestUtils.getAvailableListenPort(); long heapSize = Runtime.getRuntime().maxMemory(); long messageSize = Math.min(1024 * 1024, heapSize / 20000); ByteBuffer payload = ByteBuffer.allocate((int) messageSize); StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); TableStore tableStore = SERVICE_BUILDER.createTableStoreService(); @Cleanup("shutdown") InlineExecutor tokenExpiryExecutor = new InlineExecutor(); @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, tokenExpiryExecutor); server.startListening(); ClientConfig config = ClientConfig.builder().build(); SocketConnectionFactoryImpl clientCF = new SocketConnectionFactoryImpl(config); @Cleanup ConnectionPoolImpl connectionPool = new ConnectionPoolImpl(config, clientCF); Controller controller = new MockController(endpoint, port, connectionPool, true); @Cleanup StreamManagerImpl streamManager = new StreamManagerImpl(controller, connectionPool); streamManager.createScope(scope); @Cleanup ClientFactoryImpl clientFactory = new ClientFactoryImpl(scope, controller, config); streamManager.createStream("Scope", streamName, StreamConfiguration.builder().scalingPolicy(ScalingPolicy.fixed(1)).build()); @Cleanup EventStreamWriter<ByteBuffer> producer = clientFactory.createEventWriter(streamName, new ByteBufferSerializer(), EventWriterConfig.builder().build()); @Cleanup RawClient rawClient = new RawClient(new PravegaNodeUri(endpoint, port), connectionPool); for (int i = 0; i < 10; i++) { for (int j = 0; j < 100; j++) { producer.writeEvent(payload.slice()); } producer.flush(); long requestId = rawClient.getFlow().getNextSequenceNumber(); String scopedName = new Segment(scope, streamName, 0).getScopedName(); WireCommands.TruncateSegment request = new WireCommands.TruncateSegment(requestId, scopedName, i * 100L * (payload.remaining() + TYPE_PLUS_LENGTH_SIZE), ""); Reply reply = rawClient.sendRequest(requestId, request).join(); assertFalse(reply.toString(), reply.isFailure()); } producer.close(); } @Test(timeout = 20000) public void miniBenchmark() throws InterruptedException, ExecutionException, TimeoutException { String endpoint = "localhost"; String streamName = "miniBenchmark"; int port = TestUtils.getAvailableListenPort(); byte[] testPayload = new byte[1000]; StreamSegmentStore store = SERVICE_BUILDER.createStreamSegmentService(); TableStore tableStore = SERVICE_BUILDER.createTableStoreService(); @Cleanup PravegaConnectionListener server = new PravegaConnectionListener(false, port, store, tableStore, SERVICE_BUILDER.getLowPriorityExecutor()); server.startListening(); @Cleanup MockStreamManager streamManager = new MockStreamManager("Scope", endpoint, port); @Cleanup MockClientFactory clientFactory = streamManager.getClientFactory(); streamManager.createScope("Scope"); streamManager.createStream("Scope", streamName, null); @Cleanup EventStreamWriter<ByteBuffer> producer = clientFactory.createEventWriter(streamName, new ByteBufferSerializer(), EventWriterConfig.builder().build()); long blockingTime = timeWrites(testPayload, 3000, producer, true); long nonBlockingTime = timeWrites(testPayload, 60000, producer, false); System.out.println("Blocking took: " + blockingTime + "ms."); System.out.println("Non blocking took: " + nonBlockingTime + "ms."); assertTrue(blockingTime < 10000); assertTrue(nonBlockingTime < 10000); } private long timeWrites(byte[] testPayload, int number, EventStreamWriter<ByteBuffer> producer, boolean synchronous) throws InterruptedException, ExecutionException, TimeoutException { Timer timer = new Timer(); AtomicLong maxLatency = new AtomicLong(0); for (int i = 0; i < number; i++) { Timer latencyTimer = new Timer(); CompletableFuture<Void> ack = producer.writeEvent(ByteBuffer.wrap(testPayload)); ack.thenRun(() -> { long elapsed = latencyTimer.getElapsedNanos(); maxLatency.getAndUpdate(l -> Math.max(elapsed, l)); }); if (synchronous) { ack.get(5, TimeUnit.SECONDS); } } producer.flush(); System.out.println("Max latency: " + (maxLatency.get() / 1000000.0)); return timer.getElapsedMillis(); } }
// Copyright (c) 2011 The Chromium Authors. All rights reserved. // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. package org.chromium.sdk.internal.protocolparser.dynamicimpl; import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.lang.reflect.Proxy; import java.lang.reflect.Type; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import org.chromium.sdk.internal.protocolparser.JsonParseMethod; import org.chromium.sdk.internal.protocolparser.JsonParserRoot; import org.chromium.sdk.internal.protocolparser.JsonProtocolModelParseException; import org.chromium.sdk.internal.protocolparser.JsonProtocolParseException; import org.chromium.sdk.internal.protocolparser.dynamicimpl.JavaCodeGenerator.ClassScope; import static org.chromium.sdk.util.BasicUtil.*; import org.json.simple.JSONObject; /** * Dynamic implementation of user 'root' interface to parser. * @param <R> 'root' interface type * @see JsonParserRoot */ class ParserRootImpl<R> { private final Class<R> rootClass; private final InvocationHandlerImpl invocationHandler; private final R instance; ParserRootImpl(Class<R> rootClass, Map<Class<?>, TypeHandler<?>> type2TypeHandler) throws JsonProtocolModelParseException { this.rootClass = rootClass; ParseInterfaceSession session = new ParseInterfaceSession(type2TypeHandler); session.run(rootClass); this.invocationHandler = session.createInvocationHandler(); Object result = Proxy.newProxyInstance(rootClass.getClassLoader(), new Class<?>[] { rootClass }, invocationHandler); this.instance = (R) result; } R getInstance() { return instance; } private static class ParseInterfaceSession { private final Map<Class<?>, TypeHandler<?>> type2TypeHandler; private final Set<Class<?>> visitedInterfaces = new HashSet<Class<?>>(1); private final Map<Method, MethodDelegate> methodMap = new HashMap<Method, MethodDelegate>(); ParseInterfaceSession(Map<Class<?>, TypeHandler<?>> type2TypeHandler) { this.type2TypeHandler = type2TypeHandler; } void run(Class<?> clazz) throws JsonProtocolModelParseException { parseInterfaceRecursive(clazz); for (Method method : BaseHandlersLibrary.OBJECT_METHODS) { methodMap.put(method, new SelfCallDelegate(method)); } } private void parseInterfaceRecursive(Class<?> clazz) throws JsonProtocolModelParseException { if (containsSafe(visitedInterfaces, clazz)) { return; } visitedInterfaces.add(clazz); if (!clazz.isInterface()) { throw new JsonProtocolModelParseException( "Parser root type must be an interface: " + clazz); } JsonParserRoot jsonParserRoot = clazz.getAnnotation(JsonParserRoot.class); if (jsonParserRoot == null) { throw new JsonProtocolModelParseException( JsonParserRoot.class.getCanonicalName() + " annotation is expected in " + clazz); } for (Method m : clazz.getMethods()) { JsonParseMethod jsonParseMethod = m.getAnnotation(JsonParseMethod.class); if (jsonParseMethod == null) { throw new JsonProtocolModelParseException( JsonParseMethod.class.getCanonicalName() + " annotation is expected in " + clazz); } Class<?>[] exceptionTypes = m.getExceptionTypes(); if (exceptionTypes.length > 1) { throw new JsonProtocolModelParseException("Too many exception declared in " + m); } if (exceptionTypes.length < 1 || exceptionTypes[0] != JsonProtocolParseException.class) { throw new JsonProtocolModelParseException( JsonProtocolParseException.class.getCanonicalName() + " exception must be declared in " + m); } Type returnType = m.getGenericReturnType(); TypeHandler<?> typeHandler = type2TypeHandler.get(returnType); if (typeHandler == null) { throw new JsonProtocolModelParseException("Unknown return type in " + m); } Type[] arguments = m.getGenericParameterTypes(); if (arguments.length != 1) { throw new JsonProtocolModelParseException("Exactly one argument is expected in " + m); } Type argument = arguments[0]; MethodDelegate delegate; if (argument == JSONObject.class) { delegate = new ParseDelegate(typeHandler); } else if (argument == Object.class) { delegate = new ParseDelegate(typeHandler); } else { throw new JsonProtocolModelParseException("Unrecognized argument type in " + m); } methodMap.put(m, delegate); } for (Type baseType : clazz.getGenericInterfaces()) { if (baseType instanceof Class == false) { throw new JsonProtocolModelParseException("Base interface must be class in " + clazz); } Class<?> baseClass = (Class<?>) baseType; parseInterfaceRecursive(baseClass); } } InvocationHandlerImpl createInvocationHandler() { return new InvocationHandlerImpl(methodMap); } } private static class InvocationHandlerImpl implements InvocationHandler { private final Map<Method, MethodDelegate> map; InvocationHandlerImpl(Map<Method, MethodDelegate> map) { this.map = map; } @Override public Object invoke(Object proxy, Method method, Object[] args) throws Throwable { return getSafe(map, method).invoke(proxy, this, args); } public void writeStaticMethodJava(ClassScope scope) { for (Map.Entry<Method, MethodDelegate> en : map.entrySet()) { en.getValue().writeStaticMethodJava(scope, en.getKey()); } } } private static abstract class MethodDelegate { abstract Object invoke(Object proxy, InvocationHandlerImpl invocationHandlerImpl, Object[] args) throws Throwable; abstract void writeStaticMethodJava(ClassScope scope, Method key); } private static class ParseDelegate extends MethodDelegate { private final TypeHandler<?> typeHandler; ParseDelegate(TypeHandler<?> typeHandler) { this.typeHandler = typeHandler; } @Override Object invoke(Object proxy, InvocationHandlerImpl invocationHandlerImpl, Object[] args) throws JsonProtocolParseException { Object obj = args[0]; return typeHandler.parseRoot(obj); } @Override void writeStaticMethodJava(ClassScope scope, Method method) { MethodHandler.writeMethodDeclarationJava(scope, method, STATIC_METHOD_PARAM_NAME_LIST); scope.append(JavaCodeGenerator.Util.THROWS_CLAUSE + " {\n"); scope.indentRight(); scope.startLine("return " + scope.getTypeImplReference(typeHandler) + ".parse(" + STATIC_METHOD_PARAM_NAME + ");\n"); scope.indentLeft(); scope.startLine("}\n"); scope.append("\n"); } private static final String STATIC_METHOD_PARAM_NAME = "obj"; private static final List<String> STATIC_METHOD_PARAM_NAME_LIST = Collections.singletonList(STATIC_METHOD_PARAM_NAME); } private static class SelfCallDelegate extends MethodDelegate { private final Method method; SelfCallDelegate(Method method) { this.method = method; } @Override Object invoke(Object proxy, InvocationHandlerImpl invocationHandlerImpl, Object[] args) throws Throwable { try { return method.invoke(invocationHandlerImpl, args); } catch (IllegalAccessException e) { throw new RuntimeException(e); } catch (InvocationTargetException e) { throw new RuntimeException(e); } } @Override void writeStaticMethodJava(ClassScope scope, Method method) { } } public Class<R> getType() { return rootClass; } public void writeStaticMethodJava(ClassScope rootClassScope) { invocationHandler.writeStaticMethodJava(rootClassScope); } }
/* File: DBConfig.java ; This file is part of Twister. Version: 2.004 Copyright (C) 2012-2013 , Luxoft Authors: Andrei Costachi <acostachi@luxoft.com> Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ import javax.swing.JPanel; import java.awt.Dimension; import javax.swing.border.BevelBorder; import javax.swing.BorderFactory; import java.awt.Color; import javax.swing.JLabel; import javax.swing.JTextField; import javax.swing.JButton; import javax.swing.JFileChooser; import java.awt.event.ActionListener; import java.awt.event.ActionEvent; import java.io.FileInputStream; import java.io.File; import java.nio.file.Files; import static java.nio.file.StandardCopyOption.REPLACE_EXISTING; import javax.swing.JPasswordField; import java.io.InputStream; import java.io.ByteArrayOutputStream; import java.io.FileOutputStream; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.w3c.dom.Node; import org.w3c.dom.NodeList; import javax.xml.parsers.DocumentBuilderFactory; import javax.xml.parsers.DocumentBuilder; import javax.xml.transform.Result; import javax.xml.transform.stream.StreamResult; import javax.xml.transform.TransformerFactory; import javax.xml.transform.Transformer; import javax.xml.transform.OutputKeys; import javax.xml.transform.dom.DOMSource; import javax.swing.JOptionPane; import com.twister.CustomDialog; import java.io.BufferedWriter; import java.io.FileWriter; import javax.xml.bind.DatatypeConverter; public class DBConfig extends JPanel{ Document doc; File theone; private JTextField tdatabase,tserver,tuser; JPasswordField tpassword; DatabaseInterface databaseinterface; public DBConfig(){ databaseinterface = new DatabaseInterface(); add(databaseinterface); setBorder(BorderFactory.createBevelBorder(BevelBorder.LOWERED)); setBackground(Color.WHITE); JLabel file = new JLabel("File: "); file.setBounds(15,10,50,20); final JTextField tfile = new JTextField(); tfile.setBounds(100,10,170,25); JButton browse = new JButton("Browse"); browse.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent ev){ JFileChooser chooser = new JFileChooser(); chooser.setFileFilter(new XMLFilter()); chooser.setCurrentDirectory(new java.io.File(".")); chooser.setDialogTitle("Select XML File"); if (chooser.showOpenDialog(RunnerRepository.window) == JFileChooser.APPROVE_OPTION) { File f = chooser.getSelectedFile(); try{tfile.setText(f.getCanonicalPath());} catch(Exception e){e.printStackTrace();}}}}); browse.setBounds(275,13,90,20); JButton upload = new JButton("Upload"); upload.setBounds(375,13,90,20); upload.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent ev){ boolean saved = true; try{File f = new File(tfile.getText()); FileInputStream stream = new FileInputStream(f); RunnerRepository.uploadRemoteFile(RunnerRepository.REMOTEDATABASECONFIGPATH, stream, f.getName()); Files.copy(f.toPath(), new File(RunnerRepository.getConfigDirectory()+ RunnerRepository.getBar()+f.getName()).toPath(), REPLACE_EXISTING); RunnerRepository.resetDBConf(f.getName(),false);} catch(Exception e){ saved = false; e.printStackTrace();} if(saved){ CustomDialog.showInfo(JOptionPane.INFORMATION_MESSAGE, DBConfig.this, "Successful", "File successfully uploaded");} else{ CustomDialog.showInfo(JOptionPane.WARNING_MESSAGE, DBConfig.this, "Warning", "File could not uploaded");}}}); JLabel database = new JLabel("Database: "); database.setBounds(15,55,90,20); tdatabase = new JTextField(); tdatabase.setBounds(100,55,170,25); JLabel server = new JLabel("Server: "); server.setBounds(15,80,90,20); tserver = new JTextField(); tserver.setBounds(100,80,170,25); JLabel user = new JLabel("User: "); user.setBounds(15,105,50,20); tuser = new JTextField(); tuser.setBounds(100,105,170,25); JLabel password = new JLabel("Password: "); password.setBounds(15,130,90,20); tpassword = new JPasswordField(); tpassword.setBounds(100,130,170,25); refresh(); JButton save = new JButton("Save"); save.setBounds(200,155,70,20); save.addActionListener(new ActionListener(){ public void actionPerformed(ActionEvent ev){ if(doc!=null){ if(tpassword.getPassword().length == 0){ CustomDialog.showInfo(JOptionPane.WARNING_MESSAGE, DBConfig.this, "Warning", "Warning, password not set");} boolean saved = true; try{theone = new File(RunnerRepository.temp+RunnerRepository.getBar()+"Twister"+ RunnerRepository.getBar()+"config"+RunnerRepository.getBar()+new File( RunnerRepository.REMOTEDATABASECONFIGFILE).getName()); try{NodeList nodeLst = doc.getElementsByTagName("server"); if(nodeLst.item(0).getChildNodes().getLength()>0)nodeLst. item(0).getChildNodes().item(0).setNodeValue(tserver.getText()); else nodeLst.item(0).appendChild(doc.createTextNode( tserver.getText())); nodeLst = doc.getElementsByTagName("database"); if(nodeLst.item(0).getChildNodes().getLength()>0)nodeLst. item(0).getChildNodes().item(0).setNodeValue(tdatabase. getText()); else nodeLst.item(0).appendChild(doc.createTextNode(tdatabase. getText())); nodeLst = doc.getElementsByTagName("user"); if(nodeLst.item(0).getChildNodes().getLength()>0)nodeLst. item(0).getChildNodes().item(0).setNodeValue(tuser.getText()); else nodeLst.item(0).appendChild(doc.createTextNode(tuser. getText())); if(tpassword.getPassword().length != 0 && !(new String( tpassword.getPassword()).equals("****"))){ nodeLst = doc.getElementsByTagName("password"); String p = ""; byte mydata[]=new String(tpassword.getPassword()).getBytes(); try{p = DatatypeConverter.printBase64Binary(mydata);} catch(Exception e){e.printStackTrace();} if(nodeLst.item(0).getChildNodes().getLength()>0)nodeLst. item(0).getChildNodes().item(0).setNodeValue(p); else nodeLst.item(0).appendChild(doc.createTextNode(p));}} catch(Exception e){ saved = false; System.out.println(doc.getDocumentURI()+ " may not be properly formatted");} Result result = new StreamResult(theone); try{DOMSource source = new DOMSource(doc); TransformerFactory transformerFactory = TransformerFactory. newInstance(); Transformer transformer = transformerFactory.newTransformer(); transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "4"); transformer.transform(source, result); FileInputStream input = new FileInputStream(theone); RunnerRepository.uploadRemoteFile(RunnerRepository.REMOTEDATABASECONFIGPATH, input, theone.getName()); } catch(Exception e){ saved = false; e.printStackTrace(); System.out.println("Could not save in file : "+RunnerRepository. temp+RunnerRepository.getBar()+"Twister"+RunnerRepository.getBar()+"Config"+ RunnerRepository.getBar()+RunnerRepository.REMOTEDATABASECONFIGFILE+" and send to "+ RunnerRepository.REMOTEDATABASECONFIGPATH);}} catch(Exception e){ saved = false; e.printStackTrace();} if(saved){ CustomDialog.showInfo(JOptionPane.INFORMATION_MESSAGE, DBConfig.this, "Successful", "File successfully saved");} else{ CustomDialog.showInfo(JOptionPane.WARNING_MESSAGE, DBConfig.this, "Warning", "File could not be saved ");}}}}); } private void refresh(){ try{ System.out.println("refreshing database"); tserver.setText(""); tdatabase.setText(""); tpassword.setText(""); tuser.setText(""); theone = new File(RunnerRepository.temp+RunnerRepository.getBar()+"Twister"+RunnerRepository. getBar()+"config"+RunnerRepository.getBar()+ new File(RunnerRepository.REMOTEDATABASECONFIGFILE).getName()); String content = RunnerRepository.getRemoteFileContent(RunnerRepository.REMOTEDATABASECONFIGPATH+RunnerRepository.REMOTEDATABASECONFIGFILE); BufferedWriter writer = new BufferedWriter(new FileWriter(theone)); writer.write(content); writer.close(); try{DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance(); DocumentBuilder db = dbf.newDocumentBuilder(); doc = db.parse(theone); doc.getDocumentElement().normalize(); NodeList nodeLst = doc.getElementsByTagName("server"); tserver.setText(nodeLst.item(0).getChildNodes().item(0).getNodeValue()); nodeLst = doc.getElementsByTagName("database"); tdatabase.setText(nodeLst.item(0).getChildNodes().item(0).getNodeValue()); nodeLst = doc.getElementsByTagName("password"); tpassword.setText(nodeLst.item(0).getChildNodes().item(0).getNodeValue()); if(!tpassword.getPassword().equals(""))tpassword.setText("****"); nodeLst = doc.getElementsByTagName("user"); tuser.setText(nodeLst.item(0).getChildNodes().item(0).getNodeValue());} catch(Exception e){ System.out.println(RunnerRepository.temp+RunnerRepository.getBar()+ "Twister"+RunnerRepository.getBar()+"Config"+RunnerRepository.getBar()+new File(RunnerRepository. REMOTEDATABASECONFIGFILE).getName()+" is corrupted or incomplete"); e.printStackTrace();}} catch(Exception e){ //CustomDialog.showInfo(JOptionPane.INFORMATION_MESSAGE, DBConfig.this, "info", e.getMessage()); e.printStackTrace(); System.out.println("Could not refresh dbconfig structure");}}}
/* * Copyright (c) 2013-2015 Cinchapi Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.cinchapi.concourse.util; import java.lang.reflect.Constructor; import java.lang.reflect.Field; import java.lang.reflect.Method; import javax.annotation.Nullable; import com.google.common.base.Throwables; import com.google.common.collect.Lists; /** * A collection of tools for using reflection to access or modify objects. Use * with caution. * * @author Jeff Nelson */ public final class Reflection { /** * Use reflection to call an instance method on {@code obj} with the * specified {@code args}. * * @param obj * @param methodName * @param args * @return the result of calling the method */ @SuppressWarnings("unchecked") public static <T> T call(Object obj, String methodName, Object... args) { // TODO cache method instances try { Class<?> clazz = obj.getClass(); Class<?>[] parameterTypes = new Class<?>[args.length]; Class<?>[] altParameterTypes = new Class<?>[args.length]; for (int i = 0; i < args.length; i++) { parameterTypes[i] = args[i].getClass(); altParameterTypes[i] = unbox(args[i].getClass()); } Method method = null; while (clazz != null && method == null) { try { method = clazz .getDeclaredMethod(methodName, parameterTypes); } catch (NoSuchMethodException e) { try { // Attempt to find a method using the alt param types. // This will usually bear fruit in cases where a method // has a primitive type parameter and Java autoboxing // causes the passed in parameters to have a wrapper // type instead of the appropriate primitive type. method = clazz.getDeclaredMethod(methodName, altParameterTypes); } catch (NoSuchMethodException e2) { clazz = clazz.getSuperclass(); } } } if(method != null) { method.setAccessible(true); return (T) method.invoke(obj, args); } else { throw new NoSuchMethodException(); } } catch (ReflectiveOperationException e) { throw Throwables.propagate(e); } } /** * Use reflection to get the value of {@code variableName} from {@code obj}. * This is useful in situations when it is necessary to access an instance * variable that is out of scope. * * @param variableName * @param obj * @return the value of {@code variableName} in {@code obj} if it exists. */ @Nullable @SuppressWarnings("unchecked") public static <T> T get(String variableName, Object obj) { try { Field field = getField(variableName, obj); return (T) field.get(obj); } catch (ReflectiveOperationException e) { throw Throwables.propagate(e); } } /** * Call {@code constructor} with {@code args} and return a new instance of * type {@code T}. * * @param constructor the {@link Constructor} to use for creation * @param args the initialization args to pass to the constructor * @return an instance of the class to which the {@code constructor} belongs */ public static <T> T newInstance(Constructor<? extends T> constructor, Object... args) { try { constructor.setAccessible(true); return constructor.newInstance(args); } catch (ReflectiveOperationException e) { throw Throwables.propagate(e); } } /** * Return a new instance of the specified {@code clazz} by calling the * appropriate constructor with the specified {@code args}. * * @param clazz * @param args * @return the new instance */ @SuppressWarnings("unchecked") public static <T> T newInstance(Class<? extends T> clazz, Object... args) { try { Constructor<? extends T> toCall = null; outer: for (Constructor<?> constructor : clazz.getConstructors()) { Class<?>[] paramTypes = constructor.getParameterTypes(); if(paramTypes == null && args == null) { // Handle no arg // constructors toCall = (Constructor<? extends T>) constructor; break; } else if(args == null || paramTypes == null || args.length != paramTypes.length) { continue; } else { for (int i = 0; i < args.length; ++i) { Object arg = args[i]; Class<?> type = paramTypes[i]; if(!type.isAssignableFrom(arg.getClass())) { continue outer; } } toCall = (Constructor<? extends T>) constructor; break; } } if(toCall != null) { toCall.setAccessible(true); return (T) toCall.newInstance(args); } else { throw new NoSuchMethodException("No constructor for " + clazz + " accepts arguments: " + Lists.newArrayList(args)); } } catch (ReflectiveOperationException e) { e.printStackTrace(); throw Throwables.propagate(e); } } /** * Set the value of the field with {@code variableName} to {@code value} in * {@code obj}. * * @param variableName * @param value * @param obj */ public static void set(String variableName, Object value, Object obj) { try { Field field = getField(variableName, obj); field.set(obj, value); } catch (ReflectiveOperationException e) { throw Throwables.propagate(e); } } /** * Return the {@link Field} object} that holds the variable with * {@code name} in {@code obj}, if it exists. Otherwise a * NoSuchFieldException is thrown. * <p> * This method will take care of making the field accessible. * </p> * * @param name * @param obj * @return the Field object * @throws NoSuchFieldException */ private static Field getField(String name, Object obj) { try { Class<?> clazz = obj.getClass(); Field field = null; while (clazz != null && field == null) { try { field = clazz.getDeclaredField(name); } catch (NoSuchFieldException e) { // check the parent to see if // the field was defined there clazz = clazz.getSuperclass(); } } if(field != null) { field.setAccessible(true); return field; } else { throw new NoSuchFieldException(); } } catch (ReflectiveOperationException e) { throw Throwables.propagate(e); } } /** * Return the unboxed version of the input {@code clazz}. This is usually * a class that represents a primitive for an autoboxed wrapper class. * Otherwise, the input {@code clazz} is returned. * * @param clazz * @return the alt class */ private static Class<?> unbox(Class<?> clazz) { if(clazz == Integer.class) { return int.class; } else if(clazz == Long.class) { return long.class; } else if(clazz == Byte.class) { return byte.class; } else if(clazz == Short.class) { return short.class; } else if(clazz == Float.class) { return float.class; } else if(clazz == Double.class) { return double.class; } else if(clazz == Boolean.class) { return boolean.class; } else if(clazz == Character.class) { return char.class; } else { return clazz; } } private Reflection() {/* noop */} }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ package org.elasticsearch.ingest.geoip; import com.maxmind.db.InvalidDatabaseException; import org.apache.lucene.search.TotalHits; import org.apache.lucene.util.LuceneTestCase; import org.elasticsearch.Version; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.action.search.SearchResponseSections; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.Metadata; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.node.DiscoveryNodeRole; import org.elasticsearch.cluster.node.DiscoveryNodes; import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.cluster.routing.IndexShardRoutingTable; import org.elasticsearch.cluster.routing.RecoverySource; import org.elasticsearch.cluster.routing.RoutingTable; import org.elasticsearch.cluster.routing.ShardRouting; import org.elasticsearch.cluster.routing.UnassignedInfo; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.hash.MessageDigests; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.core.CheckedConsumer; import org.elasticsearch.core.CheckedRunnable; import org.elasticsearch.core.TimeValue; import org.elasticsearch.index.Index; import org.elasticsearch.index.query.TermQueryBuilder; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.ingest.IngestService; import org.elasticsearch.persistent.PersistentTasksCustomMetadata; import org.elasticsearch.search.SearchHit; import org.elasticsearch.search.SearchHits; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.TestThreadPool; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xcontent.XContentBuilder; import org.elasticsearch.xcontent.XContentType; import org.junit.After; import org.junit.Before; import org.mockito.ArgumentCaptor; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.io.UncheckedIOException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Path; import java.security.MessageDigest; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Locale; import java.util.Map; import java.util.Set; import java.util.UUID; import java.util.function.Consumer; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; import java.util.zip.GZIPInputStream; import java.util.zip.GZIPOutputStream; import static org.elasticsearch.ingest.geoip.GeoIpProcessorFactoryTests.copyDatabaseFiles; import static org.elasticsearch.persistent.PersistentTasksCustomMetadata.PersistentTask; import static org.elasticsearch.persistent.PersistentTasksCustomMetadata.TYPE; import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.greaterThanOrEqualTo; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; import static org.mockito.Mockito.reset; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; @LuceneTestCase.SuppressFileSystems(value = "ExtrasFS") // Don't randomly add 'extra' files to directory. public class DatabaseNodeServiceTests extends ESTestCase { private Client client; private Path geoIpTmpDir; private ThreadPool threadPool; private DatabaseNodeService databaseNodeService; private ResourceWatcherService resourceWatcherService; private IngestService ingestService; @Before public void setup() throws IOException { final Path geoIpConfigDir = createTempDir(); Files.createDirectories(geoIpConfigDir); GeoIpCache cache = new GeoIpCache(1000); ConfigDatabases configDatabases = new ConfigDatabases(geoIpConfigDir, cache); copyDatabaseFiles(geoIpConfigDir, configDatabases); threadPool = new TestThreadPool(ConfigDatabases.class.getSimpleName()); Settings settings = Settings.builder().put("resource.reload.interval.high", TimeValue.timeValueMillis(100)).build(); resourceWatcherService = new ResourceWatcherService(settings, threadPool); client = mock(Client.class); ingestService = mock(IngestService.class); geoIpTmpDir = createTempDir(); databaseNodeService = new DatabaseNodeService(geoIpTmpDir, client, cache, configDatabases, Runnable::run); databaseNodeService.initialize("nodeId", resourceWatcherService, ingestService); } @After public void cleanup() { resourceWatcherService.close(); threadPool.shutdownNow(); } public void testCheckDatabases() throws Exception { String md5 = mockSearches("GeoIP2-City.mmdb", 5, 14); String taskId = GeoIpDownloader.GEOIP_DOWNLOADER; PersistentTask<?> task = new PersistentTask<>(taskId, GeoIpDownloader.GEOIP_DOWNLOADER, new GeoIpTaskParams(), 1, null); task = new PersistentTask<>(task, new GeoIpTaskState(Map.of("GeoIP2-City.mmdb", new GeoIpTaskState.Metadata(10, 5, 14, md5, 10)))); PersistentTasksCustomMetadata tasksCustomMetadata = new PersistentTasksCustomMetadata(1L, Map.of(taskId, task)); ClusterState state = ClusterState.builder(new ClusterName("name")) .metadata(Metadata.builder().putCustom(TYPE, tasksCustomMetadata).build()) .nodes( new DiscoveryNodes.Builder().add(new DiscoveryNode("_id1", buildNewFakeTransportAddress(), Version.CURRENT)) .localNodeId("_id1") ) .routingTable(createIndexRoutingTable()) .build(); int numPipelinesToBeReloaded = randomInt(4); List<String> pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).collect(Collectors.toList()); when(ingestService.getPipelineWithProcessorType(any(), any())).thenReturn(pipelineIds); assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); // Nothing should be downloaded, since the database is no longer valid (older than 30 days) databaseNodeService.checkDatabases(state); DatabaseReaderLazyLoader database = databaseNodeService.getDatabase("GeoIP2-City.mmdb"); assertThat(database, nullValue()); verify(client, times(0)).search(any()); verify(ingestService, times(0)).reloadPipeline(anyString()); try (Stream<Path> files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { assertEquals(0, files.count()); } task = new PersistentTask<>( task, new GeoIpTaskState(Map.of("GeoIP2-City.mmdb", new GeoIpTaskState.Metadata(10, 5, 14, md5, System.currentTimeMillis()))) ); tasksCustomMetadata = new PersistentTasksCustomMetadata(1L, Map.of(taskId, task)); state = ClusterState.builder(new ClusterName("name")) .metadata(Metadata.builder().putCustom(TYPE, tasksCustomMetadata).build()) .nodes( new DiscoveryNodes.Builder().add(new DiscoveryNode("_id1", buildNewFakeTransportAddress(), Version.CURRENT)) .localNodeId("_id1") ) .routingTable(createIndexRoutingTable()) .build(); // Database should be downloaded databaseNodeService.checkDatabases(state); database = databaseNodeService.getDatabase("GeoIP2-City.mmdb"); assertThat(database, notNullValue()); verify(client, times(10)).search(any()); try (Stream<Path> files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { assertThat(files.count(), greaterThanOrEqualTo(1L)); } // First time GeoIP2-City.mmdb is downloaded, so a pipeline reload can happen: verify(ingestService, times(numPipelinesToBeReloaded)).reloadPipeline(anyString()); // 30 days check passed but we mocked mmdb data so parsing will fail expectThrows(InvalidDatabaseException.class, database::get); } public void testCheckDatabases_dontCheckDatabaseOnNonIngestNode() throws Exception { String md5 = mockSearches("GeoIP2-City.mmdb", 0, 9); String taskId = GeoIpDownloader.GEOIP_DOWNLOADER; PersistentTask<?> task = new PersistentTask<>(taskId, GeoIpDownloader.GEOIP_DOWNLOADER, new GeoIpTaskParams(), 1, null); task = new PersistentTask<>(task, new GeoIpTaskState(Map.of("GeoIP2-City.mmdb", new GeoIpTaskState.Metadata(0L, 0, 9, md5, 10)))); PersistentTasksCustomMetadata tasksCustomMetadata = new PersistentTasksCustomMetadata(1L, Map.of(taskId, task)); ClusterState state = ClusterState.builder(new ClusterName("name")) .metadata(Metadata.builder().putCustom(TYPE, tasksCustomMetadata).build()) .nodes( new DiscoveryNodes.Builder().add( new DiscoveryNode( "_name1", "_id1", buildNewFakeTransportAddress(), Map.of(), Set.of(DiscoveryNodeRole.MASTER_ROLE), Version.CURRENT ) ).localNodeId("_id1") ) .routingTable(createIndexRoutingTable()) .build(); databaseNodeService.checkDatabases(state); assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream<Path> files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { assertThat(files.collect(Collectors.toList()), empty()); } } public void testCheckDatabases_dontCheckDatabaseWhenNoDatabasesIndex() throws Exception { String md5 = mockSearches("GeoIP2-City.mmdb", 0, 9); String taskId = GeoIpDownloader.GEOIP_DOWNLOADER; PersistentTask<?> task = new PersistentTask<>(taskId, GeoIpDownloader.GEOIP_DOWNLOADER, new GeoIpTaskParams(), 1, null); task = new PersistentTask<>(task, new GeoIpTaskState(Map.of("GeoIP2-City.mmdb", new GeoIpTaskState.Metadata(0L, 0, 9, md5, 10)))); PersistentTasksCustomMetadata tasksCustomMetadata = new PersistentTasksCustomMetadata(1L, Map.of(taskId, task)); ClusterState state = ClusterState.builder(new ClusterName("name")) .metadata(Metadata.builder().putCustom(TYPE, tasksCustomMetadata).build()) .nodes( new DiscoveryNodes.Builder().add(new DiscoveryNode("_id1", buildNewFakeTransportAddress(), Version.CURRENT)) .localNodeId("_id1") ) .build(); databaseNodeService.checkDatabases(state); assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream<Path> files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { assertThat(files.collect(Collectors.toList()), empty()); } } public void testCheckDatabases_dontCheckDatabaseWhenGeoIpDownloadTask() throws Exception { PersistentTasksCustomMetadata tasksCustomMetadata = new PersistentTasksCustomMetadata(0L, Map.of()); ClusterState state = ClusterState.builder(new ClusterName("name")) .metadata(Metadata.builder().putCustom(TYPE, tasksCustomMetadata).build()) .nodes( new DiscoveryNodes.Builder().add(new DiscoveryNode("_id1", buildNewFakeTransportAddress(), Version.CURRENT)) .localNodeId("_id1") ) .routingTable(createIndexRoutingTable()) .build(); mockSearches("GeoIP2-City.mmdb", 0, 9); databaseNodeService.checkDatabases(state); assertThat(databaseNodeService.getDatabase("GeoIP2-City.mmdb"), nullValue()); verify(client, never()).search(any()); try (Stream<Path> files = Files.list(geoIpTmpDir.resolve("geoip-databases").resolve("nodeId"))) { assertThat(files.collect(Collectors.toList()), empty()); } } public void testRetrieveDatabase() throws Exception { String md5 = mockSearches("_name", 0, 29); GeoIpTaskState.Metadata metadata = new GeoIpTaskState.Metadata(-1, 0, 29, md5, 10); @SuppressWarnings("unchecked") CheckedConsumer<byte[], IOException> chunkConsumer = mock(CheckedConsumer.class); @SuppressWarnings("unchecked") CheckedRunnable<Exception> completedHandler = mock(CheckedRunnable.class); @SuppressWarnings("unchecked") Consumer<Exception> failureHandler = mock(Consumer.class); databaseNodeService.retrieveDatabase("_name", md5, metadata, chunkConsumer, completedHandler, failureHandler); verify(failureHandler, never()).accept(any()); verify(chunkConsumer, times(30)).accept(any()); verify(completedHandler, times(1)).run(); verify(client, times(30)).search(any()); } public void testRetrieveDatabaseCorruption() throws Exception { String md5 = mockSearches("_name", 0, 9); String incorrectMd5 = "different"; GeoIpTaskState.Metadata metadata = new GeoIpTaskState.Metadata(-1, 0, 9, incorrectMd5, 10); @SuppressWarnings("unchecked") CheckedConsumer<byte[], IOException> chunkConsumer = mock(CheckedConsumer.class); @SuppressWarnings("unchecked") CheckedRunnable<Exception> completedHandler = mock(CheckedRunnable.class); @SuppressWarnings("unchecked") Consumer<Exception> failureHandler = mock(Consumer.class); databaseNodeService.retrieveDatabase("_name", incorrectMd5, metadata, chunkConsumer, completedHandler, failureHandler); ArgumentCaptor<Exception> exceptionCaptor = ArgumentCaptor.forClass(Exception.class); verify(failureHandler, times(1)).accept(exceptionCaptor.capture()); assertThat(exceptionCaptor.getAllValues().size(), equalTo(1)); assertThat( exceptionCaptor.getAllValues().get(0).getMessage(), equalTo("expected md5 hash [different], " + "but got md5 hash [" + md5 + "]") ); verify(chunkConsumer, times(10)).accept(any()); verify(completedHandler, times(0)).run(); verify(client, times(10)).search(any()); } public void testUpdateDatabase() throws Exception { int numPipelinesToBeReloaded = randomInt(4); List<String> pipelineIds = IntStream.range(0, numPipelinesToBeReloaded).mapToObj(String::valueOf).collect(Collectors.toList()); when(ingestService.getPipelineWithProcessorType(any(), any())).thenReturn(pipelineIds); databaseNodeService.updateDatabase("_name", "_md5", geoIpTmpDir.resolve("some-file")); // Updating the first time may trigger a reload. verify(ingestService, times(1)).addIngestClusterStateListener(any()); verify(ingestService, times(1)).getPipelineWithProcessorType(any(), any()); verify(ingestService, times(numPipelinesToBeReloaded)).reloadPipeline(anyString()); verifyNoMoreInteractions(ingestService); reset(ingestService); // Subsequent updates shouldn't trigger a reload. databaseNodeService.updateDatabase("_name", "_md5", geoIpTmpDir.resolve("some-file")); verifyNoMoreInteractions(ingestService); } private String mockSearches(String databaseName, int firstChunk, int lastChunk) throws IOException { String dummyContent = "test: " + databaseName; List<byte[]> data = gzip(databaseName, dummyContent, lastChunk - firstChunk + 1); assertThat(gunzip(data), equalTo(dummyContent)); Map<String, ActionFuture<SearchResponse>> requestMap = new HashMap<>(); for (int i = firstChunk; i <= lastChunk; i++) { byte[] chunk = data.get(i - firstChunk); SearchHit hit = new SearchHit(i); try (XContentBuilder builder = XContentBuilder.builder(XContentType.SMILE.xContent())) { builder.map(Map.of("data", chunk)); builder.flush(); ByteArrayOutputStream outputStream = (ByteArrayOutputStream) builder.getOutputStream(); hit.sourceRef(new BytesArray(outputStream.toByteArray())); } catch (IOException ex) { throw new UncheckedIOException(ex); } SearchHits hits = new SearchHits(new SearchHit[] { hit }, new TotalHits(1, TotalHits.Relation.EQUAL_TO), 1f); SearchResponse searchResponse = new SearchResponse( new SearchResponseSections(hits, null, null, false, null, null, 0), null, 1, 1, 0, 1L, null, null ); @SuppressWarnings("unchecked") ActionFuture<SearchResponse> actionFuture = mock(ActionFuture.class); when(actionFuture.actionGet()).thenReturn(searchResponse); requestMap.put(databaseName + "_" + i, actionFuture); } when(client.search(any())).thenAnswer(invocationOnMock -> { SearchRequest req = (SearchRequest) invocationOnMock.getArguments()[0]; TermQueryBuilder term = (TermQueryBuilder) req.source().query(); String id = (String) term.value(); return requestMap.get(id.substring(0, id.lastIndexOf('_'))); }); MessageDigest md = MessageDigests.md5(); data.forEach(md::update); return MessageDigests.toHexString(md.digest()); } private static RoutingTable createIndexRoutingTable() { Index index = new Index(GeoIpDownloader.DATABASES_INDEX, UUID.randomUUID().toString()); ShardRouting shardRouting = ShardRouting.newUnassigned( new ShardId(index, 0), true, RecoverySource.ExistingStoreRecoverySource.INSTANCE, new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, "") ); String nodeId = ESTestCase.randomAlphaOfLength(8); IndexShardRoutingTable table = new IndexShardRoutingTable.Builder(new ShardId(index, 0)).addShard( shardRouting.initialize(nodeId, null, shardRouting.getExpectedShardSize()).moveToStarted() ).build(); return RoutingTable.builder().add(IndexRoutingTable.builder(index).addIndexShard(table).build()).build(); } private static List<byte[]> gzip(String name, String content, int chunks) throws IOException { ByteArrayOutputStream bytes = new ByteArrayOutputStream(); GZIPOutputStream gzipOutputStream = new GZIPOutputStream(bytes); byte[] header = new byte[512]; byte[] nameBytes = name.getBytes(StandardCharsets.UTF_8); byte[] contentBytes = content.getBytes(StandardCharsets.UTF_8); byte[] sizeBytes = String.format(Locale.ROOT, "%1$012o", contentBytes.length).getBytes(StandardCharsets.UTF_8); System.arraycopy(nameBytes, 0, header, 0, nameBytes.length); System.arraycopy(sizeBytes, 0, header, 124, 12); gzipOutputStream.write(header); gzipOutputStream.write(contentBytes); gzipOutputStream.write(512 - contentBytes.length); gzipOutputStream.write(new byte[512]); gzipOutputStream.write(new byte[512]); gzipOutputStream.close(); byte[] all = bytes.toByteArray(); int chunkSize = all.length / chunks; List<byte[]> data = new ArrayList<>(); for (int from = 0; from < all.length;) { int to = from + chunkSize; if (to > all.length) { to = all.length; } data.add(Arrays.copyOfRange(all, from, to)); from = to; } while (data.size() > chunks) { byte[] last = data.remove(data.size() - 1); byte[] secondLast = data.remove(data.size() - 1); byte[] merged = new byte[secondLast.length + last.length]; System.arraycopy(secondLast, 0, merged, 0, secondLast.length); System.arraycopy(last, 0, merged, secondLast.length, last.length); data.add(merged); } assert data.size() == chunks; return data; } private static String gunzip(List<byte[]> chunks) throws IOException { byte[] gzippedContent = new byte[chunks.stream().mapToInt(value -> value.length).sum()]; int written = 0; for (byte[] chunk : chunks) { System.arraycopy(chunk, 0, gzippedContent, written, chunk.length); written += chunk.length; } TarInputStream gzipInputStream = new TarInputStream(new GZIPInputStream(new ByteArrayInputStream(gzippedContent))); gzipInputStream.getNextEntry(); return Streams.readFully(gzipInputStream).utf8ToString(); } }
package org.testng.junit; import org.testng.ITestMethodFinder; import org.testng.ITestNGMethod; import org.testng.collections.Lists; import org.testng.internal.TestNGMethod; import org.testng.internal.annotations.IAnnotationFinder; import org.testng.xml.XmlTest; import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; import java.lang.reflect.Method; import java.util.HashSet; import java.util.List; import java.util.Set; /** * This class locates all test and configuration methods according to JUnit. * It is used to change the strategy used by TestRunner to locate its test * methods. * * @author Cedric Beust, May 3, 2004 * */ public class JUnitMethodFinder implements ITestMethodFinder { private String m_testName = null; private IAnnotationFinder m_annotationFinder = null; public JUnitMethodFinder(String testName, IAnnotationFinder finder) { m_testName = testName; m_annotationFinder = finder; } private Constructor findConstructor(Class cls, Class[] parameters) { Constructor result = null; try { result = cls.getConstructor(parameters); } catch (SecurityException ex) { // ignore } catch (NoSuchMethodException ex) { // ignore } return result; } @Override public ITestNGMethod[] getTestMethods(Class cls, XmlTest xmlTest) { ITestNGMethod[] result = privateFindTestMethods(new INameFilter() { @Override public boolean accept(Method method) { return method.getName().startsWith("test") && method.getParameterTypes().length == 0; } }, cls); // ppp("====="); // ppp("FIND TEST METHOD RETURNING "); // for (ITestMethod m : result) { // ppp(" " + m); // } // ppp("====="); return result; } private ITestNGMethod[] privateFindTestMethods(INameFilter filter, Class cls) { List<ITestNGMethod> vResult = Lists.newArrayList(); // We do not want to walk up the class hierarchy and accept the // same method twice (e.g. setUp) which would lead to double-invocation. // All relevant JUnit methods are parameter-less so we store accepted // method names in a Set to filter out duplicates. Set<String> acceptedMethodNames = new HashSet<String>(); // // Collect all methods that start with test // Class current = cls; while(!(current == Object.class)) { Method[] allMethods = current.getDeclaredMethods(); for(Method allMethod : allMethods) { ITestNGMethod m = new TestNGMethod(/* allMethods[i].getDeclaringClass(), */ allMethod, m_annotationFinder, null, null); /* @@@ */ Method method = m.getMethod(); String methodName = method.getName(); if(filter.accept(method) && !acceptedMethodNames.contains(methodName)) { // if (m.getName().startsWith("test")) { // ppp("Found JUnit test method: " + tm); vResult.add(m); acceptedMethodNames.add(methodName); } } current = current.getSuperclass(); } return vResult.toArray(new ITestNGMethod[vResult.size()]); } private static void ppp(String s) { System.out.println("[JUnitMethodFinder] " + s); } private Object instantiate(Class cls) { Object result = null; Constructor ctor = findConstructor(cls, new Class[] { String.class }); try { if (null != ctor) { result = ctor.newInstance(new Object[] { m_testName }); } else { ctor = cls.getConstructor(new Class[0]); result = ctor.newInstance(new Object[0]); } } catch (IllegalArgumentException ex) { ex.printStackTrace(); } catch (SecurityException ex) { ex.printStackTrace(); } catch (InstantiationException ex) { System.err.println("Couldn't find a constructor with a String parameter on your JUnit test class."); ex.printStackTrace(); } catch (IllegalAccessException ex) { ex.printStackTrace(); } catch (InvocationTargetException ex) { ex.printStackTrace(); } catch (NoSuchMethodException ex) { ex.printStackTrace(); } return result; } @Override public ITestNGMethod[] getBeforeTestMethods(Class cls) { ITestNGMethod[] result = privateFindTestMethods(new INameFilter() { @Override public boolean accept(Method method) { return "setUp".equals(method.getName()); } }, cls); return result; } @Override public ITestNGMethod[] getAfterTestMethods(Class cls) { ITestNGMethod[] result = privateFindTestMethods(new INameFilter() { @Override public boolean accept(Method method) { return "tearDown".equals(method.getName()); } }, cls); return result; } @Override public ITestNGMethod[] getAfterClassMethods(Class cls) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getBeforeClassMethods(Class cls) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getBeforeSuiteMethods(Class cls) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getAfterSuiteMethods(Class cls) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getBeforeTestConfigurationMethods(Class testClass) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getAfterTestConfigurationMethods(Class testClass) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getBeforeGroupsConfigurationMethods(Class testClass) { return new ITestNGMethod[0]; } @Override public ITestNGMethod[] getAfterGroupsConfigurationMethods(Class testClass) { return new ITestNGMethod[0]; } } ///////////// interface INameFilter { public boolean accept(Method method); }
/* $This file is distributed under the terms of the license in LICENSE$ */ package edu.cornell.mannlib.vitro.webapp.controller; import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.regex.Matcher; import java.util.regex.Pattern; import javax.servlet.ServletException; import javax.servlet.annotation.WebServlet; import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletResponse; import edu.cornell.mannlib.vitro.webapp.utils.JSPPageHandler; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.jena.ontology.OntModel; import org.apache.jena.ontology.OntResource; import org.apache.jena.query.QueryExecution; import org.apache.jena.query.QueryExecutionFactory; import org.apache.jena.query.QueryFactory; import org.apache.jena.rdf.model.Model; import org.apache.jena.rdf.model.ModelFactory; import org.apache.jena.rdf.model.Resource; import org.apache.jena.shared.Lock; import edu.cornell.mannlib.vitro.webapp.beans.ApplicationBean; import edu.cornell.mannlib.vitro.webapp.modelaccess.ModelAccess; import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils; import edu.cornell.mannlib.vitro.webapp.web.ContentType; @WebServlet(name = "ontology", urlPatterns = {"/ontology/*"}) public class OntologyController extends VitroHttpServlet{ private static final Log log = LogFactory.getLog(OntologyController.class.getName()); public void doPost(HttpServletRequest request, HttpServletResponse response) throws ServletException,IOException{ doGet(request, response); } public void doGet(HttpServletRequest req, HttpServletResponse res) throws ServletException,IOException{ super.doGet(req, res); //get URL without hostname or servlet context String url = req.getRequestURI().substring(req.getContextPath().length()); String redirectURL = checkForRedirect ( url, req.getHeader("accept") ); if( redirectURL != null ){ doRedirect( req, res, redirectURL ); return; } ContentType rdfFormat = checkForLinkedDataRequest(url,req.getHeader("accept")); if( rdfFormat != null ){ doRdf(req, res, rdfFormat ); } } private static Pattern RDF_REQUEST = Pattern.compile("^/ontology/([^/]*)/([^/]*).rdf$"); private static Pattern N3_REQUEST = Pattern.compile("^/ontology/([^/]*)/([^/]*).n3$"); private static Pattern TTL_REQUEST = Pattern.compile("^/ontology/([^/]*)/([^/]*).ttl$"); private static Pattern HTML_REQUEST = Pattern.compile("^/ontology/([^/]*)$"); protected ContentType checkForLinkedDataRequest(String url, String acceptHeader) { try { //check the accept header if (acceptHeader != null) { List<ContentType> actualContentTypes = new ArrayList<ContentType>(); actualContentTypes.add(new ContentType( XHTML_MIMETYPE )); actualContentTypes.add(new ContentType( HTML_MIMETYPE )); actualContentTypes.add(new ContentType( RDFXML_MIMETYPE )); actualContentTypes.add(new ContentType( N3_MIMETYPE )); actualContentTypes.add(new ContentType( TTL_MIMETYPE )); ContentType best = ContentType.getBestContentType(acceptHeader,actualContentTypes); if (best!=null && ( RDFXML_MIMETYPE.equals(best.getMediaType()) || N3_MIMETYPE.equals(best.getMediaType()) || TTL_MIMETYPE.equals(best.getMediaType()) )) return best; } /* * check for parts of URL that indicate request for RDF http://vivo.cornell.edu/ontology/(ontologyname)/n23.rdf http://vivo.cornell.edu/ontology/(ontologyname)/n23.n3 http://vivo.cornell.edu/ontology/(ontologyname)/n23.ttl */ Matcher m = RDF_REQUEST.matcher(url); if( m.matches() ){ return new ContentType(RDFXML_MIMETYPE);} m = N3_REQUEST.matcher(url); if( m.matches() ){ return new ContentType(N3_MIMETYPE);} m = TTL_REQUEST.matcher(url); if( m.matches() ){ return new ContentType(TTL_MIMETYPE);} } catch (Throwable th) { log.error("problem while checking accept header " , th); } //return null; // Returning null would default to html in the calling method. // But since we don't have a useful html representation yet, // we're going to default to returning RDF/XML. return new ContentType(RDFXML_MIMETYPE); } private void doRdf(HttpServletRequest req, HttpServletResponse res, ContentType rdfFormat) throws IOException, ServletException { VitroRequest vreq = new VitroRequest(req); int index = vreq.getRequestURL().lastIndexOf("/"); String ontology = vreq.getRequestURL().substring(0, index); String classOrProperty = vreq.getRequestURL().substring(index+1); if(classOrProperty.lastIndexOf(".")!= -1){ int indexx = classOrProperty.lastIndexOf("."); classOrProperty = classOrProperty.substring(0, indexx); } String url = ontology; OntModel ontModel = ModelAccess.on(getServletContext()).getOntModel(); boolean found = false; Model newModel = ModelFactory.createDefaultModel(); ontModel.enterCriticalSection(Lock.READ); try{ OntResource ontResource = ontModel.getOntResource(url); if(ontResource == null) ontResource = ontModel.getOntResource(url + "/"); if(ontResource != null){ found = true; Resource resource = (Resource)ontResource; QueryExecution qexec = null; try{ String queryString = "Describe <" + resource.getURI() + ">"; qexec = QueryExecutionFactory.create(QueryFactory.create(queryString), ontModel); newModel = qexec.execDescribe(); } finally{ qexec.close(); } } else { found = false; } }finally{ ontModel.leaveCriticalSection(); } if( ! found ){ //respond to HTTP outside of critical section doNotFound(req,res); } else { JenaOutputUtils.setNameSpacePrefixes(newModel,vreq.getWebappDaoFactory()); res.setContentType(rdfFormat.getMediaType()); String format = ""; if ( RDFXML_MIMETYPE.equals(rdfFormat.getMediaType())) format = "RDF/XML"; else if( N3_MIMETYPE.equals(rdfFormat.getMediaType())) format = "N3"; else if ( TTL_MIMETYPE.equals(rdfFormat.getMediaType())) format ="TTL"; newModel.write( res.getOutputStream(), format ); } } private static Pattern URI_PATTERN = Pattern.compile("^/ontology/([^/]*)/([^/]*)$"); //Redirect if the request is for http://hostname/individual/localname // if accept is nothing or text/html redirect to ??? // if accept is some RDF thing redirect to the URL for RDF private String checkForRedirect(String url, String acceptHeader) { ContentType c = checkForLinkedDataRequest(url, acceptHeader); Matcher m = URI_PATTERN.matcher(url); if( m.matches() && m.groupCount() <=2 ){ String group2=""; if(m.group(2).indexOf(".")!=-1){ group2 = m.group(2).substring(0, m.group(2).indexOf(".")); System.out.println("group2 " + group2); System.out.println("group1 " + m.group(1));} if( c != null && !group2.trim().equals(m.group(1).trim()) ){ String redirectUrl = null; if(m.group(2).isEmpty() || m.group(2) == null){ redirectUrl = "/ontology/" + m.group(1) + "/" + m.group(1); } else{ redirectUrl = "/ontology/" + m.group(1) + "/" + m.group(2) + "/" + m.group(2) ; } if( RDFXML_MIMETYPE.equals( c.getMediaType()) ){ return redirectUrl + ".rdf"; }else if( N3_MIMETYPE.equals( c.getMediaType() )){ return redirectUrl + ".n3"; }else if( TTL_MIMETYPE.equals( c.getMediaType() )){ return redirectUrl + ".ttl"; }//else send them to html } //else redirect to HTML representation return null; }else{ return null; } } private void doRedirect(HttpServletRequest req, HttpServletResponse res, String redirectURL) throws IOException { //It seems like there must be a more standard way to do a redirect in tomcat. String hn = req.getHeader("Host"); if (req.isSecure()) { res.setHeader("Location", res.encodeURL("https://" + hn + req.getContextPath() + redirectURL)); log.info("doRedirect by using HTTPS"); } else { res.setHeader("Location", res.encodeURL("http://" + hn + req.getContextPath() + redirectURL)); log.info("doRedirect by using HTTP"); } res.setStatus(res.SC_SEE_OTHER); } private void doNotFound(HttpServletRequest req, HttpServletResponse res) throws IOException, ServletException { VitroRequest vreq = new VitroRequest(req); ApplicationBean appBean = vreq.getAppBean(); //set title before we do the highlighting so we don't get markup in it. req.setAttribute("title","not found"); res.setStatus(HttpServletResponse.SC_NOT_FOUND); String css = "<link rel=\"stylesheet\" type=\"text/css\" media=\"screen\" href=\"" + appBean.getThemeDir() + "css/entity.css\"/>" + "<script language='JavaScript' type='text/javascript' src='js/toggle.js'></script>"; req.setAttribute("css",css); JSPPageHandler.renderBasicPage(req, res, "/"+Controllers.ENTITY_NOT_FOUND_JSP); } }
package de.danoeh.antennapod.activity; import android.content.DialogInterface; import android.content.Intent; import android.graphics.PixelFormat; import android.media.AudioManager; import android.net.Uri; import android.os.Bundle; import android.support.v7.app.ActionBarActivity; import android.support.v7.app.AlertDialog; import android.util.Log; import android.view.Menu; import android.view.MenuInflater; import android.view.MenuItem; import android.view.View; import android.widget.ImageButton; import android.widget.SeekBar; import android.widget.SeekBar.OnSeekBarChangeListener; import android.widget.TextView; import com.afollestad.materialdialogs.MaterialDialog; import de.danoeh.antennapod.R; import de.danoeh.antennapod.core.feed.FeedItem; import de.danoeh.antennapod.core.feed.FeedMedia; import de.danoeh.antennapod.core.preferences.UserPreferences; import de.danoeh.antennapod.core.service.playback.PlaybackService; import de.danoeh.antennapod.core.storage.DBTasks; import de.danoeh.antennapod.core.util.Converter; import de.danoeh.antennapod.core.util.ShareUtils; import de.danoeh.antennapod.core.util.StorageUtils; import de.danoeh.antennapod.core.util.playback.MediaPlayerError; import de.danoeh.antennapod.core.util.playback.Playable; import de.danoeh.antennapod.core.util.playback.PlaybackController; import de.danoeh.antennapod.dialog.SleepTimerDialog; /** * Provides general features which are both needed for playing audio and video * files. */ public abstract class MediaplayerActivity extends ActionBarActivity implements OnSeekBarChangeListener { private static final String TAG = "MediaplayerActivity"; protected PlaybackController controller; protected TextView txtvPosition; protected TextView txtvLength; protected SeekBar sbPosition; protected ImageButton butPlay; protected ImageButton butRev; protected TextView txtvRev; protected ImageButton butFF; protected TextView txtvFF; private PlaybackController newPlaybackController() { return new PlaybackController(this, false) { @Override public void setupGUI() { MediaplayerActivity.this.setupGUI(); } @Override public void onPositionObserverUpdate() { MediaplayerActivity.this.onPositionObserverUpdate(); } @Override public void onBufferStart() { MediaplayerActivity.this.onBufferStart(); } @Override public void onBufferEnd() { MediaplayerActivity.this.onBufferEnd(); } @Override public void onBufferUpdate(float progress) { MediaplayerActivity.this.onBufferUpdate(progress); } @Override public void handleError(int code) { MediaplayerActivity.this.handleError(code); } @Override public void onReloadNotification(int code) { MediaplayerActivity.this.onReloadNotification(code); } @Override public void onSleepTimerUpdate() { supportInvalidateOptionsMenu(); } @Override public ImageButton getPlayButton() { return butPlay; } @Override public void postStatusMsg(int msg) { MediaplayerActivity.this.postStatusMsg(msg); } @Override public void clearStatusMsg() { MediaplayerActivity.this.clearStatusMsg(); } @Override public boolean loadMediaInfo() { return MediaplayerActivity.this.loadMediaInfo(); } @Override public void onAwaitingVideoSurface() { MediaplayerActivity.this.onAwaitingVideoSurface(); } @Override public void onServiceQueried() { MediaplayerActivity.this.onServiceQueried(); } @Override public void onShutdownNotification() { finish(); } @Override public void onPlaybackEnd() { finish(); } @Override public void onPlaybackSpeedChange() { MediaplayerActivity.this.onPlaybackSpeedChange(); } @Override protected void setScreenOn(boolean enable) { super.setScreenOn(enable); MediaplayerActivity.this.setScreenOn(enable); } }; } protected void onPlaybackSpeedChange() { } protected void onServiceQueried() { supportInvalidateOptionsMenu(); } protected void chooseTheme() { setTheme(UserPreferences.getTheme()); } protected void setScreenOn(boolean enable) { } @Override protected void onCreate(Bundle savedInstanceState) { chooseTheme(); super.onCreate(savedInstanceState); Log.d(TAG, "onCreate()"); StorageUtils.checkStorageAvailability(this); setVolumeControlStream(AudioManager.STREAM_MUSIC); orientation = getResources().getConfiguration().orientation; getWindow().setFormat(PixelFormat.TRANSPARENT); } @Override protected void onPause() { super.onPause(); controller.reinitServiceIfPaused(); controller.pause(); } /** * Should be used to switch to another player activity if the mime type is * not the correct one for the current activity. */ protected abstract void onReloadNotification(int notificationCode); /** * Should be used to inform the user that the PlaybackService is currently * buffering. */ protected abstract void onBufferStart(); /** * Should be used to hide the view that was showing the 'buffering'-message. */ protected abstract void onBufferEnd(); protected void onBufferUpdate(float progress) { if (sbPosition != null) { sbPosition.setSecondaryProgress((int) progress * sbPosition.getMax()); } } /** * Current screen orientation. */ protected int orientation; @Override protected void onStart() { super.onStart(); if (controller != null) { controller.release(); } controller = newPlaybackController(); } @Override protected void onStop() { super.onStop(); Log.d(TAG, "onStop()"); if (controller != null) { controller.release(); } } @Override protected void onDestroy() { super.onDestroy(); Log.d(TAG, "onDestroy()"); } @Override public boolean onCreateOptionsMenu(Menu menu) { super.onCreateOptionsMenu(menu); MenuInflater inflater = getMenuInflater(); inflater.inflate(R.menu.mediaplayer, menu); return true; } @Override public boolean onPrepareOptionsMenu(Menu menu) { super.onPrepareOptionsMenu(menu); Playable media = controller.getMedia(); menu.findItem(R.id.support_item).setVisible( media != null && media.getPaymentLink() != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem().getFlattrStatus().flattrable() ); boolean hasWebsiteLink = media != null && media.getWebsiteLink() != null; menu.findItem(R.id.visit_website_item).setVisible(hasWebsiteLink); boolean isItemAndHasLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getItem().getLink() != null; menu.findItem(R.id.share_link_item).setVisible(isItemAndHasLink); menu.findItem(R.id.share_link_with_position_item).setVisible(isItemAndHasLink); boolean isItemHasDownloadLink = media != null && (media instanceof FeedMedia) && ((FeedMedia) media).getDownload_url() != null; menu.findItem(R.id.share_download_url_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_download_url_with_position_item).setVisible(isItemHasDownloadLink); menu.findItem(R.id.share_item).setVisible(hasWebsiteLink || isItemAndHasLink || isItemHasDownloadLink); menu.findItem(R.id.skip_episode_item).setVisible(media != null); boolean sleepTimerSet = controller.sleepTimerActive(); boolean sleepTimerNotSet = controller.sleepTimerNotActive(); menu.findItem(R.id.set_sleeptimer_item).setVisible(sleepTimerNotSet); menu.findItem(R.id.disable_sleeptimer_item).setVisible(sleepTimerSet); return true; } @Override public boolean onOptionsItemSelected(MenuItem item) { Playable media = controller.getMedia(); if (item.getItemId() == android.R.id.home) { Intent intent = new Intent(MediaplayerActivity.this, MainActivity.class); intent.addFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP | Intent.FLAG_ACTIVITY_NEW_TASK); startActivity(intent); return true; } else if (media != null) { switch (item.getItemId()) { case R.id.disable_sleeptimer_item: if (controller.serviceAvailable()) { MaterialDialog.Builder stDialog = new MaterialDialog.Builder(this); stDialog.title(R.string.sleep_timer_label); stDialog.content(getString(R.string.time_left_label) + Converter.getDurationStringLong((int) controller .getSleepTimerTimeLeft())); stDialog.positiveText(R.string.disable_sleeptimer_label); stDialog.negativeText(R.string.cancel_label); stDialog.callback(new MaterialDialog.ButtonCallback() { @Override public void onPositive(MaterialDialog dialog) { dialog.dismiss(); controller.disableSleepTimer(); } @Override public void onNegative(MaterialDialog dialog) { dialog.dismiss(); } }); stDialog.build().show(); } break; case R.id.set_sleeptimer_item: if (controller.serviceAvailable()) { SleepTimerDialog td = new SleepTimerDialog(this) { @Override public void onTimerSet(long millis, boolean shakeToReset, boolean vibrate) { controller.setSleepTimer(millis, shakeToReset, vibrate); } }; td.createNewDialog().show(); } break; case R.id.visit_website_item: Uri uri = Uri.parse(media.getWebsiteLink()); startActivity(new Intent(Intent.ACTION_VIEW, uri)); break; case R.id.support_item: if (media instanceof FeedMedia) { DBTasks.flattrItemIfLoggedIn(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_download_url_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem()); } break; case R.id.share_link_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemLink(this, ((FeedMedia) media).getItem(), true); } break; case R.id.share_download_url_with_position_item: if (media instanceof FeedMedia) { ShareUtils.shareFeedItemDownloadLink(this, ((FeedMedia) media).getItem(), true); } break; case R.id.skip_episode_item: sendBroadcast(new Intent( PlaybackService.ACTION_SKIP_CURRENT_EPISODE)); break; default: return false; } return true; } else { return false; } } @Override protected void onResume() { super.onResume(); Log.d(TAG, "onResume()"); StorageUtils.checkStorageAvailability(this); controller.init(); } /** * Called by 'handleStatus()' when the PlaybackService is waiting for * a video surface. */ protected abstract void onAwaitingVideoSurface(); protected abstract void postStatusMsg(int resId); protected abstract void clearStatusMsg(); protected void onPositionObserverUpdate() { if (controller != null) { int currentPosition = controller.getPosition(); int duration = controller.getDuration(); Log.d(TAG, "currentPosition " + Converter .getDurationStringLong(currentPosition)); if (currentPosition != PlaybackService.INVALID_TIME && duration != PlaybackService.INVALID_TIME && controller.getMedia() != null) { txtvPosition.setText(Converter .getDurationStringLong(currentPosition)); txtvLength.setText(Converter.getDurationStringLong(duration)); updateProgressbarPosition(currentPosition, duration); } else { Log.w(TAG, "Could not react to position observer update because of invalid time"); } } } private void updateProgressbarPosition(int position, int duration) { Log.d(TAG, "updateProgressbarPosition(" + position + ", " + duration +")"); float progress = ((float) position) / duration; sbPosition.setProgress((int) (progress * sbPosition.getMax())); } /** * Load information about the media that is going to be played or currently * being played. This method will be called when the activity is connected * to the PlaybackService to ensure that the activity has the right * FeedMedia object. */ protected boolean loadMediaInfo() { Log.d(TAG, "loadMediaInfo()"); Playable media = controller.getMedia(); if (media != null) { txtvPosition.setText(Converter.getDurationStringLong((media .getPosition()))); if (media.getDuration() != 0) { txtvLength.setText(Converter.getDurationStringLong(media .getDuration())); float progress = ((float) media.getPosition()) / media.getDuration(); sbPosition.setProgress((int) (progress * sbPosition.getMax())); } return true; } else { return false; } } protected void setupGUI() { setContentView(getContentViewResourceId()); sbPosition = (SeekBar) findViewById(R.id.sbPosition); txtvPosition = (TextView) findViewById(R.id.txtvPosition); txtvLength = (TextView) findViewById(R.id.txtvLength); butPlay = (ImageButton) findViewById(R.id.butPlay); butRev = (ImageButton) findViewById(R.id.butRev); txtvRev = (TextView) findViewById(R.id.txtvRev); if(txtvRev != null) { txtvRev.setText(String.valueOf(UserPreferences.getRewindSecs())); } butFF = (ImageButton) findViewById(R.id.butFF); txtvFF = (TextView) findViewById(R.id.txtvFF); if(txtvFF != null) { txtvFF.setText(String.valueOf(UserPreferences.getFastFowardSecs())); } // SEEKBAR SETUP sbPosition.setOnSeekBarChangeListener(this); // BUTTON SETUP butPlay.setOnClickListener(controller.newOnPlayButtonClickListener()); if (butFF != null) { butFF.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int curr = controller.getPosition(); controller.seekTo(curr + UserPreferences.getFastFowardSecs() * 1000); } }); butFF.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getFastFowardSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for(int i=0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_fast_forward); builder.setSingleChoiceItems(choices, checked, (dialog, which) -> { choice = values[which]; }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, (dialog, which) -> { UserPreferences.setPrefFastForwardSecs(choice); txtvFF.setText(String.valueOf(choice)); }); builder.create().show(); return true; } }); } if (butRev != null) { butRev.setOnClickListener(new View.OnClickListener() { @Override public void onClick(View v) { int curr = controller.getPosition(); controller.seekTo(curr - UserPreferences.getRewindSecs() * 1000); } }); butRev.setOnLongClickListener(new View.OnLongClickListener() { int choice; @Override public boolean onLongClick(View v) { int checked = 0; int rewindSecs = UserPreferences.getRewindSecs(); final int[] values = getResources().getIntArray(R.array.seek_delta_values); final String[] choices = new String[values.length]; for(int i=0; i < values.length; i++) { if (rewindSecs == values[i]) { checked = i; } choices[i] = String.valueOf(values[i]) + " " + getString(R.string.time_seconds); } choice = values[checked]; AlertDialog.Builder builder = new AlertDialog.Builder(MediaplayerActivity.this); builder.setTitle(R.string.pref_rewind); builder.setSingleChoiceItems(choices, checked, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { choice = values[which]; } }); builder.setNegativeButton(R.string.cancel_label, null); builder.setPositiveButton(R.string.confirm_label, new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { UserPreferences.setPrefRewindSecs(choice); txtvRev.setText(String.valueOf(choice)); } }); builder.create().show(); return true; } }); } } protected abstract int getContentViewResourceId(); void handleError(int errorCode) { final AlertDialog.Builder errorDialog = new AlertDialog.Builder(this); errorDialog.setTitle(R.string.error_label); errorDialog .setMessage(MediaPlayerError.getErrorString(this, errorCode)); errorDialog.setNeutralButton("OK", new DialogInterface.OnClickListener() { @Override public void onClick(DialogInterface dialog, int which) { dialog.dismiss(); finish(); } } ); errorDialog.create().show(); } float prog; @Override public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (controller != null) { prog = controller.onSeekBarProgressChanged(seekBar, progress, fromUser, txtvPosition); } } @Override public void onStartTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStartTrackingTouch(seekBar); } } @Override public void onStopTrackingTouch(SeekBar seekBar) { if (controller != null) { controller.onSeekBarStopTrackingTouch(seekBar, prog); } } }
/* * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto package com.google.cloud.compute.v1; /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.LocalDisk} */ public final class LocalDisk extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.LocalDisk) LocalDiskOrBuilder { private static final long serialVersionUID = 0L; // Use LocalDisk.newBuilder() to construct. private LocalDisk(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LocalDisk() { diskType_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new LocalDisk(); } @java.lang.Override public final com.google.protobuf.UnknownFieldSet getUnknownFields() { return this.unknownFields; } private LocalDisk( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { this(); if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = com.google.protobuf.UnknownFieldSet.newBuilder(); try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 744072418: { java.lang.String s = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; diskType_ = s; break; } case 1463467880: { bitField0_ |= 0x00000001; diskCount_ = input.readInt32(); break; } case -1764857416: { bitField0_ |= 0x00000002; diskSizeGb_ = input.readInt32(); break; } default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } break; } } } } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(this); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this); } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable(); } } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_LocalDisk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_LocalDisk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.LocalDisk.class, com.google.cloud.compute.v1.LocalDisk.Builder.class); } private int bitField0_; public static final int DISK_COUNT_FIELD_NUMBER = 182933485; private int diskCount_; /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @return Whether the diskCount field is set. */ @java.lang.Override public boolean hasDiskCount() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @return The diskCount. */ @java.lang.Override public int getDiskCount() { return diskCount_; } public static final int DISK_SIZE_GB_FIELD_NUMBER = 316263735; private int diskSizeGb_; /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @return Whether the diskSizeGb field is set. */ @java.lang.Override public boolean hasDiskSizeGb() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @return The diskSizeGb. */ @java.lang.Override public int getDiskSizeGb() { return diskSizeGb_; } public static final int DISK_TYPE_FIELD_NUMBER = 93009052; private volatile java.lang.Object diskType_; /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return Whether the diskType field is set. */ @java.lang.Override public boolean hasDiskType() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return The diskType. */ @java.lang.Override public java.lang.String getDiskType() { java.lang.Object ref = diskType_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); diskType_ = s; return s; } } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return The bytes for diskType. */ @java.lang.Override public com.google.protobuf.ByteString getDiskTypeBytes() { java.lang.Object ref = diskType_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); diskType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000004) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 93009052, diskType_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(182933485, diskCount_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeInt32(316263735, diskSizeGb_); } unknownFields.writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000004) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(93009052, diskType_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(182933485, diskCount_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(316263735, diskSizeGb_); } size += unknownFields.getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.LocalDisk)) { return super.equals(obj); } com.google.cloud.compute.v1.LocalDisk other = (com.google.cloud.compute.v1.LocalDisk) obj; if (hasDiskCount() != other.hasDiskCount()) return false; if (hasDiskCount()) { if (getDiskCount() != other.getDiskCount()) return false; } if (hasDiskSizeGb() != other.hasDiskSizeGb()) return false; if (hasDiskSizeGb()) { if (getDiskSizeGb() != other.getDiskSizeGb()) return false; } if (hasDiskType() != other.hasDiskType()) return false; if (hasDiskType()) { if (!getDiskType().equals(other.getDiskType())) return false; } if (!unknownFields.equals(other.unknownFields)) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDiskCount()) { hash = (37 * hash) + DISK_COUNT_FIELD_NUMBER; hash = (53 * hash) + getDiskCount(); } if (hasDiskSizeGb()) { hash = (37 * hash) + DISK_SIZE_GB_FIELD_NUMBER; hash = (53 * hash) + getDiskSizeGb(); } if (hasDiskType()) { hash = (37 * hash) + DISK_TYPE_FIELD_NUMBER; hash = (53 * hash) + getDiskType().hashCode(); } hash = (29 * hash) + unknownFields.hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.LocalDisk parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.LocalDisk parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.LocalDisk parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.LocalDisk parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.LocalDisk parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.LocalDisk parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.LocalDisk parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.compute.v1.LocalDisk prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * </pre> * * Protobuf type {@code google.cloud.compute.v1.LocalDisk} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.LocalDisk) com.google.cloud.compute.v1.LocalDiskOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_LocalDisk_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_LocalDisk_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.LocalDisk.class, com.google.cloud.compute.v1.LocalDisk.Builder.class); } // Construct using com.google.cloud.compute.v1.LocalDisk.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {} } @java.lang.Override public Builder clear() { super.clear(); diskCount_ = 0; bitField0_ = (bitField0_ & ~0x00000001); diskSizeGb_ = 0; bitField0_ = (bitField0_ & ~0x00000002); diskType_ = ""; bitField0_ = (bitField0_ & ~0x00000004); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_LocalDisk_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.LocalDisk getDefaultInstanceForType() { return com.google.cloud.compute.v1.LocalDisk.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.LocalDisk build() { com.google.cloud.compute.v1.LocalDisk result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.LocalDisk buildPartial() { com.google.cloud.compute.v1.LocalDisk result = new com.google.cloud.compute.v1.LocalDisk(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.diskCount_ = diskCount_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.diskSizeGb_ = diskSizeGb_; to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { to_bitField0_ |= 0x00000004; } result.diskType_ = diskType_; result.bitField0_ = to_bitField0_; onBuilt(); return result; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.LocalDisk) { return mergeFrom((com.google.cloud.compute.v1.LocalDisk) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.LocalDisk other) { if (other == com.google.cloud.compute.v1.LocalDisk.getDefaultInstance()) return this; if (other.hasDiskCount()) { setDiskCount(other.getDiskCount()); } if (other.hasDiskSizeGb()) { setDiskSizeGb(other.getDiskSizeGb()); } if (other.hasDiskType()) { bitField0_ |= 0x00000004; diskType_ = other.diskType_; onChanged(); } this.mergeUnknownFields(other.unknownFields); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { com.google.cloud.compute.v1.LocalDisk parsedMessage = null; try { parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { parsedMessage = (com.google.cloud.compute.v1.LocalDisk) e.getUnfinishedMessage(); throw e.unwrapIOException(); } finally { if (parsedMessage != null) { mergeFrom(parsedMessage); } } return this; } private int bitField0_; private int diskCount_; /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @return Whether the diskCount field is set. */ @java.lang.Override public boolean hasDiskCount() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @return The diskCount. */ @java.lang.Override public int getDiskCount() { return diskCount_; } /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @param value The diskCount to set. * @return This builder for chaining. */ public Builder setDiskCount(int value) { bitField0_ |= 0x00000001; diskCount_ = value; onChanged(); return this; } /** * * * <pre> * Specifies the number of such disks. * </pre> * * <code>optional int32 disk_count = 182933485;</code> * * @return This builder for chaining. */ public Builder clearDiskCount() { bitField0_ = (bitField0_ & ~0x00000001); diskCount_ = 0; onChanged(); return this; } private int diskSizeGb_; /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @return Whether the diskSizeGb field is set. */ @java.lang.Override public boolean hasDiskSizeGb() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @return The diskSizeGb. */ @java.lang.Override public int getDiskSizeGb() { return diskSizeGb_; } /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @param value The diskSizeGb to set. * @return This builder for chaining. */ public Builder setDiskSizeGb(int value) { bitField0_ |= 0x00000002; diskSizeGb_ = value; onChanged(); return this; } /** * * * <pre> * Specifies the size of the disk in base-2 GB. * </pre> * * <code>optional int32 disk_size_gb = 316263735;</code> * * @return This builder for chaining. */ public Builder clearDiskSizeGb() { bitField0_ = (bitField0_ & ~0x00000002); diskSizeGb_ = 0; onChanged(); return this; } private java.lang.Object diskType_ = ""; /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return Whether the diskType field is set. */ public boolean hasDiskType() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return The diskType. */ public java.lang.String getDiskType() { java.lang.Object ref = diskType_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); diskType_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return The bytes for diskType. */ public com.google.protobuf.ByteString getDiskTypeBytes() { java.lang.Object ref = diskType_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); diskType_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @param value The diskType to set. * @return This builder for chaining. */ public Builder setDiskType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000004; diskType_ = value; onChanged(); return this; } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @return This builder for chaining. */ public Builder clearDiskType() { bitField0_ = (bitField0_ & ~0x00000004); diskType_ = getDefaultInstance().getDiskType(); onChanged(); return this; } /** * * * <pre> * Specifies the desired disk type on the node. This disk type must be a local storage type (e.g.: local-ssd). Note that for nodeTemplates, this should be the name of the disk type and not its URL. * </pre> * * <code>optional string disk_type = 93009052;</code> * * @param value The bytes for diskType to set. * @return This builder for chaining. */ public Builder setDiskTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); bitField0_ |= 0x00000004; diskType_ = value; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.LocalDisk) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.LocalDisk) private static final com.google.cloud.compute.v1.LocalDisk DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.LocalDisk(); } public static com.google.cloud.compute.v1.LocalDisk getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LocalDisk> PARSER = new com.google.protobuf.AbstractParser<LocalDisk>() { @java.lang.Override public LocalDisk parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return new LocalDisk(input, extensionRegistry); } }; public static com.google.protobuf.Parser<LocalDisk> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LocalDisk> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.LocalDisk getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
/* * Copyright 2000-2014 JetBrains s.r.o. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.intellij.execution.applet; import com.intellij.execution.*; import com.intellij.execution.configurations.*; import com.intellij.execution.junit.RefactoringListeners; import com.intellij.execution.process.OSProcessHandler; import com.intellij.execution.process.ProcessAdapter; import com.intellij.execution.process.ProcessEvent; import com.intellij.execution.runners.ExecutionEnvironment; import com.intellij.execution.util.JavaParametersUtil; import com.intellij.openapi.module.Module; import com.intellij.openapi.options.SettingsEditor; import com.intellij.openapi.project.Project; import com.intellij.openapi.projectRoots.JavaSdk; import com.intellij.openapi.util.Comparing; import com.intellij.openapi.util.InvalidDataException; import com.intellij.openapi.util.WriteExternalException; import com.intellij.openapi.util.io.FileUtil; import com.intellij.openapi.util.text.StringUtil; import com.intellij.psi.PsiClass; import com.intellij.psi.PsiElement; import com.intellij.refactoring.listeners.RefactoringElementListener; import com.intellij.util.SmartList; import com.intellij.util.xmlb.SmartSerializer; import com.intellij.util.xmlb.annotations.Transient; import org.jdom.Element; import org.jetbrains.annotations.NonNls; import org.jetbrains.annotations.NotNull; import java.io.File; import java.io.FileWriter; import java.io.IOException; import java.net.MalformedURLException; import java.net.URL; import java.util.Collection; import java.util.List; public class AppletConfiguration extends ModuleBasedConfiguration<JavaRunConfigurationModule> implements SingleClassConfiguration, RefactoringListenerProvider { public String MAIN_CLASS_NAME; public String HTML_FILE_NAME; public boolean HTML_USED; public int WIDTH; public int HEIGHT; public String POLICY_FILE; public String VM_PARAMETERS; private AppletParameter[] myAppletParameters; public boolean ALTERNATIVE_JRE_PATH_ENABLED; public String ALTERNATIVE_JRE_PATH; @NonNls protected static final String NAME_ATTR = "name"; @NonNls protected static final String VALUE_ATTR = "value"; @NonNls protected static final String PARAMETER_ELEMENT_NAME = "parameter"; private final SmartSerializer mySerializer; public AppletConfiguration(@NotNull Project project, ConfigurationFactory factory) { super(new JavaRunConfigurationModule(project, false), factory); mySerializer = new SmartSerializer(!project.isDefault(), true); } @Override public void setMainClass(final PsiClass psiClass) { final Module originalModule = getConfigurationModule().getModule(); setMainClassName(JavaExecutionUtil.getRuntimeQualifiedName(psiClass)); setModule(JavaExecutionUtil.findModule(psiClass)); restoreOriginalModule(originalModule); } @Override public RunProfileState getState(@NotNull final Executor executor, @NotNull final ExecutionEnvironment env) throws ExecutionException { return new JavaCommandLineState(env) { private AppletHtmlFile myHtmlURL = null; @Override protected JavaParameters createJavaParameters() throws ExecutionException { final JavaParameters params = new JavaParameters(); myHtmlURL = getHtmlURL(); if (myHtmlURL != null) { final int classPathType = myHtmlURL.isHttp() ? JavaParameters.JDK_ONLY : JavaParameters.JDK_AND_CLASSES; final RunConfigurationModule runConfigurationModule = getConfigurationModule(); JavaParametersUtil.configureModule(runConfigurationModule, params, classPathType, ALTERNATIVE_JRE_PATH_ENABLED ? ALTERNATIVE_JRE_PATH : null); final String policyFileParameter = getPolicyFileParameter(); if (policyFileParameter != null) { params.getVMParametersList().add(policyFileParameter); } params.getVMParametersList().addParametersString(VM_PARAMETERS); params.setMainClass("sun.applet.AppletViewer"); params.getProgramParametersList().add(myHtmlURL.getUrl()); } return params; } @Override @NotNull protected OSProcessHandler startProcess() throws ExecutionException { final OSProcessHandler handler = super.startProcess(); final AppletHtmlFile htmlUrl = myHtmlURL; if (htmlUrl != null) { handler.addProcessListener(new ProcessAdapter() { @Override public void processTerminated(ProcessEvent event) { htmlUrl.deleteFile(); } }); } return handler; } }; } @Override @NotNull public SettingsEditor<? extends RunConfiguration> getConfigurationEditor() { return new AppletConfigurable(getProject()); } @NonNls private String getPolicyFileParameter() { if (POLICY_FILE != null && POLICY_FILE.length() > 0) { return "-Djava.security.policy=" + getPolicyFile(); } return null; } @Transient public String getPolicyFile() { return ExternalizablePath.localPathValue(POLICY_FILE); } public void setPolicyFile(final String localPath) { POLICY_FILE = ExternalizablePath.urlValue(localPath); } public static class AppletParameter { public String myName; public String myValue; public AppletParameter(@NonNls final String name, final String value) { myName = name; myValue = value; } public String getName() { return myName; } public void setName(final String name) { myName = name; } public String getValue() { return myValue; } public void setValue(final String value) { myValue = value; } public boolean equals(final Object obj) { if (!(obj instanceof AppletParameter)) return false; final AppletParameter second = (AppletParameter)obj; return Comparing.equal(myName, second.myName) && Comparing.equal(myValue, second.myValue); } public int hashCode() { return Comparing.hashcode(myName, myValue); } } @Override public Collection<Module> getValidModules() { return JavaRunConfigurationModule.getModulesForClass(getProject(), MAIN_CLASS_NAME); } @Override public void readExternal(final Element parentNode) throws InvalidDataException { mySerializer.readExternal(this, parentNode); List<Element> paramList = parentNode.getChildren(PARAMETER_ELEMENT_NAME); if (paramList.isEmpty()) { myAppletParameters = null; } else { List<AppletParameter> parameters = new SmartList<>(); for (Element element : paramList) { parameters.add(new AppletParameter(element.getAttributeValue(NAME_ATTR), element.getAttributeValue(VALUE_ATTR))); } myAppletParameters = parameters.toArray(new AppletParameter[parameters.size()]); } } @Override protected boolean isNewSerializationUsed() { return true; } @Override public void writeExternal(final Element parentNode) throws WriteExternalException { mySerializer.writeExternal(this, parentNode); if (myAppletParameters != null) { for (AppletParameter myAppletParameter : myAppletParameters) { final Element element = new Element(PARAMETER_ELEMENT_NAME); parentNode.addContent(element); element.setAttribute(NAME_ATTR, myAppletParameter.getName()); element.setAttribute(VALUE_ATTR, myAppletParameter.getValue()); } } } @Override public RefactoringElementListener getRefactoringElementListener(final PsiElement element) { if (HTML_USED) return null; return RefactoringListeners.getClassOrPackageListener(element, new RefactoringListeners.SingleClassConfigurationAccessor(this)); } @Override @Transient public PsiClass getMainClass() { return getConfigurationModule().findClass(MAIN_CLASS_NAME); } @Override public String suggestedName() { if (MAIN_CLASS_NAME == null) return null; return ProgramRunnerUtil.shortenName(JavaExecutionUtil.getShortClassName(MAIN_CLASS_NAME), 0); } @Override public void setMainClassName(final String qualifiedName) { MAIN_CLASS_NAME = qualifiedName; } @Override public void checkConfiguration() throws RuntimeConfigurationException { if (ALTERNATIVE_JRE_PATH_ENABLED){ if (ALTERNATIVE_JRE_PATH == null || ALTERNATIVE_JRE_PATH.length() == 0 || !JavaSdk.checkForJre(ALTERNATIVE_JRE_PATH)){ throw new RuntimeConfigurationWarning(ExecutionBundle.message("jre.not.valid.error.message", ALTERNATIVE_JRE_PATH)); } } getConfigurationModule().checkForWarning(); if (HTML_USED) { if (HTML_FILE_NAME == null || HTML_FILE_NAME.length() == 0) { throw new RuntimeConfigurationWarning(ExecutionBundle.message("html.file.not.specified.error.message")); } try { new URL(getHtmlURL().getUrl()); } catch (CantRunException ex) { checkUrlIsValid(ex); } catch (MalformedURLException ex) { checkUrlIsValid(ex); } } else { getConfigurationModule().checkClassName(MAIN_CLASS_NAME, ExecutionBundle.message("no.applet.class.specified.error.message")); } } private void checkUrlIsValid(Exception ex) throws RuntimeConfigurationWarning { throw new RuntimeConfigurationWarning("URL " + HTML_FILE_NAME + " is not valid: " + ex.getLocalizedMessage()); } @Transient public AppletParameter[] getAppletParameters() { return myAppletParameters; } public void setAppletParameters(AppletParameter[] appletParameters) { myAppletParameters = appletParameters; } public void setAppletParameters(final List<AppletParameter> parameters) { setAppletParameters(parameters.toArray(new AppletParameter[parameters.size()])); } private AppletHtmlFile getHtmlURL() throws CantRunException { if (HTML_USED) { if (HTML_FILE_NAME == null || HTML_FILE_NAME.length() == 0) { throw new CantRunException(ExecutionBundle.message("html.file.not.specified.error.message")); } return new AppletHtmlFile(HTML_FILE_NAME, null); } else { if (MAIN_CLASS_NAME == null || MAIN_CLASS_NAME.length() == 0) { throw new CantRunException(ExecutionBundle.message("class.not.specified.error.message")); } // generate html try { return generateAppletTempPage(); } catch (IOException ignored) { throw new CantRunException(ExecutionBundle.message("failed.to.generate.wrapper.error.message")); } } } private AppletHtmlFile generateAppletTempPage() throws IOException { final File tempFile = FileUtil.createTempFile("AppletPage", ".html"); @NonNls final FileWriter writer = new FileWriter(tempFile); try { writer.write("<html>\n" + "<head>\n" + "<title>" + MAIN_CLASS_NAME + "</title>\n" + "</head>\n" + "<applet codebase=\".\"\n" + "code=\"" + MAIN_CLASS_NAME + "\"\n" + "name=\"" + MAIN_CLASS_NAME + "\"\n" + "width=" + WIDTH + "\n" + "height=" + HEIGHT + "\n" + "align=top>\n"); final AppletParameter[] appletParameters = getAppletParameters(); if (appletParameters != null) { for (final AppletParameter parameter : appletParameters) { writer.write("<param name=\"" + parameter.getName() + "\" value=\"" + parameter.getValue() + "\">\n"); } } writer.write("</applet>\n</body>\n</html>\n"); } finally { writer.close(); } return new AppletHtmlFile(tempFile.getAbsolutePath(), tempFile); } private static class AppletHtmlFile { private final String myHtmlFile; private final File myFileToDelete; @NonNls protected static final String FILE_PREFIX = "file:/"; @NonNls protected static final String HTTP_PREFIX = "http:/"; @NonNls protected static final String HTTPS_PREFIX = "https:/"; protected AppletHtmlFile(final String htmlFile, final File fileToDelete) { myHtmlFile = htmlFile; myFileToDelete = fileToDelete; } public String getUrl() { if (!StringUtil.startsWithIgnoreCase(myHtmlFile, FILE_PREFIX) && !isHttp()) { try { return new File(myHtmlFile).toURL().toString(); } catch (MalformedURLException ignored) { } } return myHtmlFile; } public boolean isHttp() { return StringUtil.startsWithIgnoreCase(myHtmlFile, HTTP_PREFIX) || StringUtil.startsWithIgnoreCase(myHtmlFile, HTTPS_PREFIX); } public void deleteFile() { if (myFileToDelete != null) { myFileToDelete.delete(); } } } }
/* * Copyright 2014 The gRPC Authors * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.grpc; import static com.google.common.base.Charsets.US_ASCII; import static com.google.common.base.Charsets.UTF_8; import static com.google.common.base.Preconditions.checkNotNull; import static com.google.common.base.Throwables.getStackTraceAsString; import com.google.common.base.MoreObjects; import com.google.common.base.Objects; import io.grpc.Metadata.TrustedAsciiMarshaller; import java.nio.ByteBuffer; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.TreeMap; import javax.annotation.CheckReturnValue; import javax.annotation.Nullable; import javax.annotation.concurrent.Immutable; /** * Defines the status of an operation by providing a standard {@link Code} in conjunction with an * optional descriptive message. Instances of {@code Status} are created by starting with the * template for the appropriate {@link Status.Code} and supplementing it with additional * information: {@code Status.NOT_FOUND.withDescription("Could not find 'important_file.txt'");} * * <p>For clients, every remote call will return a status on completion. In the case of errors this * status may be propagated to blocking stubs as a {@link RuntimeException} or to a listener as an * explicit parameter. * * <p>Similarly servers can report a status by throwing {@link StatusRuntimeException} * or by passing the status to a callback. * * <p>Utility functions are provided to convert a status to an exception and to extract them * back out. */ @Immutable @CheckReturnValue public final class Status { /** * The set of canonical status codes. If new codes are added over time they must choose * a numerical value that does not collide with any previously used value. */ public enum Code { /** * The operation completed successfully. */ OK(0), /** * The operation was cancelled (typically by the caller). */ CANCELLED(1), /** * Unknown error. An example of where this error may be returned is * if a Status value received from another address space belongs to * an error-space that is not known in this address space. Also * errors raised by APIs that do not return enough error information * may be converted to this error. */ UNKNOWN(2), /** * Client specified an invalid argument. Note that this differs * from FAILED_PRECONDITION. INVALID_ARGUMENT indicates arguments * that are problematic regardless of the state of the system * (e.g., a malformed file name). */ INVALID_ARGUMENT(3), /** * Deadline expired before operation could complete. For operations * that change the state of the system, this error may be returned * even if the operation has completed successfully. For example, a * successful response from a server could have been delayed long * enough for the deadline to expire. */ DEADLINE_EXCEEDED(4), /** * Some requested entity (e.g., file or directory) was not found. */ NOT_FOUND(5), /** * Some entity that we attempted to create (e.g., file or directory) already exists. */ ALREADY_EXISTS(6), /** * The caller does not have permission to execute the specified * operation. PERMISSION_DENIED must not be used for rejections * caused by exhausting some resource (use RESOURCE_EXHAUSTED * instead for those errors). PERMISSION_DENIED must not be * used if the caller cannot be identified (use UNAUTHENTICATED * instead for those errors). */ PERMISSION_DENIED(7), /** * Some resource has been exhausted, perhaps a per-user quota, or * perhaps the entire file system is out of space. */ RESOURCE_EXHAUSTED(8), /** * Operation was rejected because the system is not in a state * required for the operation's execution. For example, directory * to be deleted may be non-empty, an rmdir operation is applied to * a non-directory, etc. * * <p>A litmus test that may help a service implementor in deciding * between FAILED_PRECONDITION, ABORTED, and UNAVAILABLE: * (a) Use UNAVAILABLE if the client can retry just the failing call. * (b) Use ABORTED if the client should retry at a higher-level * (e.g., restarting a read-modify-write sequence). * (c) Use FAILED_PRECONDITION if the client should not retry until * the system state has been explicitly fixed. E.g., if an "rmdir" * fails because the directory is non-empty, FAILED_PRECONDITION * should be returned since the client should not retry unless * they have first fixed up the directory by deleting files from it. */ FAILED_PRECONDITION(9), /** * The operation was aborted, typically due to a concurrency issue * like sequencer check failures, transaction aborts, etc. * * <p>See litmus test above for deciding between FAILED_PRECONDITION, * ABORTED, and UNAVAILABLE. */ ABORTED(10), /** * Operation was attempted past the valid range. E.g., seeking or * reading past end of file. * * <p>Unlike INVALID_ARGUMENT, this error indicates a problem that may * be fixed if the system state changes. For example, a 32-bit file * system will generate INVALID_ARGUMENT if asked to read at an * offset that is not in the range [0,2^32-1], but it will generate * OUT_OF_RANGE if asked to read from an offset past the current * file size. * * <p>There is a fair bit of overlap between FAILED_PRECONDITION and OUT_OF_RANGE. * We recommend using OUT_OF_RANGE (the more specific error) when it applies * so that callers who are iterating through * a space can easily look for an OUT_OF_RANGE error to detect when they are done. */ OUT_OF_RANGE(11), /** * Operation is not implemented or not supported/enabled in this service. */ UNIMPLEMENTED(12), /** * Internal errors. Means some invariants expected by underlying * system has been broken. If you see one of these errors, * something is very broken. */ INTERNAL(13), /** * The service is currently unavailable. This is a most likely a * transient condition and may be corrected by retrying with * a backoff. Note that it is not always safe to retry * non-idempotent operations. * * <p>See litmus test above for deciding between FAILED_PRECONDITION, * ABORTED, and UNAVAILABLE. */ UNAVAILABLE(14), /** * Unrecoverable data loss or corruption. */ DATA_LOSS(15), /** * The request does not have valid authentication credentials for the * operation. */ UNAUTHENTICATED(16); private final int value; @SuppressWarnings("ImmutableEnumChecker") // we make sure the byte[] can't be modified private final byte[] valueAscii; private Code(int value) { this.value = value; this.valueAscii = Integer.toString(value).getBytes(US_ASCII); } /** * The numerical value of the code. */ public int value() { return value; } /** * Returns a {@link Status} object corresponding to this status code. */ public Status toStatus() { return STATUS_LIST.get(value); } private byte[] valueAscii() { return valueAscii; } } private static final String TEST_EQUALS_FAILURE_PROPERTY = "io.grpc.Status.failOnEqualsForTest"; private static final boolean FAIL_ON_EQUALS_FOR_TEST = Boolean.parseBoolean(System.getProperty(TEST_EQUALS_FAILURE_PROPERTY, "false")); // Create the canonical list of Status instances indexed by their code values. private static final List<Status> STATUS_LIST = buildStatusList(); private static List<Status> buildStatusList() { TreeMap<Integer, Status> canonicalizer = new TreeMap<>(); for (Code code : Code.values()) { Status replaced = canonicalizer.put(code.value(), new Status(code)); if (replaced != null) { throw new IllegalStateException("Code value duplication between " + replaced.getCode().name() + " & " + code.name()); } } return Collections.unmodifiableList(new ArrayList<>(canonicalizer.values())); } // A pseudo-enum of Status instances mapped 1:1 with values in Code. This simplifies construction // patterns for derived instances of Status. /** The operation completed successfully. */ public static final Status OK = Code.OK.toStatus(); /** The operation was cancelled (typically by the caller). */ public static final Status CANCELLED = Code.CANCELLED.toStatus(); /** Unknown error. See {@link Code#UNKNOWN}. */ public static final Status UNKNOWN = Code.UNKNOWN.toStatus(); /** Client specified an invalid argument. See {@link Code#INVALID_ARGUMENT}. */ public static final Status INVALID_ARGUMENT = Code.INVALID_ARGUMENT.toStatus(); /** Deadline expired before operation could complete. See {@link Code#DEADLINE_EXCEEDED}. */ public static final Status DEADLINE_EXCEEDED = Code.DEADLINE_EXCEEDED.toStatus(); /** Some requested entity (e.g., file or directory) was not found. */ public static final Status NOT_FOUND = Code.NOT_FOUND.toStatus(); /** Some entity that we attempted to create (e.g., file or directory) already exists. */ public static final Status ALREADY_EXISTS = Code.ALREADY_EXISTS.toStatus(); /** * The caller does not have permission to execute the specified operation. See {@link * Code#PERMISSION_DENIED}. */ public static final Status PERMISSION_DENIED = Code.PERMISSION_DENIED.toStatus(); /** The request does not have valid authentication credentials for the operation. */ public static final Status UNAUTHENTICATED = Code.UNAUTHENTICATED.toStatus(); /** * Some resource has been exhausted, perhaps a per-user quota, or perhaps the entire file system * is out of space. */ public static final Status RESOURCE_EXHAUSTED = Code.RESOURCE_EXHAUSTED.toStatus(); /** * Operation was rejected because the system is not in a state required for the operation's * execution. See {@link Code#FAILED_PRECONDITION}. */ public static final Status FAILED_PRECONDITION = Code.FAILED_PRECONDITION.toStatus(); /** * The operation was aborted, typically due to a concurrency issue like sequencer check failures, * transaction aborts, etc. See {@link Code#ABORTED}. */ public static final Status ABORTED = Code.ABORTED.toStatus(); /** Operation was attempted past the valid range. See {@link Code#OUT_OF_RANGE}. */ public static final Status OUT_OF_RANGE = Code.OUT_OF_RANGE.toStatus(); /** Operation is not implemented or not supported/enabled in this service. */ public static final Status UNIMPLEMENTED = Code.UNIMPLEMENTED.toStatus(); /** Internal errors. See {@link Code#INTERNAL}. */ public static final Status INTERNAL = Code.INTERNAL.toStatus(); /** The service is currently unavailable. See {@link Code#UNAVAILABLE}. */ public static final Status UNAVAILABLE = Code.UNAVAILABLE.toStatus(); /** Unrecoverable data loss or corruption. */ public static final Status DATA_LOSS = Code.DATA_LOSS.toStatus(); /** * Return a {@link Status} given a canonical error {@link Code} value. */ public static Status fromCodeValue(int codeValue) { if (codeValue < 0 || codeValue > STATUS_LIST.size()) { return UNKNOWN.withDescription("Unknown code " + codeValue); } else { return STATUS_LIST.get(codeValue); } } private static Status fromCodeValue(byte[] asciiCodeValue) { if (asciiCodeValue.length == 1 && asciiCodeValue[0] == '0') { return Status.OK; } return fromCodeValueSlow(asciiCodeValue); } @SuppressWarnings("fallthrough") private static Status fromCodeValueSlow(byte[] asciiCodeValue) { int index = 0; int codeValue = 0; switch (asciiCodeValue.length) { case 2: if (asciiCodeValue[index] < '0' || asciiCodeValue[index] > '9') { break; } codeValue += (asciiCodeValue[index++] - '0') * 10; // fall through case 1: if (asciiCodeValue[index] < '0' || asciiCodeValue[index] > '9') { break; } codeValue += asciiCodeValue[index] - '0'; if (codeValue < STATUS_LIST.size()) { return STATUS_LIST.get(codeValue); } break; default: break; } return UNKNOWN.withDescription("Unknown code " + new String(asciiCodeValue, US_ASCII)); } /** * Return a {@link Status} given a canonical error {@link Code} object. */ public static Status fromCode(Code code) { return code.toStatus(); } /** * Key to bind status code to trailing metadata. */ static final Metadata.Key<Status> CODE_KEY = Metadata.Key.of("grpc-status", false /* not pseudo */, new StatusCodeMarshaller()); /** * Marshals status messages for ({@link #MESSAGE_KEY}. gRPC does not use binary coding of * status messages by default, which makes sending arbitrary strings difficult. This marshaller * uses ASCII printable characters by default, and percent encodes (e.g. %0A) all non ASCII bytes. * This leads to normal text being mostly readable (especially useful for debugging), and special * text still being sent. * * <p>By default, the HTTP spec says that header values must be encoded using a strict subset of * ASCII (See RFC 7230 section 3.2.6). HTTP/2 HPACK allows use of arbitrary binary headers, but * we do not use them for interoperating with existing HTTP/1.1 code. Since the grpc-message * is encoded to such a header, it needs to not use forbidden characters. * * <p>This marshaller works by converting the passed in string into UTF-8, checking to see if * each individual byte is an allowable byte, and then either percent encoding or passing it * through. When percent encoding, the byte is converted into hexadecimal notation with a '%' * prepended. * * <p>When unmarshalling, bytes are passed through unless they match the "%XX" pattern. If they * do match, the unmarshaller attempts to convert them back into their original UTF-8 byte * sequence. After the input header bytes are converted into UTF-8 bytes, the new byte array is * reinterpretted back as a string. */ private static final TrustedAsciiMarshaller<String> STATUS_MESSAGE_MARSHALLER = new StatusMessageMarshaller(); /** * Key to bind status message to trailing metadata. */ static final Metadata.Key<String> MESSAGE_KEY = Metadata.Key.of("grpc-message", false /* not pseudo */, STATUS_MESSAGE_MARSHALLER); /** * Extract an error {@link Status} from the causal chain of a {@link Throwable}. * If no status can be found, a status is created with {@link Code#UNKNOWN} as its code and * {@code t} as its cause. * * @return non-{@code null} status */ public static Status fromThrowable(Throwable t) { Throwable cause = checkNotNull(t, "t"); while (cause != null) { if (cause instanceof StatusException) { return ((StatusException) cause).getStatus(); } else if (cause instanceof StatusRuntimeException) { return ((StatusRuntimeException) cause).getStatus(); } cause = cause.getCause(); } // Couldn't find a cause with a Status return UNKNOWN.withCause(t); } /** * Extract an error trailers from the causal chain of a {@link Throwable}. * * @return the trailers or {@code null} if not found. */ @Nullable @ExperimentalApi("https://github.com/grpc/grpc-java/issues/4683") public static Metadata trailersFromThrowable(Throwable t) { Throwable cause = checkNotNull(t, "t"); while (cause != null) { if (cause instanceof StatusException) { return ((StatusException) cause).getTrailers(); } else if (cause instanceof StatusRuntimeException) { return ((StatusRuntimeException) cause).getTrailers(); } cause = cause.getCause(); } return null; } static String formatThrowableMessage(Status status) { if (status.description == null) { return status.code.toString(); } else { return status.code + ": " + status.description; } } private final Code code; private final String description; private final Throwable cause; private Status(Code code) { this(code, null, null); } private Status(Code code, @Nullable String description, @Nullable Throwable cause) { this.code = checkNotNull(code, "code"); this.description = description; this.cause = cause; } /** * Create a derived instance of {@link Status} with the given cause. * However, the cause is not transmitted from server to client. */ public Status withCause(Throwable cause) { if (Objects.equal(this.cause, cause)) { return this; } return new Status(this.code, this.description, cause); } /** * Create a derived instance of {@link Status} with the given description. Leading and trailing * whitespace may be removed; this may change in the future. */ public Status withDescription(String description) { if (Objects.equal(this.description, description)) { return this; } return new Status(this.code, description, this.cause); } /** * Create a derived instance of {@link Status} augmenting the current description with * additional detail. Leading and trailing whitespace may be removed; this may change in the * future. */ public Status augmentDescription(String additionalDetail) { if (additionalDetail == null) { return this; } else if (this.description == null) { return new Status(this.code, additionalDetail, this.cause); } else { return new Status(this.code, this.description + "\n" + additionalDetail, this.cause); } } /** * The canonical status code. */ public Code getCode() { return code; } /** * A description of this status for human consumption. */ @Nullable public String getDescription() { return description; } /** * The underlying cause of an error. * Note that the cause is not transmitted from server to client. */ @Nullable public Throwable getCause() { return cause; } /** * Is this status OK, i.e., not an error. */ public boolean isOk() { return Code.OK == code; } /** * Convert this {@link Status} to a {@link RuntimeException}. Use {@link #fromThrowable} * to recover this {@link Status} instance when the returned exception is in the causal chain. */ public StatusRuntimeException asRuntimeException() { return new StatusRuntimeException(this); } /** * Same as {@link #asRuntimeException()} but includes the provided trailers in the returned * exception. */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/4683") public StatusRuntimeException asRuntimeException(@Nullable Metadata trailers) { return new StatusRuntimeException(this, trailers); } /** * Convert this {@link Status} to an {@link Exception}. Use {@link #fromThrowable} * to recover this {@link Status} instance when the returned exception is in the causal chain. */ public StatusException asException() { return new StatusException(this); } /** * Same as {@link #asException()} but includes the provided trailers in the returned exception. */ @ExperimentalApi("https://github.com/grpc/grpc-java/issues/4683") public StatusException asException(@Nullable Metadata trailers) { return new StatusException(this, trailers); } /** A string representation of the status useful for debugging. */ @Override public String toString() { return MoreObjects.toStringHelper(this) .add("code", code.name()) .add("description", description) .add("cause", cause != null ? getStackTraceAsString(cause) : cause) .toString(); } private static final class StatusCodeMarshaller implements TrustedAsciiMarshaller<Status> { @Override public byte[] toAsciiString(Status status) { return status.getCode().valueAscii(); } @Override public Status parseAsciiString(byte[] serialized) { return fromCodeValue(serialized); } } private static final class StatusMessageMarshaller implements TrustedAsciiMarshaller<String> { private static final byte[] HEX = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'}; @Override public byte[] toAsciiString(String value) { byte[] valueBytes = value.getBytes(UTF_8); for (int i = 0; i < valueBytes.length; i++) { byte b = valueBytes[i]; // If there are only non escaping characters, skip the slow path. if (isEscapingChar(b)) { return toAsciiStringSlow(valueBytes, i); } } return valueBytes; } private static boolean isEscapingChar(byte b) { return b < ' ' || b >= '~' || b == '%'; } /** * @param valueBytes the UTF-8 bytes * @param ri The reader index, pointed at the first byte that needs escaping. */ private static byte[] toAsciiStringSlow(byte[] valueBytes, int ri) { byte[] escapedBytes = new byte[ri + (valueBytes.length - ri) * 3]; // copy over the good bytes if (ri != 0) { System.arraycopy(valueBytes, 0, escapedBytes, 0, ri); } int wi = ri; for (; ri < valueBytes.length; ri++) { byte b = valueBytes[ri]; // Manually implement URL encoding, per the gRPC spec. if (isEscapingChar(b)) { escapedBytes[wi] = '%'; escapedBytes[wi + 1] = HEX[(b >> 4) & 0xF]; escapedBytes[wi + 2] = HEX[b & 0xF]; wi += 3; continue; } escapedBytes[wi++] = b; } return Arrays.copyOf(escapedBytes, wi); } @SuppressWarnings("deprecation") // Use fast but deprecated String ctor @Override public String parseAsciiString(byte[] value) { for (int i = 0; i < value.length; i++) { byte b = value[i]; if (b < ' ' || b >= '~' || (b == '%' && i + 2 < value.length)) { return parseAsciiStringSlow(value); } } return new String(value, 0); } private static String parseAsciiStringSlow(byte[] value) { ByteBuffer buf = ByteBuffer.allocate(value.length); for (int i = 0; i < value.length;) { if (value[i] == '%' && i + 2 < value.length) { try { buf.put((byte)Integer.parseInt(new String(value, i + 1, 2, US_ASCII), 16)); i += 3; continue; } catch (NumberFormatException e) { // ignore, fall through, just push the bytes. } } buf.put(value[i]); i += 1; } return new String(buf.array(), 0, buf.position(), UTF_8); } } /** * Equality on Statuses is not well defined. Instead, do comparison based on their Code with * {@link #getCode}. The description and cause of the Status are unlikely to be stable, and * additional fields may be added to Status in the future. */ @Override public boolean equals(Object obj) { assert !FAIL_ON_EQUALS_FOR_TEST : "Status.equals called; disable this by setting " + TEST_EQUALS_FAILURE_PROPERTY; return super.equals(obj); } /** * Hash codes on Statuses are not well defined. * * @see #equals */ @Override public int hashCode() { return super.hashCode(); } }
package com.homerours.musiccontrols; import org.apache.cordova.CordovaInterface; import java.io.BufferedInputStream; import java.io.FileInputStream; import java.io.InputStream; import java.io.File; import java.net.HttpURLConnection; import java.net.URL; import java.util.Random; import android.util.Log; import android.R; import android.content.Context; import android.app.Activity; import android.app.Notification; import android.app.NotificationManager; import android.app.PendingIntent; import android.content.Intent; import android.os.Bundle; import android.os.Build; import android.graphics.BitmapFactory; import android.graphics.Bitmap; import android.net.Uri; public class MusicControlsNotification { private Activity cordovaActivity; private NotificationManager notificationManager; private Notification.Builder notificationBuilder; private int notificationID; private MusicControlsInfos infos; private Bitmap bitmapCover; // Public Constructor public MusicControlsNotification(Activity cordovaActivity,int id){ this.notificationID = id; this.cordovaActivity = cordovaActivity; Context context = cordovaActivity; this.notificationManager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE); } // Show or update notification public void updateNotification(MusicControlsInfos newInfos){ // Check if the cover has changed if (!newInfos.cover.isEmpty() && (this.infos == null || !newInfos.cover.equals(this.infos.cover))){ this.getBitmapCover(newInfos.cover); } this.infos = newInfos; this.createBuilder(); Notification noti = this.notificationBuilder.build(); this.notificationManager.notify(this.notificationID, noti); } // Toggle the play/pause button public void updateIsPlaying(boolean isPlaying){ this.infos.isPlaying=isPlaying; this.createBuilder(); Notification noti = this.notificationBuilder.build(); this.notificationManager.notify(this.notificationID, noti); } // Get image from url private void getBitmapCover(String coverURL){ try{ if(coverURL.matches("^(https?|ftp)://.*$")) // Remote image this.bitmapCover = getBitmapFromURL(coverURL); else{ // Local image this.bitmapCover = getBitmapFromLocal(coverURL); } } catch (Exception ex) { ex.printStackTrace(); } } // get Local image private Bitmap getBitmapFromLocal(String localURL){ try { Uri uri = Uri.parse(localURL); File file = new File(uri.getPath()); FileInputStream fileStream = new FileInputStream(file); BufferedInputStream buf = new BufferedInputStream(fileStream); Bitmap myBitmap = BitmapFactory.decodeStream(buf); buf.close(); return myBitmap; } catch (Exception ex) { ex.printStackTrace(); return null; } } // get Remote image private Bitmap getBitmapFromURL(String strURL) { try { URL url = new URL(strURL); HttpURLConnection connection = (HttpURLConnection) url.openConnection(); connection.setDoInput(true); connection.connect(); InputStream input = connection.getInputStream(); Bitmap myBitmap = BitmapFactory.decodeStream(input); return myBitmap; } catch (Exception ex) { ex.printStackTrace(); return null; } } private void createBuilder(){ Context context = cordovaActivity; Notification.Builder builder = new Notification.Builder(context); //Configure builder builder.setContentTitle(infos.track); if (!infos.artist.isEmpty()){ builder.setContentText(infos.artist); } builder.setWhen(0); // set if the notification can be destroyed by swiping if (infos.dismissable){ builder.setOngoing(false); Intent dismissIntent = new Intent("music-controls-destroy"); PendingIntent dismissPendingIntent = PendingIntent.getBroadcast(context, 1, dismissIntent, 0); builder.setDeleteIntent(dismissPendingIntent); } else { builder.setOngoing(true); } if (!infos.ticker.isEmpty()){ builder.setTicker(infos.ticker); } builder.setPriority(Notification.PRIORITY_MAX); //If 5.0 >= set the controls to be visible on lockscreen if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP){ builder.setVisibility(Notification.VISIBILITY_PUBLIC); } //Set SmallIcon if (infos.isPlaying){ builder.setSmallIcon(R.drawable.ic_media_play); } else { builder.setSmallIcon(R.drawable.ic_media_pause); } //Set LargeIcon if (!infos.cover.isEmpty() && this.bitmapCover != null){ builder.setLargeIcon(this.bitmapCover); } //Open app if tapped Intent resultIntent = new Intent(context, cordovaActivity.getClass()); resultIntent.setAction(Intent.ACTION_MAIN); resultIntent.addCategory(Intent.CATEGORY_LAUNCHER); PendingIntent resultPendingIntent = PendingIntent.getActivity(context, 0, resultIntent, 0); builder.setContentIntent(resultPendingIntent); //Controls int nbControls=0; /* Previous */ if (infos.hasPrev){ nbControls++; Intent previousIntent = new Intent("music-controls-previous"); PendingIntent previousPendingIntent = PendingIntent.getBroadcast(context, 1, previousIntent, 0); builder.addAction(android.R.drawable.ic_media_rew, "", previousPendingIntent); } if (infos.isPlaying){ /* Pause */ nbControls++; Intent pauseIntent = new Intent("music-controls-pause"); PendingIntent pausePendingIntent = PendingIntent.getBroadcast(context, 1, pauseIntent, 0); builder.addAction(android.R.drawable.ic_media_pause, "", pausePendingIntent); } else { /* Play */ nbControls++; Intent playIntent = new Intent("music-controls-play"); PendingIntent playPendingIntent = PendingIntent.getBroadcast(context, 1, playIntent, 0); builder.addAction(android.R.drawable.ic_media_play, "", playPendingIntent); } /* Next */ if (infos.hasNext){ nbControls++; Intent nextIntent = new Intent("music-controls-next"); PendingIntent nextPendingIntent = PendingIntent.getBroadcast(context, 1, nextIntent, 0); builder.addAction(android.R.drawable.ic_media_ff, "", nextPendingIntent); } /* Close */ if (infos.hasClose){ nbControls++; Intent destroyIntent = new Intent("music-controls-destroy"); PendingIntent destroyPendingIntent = PendingIntent.getBroadcast(context, 1, destroyIntent, 0); builder.addAction(android.R.drawable.ic_menu_close_clear_cancel, "", destroyPendingIntent); } //If 5.0 >= use MediaStyle if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP){ int[] args = new int[nbControls]; for (int i = 0; i < nbControls; ++i) { args[i] = i; } builder.setStyle(new Notification.MediaStyle().setShowActionsInCompactView(args)); } this.notificationBuilder = builder; } public void destroy(){ this.notificationManager.cancel(this.notificationID); } }
/* * Copyright 2014-present Facebook, Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); you may * not use this file except in compliance with the License. You may obtain * a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the * License for the specific language governing permissions and limitations * under the License. */ package com.facebook.buck.android; import com.facebook.buck.cxx.CxxHeaders; import com.facebook.buck.cxx.CxxPlatform; import com.facebook.buck.cxx.CxxPreprocessables; import com.facebook.buck.cxx.CxxPreprocessorInput; import com.facebook.buck.cxx.CxxSource; import com.facebook.buck.cxx.Linker; import com.facebook.buck.cxx.NativeLinkableInput; import com.facebook.buck.cxx.NativeLinkables; import com.facebook.buck.io.ProjectFilesystem; import com.facebook.buck.model.BuildTarget; import com.facebook.buck.model.BuildTargets; import com.facebook.buck.model.Pair; import com.facebook.buck.parser.NoSuchBuildTargetException; import com.facebook.buck.rules.BuildRule; import com.facebook.buck.rules.BuildRuleParams; import com.facebook.buck.rules.BuildRuleResolver; import com.facebook.buck.rules.BuildRuleType; import com.facebook.buck.rules.Description; import com.facebook.buck.rules.PathSourcePath; import com.facebook.buck.rules.SourcePath; import com.facebook.buck.rules.SourcePathResolver; import com.facebook.buck.rules.TargetGraph; import com.facebook.buck.rules.macros.EnvironmentVariableMacroExpander; import com.facebook.buck.rules.macros.MacroExpander; import com.facebook.buck.rules.macros.MacroHandler; import com.facebook.buck.util.BuckConstant; import com.facebook.buck.util.Escaper; import com.facebook.buck.util.MoreIterables; import com.facebook.buck.util.MoreStrings; import com.facebook.buck.util.environment.Platform; import com.facebook.infer.annotation.SuppressFieldNotInitialized; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Functions; import com.google.common.base.Joiner; import com.google.common.base.Optional; import com.google.common.base.Preconditions; import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSortedSet; import com.google.common.collect.Iterables; import java.io.IOException; import java.nio.file.FileVisitOption; import java.nio.file.FileVisitResult; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.SimpleFileVisitor; import java.nio.file.attribute.BasicFileAttributes; import java.util.EnumSet; import java.util.Map; import java.util.regex.Pattern; public class NdkLibraryDescription implements Description<NdkLibraryDescription.Arg> { public static final BuildRuleType TYPE = BuildRuleType.of("ndk_library"); private static final Pattern EXTENSIONS_REGEX = Pattern.compile( ".*\\." + MoreStrings.regexPatternForAny("mk", "h", "hpp", "c", "cpp", "cc", "cxx") + "$"); public static final MacroHandler MACRO_HANDLER = new MacroHandler( ImmutableMap.<String, MacroExpander>of( "env", new EnvironmentVariableMacroExpander(Platform.detect()) ) ); private final Optional<String> ndkVersion; private final ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> cxxPlatforms; public NdkLibraryDescription( Optional<String> ndkVersion, ImmutableMap<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> cxxPlatforms) { this.ndkVersion = ndkVersion; this.cxxPlatforms = Preconditions.checkNotNull(cxxPlatforms); } @Override public BuildRuleType getBuildRuleType() { return TYPE; } @Override public Arg createUnpopulatedConstructorArg() { return new Arg(); } private Iterable<String> escapeForMakefile(Iterable<String> args) { ImmutableList.Builder<String> escapedArgs = ImmutableList.builder(); for (String arg : args) { String escapedArg = arg; // The ndk-build makefiles make heavy use of the "eval" function to propagate variables, // which means we need to perform additional makefile escaping for *every* "eval" that // gets used. Turns out there are three "evals", so we escape a total of four times // including the initial escaping. Since the makefiles eventually hand-off these values // to the shell, we first perform bash escaping. // escapedArg = Escaper.escapeAsShellString(escapedArg); for (int i = 0; i < 4; i++) { escapedArg = Escaper.escapeAsMakefileValueString(escapedArg); } // We run ndk-build from the root of the NDK, so fixup paths that use the relative path to // the buck out directory. if (arg.startsWith(BuckConstant.BUCK_OUTPUT_DIRECTORY)) { escapedArg = "$(BUCK_PROJECT_DIR)/" + escapedArg; } escapedArgs.add(escapedArg); } return escapedArgs.build(); } private String getTargetArchAbi(NdkCxxPlatforms.TargetCpuType cpuType) { switch (cpuType) { case ARM: return "armeabi"; case ARMV7: return "armeabi-v7a"; case X86: return "x86"; case MIPS: return "mips"; default: throw new IllegalStateException(); } } @VisibleForTesting protected static Path getGeneratedMakefilePath(BuildTarget target) { return BuildTargets.getGenPath(target, "Android.%s.mk"); } /** * @return a {@link BuildRule} which generates a Android.mk which pulls in the local Android.mk * file and also appends relevant preprocessor and linker flags to use C/C++ library deps. */ private Pair<String, Iterable<BuildRule>> generateMakefile( BuildRuleParams params, BuildRuleResolver resolver) throws NoSuchBuildTargetException { SourcePathResolver pathResolver = new SourcePathResolver(resolver); ImmutableList.Builder<String> outputLinesBuilder = ImmutableList.builder(); ImmutableSortedSet.Builder<BuildRule> deps = ImmutableSortedSet.naturalOrder(); for (Map.Entry<NdkCxxPlatforms.TargetCpuType, NdkCxxPlatform> entry : cxxPlatforms.entrySet()) { CxxPlatform cxxPlatform = entry.getValue().getCxxPlatform(); CxxPreprocessorInput cxxPreprocessorInput; try { // Collect the preprocessor input for all C/C++ library deps. We search *through* other // NDK library rules. cxxPreprocessorInput = CxxPreprocessorInput.concat( CxxPreprocessables.getTransitiveCxxPreprocessorInput( cxxPlatform, params.getDeps(), Predicates.instanceOf(NdkLibrary.class))); } catch (CxxHeaders.ConflictingHeadersException e) { throw e.getHumanReadableExceptionForBuildTarget(params.getBuildTarget()); } // We add any dependencies from the C/C++ preprocessor input to this rule, even though // it technically should be added to the top-level rule. deps.addAll( pathResolver.filterBuildRuleInputs( cxxPreprocessorInput.getIncludes().getNameToPathMap().values())); deps.addAll(resolver.getAllRules(cxxPreprocessorInput.getRules())); // Add in the transitive preprocessor flags contributed by C/C++ library rules into the // NDK build. Iterable<String> ppflags = Iterables.concat( cxxPreprocessorInput.getPreprocessorFlags().get(CxxSource.Type.C), MoreIterables.zipAndConcat( Iterables.cycle("-I"), FluentIterable.from(cxxPreprocessorInput.getHeaderMaps()) .transform(Functions.toStringFunction())), MoreIterables.zipAndConcat( Iterables.cycle("-I"), FluentIterable.from(cxxPreprocessorInput.getIncludeRoots()) .transform(Functions.toStringFunction())), MoreIterables.zipAndConcat( Iterables.cycle("-isystem"), FluentIterable.from(cxxPreprocessorInput.getIncludeRoots()) .transform(Functions.toStringFunction()))); String localCflags = Joiner.on(' ').join(escapeForMakefile(ppflags)); // Collect the native linkable input for all C/C++ library deps. We search *through* other // NDK library rules. NativeLinkableInput nativeLinkableInput = NativeLinkables.getTransitiveNativeLinkableInput( cxxPlatform, params.getDeps(), Linker.LinkableDepType.SHARED, Predicates.instanceOf(NdkLibrary.class)); // We add any dependencies from the native linkable input to this rule, even though // it technically should be added to the top-level rule. deps.addAll( FluentIterable.from(nativeLinkableInput.getArgs()) .transformAndConcat(com.facebook.buck.rules.args.Arg.getDepsFunction(pathResolver))); // Add in the transitive native linkable flags contributed by C/C++ library rules into the // NDK build. String localLdflags = Joiner.on(' ').join( escapeForMakefile(com.facebook.buck.rules.args.Arg.stringify( nativeLinkableInput.getArgs()))); // Write the relevant lines to the generated makefile. if (!localCflags.isEmpty() || !localLdflags.isEmpty()) { NdkCxxPlatforms.TargetCpuType targetCpuType = entry.getKey(); String targetArchAbi = getTargetArchAbi(targetCpuType); outputLinesBuilder.add(String.format("ifeq ($(TARGET_ARCH_ABI),%s)", targetArchAbi)); if (!localCflags.isEmpty()) { outputLinesBuilder.add("BUCK_DEP_CFLAGS=" + localCflags); } if (!localLdflags.isEmpty()) { outputLinesBuilder.add("BUCK_DEP_LDFLAGS=" + localLdflags); } outputLinesBuilder.add("endif"); outputLinesBuilder.add(""); } } // GCC-only magic that rewrites non-deterministic parts of builds String ndksubst = NdkCxxPlatforms.ANDROID_NDK_ROOT; outputLinesBuilder.addAll( ImmutableList.copyOf(new String[] { // We're evaluated once per architecture, but want to add the cflags only once. "ifeq ($(BUCK_ALREADY_HOOKED_CFLAGS),)", "BUCK_ALREADY_HOOKED_CFLAGS := 1", // Only GCC supports -fdebug-prefix-map "ifeq ($(filter clang%,$(NDK_TOOLCHAIN_VERSION)),)", // Replace absolute paths with machine-relative ones. "NDK_APP_CFLAGS += -fdebug-prefix-map=$(NDK_ROOT)/=" + ndksubst + "/", "NDK_APP_CFLAGS += -fdebug-prefix-map=$(abspath $(BUCK_PROJECT_DIR))/=./", // Replace paths relative to the build rule with paths relative to the // repository root. "NDK_APP_CFLAGS += -fdebug-prefix-map=$(BUCK_PROJECT_DIR)/=./", "NDK_APP_CFLAGS += -fdebug-prefix-map=./=" + ".$(subst $(abspath $(BUCK_PROJECT_DIR)),,$(abspath $(CURDIR)))/", "NDK_APP_CFLAGS += -fno-record-gcc-switches", "ifeq ($(filter 4.6,$(TOOLCHAIN_VERSION)),)", // Do not let header canonicalization undo the work we just did above. Note that GCC // 4.6 doesn't support this option, but that's okay, because it doesn't canonicalize // headers either. "NDK_APP_CPPFLAGS += -fno-canonical-system-headers", // If we include the -fdebug-prefix-map in the switches, the "from"-parts of which // contain machine-specific paths, we lose determinism. GCC 4.6 didn't include // detailed command line argument information anyway. "NDK_APP_CFLAGS += -gno-record-gcc-switches", "endif", // !GCC 4.6 "endif", // !clang // Rewrite NDK module paths to import managed modules by relative path instead of by // absolute path, but only for modules under the project root. "BUCK_SAVED_IMPORTS := $(__ndk_import_dirs)", "__ndk_import_dirs :=", "$(foreach __dir,$(BUCK_SAVED_IMPORTS),\\", "$(call import-add-path-optional,\\", "$(if $(filter $(abspath $(BUCK_PROJECT_DIR))%,$(__dir)),\\", "$(BUCK_PROJECT_DIR)$(patsubst $(abspath $(BUCK_PROJECT_DIR))%,%,$(__dir)),\\", "$(__dir))))", "endif", // !already hooked // Now add a toolchain directory to replace. GCC's debug path replacement evaluates // candidate replaces last-first (because it internally pushes them all onto a stack // and scans the stack first-match-wins), so only add them after the more // generic paths. "NDK_APP_CFLAGS += -fdebug-prefix-map=$(TOOLCHAIN_PREBUILT_ROOT)/=" + "@ANDROID_NDK_ROOT@/toolchains/$(TOOLCHAIN_NAME)/prebuilt/@BUILD_HOST@/", })); outputLinesBuilder.add("include Android.mk"); String contents = Joiner.on(System.lineSeparator()).join(outputLinesBuilder.build()); return new Pair<String, Iterable<BuildRule>>(contents, deps.build()); } @VisibleForTesting protected ImmutableSortedSet<SourcePath> findSources( final ProjectFilesystem filesystem, final Path buildRulePath) { final ImmutableSortedSet.Builder<SourcePath> srcs = ImmutableSortedSet.naturalOrder(); try { final Path rootDirectory = filesystem.resolve(buildRulePath); Files.walkFileTree( rootDirectory, EnumSet.of(FileVisitOption.FOLLOW_LINKS), /* maxDepth */ Integer.MAX_VALUE, new SimpleFileVisitor<Path>() { @Override public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { if (EXTENSIONS_REGEX.matcher(file.toString()).matches()) { srcs.add( new PathSourcePath( filesystem, buildRulePath.resolve(rootDirectory.relativize(file)))); } return super.visitFile(file, attrs); } }); } catch (IOException e) { throw new RuntimeException(e); } return srcs.build(); } @Override public <A extends Arg> NdkLibrary createBuildRule( TargetGraph targetGraph, final BuildRuleParams params, BuildRuleResolver resolver, A args) throws NoSuchBuildTargetException { Pair<String, Iterable<BuildRule>> makefilePair = generateMakefile(params, resolver); return new NdkLibrary( params.appendExtraDeps( ImmutableSortedSet.<BuildRule>naturalOrder() .addAll(makefilePair.getSecond()) .build()), new SourcePathResolver(resolver), getGeneratedMakefilePath(params.getBuildTarget()), makefilePair.getFirst(), findSources(params.getProjectFilesystem(), params.getBuildTarget().getBasePath()), args.flags.get(), args.isAsset.or(false), ndkVersion, MACRO_HANDLER.getExpander( params.getBuildTarget(), params.getCellRoots(), resolver)); } @SuppressFieldNotInitialized public static class Arg { public Optional<ImmutableList<String>> flags; public Optional<Boolean> isAsset; public Optional<ImmutableSortedSet<BuildTarget>> deps; } }
/* * Copyright 2010 JBoss Inc * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.drools.guvnor.client.asseteditor.drools; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Set; import org.drools.guvnor.client.asseteditor.EditorWidget; import org.drools.guvnor.client.asseteditor.RuleViewer; import org.drools.guvnor.client.common.AssetFormats; import org.kie.uberfirebootstrap.client.widgets.SmallLabel; import org.drools.guvnor.client.explorer.ClientFactory; import org.drools.guvnor.client.moduleeditor.drools.SuggestionCompletionCache; import org.drools.guvnor.client.rpc.Asset; import org.drools.guvnor.client.rpc.WorkingSetConfigData; import org.drools.ide.common.client.modeldriven.SuggestionCompletionEngine; import org.drools.ide.common.client.factconstraints.helper.ConstraintsContainer; import org.drools.ide.common.client.factconstraints.helper.CustomFormsContainer; import com.google.gwt.event.dom.client.ClickEvent; import com.google.gwt.event.dom.client.ClickHandler; import com.google.gwt.event.logical.shared.BeforeSelectionEvent; import com.google.gwt.event.logical.shared.BeforeSelectionHandler; import com.google.gwt.event.shared.EventBus; import com.google.gwt.user.client.ui.Button; import com.google.gwt.user.client.ui.Composite; import com.google.gwt.user.client.ui.Grid; import com.google.gwt.user.client.ui.ListBox; import com.google.gwt.user.client.ui.ScrollPanel; import com.google.gwt.user.client.ui.TabPanel; public class WorkingSetEditor extends Composite implements EditorWidget { private Asset workingSet; private ListBox availFacts = new ListBox( true ); private ListBox validFacts = new ListBox( true ); private ConstraintsContainer cc; private CustomFormsContainer cfc; private FactsConstraintsEditorPanel factsConstraintsgEditorPanel; private CustomFormsEditorPanel customFormsEditorPanel; public WorkingSetEditor(Asset asset, RuleViewer viewer, ClientFactory clientFactory, EventBus eventBus) { this(asset); } public WorkingSetEditor(Asset asset) { if ( !AssetFormats.WORKING_SET.equals( asset.getFormat() ) ) { throw new IllegalArgumentException( "asset must a be a workingset not a: " + asset.getFormat() ); } workingSet = asset; WorkingSetConfigData wsData = (WorkingSetConfigData) workingSet.getContent(); cc = new ConstraintsContainer( wsData.constraints ); cfc = new CustomFormsContainer( wsData.customForms ); refreshWidgets(); setWidth( "100%" ); } private void refreshWidgets() { WorkingSetConfigData wsData = (WorkingSetConfigData) workingSet.getContent(); TabPanel tPanel = new TabPanel(); //tPanel.setWidth(800); ScrollPanel pnl = new ScrollPanel(); // pnl.setAutoWidth(true); //pnl.setClosable(false); pnl.setTitle( "WS Definition" ); //TODO {bauna} i18n // pnl.setAutoHeight(true); pnl.add( buildDoubleList( wsData ) ); tPanel.add( pnl, "WS Definition" ); pnl = new ScrollPanel(); // pnl.setAutoWidth(true); //pnl.setClosable(false); //pnl.setTitle("WS Constraints"); //TODO {bauna} i18n // pnl.setAutoHeight(true); this.factsConstraintsgEditorPanel = new FactsConstraintsEditorPanel( this ); pnl.add( this.factsConstraintsgEditorPanel ); tPanel.add( pnl, "WS Constraints" ); pnl = new ScrollPanel(); // pnl.setAutoWidth(true); //pnl.setClosable(false); pnl.setTitle( "WS Custom Forms" ); //TODO {bauna} i18n // pnl.setAutoHeight(true); this.customFormsEditorPanel = new CustomFormsEditorPanel( this ); pnl.add( this.customFormsEditorPanel ); tPanel.add( pnl, "WS Custom Forms" ); tPanel.addBeforeSelectionHandler( new BeforeSelectionHandler<java.lang.Integer>() { public void onBeforeSelection(BeforeSelectionEvent<java.lang.Integer> arg0) { factsConstraintsgEditorPanel.fillSelectedFacts(); customFormsEditorPanel.fillSelectedFacts(); } } ); tPanel.selectTab( 0 ); initWidget( tPanel ); } private Grid buildDoubleList(WorkingSetConfigData wsData) { Grid grid = new Grid( 2, 3 ); SuggestionCompletionEngine sce = SuggestionCompletionCache.getInstance().getEngineFromCache( workingSet.getMetaData().getModuleName() ); boolean filteringFact = sce.isFilteringFacts(); sce.setFilteringFacts( false ); try { Set<String> elem = new HashSet<String>(); availFacts.setVisibleItemCount( 10 ); validFacts.setVisibleItemCount( 10 ); if ( wsData.validFacts != null ) { elem.addAll( Arrays.asList( wsData.validFacts ) ); for ( String factName : wsData.validFacts ) { validFacts.addItem( factName ); } } for ( String factName : sce.getFactTypes() ) { if ( !elem.contains( factName ) ) { availFacts.addItem( factName ); } } Grid btnsPanel = new Grid( 2, 1 ); btnsPanel.setWidget( 0, 0, new Button( ">", new ClickHandler() { public void onClick(ClickEvent sender) { copySelected( availFacts, validFacts ); updateAsset( validFacts ); factsConstraintsgEditorPanel.fillSelectedFacts(); customFormsEditorPanel.fillSelectedFacts(); } } ) ); btnsPanel.setWidget( 1, 0, new Button( "&lt;", new ClickHandler() { public void onClick(ClickEvent sender) { copySelected( validFacts, availFacts ); updateAsset( validFacts ); factsConstraintsgEditorPanel.fillSelectedFacts(); customFormsEditorPanel.fillSelectedFacts(); } } ) ); grid.setWidget( 0, 0, new SmallLabel( "Available Facts" ) ); //TODO i18n grid.setWidget( 0, 1, new SmallLabel( "" ) ); grid.setWidget( 0, 2, new SmallLabel( "WorkingSet Facts" ) ); //TODO i18n grid.setWidget( 1, 0, availFacts ); grid.setWidget( 1, 1, btnsPanel ); grid.setWidget( 1, 2, validFacts ); grid.getColumnFormatter().setWidth( 0, "45%" ); grid.getColumnFormatter().setWidth( 0, "10%" ); grid.getColumnFormatter().setWidth( 0, "45%" ); return grid; } finally { sce.setFilteringFacts( filteringFact ); } } /** * This will get the save widgets. */ private void updateAsset(ListBox availFacts) { List<String> l = new ArrayList<String>( availFacts.getItemCount() ); for ( int i = 0; i < availFacts.getItemCount(); i++ ) { l.add( availFacts.getItemText( i ) ); } ((WorkingSetConfigData) workingSet.getContent()).validFacts = l.toArray( new String[l.size()] ); } private void copySelected(final ListBox from, final ListBox to) { int selected; while ( (selected = from.getSelectedIndex()) != -1 ) { to.addItem( from.getItemText( selected ) ); from.removeItem( selected ); factsConstraintsgEditorPanel.notifyValidFactsChanged(); customFormsEditorPanel.notifyValidFactsChanged(); } } public ConstraintsContainer getConstraintsConstrainer() { return cc; } public CustomFormsContainer getCustomFormsContainer() { return cfc; } protected Asset getWorkingSet() { return workingSet; } protected ListBox getValidFactsListBox() { return this.validFacts; } }
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.ignite.internal.processors.database; import java.util.concurrent.CountDownLatch; import org.apache.ignite.MemoryMetrics; import org.apache.ignite.configuration.MemoryPolicyConfiguration; import org.apache.ignite.internal.processors.cache.persistence.MemoryMetricsImpl; import org.apache.ignite.internal.processors.cache.ratemetrics.HitRateMetrics; import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest; import static java.lang.Thread.sleep; /** * */ public class MemoryMetricsSelfTest extends GridCommonAbstractTest { /** */ private MemoryMetricsImpl memMetrics; /** */ private int threadsCnt = 1; /** */ private Thread[] allocationThreads; /** */ private Thread watcherThread; /** */ private static final int RATE_TIME_INTERVAL_1 = 5_000; /** */ private static final int RATE_TIME_INTERVAL_2 = 10_000; /** {@inheritDoc} */ @Override protected void beforeTest() throws Exception { MemoryPolicyConfiguration plcCfg = new MemoryPolicyConfiguration(); memMetrics = new MemoryMetricsImpl(plcCfg); memMetrics.enableMetrics(); } /** * Test for allocationRate metric in single-threaded mode. * @throws Exception if any happens during test. */ public void testAllocationRateSingleThreaded() throws Exception { threadsCnt = 1; memMetrics.rateTimeInterval(RATE_TIME_INTERVAL_2); CountDownLatch startLatch = new CountDownLatch(1); startAllocationThreads(startLatch, 340, 50); AllocationRateWatcher watcher = startWatcherThread(startLatch, 20); alignWithTimeInterval(RATE_TIME_INTERVAL_2, 5); startLatch.countDown(); joinAllThreads(); assertTrue(watcher.rateDropsCntr > 3); assertTrue(watcher.rateDropsCntr < 6); } /** * Test for allocationRate metric in multi-threaded mode with short silent period in the middle of the test. * @throws Exception if any happens during test. */ public void testAllocationRateMultiThreaded() throws Exception { threadsCnt = 4; memMetrics.rateTimeInterval(RATE_TIME_INTERVAL_1); CountDownLatch startLatch = new CountDownLatch(1); startAllocationThreads(startLatch, 7_800, 1); AllocationRateWatcher watcher = startWatcherThread(startLatch, 20); alignWithTimeInterval(RATE_TIME_INTERVAL_1, 5); startLatch.countDown(); joinAllocationThreads(); assertTrue("4 or 5 rate drops must be observed: " + watcher.rateDropsCntr, watcher.rateDropsCntr == 4 || watcher.rateDropsCntr == 5); sleep(3); threadsCnt = 8; CountDownLatch restartLatch = new CountDownLatch(1); startAllocationThreads(restartLatch, 8_000, 1); restartLatch.countDown(); joinAllThreads(); assertTrue(watcher.rateDropsCntr > 4); } /** * Test verifies that allocationRate calculation algorithm survives setting new values to rateTimeInterval parameter. * @throws Exception if any happens during test. */ public void testAllocationRateTimeIntervalConcurrentChange() throws Exception { threadsCnt = 5; memMetrics.rateTimeInterval(RATE_TIME_INTERVAL_1); CountDownLatch startLatch = new CountDownLatch(1); startAllocationThreads(startLatch, 10_000, 1); AllocationRateWatcher watcher = startWatcherThread(startLatch, 20); alignWithTimeInterval(RATE_TIME_INTERVAL_1, 5); startLatch.countDown(); for (int i = 0; i < 10; i++) { Thread.sleep(25); memMetrics.rateTimeInterval(((2 + i * 5) % 3 + 1) * 1000); } joinAllThreads(); assertTrue(watcher.rateDropsCntr > 4); } /** * * @throws Exception if any happens during test. */ public void testAllocationRateSubintervalsConcurrentChange() throws Exception { threadsCnt = 5; memMetrics.rateTimeInterval(RATE_TIME_INTERVAL_1); CountDownLatch startLatch = new CountDownLatch(1); startAllocationThreads(startLatch, 10_000, 1); AllocationRateWatcher watcher = startWatcherThread(startLatch, 20); alignWithTimeInterval(RATE_TIME_INTERVAL_1, 5); startLatch.countDown(); for (int i = 0; i < 10; i++) { Thread.sleep(25); memMetrics.subIntervals((2 + i * 5) % 3 + 2); } joinAllThreads(); assertTrue(watcher.rateDropsCntr > 4); } /** * As rate metrics {@link HitRateMetrics implementation} is tied to absolute time ticks * (not related to the first hit) all tests need to align start time with this sequence of ticks. * * @param rateTimeInterval Rate time interval. * @param size Size. */ private void alignWithTimeInterval(int rateTimeInterval, int size) throws InterruptedException { int subIntervalLength = rateTimeInterval / size; long subIntCurTime = System.currentTimeMillis() % subIntervalLength; Thread.sleep(subIntervalLength - subIntCurTime); } /** * @param startLatch Start latch. * @param watchingDelay Watching delay. */ private AllocationRateWatcher startWatcherThread(CountDownLatch startLatch, int watchingDelay) { AllocationRateWatcher watcher = new AllocationRateWatcher(startLatch, memMetrics, watchingDelay); watcherThread = new Thread(watcher); watcherThread.start(); return watcher; } /** * @param startLatch Start latch. * @param iterationsCnt Iterations count. * @param allocationsDelay Allocations delay. */ private void startAllocationThreads(CountDownLatch startLatch, int iterationsCnt, int allocationsDelay) { assert threadsCnt > 0; allocationThreads = new Thread[threadsCnt]; for (int i = 0; i < threadsCnt; i++) { AllocationsIncrementer inc = new AllocationsIncrementer(startLatch, memMetrics, iterationsCnt, allocationsDelay); Thread incThread = new Thread(inc); incThread.start(); allocationThreads[i] = incThread; } } /** * */ private void joinAllThreads() throws Exception { joinAllocationThreads(); watcherThread.interrupt(); watcherThread.join(); } /** * */ private void joinAllocationThreads() throws Exception { assert allocationThreads != null; assert allocationThreads.length > 0; for (Thread allocationThread : allocationThreads) allocationThread.join(); } /** * */ private static class AllocationsIncrementer implements Runnable { /** */ private final CountDownLatch startLatch; /** */ private final MemoryMetricsImpl memMetrics; /** */ private final int iterationsCnt; /** */ private final int delay; /** * @param startLatch Start latch. * @param memMetrics Mem metrics. * @param iterationsCnt Iterations count. * @param delay Delay. */ private AllocationsIncrementer(CountDownLatch startLatch, MemoryMetricsImpl memMetrics, int iterationsCnt, int delay) { this.startLatch = startLatch; this.memMetrics = memMetrics; this.iterationsCnt = iterationsCnt; this.delay = delay; } /** {@inheritDoc} */ @Override public void run() { try { startLatch.await(); for (int i = 0; i < iterationsCnt; i++) { memMetrics.incrementTotalAllocatedPages(); sleep(delay); } } catch (InterruptedException ignore) { // No-op. } catch (Exception e) { e.printStackTrace(); } } } /** * */ private static class AllocationRateWatcher implements Runnable { /** */ private volatile int rateDropsCntr; /** */ private final CountDownLatch startLatch; /** */ private final MemoryMetrics memMetrics; /** */ private final int delay; /** * @param startLatch Start latch. * @param memMetrics Mem metrics. * @param delay Delay. */ private AllocationRateWatcher(CountDownLatch startLatch, MemoryMetrics memMetrics, int delay) { this.startLatch = startLatch; this.memMetrics = memMetrics; this.delay = delay; } /** {@inheritDoc} */ @Override public void run() { try { startLatch.await(); float prevRate = 0; while (!Thread.currentThread().isInterrupted()) { if (prevRate > memMetrics.getAllocationRate()) rateDropsCntr++; prevRate = memMetrics.getAllocationRate(); sleep(delay); } } catch (InterruptedException ignore) { // No-op. } catch (Exception e) { e.printStackTrace(); } } } }
/* * Copyright (c) 2007 Adobe Systems Incorporated * * Permission is hereby granted, free of charge, to any person obtaining a copy of * this software and associated documentation files (the "Software"), to deal in * the Software without restriction, including without limitation the rights to * use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of * the Software, and to permit persons to whom the Software is furnished to do so, * subject to the following conditions: * * The above copyright notice and this permission notice shall be included in all * copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS * FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR * COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER * IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. * */ package com.adobe.epubcheck.opf; import java.net.URI; import java.net.URISyntaxException; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import com.adobe.epubcheck.api.EPUBLocation; import com.adobe.epubcheck.api.EPUBProfile; import com.adobe.epubcheck.api.FeatureReport.Feature; import com.adobe.epubcheck.bitmap.BitmapCheckerFactory; import com.adobe.epubcheck.css.CSSCheckerFactory; import com.adobe.epubcheck.dict.SearchKeyMapCheckerFactory; import com.adobe.epubcheck.dtbook.DTBookCheckerFactory; import com.adobe.epubcheck.messages.MessageId; import com.adobe.epubcheck.opf.MetadataSet.Metadata; import com.adobe.epubcheck.opf.ResourceCollection.Roles; import com.adobe.epubcheck.ops.OPSCheckerFactory; import com.adobe.epubcheck.overlay.OverlayCheckerFactory; import com.adobe.epubcheck.util.EPUBVersion; import com.adobe.epubcheck.util.FeatureEnum; import com.adobe.epubcheck.util.PathUtil; import com.adobe.epubcheck.vocab.DCMESVocab; import com.adobe.epubcheck.vocab.PackageVocabs; import com.google.common.base.Optional; import com.google.common.base.Predicate; import com.google.common.base.Strings; import com.google.common.collect.Iterables; import com.google.common.io.Files; public class OPFChecker30 extends OPFChecker implements DocumentValidator { public OPFChecker30(ValidationContext context) { super(context); } @Override protected void initContentCheckerFactoryMap() { HashMap<String, ContentCheckerFactory> map = new HashMap<String, ContentCheckerFactory>(); map.put("application/vnd.epub.search-key-map+xml", SearchKeyMapCheckerFactory.getInstance()); map.put("application/smil+xml", OverlayCheckerFactory.getInstance()); map.put("application/xhtml+xml", OPSCheckerFactory.getInstance()); map.put("application/x-dtbook+xml", DTBookCheckerFactory.getInstance()); map.put("image/jpeg", BitmapCheckerFactory.getInstance()); map.put("image/gif", BitmapCheckerFactory.getInstance()); map.put("image/png", BitmapCheckerFactory.getInstance()); map.put("image/svg+xml", OPSCheckerFactory.getInstance()); map.put("text/css", CSSCheckerFactory.getInstance()); contentCheckerFactoryMap.clear(); contentCheckerFactoryMap.putAll(map); } @Override public void initHandler() { opfHandler = new OPFHandler30(context, opfParser); } @Override public void runChecks() { super.runChecks(); checkCollectionsContent(); checkPagination(); checkSemantics(); checkNav(); checkSpecifics(); } @Override public boolean validate() { int fatalErrorsSoFar = report.getFatalErrorCount(); int errorsSoFar = report.getErrorCount(); int warningsSoFar = report.getWarningCount(); super.validate(); checkLinkedResources(); checkCollections(); return fatalErrorsSoFar == report.getFatalErrorCount() && errorsSoFar == report.getErrorCount() && warningsSoFar == report.getWarningCount(); } @Override protected void checkItem(OPFItem item, OPFHandler opfHandler) { String mimeType = item.getMimeType(); if (mimeType == null || mimeType.equals("")) { // report.error(path, item.getLineNumber(), item.getColumnNumber(), // "empty media-type attribute"); return; } if (!mimeType.matches("[a-zA-Z0-9!#$&+-^_]+/[a-zA-Z0-9!#$&+-^_]+")) { // report.error(path, item.getLineNumber(), item.getColumnNumber(), // "invalid content for media-type attribute"); return; } // Check preferred media types String preferredMimeType = getPreferredMediaType(mimeType, item.getPath()); if (preferredMimeType != null) { report.message(MessageId.OPF_090, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), preferredMimeType, mimeType); } if ("application/xhtml+xml".equals(mimeType) && !"xhtml".equals(Files.getFileExtension(item.getPath()))) { report.message(MessageId.HTM_014a, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), item.getPath()); } // Note: item fallback existence is checked in schematron, i.e.: // opfHandler.getItemById(item.getFallback().get()).isPresent() == true } @Override protected void checkItemAfterResourceValidation(OPFItem item) { XRefChecker xrefChecker = context.xrefChecker.get(); // Check remote resources String mediatype = item.getMimeType(); if (PathUtil.isRemote(item.getPath()) // audio, video, and fonts can be remote resources && !(isAudioType(mediatype) || isVideoType(mediatype) || "application/x-shockwave-flash".equals(mediatype) || isFontType(mediatype))) { // spine items cannot be remote resources // (except, theoretically, for video/audio/fonts) if (item.isInSpine()) { report.message(MessageId.RSC_006, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), item.getPath()); } // if no direct reference to the resource was found, else if (xrefChecker.getTypes(item.getPath()).isEmpty()) { // if may be allowed when if the resource is retrieved from a script if (context.featureReport.hasFeature(FeatureEnum.HAS_SCRIPTS)) { report.message(MessageId.RSC_006b, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), item.getPath()); } // otherwise, still report it as an error, even if not used else { report.message(MessageId.RSC_006, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), item.getPath()); } } } } @Override protected void checkSpineItem(OPFItem item, OPFHandler opfHandler) { String mimeType = item.getMimeType(); if (item.getProperties() .contains(PackageVocabs.ITEM_VOCAB.get(PackageVocabs.ITEM_PROPERTIES.DATA_NAV))) { report.message(MessageId.OPF_077, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber())); } if (isBlessedItemType(mimeType, version)) { return; } if (!item.getFallback().isPresent()) { report.message(MessageId.OPF_043, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), mimeType); } else if (!new FallbackChecker().checkItemFallbacks(item, opfHandler, false)) { report.message(MessageId.OPF_044, EPUBLocation.create(path, item.getLineNumber(), item.getColumnNumber()), mimeType); } } @Override protected void checkBindings() { Set<String> mimeTypes = context.xrefChecker.get().getBindingsMimeTypes(); Iterator<String> it = mimeTypes.iterator(); String mimeType; while (it.hasNext()) { mimeType = it.next(); String handlerId = context.xrefChecker.get().getBindingHandlerId(mimeType); OPFItem handler = opfHandler.getItemById(handlerId).get(); if (!handler.isScripted()) { report.message(MessageId.OPF_046, EPUBLocation.create(handler.getPath(), handler.getLineNumber(), handler.getColumnNumber())); } } } // protected boolean checkItemFallbacks(OPFItem item, OPFHandler opfHandler) { // String fallback = item.getFallback(); // if (fallback != null) { // OPFItem fallbackItem = opfHandler.getItemById(fallback); // if (fallbackItem != null) { // String mimeType = fallbackItem.getMimeType(); // if (mimeType != null) { // if (OPFChecker.isBlessedItemType(mimeType, version)) // return true; // if (checkItemFallbacks(fallbackItem, opfHandler)) // return true; // } // } // } // return false; // } private void checkCollections() { for (ResourceCollection collection : ((OPFHandler30) opfHandler).getCollections().asList()) { if (collection.hasRole(ResourceCollection.Roles.DICTIONARY)) { checkDictCollection(collection); } if (collection.hasRole(ResourceCollection.Roles.INDEX)) { checkIndexCollection(collection); } if (collection.hasRole(ResourceCollection.Roles.PREVIEW)) { checkPreviewCollection(collection); } } } private void checkCollectionsContent() { for (ResourceCollection collection : ((OPFHandler30) opfHandler).getCollections().asList()) { if (collection.hasRole(ResourceCollection.Roles.DICTIONARY)) { checkDictCollectionContent(collection); } } } private void checkDictCollection(ResourceCollection collection) { if (collection.hasRole(Roles.DICTIONARY)) { boolean skmFound = false; for (LinkedResource resource : collection.getResources().asList()) { Optional<OPFItem> item = opfHandler.getItemByPath(resource.getPath()); if (!item.isPresent()) { report.message(MessageId.OPF_081, EPUBLocation.create(path), resource.getPath()); } else if ("application/vnd.epub.search-key-map+xml".equals(item.get().getMimeType())) { if (skmFound) { // More than one Search Key Map report.message(MessageId.OPF_082, EPUBLocation.create(path)); } skmFound = true; } else if (!"application/xhtml+xml".equals(item.get().getMimeType())) { report.message(MessageId.OPF_084, EPUBLocation.create(path), resource.getPath()); } } if (!skmFound) { // No Search Key Map report.message(MessageId.OPF_083, EPUBLocation.create(path)); } } } private void checkDictCollectionContent(ResourceCollection collection) { if (collection.hasRole(Roles.DICTIONARY)) { boolean dictFound = false; for (LinkedResource resource : collection.getResources().asList()) { final Optional<OPFItem> item = opfHandler.getItemByPath(resource.getPath()); if (!dictFound && item.isPresent() && "application/xhtml+xml".equals(item.get().getMimeType())) { // Search if this resource was reported as DICTIONARY content dictFound = Iterables.tryFind(context.featureReport.getFeature(FeatureEnum.DICTIONARY), new Predicate<Feature>() { @Override public boolean apply(Feature dict) { return item.get().getPath().equals(dict.getLocation().get().getPath()); } }).isPresent(); } } if (!dictFound) { // No Dictionary content report.message(MessageId.OPF_078, EPUBLocation.create(path)); } } } private void checkIndexCollection(ResourceCollection collection) { if (collection.hasRole(Roles.INDEX) || collection.hasRole(Roles.INDEX_GROUP)) { for (LinkedResource resource : collection.getResources().asList()) { Optional<OPFItem> item = opfHandler.getItemByPath(resource.getPath()); if (!item.isPresent() || !"application/xhtml+xml".equals(item.get().getMimeType())) { report.message(MessageId.OPF_071, EPUBLocation.create(path)); } } for (ResourceCollection childCollection : collection.getCollections().asList()) { checkIndexCollection(childCollection); } } } private void checkPreviewCollection(ResourceCollection collection) { if (collection.hasRole(Roles.PREVIEW)) { for (LinkedResource resource : collection.getResources().asList()) { Optional<OPFItem> item = opfHandler.getItemByPath(resource.getPath()); if (!item.isPresent() || !("application/xhtml+xml".equals(item.get().getMimeType()) || "image/svg+xml".equals(item.get().getMimeType()))) { report.message(MessageId.OPF_075, EPUBLocation.create(path)); } else { try { URI uri = new URI(resource.getURI()); if (Optional.fromNullable(uri.getFragment()).or("").startsWith("epubcfi(")) { report.message(MessageId.OPF_076, EPUBLocation.create(path)); } } catch (URISyntaxException e) { report.message(MessageId.RSC_020, EPUBLocation.create(path)); } } } } } private void checkLinkedResources() { LinkedResources links = ((OPFHandler30) opfHandler).getLinkedResources(); for (LinkedResource link : links.asList()) { if (opfHandler.getItemByPath(link.getPath()).isPresent()) { report.message(MessageId.OPF_067, EPUBLocation.create(path), link.getPath()); } } } private void checkPagination() { if (context.profile == EPUBProfile.EDUPUB || context.pubTypes.contains(OPFData.DC_TYPE_EDUPUB)) { if (context.featureReport.hasFeature(FeatureEnum.PAGE_BREAK)) { // Check there is a page list if (!context.featureReport.hasFeature(FeatureEnum.PAGE_LIST)) { report.message(MessageId.NAV_003, EPUBLocation.create(path)); } // Search a "dc:source" metadata expression Set<Metadata> dcSourceMetas = ((OPFHandler30) opfHandler).getMetadata() .getPrimary(DCMESVocab.VOCAB.get(DCMESVocab.PROPERTIES.SOURCE)); if (dcSourceMetas.isEmpty()) { report.message(MessageId.OPF_066, EPUBLocation.create(path)); } else { // Search a "source-of : pagination" expression refining a "dc:source" if (!MetadataSet.tryFindInRefines(dcSourceMetas, PackageVocabs.META_VOCAB.get(PackageVocabs.META_PROPERTIES.SOURCE_OF), Optional.of("pagination")).isPresent()) { report.message(MessageId.OPF_066, EPUBLocation.create(path)); } } } } } private void checkSemantics() { if (context.profile == EPUBProfile.EDUPUB || context.pubTypes.contains(OPFData.DC_TYPE_EDUPUB)) { if (context.featureReport.hasFeature(FeatureEnum.HAS_MICRODATA) && !context.featureReport.hasFeature(FeatureEnum.HAS_RDFA)) { report.message(MessageId.HTM_051, context.featureReport .getFeature(FeatureEnum.HAS_MICRODATA).iterator().next().getLocation().get()); } } } private void checkNav() { if (context.profile == EPUBProfile.EDUPUB || context.pubTypes.contains(OPFData.DC_TYPE_EDUPUB)) { Set<Feature> sections = context.featureReport.getFeature(FeatureEnum.SECTIONS); Set<Feature> tocLinks = context.featureReport.getFeature(FeatureEnum.TOC_LINKS); if (sections.size() != tocLinks.size()) { report.message(MessageId.NAV_004, tocLinks.isEmpty() ? EPUBLocation.create(path) : tocLinks.iterator().next().getLocation().get()); } if (context.featureReport.hasFeature(FeatureEnum.AUDIO) && !context.featureReport.hasFeature(FeatureEnum.LOA)) { report.message(MessageId.NAV_005, tocLinks.isEmpty() ? EPUBLocation.create(path) : tocLinks.iterator().next().getLocation().get()); } if (context.featureReport.hasFeature(FeatureEnum.FIGURE) && !context.featureReport.hasFeature(FeatureEnum.LOI)) { report.message(MessageId.NAV_006, tocLinks.isEmpty() ? EPUBLocation.create(path) : tocLinks.iterator().next().getLocation().get()); } if (context.featureReport.hasFeature(FeatureEnum.TABLE) && !context.featureReport.hasFeature(FeatureEnum.LOT)) { report.message(MessageId.NAV_007, tocLinks.isEmpty() ? EPUBLocation.create(path) : tocLinks.iterator().next().getLocation().get()); } if (context.featureReport.hasFeature(FeatureEnum.VIDEO) && !context.featureReport.hasFeature(FeatureEnum.LOV)) { report.message(MessageId.NAV_008, tocLinks.isEmpty() ? EPUBLocation.create(path) : tocLinks.iterator().next().getLocation().get()); } } } private void checkSpecifics() { if (context.featureReport.hasFeature(FeatureEnum.DICTIONARY) && !context.pubTypes.contains(OPFData.DC_TYPE_DICT)) { report.message(MessageId.OPF_079, context.featureReport.getFeature(FeatureEnum.DICTIONARY) .iterator().next().getLocation().get()); } if (context.profile == EPUBProfile.DICT || context.pubTypes.contains(OPFData.DC_TYPE_DICT)) { if (!context.featureReport.hasFeature(FeatureEnum.DICTIONARY)) { report.message(MessageId.OPF_078, EPUBLocation.create(path)); } } } public static boolean isAudioType(String type) { return type != null && type.startsWith("audio/"); } public static boolean isBlessedAudioType(String type) { return type.equals("audio/mpeg") || type.equals("audio/mp4"); } public static boolean isVideoType(String type) { return type != null && type.startsWith("video/"); } public static boolean isBlessedVideoType(String type) { return isVideoType(type); } public static boolean isCommonVideoType(String type) { return "video/h264".equals(type) || "video/webm".equals(type) || "video/mp4".equals(type); } public static boolean isFontType(String type) { return type.startsWith("font/") || type.startsWith("application/font-") || type.equals("application/vnd.ms-opentype"); } public static boolean isBlessedFontType(String type) { return type.equals("font/otf") || type.equals("font/ttf") || type.equals("font/woff") || type.equals("font/woff2") || type.equals("application/font-sfnt") || type.equals("application/font-woff") || type.equals("application/vnd.ms-opentype") || type.equals("image/svg+xml"); } public static boolean isBlessedScriptType(String type) { return type.equals("text/javascript") || type.equals("application/javascript"); } public static boolean isCoreMediaType(String type) { return isBlessedAudioType(type) || isBlessedVideoType(type) || isBlessedFontType(type) || isBlessedItemType(type, EPUBVersion.VERSION_3) || isBlessedImageType(type) || isBlessedScriptType(type) || type.equals("application/pls+xml") || type.equals("application/smil+xml") || type.equals("image/svg+xml"); } public static String getPreferredMediaType(String type, String path) { switch (Strings.nullToEmpty(type)) { case "application/font-sfnt": return (path.endsWith(".ttf")) ? "font/ttf" : (path.endsWith(".otf")) ? "font/otf" : "font/(ttf|otf)"; case "application/vnd.ms-opentype": return "font/otf"; case "application/font-woff": return "font/woff"; case "text/javascript": return "application/javascript"; default: return null; } } }
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.oozie.command.coord; import java.util.Date; import java.util.List; import org.apache.oozie.CoordinatorActionBean; import org.apache.oozie.CoordinatorJobBean; import org.apache.oozie.SLAEventBean; import org.apache.oozie.client.CoordinatorJob; import org.apache.oozie.client.CoordinatorJob.Timeunit; import org.apache.oozie.service.Services; import org.apache.oozie.store.CoordinatorStore; import org.apache.oozie.store.SLAStore; import org.apache.oozie.store.StoreException; import org.apache.oozie.test.XTestCase; import org.apache.oozie.util.DateUtils; @SuppressWarnings("deprecation") public class TestCoordActionMaterializeCommand extends XTestCase { private Services services; @Override protected void setUp() throws Exception { super.setUp(); services = new Services(); services.init(); cleanUpDBTables(); } @Override protected void tearDown() throws Exception { services.destroy(); super.tearDown(); } public void testActionMater() throws Exception { String jobId = "0000000-" + new Date().getTime() + "-testActionMater-C"; Date startTime = DateUtils.parseDateOozieTZ("2009-03-06T010:00Z"); Date endTime = DateUtils.parseDateOozieTZ("2009-03-11T10:00Z"); addRecordToJobTable(jobId, startTime, endTime); new CoordActionMaterializeCommand(jobId, startTime, endTime).call(); CoordinatorActionBean action = checkCoordAction(jobId + "@1"); } public void testActionMaterWithPauseTime1() throws Exception { String jobId = "0000000-" + new Date().getTime() + "-testActionMater-C"; Date startTime = DateUtils.parseDateOozieTZ("2009-03-06T10:00Z"); Date endTime = DateUtils.parseDateOozieTZ("2009-03-06T10:14Z"); Date pauseTime = DateUtils.parseDateOozieTZ("2009-03-06T10:04Z"); addRecordToJobTable(jobId, startTime, endTime, pauseTime); new CoordActionMaterializeCommand(jobId, startTime, endTime).call(); checkCoordActions(jobId, 1, null); } public void testActionMaterWithPauseTime2() throws Exception { String jobId = "0000000-" + new Date().getTime() + "-testActionMater-C"; Date startTime = DateUtils.parseDateOozieTZ("2009-03-06T10:00Z"); Date endTime = DateUtils.parseDateOozieTZ("2009-03-06T10:14Z"); Date pauseTime = DateUtils.parseDateOozieTZ("2009-03-06T10:08Z"); addRecordToJobTable(jobId, startTime, endTime, pauseTime); new CoordActionMaterializeCommand(jobId, startTime, endTime).call(); checkCoordActions(jobId, 2, null); } public void testActionMaterWithPauseTime3() throws Exception { String jobId = "0000000-" + new Date().getTime() + "-testActionMater-C"; Date startTime = DateUtils.parseDateOozieTZ("2009-03-06T10:00Z"); Date endTime = DateUtils.parseDateOozieTZ("2009-03-06T10:14Z"); Date pauseTime = DateUtils.parseDateOozieTZ("2009-03-06T09:58Z"); addRecordToJobTable(jobId, startTime, endTime, pauseTime); new CoordActionMaterializeCommand(jobId, startTime, endTime).call(); checkCoordActions(jobId, 0, CoordinatorJob.Status.RUNNING); } private void addRecordToJobTable(String jobId, Date startTime, Date endTime) throws StoreException { CoordinatorStore store = new CoordinatorStore(false); CoordinatorJobBean coordJob = new CoordinatorJobBean(); coordJob.setId(jobId); coordJob.setAppName("testApp"); coordJob.setStartTime(startTime); coordJob.setEndTime(endTime); coordJob.setTimeUnit(Timeunit.DAY); coordJob.setAppPath("testAppPath"); coordJob.setStatus(CoordinatorJob.Status.PREMATER); coordJob.setCreatedTime(new Date()); // TODO: Do we need that? coordJob.setLastModifiedTime(new Date()); coordJob.setUser("testUser"); coordJob.setGroup("testGroup"); coordJob.setTimeZone("America/Los_Angeles"); String confStr = "<configuration></configuration>"; coordJob.setConf(confStr); String appXml = "<coordinator-app xmlns='uri:oozie:coordinator:0.1' xmlns:sla='uri:oozie:sla:0.1' name='NAME' frequency=\"1\" start='2009-03-06T010:00Z' end='2009-03-11T10:00Z' timezone='America/Los_Angeles' freq_timeunit='DAY' end_of_duration='NONE'>"; appXml += "<controls>"; appXml += "<timeout>10</timeout>"; appXml += "<concurrency>2</concurrency>"; appXml += "<execution>LIFO</execution>"; appXml += "</controls>"; appXml += "<input-events>"; appXml += "<data-in name='A' dataset='a'>"; appXml += "<dataset name='a' frequency='7' initial-instance='2009-02-01T01:00Z' timezone='UTC' freq_timeunit='DAY' end_of_duration='NONE'>"; appXml += "<uri-template>file:///tmp/coord/workflows/${YEAR}/${MONTH}/${DAY}</uri-template>"; appXml += "</dataset>"; appXml += "<instance>${coord:current(0)}</instance>"; appXml += "<instance>${coord:latest(-1)}</instance>"; //appXml += "<start-instance>${coord:current(-2)}</start-instance>"; //appXml += "<end-instance>${coord:current(0)}</end-instance>"; appXml += "</data-in>"; appXml += "</input-events>"; appXml += "<output-events>"; appXml += "<data-out name='LOCAL_A' dataset='local_a'>"; appXml += "<dataset name='local_a' frequency='7' initial-instance='2009-02-01T01:00Z' timezone='UTC' freq_timeunit='DAY' end_of_duration='NONE'>"; appXml += "<uri-template>file:///tmp/coord/workflows/${YEAR}/${DAY}</uri-template>"; appXml += "</dataset>"; appXml += "<instance>${coord:current(-1)}</instance>"; appXml += "</data-out>"; appXml += "</output-events>"; appXml += "<action>"; appXml += "<workflow>"; appXml += "<app-path>hdfs:///tmp/workflows/</app-path>"; appXml += "<configuration>"; appXml += "<property>"; appXml += "<name>inputA</name>"; appXml += "<value>${coord:dataIn('A')}</value>"; appXml += "</property>"; appXml += "<property>"; appXml += "<name>inputB</name>"; appXml += "<value>${coord:dataOut('LOCAL_A')}</value>"; appXml += "</property>"; appXml += "</configuration>"; appXml += "</workflow>"; appXml += " <sla:info>" // + " <sla:client-id>axonite-blue</sla:client-id>" + " <sla:app-name>test-app</sla:app-name>" + " <sla:nominal-time>${coord:nominalTime()}</sla:nominal-time>" + " <sla:should-start>5</sla:should-start>" + " <sla:should-end>120</sla:should-end>" + " <sla:notification-msg>Notifying User for ${coord:nominalTime()} nominal time </sla:notification-msg>" + " <sla:alert-contact>abc@example.com</sla:alert-contact>" + " <sla:dev-contact>abc@example.com</sla:dev-contact>" + " <sla:qa-contact>abc@example.com</sla:qa-contact>" + " <sla:se-contact>abc@example.com</sla:se-contact>" + "</sla:info>"; appXml += "</action>"; appXml += "</coordinator-app>"; /*try { System.out.println(XmlUtils.prettyPrint(XmlUtils.parseXml(appXml))); ; } catch (JDOMException e1) { // TODO Auto-generated catch block e1.printStackTrace(); }*/ coordJob.setJobXml(appXml); coordJob.setLastActionNumber(0); coordJob.setFrequency("1"); try { coordJob.setEndTime(DateUtils.parseDateOozieTZ("2009-03-11T10:00Z")); } catch (Exception e) { // TODO Auto-generated catch block e.printStackTrace(); fail("Could not set end time"); } try { store.beginTrx(); store.insertCoordinatorJob(coordJob); store.commitTrx(); } catch (StoreException se) { se.printStackTrace(); store.rollbackTrx(); fail("Unable to insert the test job record to table"); throw se; } finally { store.closeTrx(); } } private CoordinatorActionBean checkCoordAction(String actionId) throws StoreException { CoordinatorStore store = new CoordinatorStore(false); try { CoordinatorActionBean action = store.getCoordinatorAction(actionId, false); SLAStore slaStore = new SLAStore(store); long lastSeqId[] = new long[1]; List<SLAEventBean> slaEvents = slaStore.getSLAEventListNewerSeqLimited(0, 10, lastSeqId); // System.out.println("AAA " + slaEvents.size() + " : " + // lastSeqId[0]); if (slaEvents.size() == 0) { fail("Unable to GET any record of sequence id greater than 0"); } return action; } catch (StoreException se) { se.printStackTrace(); fail("Action ID " + actionId + " was not stored properly in db"); } return null; } private void addRecordToJobTable(String jobId, Date startTime, Date endTime, Date pauseTime) throws StoreException { CoordinatorStore store = new CoordinatorStore(false); CoordinatorJobBean coordJob = new CoordinatorJobBean(); coordJob.setId(jobId); coordJob.setAppName("testApp"); coordJob.setStartTime(startTime); coordJob.setEndTime(endTime); coordJob.setPauseTime(pauseTime); coordJob.setTimeUnit(Timeunit.MINUTE); coordJob.setAppPath("testAppPath"); coordJob.setStatus(CoordinatorJob.Status.PREMATER); coordJob.setCreatedTime(new Date()); // TODO: Do we need that? coordJob.setLastModifiedTime(new Date()); coordJob.setUser("testUser"); coordJob.setGroup("testGroup"); coordJob.setTimeZone("America/Los_Angeles"); String confStr = "<configuration></configuration>"; coordJob.setConf(confStr); String appXml = "<coordinator-app xmlns='uri:oozie:coordinator:0.1' xmlns:sla='uri:oozie:sla:0.1' name='NAME' frequency=\"5\" start='2009-03-06T010:00Z' end='2009-03-06T10:14Z' timezone='America/Los_Angeles' freq_timeunit='MINUTE' end_of_duration='NONE'>"; appXml += "<controls>"; appXml += "<timeout>10</timeout>"; appXml += "<concurrency>2</concurrency>"; appXml += "<execution>LIFO</execution>"; appXml += "</controls>"; appXml += "<action>"; appXml += "<workflow>"; appXml += "<app-path>hdfs:///tmp/workflows/</app-path>"; appXml += "<configuration>"; appXml += "</configuration>"; appXml += "</workflow>"; appXml += " <sla:info>" // + " <sla:client-id>axonite-blue</sla:client-id>" + " <sla:app-name>test-app</sla:app-name>" + " <sla:nominal-time>${coord:nominalTime()}</sla:nominal-time>" + " <sla:should-start>5</sla:should-start>" + " <sla:should-end>120</sla:should-end>" + " <sla:notification-msg>Notifying User for ${coord:nominalTime()} nominal time </sla:notification-msg>" + " <sla:alert-contact>abc@example.com</sla:alert-contact>" + " <sla:dev-contact>abc@example.com</sla:dev-contact>" + " <sla:qa-contact>abc@example.com</sla:qa-contact>" + " <sla:se-contact>abc@example.com</sla:se-contact>" + "</sla:info>"; appXml += "</action>"; appXml += "</coordinator-app>"; coordJob.setJobXml(appXml); coordJob.setLastActionNumber(0); coordJob.setFrequency("5"); try { store.beginTrx(); store.insertCoordinatorJob(coordJob); store.commitTrx(); } catch (StoreException se) { se.printStackTrace(); store.rollbackTrx(); fail("Unable to insert the test job record to table"); throw se; } finally { store.closeTrx(); } } private void checkCoordActions(String jobId, int number, CoordinatorJob.Status status) throws StoreException { CoordinatorStore store = new CoordinatorStore(false); try { int coordActionsCount = store.getActionsForCoordinatorJob(jobId, false); if (coordActionsCount != number) { fail("Should have " + number + " actions created for job " + jobId); } if (status != null) { CoordinatorJob job = store.getCoordinatorJob(jobId, false); if (job.getStatus() != status) { fail("Job status " + job.getStatus() + " should be " + status); } } } catch (StoreException se) { se.printStackTrace(); fail("Job ID " + jobId + " was not stored properly in db"); } } }
/* The following code was generated by JFlex 1.7.0 tweaked for IntelliJ platform */ package com.intellij.jsonpath.lexer; import com.intellij.lexer.FlexLexer; import com.intellij.psi.tree.IElementType; import com.intellij.jsonpath.psi.JsonPathTypes; import com.intellij.psi.TokenType; import it.unimi.dsi.fastutil.ints.IntArrayList; /** * This class is a scanner generated by * <a href="http://www.jflex.de/">JFlex</a> 1.7.0 * from the specification file <tt>_JsonPathLexer.flex</tt> */ public class _JsonPathLexer implements FlexLexer { /** This character denotes the end of file */ public static final int YYEOF = -1; /** initial size of the lookahead buffer */ private static final int ZZ_BUFFERSIZE = 16384; /** lexical states */ public static final int YYINITIAL = 0; public static final int WILDCARD_EXPECTED = 2; public static final int REGEX_EXPECTED = 4; public static final int SEGMENT_EXPRESSION = 6; public static final int SCRIPT_EXPRESSION = 8; public static final int NESTED_PATH = 10; /** * ZZ_LEXSTATE[l] is the state in the DFA for the lexical state l * ZZ_LEXSTATE[l+1] is the state in the DFA for the lexical state l * at the beginning of a line * l is of the form l = 2*k, k a non negative integer */ private static final int ZZ_LEXSTATE[] = { 0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 0, 0 }; /** * Translates characters to character classes * Chosen bits are [11, 6, 4] * Total runtime size is 14208 bytes */ public static int ZZ_CMAP(int ch) { return ZZ_CMAP_A[(ZZ_CMAP_Y[(ZZ_CMAP_Z[ch>>10]<<6)|((ch>>4)&0x3f)]<<4)|(ch&0xf)]; } /* The ZZ_CMAP_Z table has 1088 entries */ static final char ZZ_CMAP_Z[] = zzUnpackCMap( "\1\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7\1\10\2\11\1\12\1\13\6\14\1\15\23\14\1\16"+ "\1\14\1\17\1\20\12\14\1\21\10\11\1\22\1\23\1\24\1\25\1\26\1\27\1\30\1\31\1"+ "\32\1\33\1\34\1\35\2\11\1\14\1\36\3\11\1\37\10\11\1\40\1\41\5\14\1\42\1\43"+ "\11\11\1\44\2\11\1\45\5\11\1\46\4\11\1\47\1\50\4\11\51\14\1\51\3\14\1\52\1"+ "\53\4\14\1\54\12\11\1\55\u0381\11"); /* The ZZ_CMAP_Y table has 2944 entries */ static final char ZZ_CMAP_Y[] = zzUnpackCMap( "\1\0\1\1\1\2\1\3\1\4\1\5\1\6\1\7\1\10\1\1\1\11\1\12\1\13\1\14\1\13\1\14\34"+ "\13\1\15\1\16\1\17\10\1\1\20\1\21\1\13\1\22\4\13\1\23\10\13\1\24\12\13\1\25"+ "\1\13\1\26\1\25\1\13\1\27\4\1\1\13\1\30\1\31\2\1\2\13\1\30\1\1\1\32\1\25\5"+ "\13\1\33\1\34\1\35\1\1\1\36\1\13\1\1\1\37\5\13\1\40\1\41\1\42\1\13\1\30\1"+ "\43\1\13\1\44\1\45\1\1\1\13\1\46\4\1\1\13\1\47\4\1\1\50\2\13\1\51\1\1\1\52"+ "\1\16\1\25\1\53\1\54\1\55\1\56\1\57\1\60\2\16\1\61\1\54\1\55\1\62\1\1\1\63"+ "\1\1\1\64\1\65\1\22\1\55\1\66\1\1\1\67\1\16\1\70\1\71\1\54\1\55\1\66\1\1\1"+ "\60\1\16\1\41\1\72\1\73\1\74\1\75\1\1\1\67\2\1\1\76\1\36\1\55\1\51\1\1\1\77"+ "\1\16\1\1\1\100\1\36\1\55\1\101\1\1\1\57\1\16\1\102\1\76\1\36\1\13\1\103\1"+ "\57\1\104\1\16\1\42\1\105\1\106\1\13\1\107\1\110\3\1\1\25\2\13\1\111\1\110"+ "\3\1\1\112\1\113\1\114\1\115\1\116\1\117\2\1\1\67\3\1\1\120\1\13\1\121\1\1"+ "\1\122\7\1\2\13\1\30\1\123\1\1\1\124\1\125\1\126\1\127\1\1\2\13\1\130\2\13"+ "\1\131\24\13\1\132\1\133\2\13\1\132\2\13\1\134\1\135\1\14\3\13\1\135\3\13"+ "\1\30\2\1\1\13\1\1\5\13\1\136\1\25\45\13\1\137\1\13\1\140\1\30\4\13\1\30\1"+ "\141\1\142\1\16\1\13\1\16\1\13\1\16\1\142\1\67\3\13\1\143\1\1\1\144\4\1\5"+ "\13\1\27\1\145\1\13\1\146\4\13\1\40\1\13\1\147\3\1\1\13\1\150\1\151\2\13\1"+ "\152\1\13\1\75\3\1\1\13\1\110\3\13\1\151\4\1\1\153\5\1\1\105\2\13\1\143\1"+ "\154\3\1\1\155\1\13\1\156\1\42\2\13\1\40\1\1\2\13\1\143\1\1\1\37\1\42\1\13"+ "\1\150\1\46\5\1\1\157\1\160\14\13\4\1\21\13\1\136\2\13\1\136\1\161\1\13\1"+ "\150\3\13\1\162\1\163\1\164\1\121\1\163\1\165\1\1\1\166\2\1\1\167\1\1\1\170"+ "\1\1\1\121\6\1\1\171\1\172\1\173\1\174\1\175\3\1\1\176\147\1\2\13\1\147\2"+ "\13\1\147\10\13\1\177\1\200\2\13\1\130\3\13\1\201\1\1\1\13\1\110\4\202\4\1"+ "\1\123\35\1\1\203\2\1\1\204\1\25\4\13\1\205\1\25\4\13\1\131\1\105\1\13\1\150"+ "\1\25\4\13\1\147\1\1\1\13\1\30\3\1\1\13\40\1\133\13\1\40\4\1\135\13\1\40\2"+ "\1\10\13\1\121\4\1\2\13\1\150\20\13\1\121\1\13\1\206\1\1\2\13\1\147\1\123"+ "\1\13\1\150\4\13\1\40\2\1\1\207\1\210\5\13\1\211\1\13\1\147\1\27\3\1\1\207"+ "\1\212\1\13\1\31\1\1\3\13\1\143\1\210\2\13\1\143\3\1\1\213\1\42\1\13\1\40"+ "\1\13\1\110\1\1\1\13\1\121\1\50\2\13\1\31\1\123\1\1\1\214\1\215\2\13\1\46"+ "\1\1\1\216\1\1\1\13\1\217\3\13\1\220\1\221\1\222\1\30\1\64\1\223\1\224\1\202"+ "\2\13\1\131\1\40\7\13\1\31\1\1\72\13\1\143\1\13\1\225\2\13\1\152\20\1\26\13"+ "\1\150\6\13\1\75\2\1\1\110\1\226\1\55\1\227\1\230\6\13\1\16\1\1\1\155\25\13"+ "\1\150\1\1\4\13\1\210\2\13\1\27\2\1\1\152\7\1\1\214\7\13\1\121\2\1\1\25\1"+ "\30\1\25\1\30\1\231\4\13\1\147\1\232\1\233\2\1\1\234\1\13\1\14\1\235\2\150"+ "\2\1\7\13\1\30\30\1\1\13\1\121\3\13\1\67\2\1\2\13\1\1\1\13\1\236\2\13\1\40"+ "\1\13\1\150\2\13\1\237\3\1\11\13\1\150\1\1\2\13\1\237\1\13\1\152\2\13\1\27"+ "\3\13\1\143\11\1\23\13\1\110\1\13\1\40\1\27\11\1\1\240\2\13\1\241\1\13\1\40"+ "\1\13\1\110\1\13\1\147\4\1\1\13\1\242\1\13\1\40\1\13\1\75\4\1\3\13\1\243\4"+ "\1\1\67\1\244\1\13\1\143\2\1\1\13\1\121\1\13\1\121\2\1\1\120\1\13\1\151\1"+ "\1\3\13\1\40\1\13\1\40\1\13\1\31\1\13\1\16\6\1\4\13\1\46\3\1\3\13\1\31\3\13"+ "\1\31\60\1\1\155\2\13\1\27\4\1\1\155\2\13\2\1\1\13\1\46\1\1\1\155\1\13\1\110"+ "\2\1\2\13\1\245\1\155\2\13\1\31\1\246\1\247\2\1\1\13\1\22\1\152\5\1\1\250"+ "\1\251\1\46\2\13\1\147\2\1\1\71\1\54\1\55\1\66\1\1\1\252\1\16\11\1\3\13\1"+ "\151\1\253\3\1\3\13\1\1\1\254\13\1\2\13\1\147\2\1\1\255\2\1\3\13\1\1\1\256"+ "\3\1\2\13\1\30\5\1\1\13\1\75\30\1\4\13\1\1\1\123\34\1\3\13\1\46\20\1\1\55"+ "\1\13\1\147\1\1\1\67\2\1\1\210\1\13\67\1\71\13\1\75\16\1\14\13\1\143\53\1"+ "\2\13\1\147\75\1\44\13\1\110\33\1\43\13\1\46\1\13\1\147\7\1\1\13\1\150\1\1"+ "\3\13\1\1\1\143\1\1\1\155\1\257\1\13\67\1\4\13\1\151\1\67\3\1\1\155\4\1\1"+ "\67\1\1\76\13\1\121\1\1\57\13\1\31\20\1\1\16\77\1\6\13\1\30\1\121\1\46\1\75"+ "\66\1\5\13\1\214\3\13\1\142\1\260\1\261\1\262\3\13\1\263\1\264\1\13\1\265"+ "\1\266\1\36\24\13\1\267\1\13\1\36\1\131\1\13\1\131\1\13\1\214\1\13\1\214\1"+ "\147\1\13\1\147\1\13\1\55\1\13\1\55\1\13\1\216\3\1\14\13\1\151\3\1\4\13\1"+ "\143\113\1\1\262\1\13\1\270\1\271\1\272\1\273\1\274\1\275\1\276\1\152\1\277"+ "\1\152\24\1\55\13\1\110\2\1\103\13\1\151\15\13\1\150\150\13\1\16\25\1\41\13"+ "\1\150\36\1"); /* The ZZ_CMAP_A table has 3072 entries */ static final char ZZ_CMAP_A[] = zzUnpackCMap( "\11\0\1\3\1\11\2\3\1\11\22\0\1\3\1\41\1\13\1\0\1\1\1\0\1\47\1\12\1\40\1\23"+ "\1\22\1\20\1\51\1\5\1\16\1\14\1\6\11\7\1\50\1\0\1\45\1\42\1\44\1\52\1\2\4"+ "\15\1\17\25\15\1\21\1\10\1\24\1\0\1\4\1\0\1\34\3\15\1\32\1\33\5\15\1\27\1"+ "\15\1\25\3\15\1\31\1\35\1\30\1\26\5\15\1\36\1\46\1\37\1\43\6\0\1\3\12\0\1"+ "\3\11\0\1\4\12\0\1\4\4\0\1\4\5\0\27\4\1\0\12\4\4\0\14\4\16\0\5\4\7\0\1\4\1"+ "\0\1\4\1\0\5\4\1\0\2\4\2\0\4\4\1\0\1\4\6\0\1\4\1\0\3\4\1\0\1\4\1\0\4\4\1\0"+ "\23\4\1\0\13\4\10\0\6\4\1\0\26\4\2\0\1\4\6\0\10\4\10\0\13\4\5\0\3\4\33\0\6"+ "\4\1\0\1\4\17\0\2\4\7\0\2\4\12\0\3\4\2\0\2\4\1\0\16\4\15\0\11\4\13\0\1\4\30"+ "\0\6\4\4\0\2\4\4\0\1\4\5\0\6\4\4\0\1\4\11\0\1\4\3\0\1\4\7\0\11\4\7\0\5\4\1"+ "\0\10\4\6\0\26\4\3\0\1\4\2\0\1\4\7\0\11\4\4\0\10\4\2\0\2\4\2\0\26\4\1\0\7"+ "\4\1\0\1\4\3\0\4\4\3\0\1\4\20\0\1\4\15\0\2\4\1\0\1\4\5\0\6\4\4\0\2\4\1\0\2"+ "\4\1\0\2\4\1\0\2\4\17\0\4\4\1\0\1\4\3\0\3\4\20\0\11\4\1\0\2\4\1\0\2\4\1\0"+ "\5\4\3\0\1\4\2\0\1\4\30\0\1\4\13\0\10\4\2\0\1\4\3\0\1\4\1\0\6\4\3\0\3\4\1"+ "\0\4\4\3\0\2\4\1\0\1\4\1\0\2\4\3\0\2\4\3\0\3\4\3\0\14\4\13\0\10\4\1\0\2\4"+ "\10\0\3\4\5\0\1\4\4\0\10\4\1\0\6\4\1\0\5\4\3\0\1\4\3\0\2\4\15\0\13\4\2\0\1"+ "\4\6\0\3\4\10\0\1\4\5\0\22\4\3\0\10\4\1\0\11\4\1\0\1\4\2\0\7\4\11\0\1\4\1"+ "\0\2\4\15\0\2\4\1\0\1\4\2\0\2\4\1\0\1\4\2\0\1\4\6\0\4\4\1\0\7\4\1\0\3\4\1"+ "\0\1\4\1\0\1\4\2\0\2\4\1\0\4\4\1\0\2\4\11\0\1\4\2\0\5\4\1\0\1\4\25\0\14\4"+ "\1\0\24\4\13\0\5\4\22\0\7\4\4\0\4\4\3\0\1\4\3\0\2\4\7\0\3\4\4\0\15\4\14\0"+ "\1\4\1\0\6\4\1\0\1\4\5\0\1\4\2\0\13\4\1\0\15\4\1\0\4\4\2\0\7\4\1\0\1\4\1\0"+ "\4\4\2\0\1\4\1\0\4\4\2\0\7\4\1\0\1\4\1\0\4\4\2\0\16\4\2\0\6\4\2\0\15\4\2\0"+ "\1\4\1\3\17\4\1\0\10\4\7\0\15\4\1\0\6\4\23\0\1\4\4\0\1\4\3\0\5\4\2\0\22\4"+ "\1\0\1\4\5\0\17\4\1\0\16\4\2\0\5\4\13\0\14\4\13\0\1\4\15\0\7\4\7\0\16\4\15"+ "\0\2\4\11\0\4\4\1\0\4\4\3\0\2\4\11\0\10\4\1\0\1\4\1\0\1\4\1\0\1\4\1\0\6\4"+ "\1\0\7\4\1\0\1\4\3\0\3\4\1\0\7\4\3\0\4\4\2\0\6\4\4\0\13\3\15\0\2\3\5\0\1\3"+ "\17\0\1\3\1\0\1\4\15\0\1\4\2\0\1\4\4\0\1\4\2\0\12\4\1\0\1\4\3\0\5\4\6\0\1"+ "\4\1\0\1\4\1\0\1\4\1\0\4\4\1\0\13\4\2\0\4\4\5\0\5\4\4\0\1\4\4\0\2\4\13\0\5"+ "\4\6\0\4\4\3\0\2\4\14\0\10\4\7\0\10\4\1\0\7\4\1\0\1\3\4\0\2\4\12\0\5\4\5\0"+ "\2\4\3\0\7\4\6\0\3\4\12\0\2\4\13\0\11\4\2\0\27\4\2\0\7\4\1\0\3\4\1\0\4\4\1"+ "\0\4\4\2\0\6\4\3\0\1\4\1\0\1\4\2\0\5\4\1\0\12\4\12\0\5\4\1\0\3\4\1\0\10\4"+ "\4\0\7\4\3\0\1\4\3\0\2\4\1\0\1\4\3\0\2\4\2\0\5\4\2\0\1\4\1\0\1\4\30\0\3\4"+ "\3\0\6\4\2\0\6\4\2\0\6\4\11\0\7\4\4\0\5\4\3\0\5\4\5\0\1\4\1\0\10\4\1\0\5\4"+ "\1\0\1\4\1\0\2\4\1\0\2\4\1\0\12\4\6\0\12\4\2\0\6\4\2\0\6\4\2\0\6\4\2\0\3\4"+ "\3\0\14\4\1\0\16\4\1\0\2\4\1\0\2\4\1\0\10\4\6\0\4\4\4\0\16\4\2\0\1\4\1\0\14"+ "\4\1\0\2\4\3\0\1\4\2\0\4\4\1\0\2\4\12\0\10\4\6\0\6\4\1\0\3\4\1\0\12\4\3\0"+ "\1\4\12\0\4\4\25\0\1\4\1\0\1\4\3\0\7\4\1\0\1\4\1\0\4\4\1\0\17\4\1\0\2\4\14"+ "\0\3\4\7\0\4\4\11\0\2\4\1\0\1\4\20\0\4\4\10\0\1\4\13\0\10\4\5\0\3\4\2\0\1"+ "\4\2\0\2\4\2\0\4\4\1\0\14\4\1\0\1\4\1\0\7\4\1\0\21\4\1\0\4\4\2\0\10\4\1\0"+ "\7\4\1\0\14\4\1\0\4\4\1\0\5\4\1\0\1\4\3\0\14\4\2\0\10\4\1\0\2\4\1\0\1\4\2"+ "\0\1\4\1\0\12\4\1\0\4\4\1\0\1\4\1\0\1\4\6\0\1\4\4\0\1\4\1\0\1\4\1\0\1\4\1"+ "\0\3\4\1\0\2\4\1\0\1\4\2\0\1\4\1\0\1\4\1\0\1\4\1\0\1\4\1\0\1\4\1\0\2\4\1\0"+ "\1\4\2\0\4\4\1\0\7\4\1\0\4\4\1\0\4\4\1\0\1\4\1\0\12\4\1\0\5\4\1\0\3\4\1\0"+ "\5\4\1\0\5\4"); /** * Translates DFA states to action switch labels. */ private static final int [] ZZ_ACTION = zzUnpackAction(); private static final String ZZ_ACTION_PACKED_0 = "\5\0\1\1\1\2\1\3\1\4\1\5\1\6\2\7"+ "\1\10\1\11\1\12\1\13\1\14\1\15\1\16\1\4"+ "\1\17\1\20\1\21\1\22\1\1\1\23\1\24\2\1"+ "\1\25\1\26\1\27\1\30\1\31\1\6\1\32\1\33"+ "\1\34\1\35\3\1\1\36\1\37\1\40\1\41\1\42"+ "\3\40\1\0\2\43\2\10\2\11\1\44\1\45\1\46"+ "\1\47\1\50\1\51\1\52\1\53\2\31\1\7\1\54"+ "\3\0\3\40\1\43\1\55\1\56\3\0\3\40\1\57"+ "\1\60\1\0\1\57\1\60\1\40\2\61"; private static int [] zzUnpackAction() { int [] result = new int[93]; int offset = 0; offset = zzUnpackAction(ZZ_ACTION_PACKED_0, offset, result); return result; } private static int zzUnpackAction(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** * Translates a state to a row index in the transition table */ private static final int [] ZZ_ROWMAP = zzUnpackRowMap(); private static final String ZZ_ROWMAP_PACKED_0 = "\0\0\0\53\0\126\0\201\0\254\0\327\0\327\0\327"+ "\0\u0102\0\u012d\0\327\0\u0158\0\u0183\0\u01ae\0\u01d9\0\327"+ "\0\u0204\0\327\0\327\0\327\0\327\0\327\0\327\0\327"+ "\0\u022f\0\u025a\0\u0285\0\u02b0\0\u02db\0\u0306\0\327\0\327"+ "\0\327\0\u0331\0\u035c\0\u0387\0\u03b2\0\327\0\327\0\327"+ "\0\u03dd\0\u0408\0\u0433\0\327\0\327\0\u045e\0\327\0\327"+ "\0\u0489\0\u04b4\0\u04df\0\u0158\0\u050a\0\u0535\0\u0560\0\327"+ "\0\u058b\0\327\0\327\0\u05b6\0\u05e1\0\327\0\327\0\327"+ "\0\327\0\327\0\u060c\0\u0637\0\u0662\0\327\0\u068d\0\u06b8"+ "\0\u06e3\0\u070e\0\u0739\0\u0764\0\u078f\0\327\0\327\0\u07ba"+ "\0\u07e5\0\u0810\0\u083b\0\u0866\0\u0891\0\327\0\327\0\u08bc"+ "\0\u045e\0\u045e\0\u08e7\0\327\0\u045e"; private static int [] zzUnpackRowMap() { int [] result = new int[93]; int offset = 0; offset = zzUnpackRowMap(ZZ_ROWMAP_PACKED_0, offset, result); return result; } private static int zzUnpackRowMap(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int high = packed.charAt(i++) << 16; result[j++] = high | packed.charAt(i++); } return j; } /** * The transition table of the DFA */ private static final int [] ZZ_TRANS = zzUnpackTrans(); private static final String ZZ_TRANS_PACKED_0 = "\1\6\1\7\1\10\1\11\1\12\1\13\1\14\1\15"+ "\1\6\1\11\1\16\1\17\1\20\1\12\1\21\1\12"+ "\1\22\1\23\1\24\1\25\1\6\11\12\1\26\1\27"+ "\1\30\1\31\1\32\1\6\1\33\1\34\1\35\1\36"+ "\1\37\1\40\1\41\22\6\1\24\33\6\1\42\5\6"+ "\1\42\2\6\1\43\41\6\1\42\1\6\1\44\1\14"+ "\1\15\1\6\1\42\1\16\1\17\1\20\1\6\1\45"+ "\1\6\1\22\1\46\1\24\1\47\1\50\1\51\2\6"+ "\1\52\2\6\1\53\2\6\1\26\1\27\1\30\1\31"+ "\1\32\1\6\1\33\1\34\1\35\1\36\1\37\1\40"+ "\1\41\1\6\1\54\1\55\1\42\1\56\1\13\1\14"+ "\1\15\1\6\1\42\1\16\1\17\1\20\1\56\1\45"+ "\1\56\1\22\1\46\1\57\1\47\1\60\1\61\2\56"+ "\1\62\2\56\1\63\2\56\1\26\1\27\1\30\1\31"+ "\1\32\1\6\1\33\1\34\1\35\1\36\1\37\1\40"+ "\1\41\56\0\1\11\5\0\1\11\45\0\4\12\5\0"+ "\1\12\1\0\1\12\5\0\11\12\23\0\2\64\6\0"+ "\1\65\1\66\12\0\1\66\26\0\2\15\6\0\1\65"+ "\1\66\12\0\1\66\20\0\10\16\1\67\1\0\1\70"+ "\40\16\10\17\1\71\1\0\1\17\1\72\37\17\6\0"+ "\2\65\6\0\1\73\76\0\1\74\52\0\1\75\1\76"+ "\51\0\1\77\52\0\1\100\56\0\1\101\53\0\1\102"+ "\6\0\1\42\5\0\1\42\41\0\10\43\1\103\3\43"+ "\1\104\36\43\7\0\1\105\51\0\2\65\6\0\1\106"+ "\62\0\1\107\55\0\1\110\55\0\1\111\22\0\4\56"+ "\5\0\1\56\1\0\1\56\5\0\11\56\21\0\4\56"+ "\5\0\1\56\1\0\1\56\5\0\1\56\1\112\7\56"+ "\21\0\4\56\5\0\1\56\1\0\1\56\5\0\4\56"+ "\1\113\4\56\21\0\4\56\5\0\1\56\1\0\1\56"+ "\5\0\7\56\1\114\1\56\23\0\2\65\7\0\1\66"+ "\12\0\1\66\25\0\3\115\10\0\1\115\32\0\11\16"+ "\1\0\41\16\11\17\1\0\41\17\42\0\1\116\52\0"+ "\1\117\10\0\10\43\1\103\42\43\15\0\1\104\1\0"+ "\1\104\5\0\11\104\23\0\2\105\72\0\1\120\51\0"+ "\1\121\53\0\1\122\27\0\4\56\5\0\1\56\1\0"+ "\1\56\5\0\2\56\1\123\6\56\21\0\4\56\5\0"+ "\1\56\1\0\1\56\5\0\1\56\1\124\7\56\21\0"+ "\4\56\5\0\1\56\1\0\1\56\5\0\2\56\1\125"+ "\6\56\23\0\2\115\72\0\1\126\55\0\1\127\55\0"+ "\1\130\21\0\4\56\5\0\1\56\1\0\1\56\5\0"+ "\2\56\1\131\6\56\21\0\4\56\5\0\1\56\1\0"+ "\1\56\5\0\5\56\1\132\3\56\21\0\4\56\5\0"+ "\1\56\1\0\1\56\5\0\10\56\1\133\47\0\1\134"+ "\24\0\4\56\5\0\1\56\1\0\1\56\5\0\5\56"+ "\1\135\3\56\15\0"; private static int [] zzUnpackTrans() { int [] result = new int[2322]; int offset = 0; offset = zzUnpackTrans(ZZ_TRANS_PACKED_0, offset, result); return result; } private static int zzUnpackTrans(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); value--; do result[j++] = value; while (--count > 0); } return j; } /* error codes */ private static final int ZZ_UNKNOWN_ERROR = 0; private static final int ZZ_NO_MATCH = 1; private static final int ZZ_PUSHBACK_2BIG = 2; /* error messages for the codes above */ private static final String[] ZZ_ERROR_MSG = { "Unknown internal scanner error", "Error: could not match input", "Error: pushback value was too large" }; /** * ZZ_ATTRIBUTE[aState] contains the attributes of state <code>aState</code> */ private static final int [] ZZ_ATTRIBUTE = zzUnpackAttribute(); private static final String ZZ_ATTRIBUTE_PACKED_0 = "\5\0\3\11\2\1\1\11\4\1\1\11\1\1\7\11"+ "\6\1\3\11\4\1\3\11\3\1\2\11\1\1\2\11"+ "\3\1\1\0\3\1\1\11\1\1\2\11\2\1\5\11"+ "\3\1\1\11\3\0\4\1\2\11\3\0\3\1\2\11"+ "\1\0\3\1\1\11\1\1"; private static int [] zzUnpackAttribute() { int [] result = new int[93]; int offset = 0; offset = zzUnpackAttribute(ZZ_ATTRIBUTE_PACKED_0, offset, result); return result; } private static int zzUnpackAttribute(String packed, int offset, int [] result) { int i = 0; /* index in packed string */ int j = offset; /* index in unpacked array */ int l = packed.length(); while (i < l) { int count = packed.charAt(i++); int value = packed.charAt(i++); do result[j++] = value; while (--count > 0); } return j; } /** the input device */ private java.io.Reader zzReader; /** the current state of the DFA */ private int zzState; /** the current lexical state */ private int zzLexicalState = YYINITIAL; /** this buffer contains the current text to be matched and is the source of the yytext() string */ private CharSequence zzBuffer = ""; /** the textposition at the last accepting state */ private int zzMarkedPos; /** the current text position in the buffer */ private int zzCurrentPos; /** startRead marks the beginning of the yytext() string in the buffer */ private int zzStartRead; /** endRead marks the last character in the buffer, that has been read from input */ private int zzEndRead; /** * zzAtBOL == true <=> the scanner is currently at the beginning of a line */ private boolean zzAtBOL = true; /** zzAtEOF == true <=> the scanner is at the EOF */ private boolean zzAtEOF; /** denotes if the user-EOF-code has already been executed */ private boolean zzEOFDone; /* user code: */ public _JsonPathLexer() { this((java.io.Reader)null); } private final IntArrayList myStateStack = new IntArrayList(); protected void resetInternal() { myStateStack.clear(); } private void pushState(int newState) { myStateStack.add(yystate()); yybegin(newState); } private void popState() { if (myStateStack.isEmpty()) return; int state = myStateStack.removeInt(myStateStack.size() - 1); yybegin(state); } /** * Creates a new scanner * * @param in the java.io.Reader to read input from. */ public _JsonPathLexer(java.io.Reader in) { this.zzReader = in; } /** * Unpacks the compressed character translation table. * * @param packed the packed character translation table * @return the unpacked character translation table */ private static char [] zzUnpackCMap(String packed) { int size = 0; for (int i = 0, length = packed.length(); i < length; i += 2) { size += packed.charAt(i); } char[] map = new char[size]; int i = 0; /* index in packed string */ int j = 0; /* index in unpacked array */ while (i < packed.length()) { int count = packed.charAt(i++); char value = packed.charAt(i++); do map[j++] = value; while (--count > 0); } return map; } public final int getTokenStart() { return zzStartRead; } public final int getTokenEnd() { return getTokenStart() + yylength(); } public void reset(CharSequence buffer, int start, int end, int initialState) { zzBuffer = buffer; zzCurrentPos = zzMarkedPos = zzStartRead = start; zzAtEOF = false; zzAtBOL = true; zzEndRead = end; yybegin(initialState); } /** * Refills the input buffer. * * @return {@code false}, iff there was new input. * * @exception java.io.IOException if any I/O-Error occurs */ private boolean zzRefill() throws java.io.IOException { return true; } /** * Returns the current lexical state. */ public final int yystate() { return zzLexicalState; } /** * Enters a new lexical state * * @param newState the new lexical state */ public final void yybegin(int newState) { zzLexicalState = newState; } /** * Returns the text matched by the current regular expression. */ public final CharSequence yytext() { return zzBuffer.subSequence(zzStartRead, zzMarkedPos); } /** * Returns the character at position {@code pos} from the * matched text. * * It is equivalent to yytext().charAt(pos), but faster * * @param pos the position of the character to fetch. * A value from 0 to yylength()-1. * * @return the character at position pos */ public final char yycharat(int pos) { return zzBuffer.charAt(zzStartRead+pos); } /** * Returns the length of the matched text region. */ public final int yylength() { return zzMarkedPos-zzStartRead; } /** * Reports an error that occurred while scanning. * * In a wellformed scanner (no or only correct usage of * yypushback(int) and a match-all fallback rule) this method * will only be called with things that "Can't Possibly Happen". * If this method is called, something is seriously wrong * (e.g. a JFlex bug producing a faulty scanner etc.). * * Usual syntax/scanner level error handling should be done * in error fallback rules. * * @param errorCode the code of the errormessage to display */ private void zzScanError(int errorCode) { String message; try { message = ZZ_ERROR_MSG[errorCode]; } catch (ArrayIndexOutOfBoundsException e) { message = ZZ_ERROR_MSG[ZZ_UNKNOWN_ERROR]; } throw new Error(message); } /** * Pushes the specified amount of characters back into the input stream. * * They will be read again by then next call of the scanning method * * @param number the number of characters to be read again. * This number must not be greater than yylength()! */ public void yypushback(int number) { if ( number > yylength() ) zzScanError(ZZ_PUSHBACK_2BIG); zzMarkedPos -= number; } /** * Contains user EOF-code, which will be executed exactly once, * when the end of file is reached */ private void zzDoEOF() { if (!zzEOFDone) { zzEOFDone = true; resetInternal(); } } /** * Resumes scanning until the next regular expression is matched, * the end of input is encountered or an I/O-Error occurs. * * @return the next token * @exception java.io.IOException if any I/O-Error occurs */ public IElementType advance() throws java.io.IOException { int zzInput; int zzAction; // cached fields: int zzCurrentPosL; int zzMarkedPosL; int zzEndReadL = zzEndRead; CharSequence zzBufferL = zzBuffer; int [] zzTransL = ZZ_TRANS; int [] zzRowMapL = ZZ_ROWMAP; int [] zzAttrL = ZZ_ATTRIBUTE; while (true) { zzMarkedPosL = zzMarkedPos; zzAction = -1; zzCurrentPosL = zzCurrentPos = zzStartRead = zzMarkedPosL; zzState = ZZ_LEXSTATE[zzLexicalState]; // set up zzAction for empty match case: int zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; } zzForAction: { while (true) { if (zzCurrentPosL < zzEndReadL) { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/); zzCurrentPosL += Character.charCount(zzInput); } else if (zzAtEOF) { zzInput = YYEOF; break zzForAction; } else { // store back cached positions zzCurrentPos = zzCurrentPosL; zzMarkedPos = zzMarkedPosL; boolean eof = zzRefill(); // get translated positions and possibly new buffer zzCurrentPosL = zzCurrentPos; zzMarkedPosL = zzMarkedPos; zzBufferL = zzBuffer; zzEndReadL = zzEndRead; if (eof) { zzInput = YYEOF; break zzForAction; } else { zzInput = Character.codePointAt(zzBufferL, zzCurrentPosL/*, zzEndReadL*/); zzCurrentPosL += Character.charCount(zzInput); } } int zzNext = zzTransL[ zzRowMapL[zzState] + ZZ_CMAP(zzInput) ]; if (zzNext == -1) break zzForAction; zzState = zzNext; zzAttributes = zzAttrL[zzState]; if ( (zzAttributes & 1) == 1 ) { zzAction = zzState; zzMarkedPosL = zzCurrentPosL; if ( (zzAttributes & 8) == 8 ) break zzForAction; } } } // store back cached position zzMarkedPos = zzMarkedPosL; if (zzInput == YYEOF && zzStartRead == zzCurrentPos) { zzAtEOF = true; zzDoEOF(); return null; } else { switch (zzAction < 0 ? zzAction : ZZ_ACTION[zzAction]) { case 1: { if (myStateStack.isEmpty()) { return TokenType.BAD_CHARACTER; } yypushback(1); popState(); } // fall through case 50: break; case 2: { return JsonPathTypes.ROOT_CONTEXT; } // fall through case 51: break; case 3: { return JsonPathTypes.EVAL_CONTEXT; } // fall through case 52: break; case 4: { if (myStateStack.isEmpty()) { return TokenType.BAD_CHARACTER; } yypushback(1); popState(); } // fall through case 53: break; case 5: { return JsonPathTypes.IDENTIFIER; } // fall through case 54: break; case 6: { return JsonPathTypes.MINUS_OP; } // fall through case 55: break; case 7: { return JsonPathTypes.INTEGER_NUMBER; } // fall through case 56: break; case 8: { return JsonPathTypes.SINGLE_QUOTED_STRING; } // fall through case 57: break; case 9: { return JsonPathTypes.DOUBLE_QUOTED_STRING; } // fall through case 58: break; case 10: { return JsonPathTypes.DIVIDE_OP; } // fall through case 59: break; case 11: { pushState(WILDCARD_EXPECTED); return JsonPathTypes.DOT; } // fall through case 60: break; case 12: { return JsonPathTypes.PLUS_OP; } // fall through case 61: break; case 13: { pushState(SEGMENT_EXPRESSION); return JsonPathTypes.LBRACKET; } // fall through case 62: break; case 14: { return JsonPathTypes.WILDCARD; } // fall through case 63: break; case 15: { return JsonPathTypes.LBRACE; } // fall through case 64: break; case 16: { return JsonPathTypes.RBRACE; } // fall through case 65: break; case 17: { pushState(SCRIPT_EXPRESSION); return JsonPathTypes.LPARENTH; } // fall through case 66: break; case 18: { return JsonPathTypes.NOT_OP; } // fall through case 67: break; case 19: { return JsonPathTypes.GT_OP; } // fall through case 68: break; case 20: { return JsonPathTypes.LT_OP; } // fall through case 69: break; case 21: { return JsonPathTypes.COLON; } // fall through case 70: break; case 22: { return JsonPathTypes.COMMA; } // fall through case 71: break; case 23: { return JsonPathTypes.FILTER_OPERATOR; } // fall through case 72: break; case 24: { return TokenType.WHITE_SPACE; } // fall through case 73: break; case 25: { return JsonPathTypes.REGEX_STRING; } // fall through case 74: break; case 26: { return JsonPathTypes.DOT; } // fall through case 75: break; case 27: { return JsonPathTypes.LBRACKET; } // fall through case 76: break; case 28: { popState(); return JsonPathTypes.RPARENTH; } // fall through case 77: break; case 29: { popState(); return JsonPathTypes.RBRACKET; } // fall through case 78: break; case 30: { pushState(NESTED_PATH); return JsonPathTypes.ROOT_CONTEXT; } // fall through case 79: break; case 31: { pushState(NESTED_PATH); return JsonPathTypes.EVAL_CONTEXT; } // fall through case 80: break; case 32: { return JsonPathTypes.NAMED_OP; } // fall through case 81: break; case 33: { return JsonPathTypes.MULTIPLY_OP; } // fall through case 82: break; case 34: { return JsonPathTypes.RBRACKET; } // fall through case 83: break; case 35: { return JsonPathTypes.DOUBLE_NUMBER; } // fall through case 84: break; case 36: { pushState(WILDCARD_EXPECTED); return JsonPathTypes.RECURSIVE_DESCENT; } // fall through case 85: break; case 37: { return JsonPathTypes.NE_OP; } // fall through case 86: break; case 38: { return JsonPathTypes.EQ_OP; } // fall through case 87: break; case 39: { pushState(REGEX_EXPECTED); return JsonPathTypes.RE_OP; } // fall through case 88: break; case 40: { return JsonPathTypes.GE_OP; } // fall through case 89: break; case 41: { return JsonPathTypes.LE_OP; } // fall through case 90: break; case 42: { return JsonPathTypes.OR_OP; } // fall through case 91: break; case 43: { return JsonPathTypes.AND_OP; } // fall through case 92: break; case 44: { return JsonPathTypes.RECURSIVE_DESCENT; } // fall through case 93: break; case 45: { return JsonPathTypes.ENE_OP; } // fall through case 94: break; case 46: { return JsonPathTypes.EEQ_OP; } // fall through case 95: break; case 47: { return JsonPathTypes.NULL; } // fall through case 96: break; case 48: { return JsonPathTypes.TRUE; } // fall through case 97: break; case 49: { return JsonPathTypes.FALSE; } // fall through case 98: break; default: zzScanError(ZZ_NO_MATCH); } } } } }
/****************************************************************************** * Compilation: javac TarjanSCC.java * Execution: Java TarjanSCC V E * Dependencies: Digraph.java Stack.java TransitiveClosure.java StdOut.java * Data files: http://algs4.cs.princeton.edu/42digraph/tinyDG.txt * http://algs4.cs.princeton.edu/42digraph/mediumDG.txt * http://algs4.cs.princeton.edu/42digraph/largeDG.txt * * Compute the strongly-connected components of a digraph using * Tarjan's algorithm. * * Runs in O(E + V) time. * * % java TarjanSCC tinyDG.txt * 5 components * 1 * 0 2 3 4 5 * 9 10 11 12 * 6 8 * 7 * ******************************************************************************/ package algs4; /** * The {@code TarjanSCC} class represents a data type for * determining the strong components in a digraph. * The <em>id</em> operation determines in which strong component * a given vertex lies; the <em>areStronglyConnected</em> operation * determines whether two vertices are in the same strong component; * and the <em>count</em> operation determines the number of strong * components. * The <em>component identifier</em> of a component is one of the * vertices in the strong component: two vertices have the same component * identifier if and only if they are in the same strong component. * <p> * This implementation uses Tarjan's algorithm. * The constructor takes time proportional to <em>V</em> + <em>E</em> * (in the worst case), * where <em>V</em> is the number of vertices and <em>E</em> is the number of edges. * Afterwards, the <em>id</em>, <em>count</em>, and <em>areStronglyConnected</em> * operations take constant time. * For alternate implementations of the same API, see * {@link KosarajuSharirSCC} and {@link GabowSCC}. * <p> * For additional documentation, * see <a href="http://algs4.cs.princeton.edu/42digraph">Section 4.2</a> of * <i>Algorithms, 4th Edition</i> by Robert Sedgewick and Kevin Wayne. * * @author Robert Sedgewick * @author Kevin Wayne */ public class TarjanSCC { private boolean[] marked; // marked[v] = has v been visited? private int[] id; // id[v] = id of strong component containing v private int[] low; // low[v] = low number of v private int pre; // preorder number counter private int count; // number of strongly-connected components private Stack<Integer> stack; /** * Computes the strong components of the digraph {@code G}. * @param G the digraph */ public TarjanSCC(Digraph G) { marked = new boolean[G.V()]; stack = new Stack<Integer>(); id = new int[G.V()]; low = new int[G.V()]; for (int v = 0; v < G.V(); v++) { if (!marked[v]) dfs(G, v); } // check that id[] gives strong components assert check(G); } private void dfs(Digraph G, int v) { marked[v] = true; low[v] = pre++; int min = low[v]; stack.push(v); for (int w : G.adj(v)) { if (!marked[w]) dfs(G, w); if (low[w] < min) min = low[w]; } if (min < low[v]) { low[v] = min; return; } int w; do { w = stack.pop(); id[w] = count; low[w] = G.V(); } while (w != v); count++; } /** * Returns the number of strong components. * @return the number of strong components */ public int count() { return count; } /** * Are vertices {@code v} and {@code w} in the same strong component? * @param v one vertex * @param w the other vertex * @return {@code true} if vertices {@code v} and {@code w} are in the same * strong component, and {@code false} otherwise * @throws IllegalArgumentException unless {@code 0 <= v < V} * @throws IllegalArgumentException unless {@code 0 <= w < V} */ public boolean stronglyConnected(int v, int w) { validateVertex(v); validateVertex(w); return id[v] == id[w]; } /** * Returns the component id of the strong component containing vertex {@code v}. * @param v the vertex * @return the component id of the strong component containing vertex {@code v} * @throws IllegalArgumentException unless {@code 0 <= v < V} */ public int id(int v) { validateVertex(v); return id[v]; } // does the id[] array contain the strongly connected components? private boolean check(Digraph G) { TransitiveClosure tc = new TransitiveClosure(G); for (int v = 0; v < G.V(); v++) { for (int w = 0; w < G.V(); w++) { if (stronglyConnected(v, w) != (tc.reachable(v, w) && tc.reachable(w, v))) return false; } } return true; } // throw an IllegalArgumentException unless {@code 0 <= v < V} private void validateVertex(int v) { int V = marked.length; if (v < 0 || v >= V) throw new IllegalArgumentException("vertex " + v + " is not between 0 and " + (V-1)); } /** * Unit tests the {@code TarjanSCC} data type. * * @param args the command-line arguments */ public static void main(String[] args) { In in = new In(args[0]); Digraph G = new Digraph(in); TarjanSCC scc = new TarjanSCC(G); // number of connected components int m = scc.count(); StdOut.println(m + " components"); // compute list of vertices in each strong component Queue<Integer>[] components = (Queue<Integer>[]) new Queue[m]; for (int i = 0; i < m; i++) { components[i] = new Queue<Integer>(); } for (int v = 0; v < G.V(); v++) { components[scc.id(v)].enqueue(v); } // print results for (int i = 0; i < m; i++) { for (int v : components[i]) { StdOut.print(v + " "); } StdOut.println(); } } } /****************************************************************************** * Copyright 2002-2016, Robert Sedgewick and Kevin Wayne. * * This file is part of algs4.jar, which accompanies the textbook * * Algorithms, 4th edition by Robert Sedgewick and Kevin Wayne, * Addison-Wesley Professional, 2011, ISBN 0-321-57351-X. * http://algs4.cs.princeton.edu * * * algs4.jar is free software: you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation, either version 3 of the License, or * (at your option) any later version. * * algs4.jar is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with algs4.jar. If not, see http://www.gnu.org/licenses. ******************************************************************************/
/** * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.openejb.core.entity; import java.lang.reflect.Method; import java.rmi.NoSuchObjectException; import java.util.Collection; import java.util.Enumeration; import java.util.HashMap; import java.util.Iterator; import java.util.Map; import java.util.Vector; import javax.ejb.EJBAccessException; import javax.ejb.EJBHome; import javax.ejb.EJBLocalHome; import javax.ejb.EJBLocalObject; import javax.ejb.EJBObject; import javax.ejb.EntityBean; import javax.ejb.NoSuchEntityException; import javax.ejb.Timer; import javax.transaction.Transaction; import javax.transaction.TransactionManager; import javax.transaction.TransactionSynchronizationRegistry; import org.apache.openejb.ApplicationException; import org.apache.openejb.ContainerType; import org.apache.openejb.DeploymentInfo; import org.apache.openejb.OpenEJBException; import org.apache.openejb.ProxyInfo; import org.apache.openejb.SystemException; import org.apache.openejb.loader.SystemInstance; import org.apache.openejb.core.BaseContext; import org.apache.openejb.core.CoreDeploymentInfo; import org.apache.openejb.core.Operation; import org.apache.openejb.core.ThreadContext; import org.apache.openejb.core.ExceptionType; import org.apache.openejb.core.timer.EjbTimerService; import org.apache.openejb.core.timer.EjbTimerServiceImpl; import org.apache.openejb.core.transaction.TransactionContainer; import org.apache.openejb.core.transaction.TransactionContext; import org.apache.openejb.core.transaction.TransactionPolicy; import org.apache.openejb.spi.SecurityService; import org.apache.openejb.util.LogCategory; import org.apache.openejb.util.Logger; /** * @org.apache.xbean.XBean element="bmpContainer" */ public class EntityContainer implements org.apache.openejb.RpcContainer, TransactionContainer { private EntityInstanceManager instanceManager; private Map<String,CoreDeploymentInfo> deploymentRegistry = new HashMap<String,CoreDeploymentInfo>(); private Object containerID = null; public Logger logger = Logger.getInstance(LogCategory.OPENEJB, "org.apache.openejb.util.resources"); private TransactionManager transactionManager; private SecurityService securityService; /** * Tracks entity instances that have been "entered" so we can throw reentrancy exceptions. */ protected EntrancyTracker entrancyTracker; public EntityContainer(Object id, TransactionManager transactionManager, SecurityService securityService, int poolSize) throws OpenEJBException { this.containerID = id; this.transactionManager = transactionManager; this.securityService = securityService; entrancyTracker = new EntrancyTracker(SystemInstance.get().getComponent(TransactionSynchronizationRegistry.class)); instanceManager = new EntityInstanceManager(this, transactionManager, securityService, poolSize); } public synchronized DeploymentInfo [] deployments() { return deploymentRegistry.values().toArray(new DeploymentInfo[deploymentRegistry.size()]); } public synchronized DeploymentInfo getDeploymentInfo(Object deploymentID) { String id = (String) deploymentID; return deploymentRegistry.get(id); } public ContainerType getContainerType() { return ContainerType.BMP_ENTITY; } public Object getContainerID() { return containerID; } public void deploy(DeploymentInfo info) throws OpenEJBException { synchronized (this) { CoreDeploymentInfo deploymentInfo = (CoreDeploymentInfo) info; deploymentRegistry.put((String)deploymentInfo.getDeploymentID(), deploymentInfo); deploymentInfo.setContainer(this); } instanceManager.deploy(info); EjbTimerService timerService = info.getEjbTimerService(); if (timerService != null) { timerService.start(); } } public void undeploy(DeploymentInfo info) throws OpenEJBException { EjbTimerService timerService = info.getEjbTimerService(); if (timerService != null) { timerService.stop(); } instanceManager.undeploy(info); synchronized (this) { String id = (String) info.getDeploymentID(); deploymentRegistry.remove(id); info.setContainer(null); } } /** * @deprecated use invoke signature without 'securityIdentity' argument. */ public Object invoke(Object deployID, Method callMethod, Object[] args, Object primKey, Object securityIdentity) throws OpenEJBException { return invoke(deployID, callMethod.getDeclaringClass(), callMethod, args, primKey); } public Object invoke(Object deployID, Class callInterface, Method callMethod, Object [] args, Object primKey) throws org.apache.openejb.OpenEJBException { CoreDeploymentInfo deployInfo = (CoreDeploymentInfo) this.getDeploymentInfo(deployID); if (deployInfo == null) throw new OpenEJBException("Deployment does not exist in this container. Deployment(id='"+deployID+"'), Container(id='"+containerID+"')"); ThreadContext callContext = new ThreadContext(deployInfo, primKey); ThreadContext oldCallContext = ThreadContext.enter(callContext); try { boolean authorized = getSecurityService().isCallerAuthorized(callMethod, deployInfo.getInterfaceType(callInterface)); if (!authorized) throw new org.apache.openejb.ApplicationException(new EJBAccessException("Unauthorized Access by Principal Denied")); Class declaringClass = callMethod.getDeclaringClass(); String methodName = callMethod.getName(); if (EJBHome.class.isAssignableFrom(declaringClass) || EJBLocalHome.class.isAssignableFrom(declaringClass)) { if (declaringClass != EJBHome.class && declaringClass != EJBLocalHome.class) { if (methodName.startsWith("create")) { return createEJBObject(callMethod, args, callContext); } else if (methodName.startsWith("find")) { return findMethod(callMethod, args, callContext); } else { return homeMethod(callMethod, args, callContext); } } else if (methodName.equals("remove")) { removeEJBObject(callMethod, args, callContext); return null; } } else if ((EJBObject.class == declaringClass || EJBLocalObject.class == declaringClass) && methodName.equals("remove")) { removeEJBObject(callMethod, args, callContext); return null; } callContext.setCurrentOperation(Operation.BUSINESS); callContext.setCurrentAllowedStates(EntityContext.getStates()); Method runMethod = deployInfo.getMatchingBeanMethod(callMethod); callContext.set(Method.class, runMethod); Object retValue = invoke(callMethod, runMethod, args, callContext); return retValue; } finally { ThreadContext.exit(oldCallContext); } } private SecurityService getSecurityService() { return securityService; } public EntityInstanceManager getInstanceManager() { return instanceManager; } protected Object invoke(Method callMethod, Method runMethod, Object [] args, ThreadContext callContext) throws org.apache.openejb.OpenEJBException { TransactionPolicy txPolicy = callContext.getDeploymentInfo().getTransactionPolicy(callMethod); TransactionContext txContext = new TransactionContext(callContext, transactionManager); txContext.callContext = callContext; EntityBean bean = null; txPolicy.beforeInvoke(bean, txContext); Object returnValue = null; entrancyTracker.enter(callContext.getDeploymentInfo(), callContext.getPrimaryKey()); try { bean = instanceManager.obtainInstance(callContext); ejbLoad_If_No_Transaction(callContext, bean); returnValue = runMethod.invoke(bean, args); ejbStore_If_No_Transaction(callContext, bean); instanceManager.poolInstance(callContext, bean, callContext.getPrimaryKey()); } catch (java.lang.reflect.InvocationTargetException ite) {// handle enterprise bean exceptions ExceptionType type = callContext.getDeploymentInfo().getExceptionType(ite.getTargetException()); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ txPolicy.handleSystemException(ite.getTargetException(), bean, txContext); } else { /* Application Exception ***********************/ instanceManager.poolInstance(callContext, bean, callContext.getPrimaryKey()); txPolicy.handleApplicationException(ite.getTargetException(), type == ExceptionType.APPLICATION_ROLLBACK, txContext); } } catch (org.apache.openejb.ApplicationException e) { txPolicy.handleApplicationException(e.getRootCause(), false, txContext); } catch (org.apache.openejb.SystemException se) { txPolicy.handleSystemException(se.getRootCause(), bean, txContext); } catch (Throwable iae) {// handle reflection exception /* Any exception thrown by reflection; not by the enterprise bean. Possible Exceptions are: IllegalAccessException - if the underlying method is inaccessible. IllegalArgumentException - if the number of actual and formal parameters differ, or if an unwrapping conversion fails. NullPointerException - if the specified object is null and the method is an instance method. ExceptionInInitializerError - if the initialization provoked by this method fails. */ txPolicy.handleSystemException(iae, bean, txContext); } finally { entrancyTracker.exit(callContext.getDeploymentInfo(), callContext.getPrimaryKey()); txPolicy.afterInvoke(bean, txContext); } return returnValue; } public void ejbLoad_If_No_Transaction(ThreadContext callContext, EntityBean bean) throws Exception { Operation orginalOperation = callContext.getCurrentOperation(); BaseContext.State[] originalAllowedStates = callContext.getCurrentAllowedStates(); if (orginalOperation == Operation.BUSINESS || orginalOperation == Operation.REMOVE) { Transaction currentTx = null; try { currentTx = getTransactionManager().getTransaction(); } catch (javax.transaction.SystemException se) { throw new org.apache.openejb.SystemException("Transaction Manager failure", se); } if (currentTx == null) { callContext.setCurrentOperation(Operation.LOAD); callContext.setCurrentAllowedStates(EntityContext.getStates()); try { bean.ejbLoad(); } catch (NoSuchEntityException e) { instanceManager.discardInstance(callContext, bean); throw new ApplicationException(new NoSuchObjectException("Entity not found: " + callContext.getPrimaryKey())/*.initCause(e)*/); } catch (Exception e) { instanceManager.discardInstance(callContext, bean); throw e; } finally { callContext.setCurrentOperation(orginalOperation); callContext.setCurrentAllowedStates(originalAllowedStates); } } } } private TransactionManager getTransactionManager() { return transactionManager; } public void ejbStore_If_No_Transaction(ThreadContext callContext, EntityBean bean) throws Exception { Operation currentOp = callContext.getCurrentOperation(); BaseContext.State[] originalAllowedStates = callContext.getCurrentAllowedStates(); if (currentOp == Operation.BUSINESS) { Transaction currentTx = null; try { currentTx = getTransactionManager().getTransaction(); } catch (javax.transaction.SystemException se) { throw new org.apache.openejb.SystemException("Transaction Manager failure", se); } if (currentTx == null) { callContext.setCurrentOperation(Operation.STORE); callContext.setCurrentAllowedStates(EntityContext.getStates()); try { bean.ejbStore(); } catch (Exception e) { instanceManager.discardInstance(callContext, bean); throw e; } finally { callContext.setCurrentOperation(currentOp); callContext.setCurrentAllowedStates(originalAllowedStates); } } } } protected void didCreateBean(ThreadContext callContext, EntityBean bean) throws org.apache.openejb.OpenEJBException { } protected ProxyInfo createEJBObject(Method callMethod, Object [] args, ThreadContext callContext) throws OpenEJBException { CoreDeploymentInfo deploymentInfo = callContext.getDeploymentInfo(); callContext.setCurrentOperation(Operation.CREATE); callContext.setCurrentAllowedStates(EntityContext.getStates()); TransactionPolicy txPolicy = callContext.getDeploymentInfo().getTransactionPolicy(callMethod); TransactionContext txContext = new TransactionContext(callContext, transactionManager); txContext.callContext = callContext; /* * According to section 9.1.5.1 of the EJB 1.1 specification, the "ejbPostCreate(...) * method executes in the same transaction context as the previous ejbCreate(...) method." * * For this reason the TransactionScopeHandler methods usally preformed by the invoke( ) * operation must be handled here along with the call explicitly. * This ensures that the afterInvoke() is not processed between the ejbCreate and ejbPostCreate methods to * ensure that the ejbPostCreate executes in the same transaction context of the ejbCreate. * This would otherwise not be possible if container-managed transactions were used because * the TransactionScopeManager would attempt to commit the transaction immediately after the ejbCreate * and before the ejbPostCreate had a chance to execute. Once the ejbPostCreate method execute the * super classes afterInvoke( ) method will be executed committing the transaction if its a CMT. */ txPolicy.beforeInvoke(null, txContext); EntityBean bean = null; Object primaryKey = null; try { // Get new ready instance bean = instanceManager.obtainInstance(callContext); // Obtain the proper ejbCreate() method Method ejbCreateMethod = deploymentInfo.getMatchingBeanMethod(callMethod); // invoke the ejbCreate which returns the primary key primaryKey = ejbCreateMethod.invoke(bean, args); didCreateBean(callContext, bean); // determine post create callback method Method ejbPostCreateMethod = deploymentInfo.getMatchingPostCreateMethod(ejbCreateMethod); // create a new context containing the pk for the post create call ThreadContext postCreateContext = new ThreadContext(deploymentInfo, primaryKey); postCreateContext.setCurrentOperation(Operation.POST_CREATE); postCreateContext.setCurrentAllowedStates(EntityContext.getStates()); ThreadContext oldContext = ThreadContext.enter(postCreateContext); try { // Invoke the ejbPostCreate method on the bean instance ejbPostCreateMethod.invoke(bean, args); // According to section 9.1.5.1 of the EJB 1.1 specification, the "ejbPostCreate(...) // method executes in the same transaction context as the previous ejbCreate(...) method." // // The bean is first insterted using db.create( ) and then after ejbPostCreate( ) its // updated using db.update(). This protocol allows for visablity of the bean after ejbCreate // within the current trasnaction. } finally { ThreadContext.exit(oldContext); } // update pool instanceManager.poolInstance(callContext, bean, primaryKey); } catch (java.lang.reflect.InvocationTargetException ite) {// handle enterprise bean exceptions ExceptionType type = callContext.getDeploymentInfo().getExceptionType(ite.getTargetException()); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ txPolicy.handleSystemException(ite.getTargetException(), bean, txContext); } else { /* Application Exception ***********************/ instanceManager.poolInstance(callContext, bean, callContext.getPrimaryKey()); txPolicy.handleApplicationException(ite.getTargetException(), type == ExceptionType.APPLICATION_ROLLBACK, txContext); } } catch (OpenEJBException e) { txPolicy.handleSystemException(e.getRootCause(), bean, txContext); } catch (Throwable e) {// handle reflection exception /* Any exception thrown by reflection; not by the enterprise bean. Possible Exceptions are: IllegalAccessException - if the underlying method is inaccessible. IllegalArgumentException - if the number of actual and formal parameters differ, or if an unwrapping conversion fails. NullPointerException - if the specified object is null and the method is an instance method. ExceptionInInitializerError - if the initialization provoked by this method fails. */ txPolicy.handleSystemException(e, bean, txContext); } finally { txPolicy.afterInvoke(bean, txContext); } return new ProxyInfo(deploymentInfo, primaryKey); } protected Object findMethod(Method callMethod, Object [] args, ThreadContext callContext) throws OpenEJBException { CoreDeploymentInfo deploymentInfo = callContext.getDeploymentInfo(); callContext.setCurrentOperation(Operation.FIND); callContext.setCurrentAllowedStates(EntityContext.getStates()); Method runMethod = deploymentInfo.getMatchingBeanMethod(callMethod); Object returnValue = invoke(callMethod, runMethod, args, callContext); /* * Find operations return either a single primary key or a collection of primary keys. * The primary keys are converted to ProxyInfo objects. */ if (returnValue instanceof java.util.Collection) { Iterator keys = ((Collection) returnValue).iterator(); Vector<ProxyInfo> proxies = new Vector<ProxyInfo>(); while (keys.hasNext()) { Object primaryKey = keys.next(); proxies.addElement(new ProxyInfo(deploymentInfo, primaryKey)); } returnValue = proxies; } else if (returnValue instanceof java.util.Enumeration) { Enumeration keys = (Enumeration) returnValue; Vector<ProxyInfo> proxies = new Vector<ProxyInfo>(); while (keys.hasMoreElements()) { Object primaryKey = keys.nextElement(); proxies.addElement(new ProxyInfo(deploymentInfo, primaryKey)); } returnValue = new org.apache.openejb.util.ArrayEnumeration(proxies); } else returnValue = new ProxyInfo(deploymentInfo, returnValue); return returnValue; } protected Object homeMethod(Method callMethod, Object [] args, ThreadContext callContext) throws OpenEJBException { org.apache.openejb.core.CoreDeploymentInfo deploymentInfo = callContext.getDeploymentInfo(); callContext.setCurrentOperation(Operation.HOME); callContext.setCurrentAllowedStates(EntityContext.getStates()); Method runMethod = deploymentInfo.getMatchingBeanMethod(callMethod); return invoke(callMethod, runMethod, args, callContext); } protected void didRemove(EntityBean bean, ThreadContext threadContext) throws OpenEJBException { cancelTimers(threadContext); } private void cancelTimers(ThreadContext threadContext) { CoreDeploymentInfo deploymentInfo = threadContext.getDeploymentInfo(); Object primaryKey = threadContext.getPrimaryKey(); // if we have a real timerservice, stop all timers. Otherwise, ignore... if (primaryKey != null) { EjbTimerService timerService = deploymentInfo.getEjbTimerService(); if (timerService != null && timerService instanceof EjbTimerServiceImpl) { for (Timer timer : deploymentInfo.getEjbTimerService().getTimers(primaryKey)) { timer.cancel(); } } } } protected void removeEJBObject(Method callMethod, Object [] args, ThreadContext callContext) throws org.apache.openejb.OpenEJBException { callContext.setCurrentOperation(Operation.REMOVE); callContext.setCurrentAllowedStates(EntityContext.getStates()); TransactionPolicy txPolicy = callContext.getDeploymentInfo().getTransactionPolicy(callMethod); TransactionContext txContext = new TransactionContext(callContext, transactionManager); txContext.callContext = callContext; EntityBean bean = null; txPolicy.beforeInvoke(bean, txContext); try { bean = instanceManager.obtainInstance(callContext); ejbLoad_If_No_Transaction(callContext, bean); bean.ejbRemove(); didRemove(bean, callContext); instanceManager.poolInstance(callContext, bean, callContext.getPrimaryKey()); } catch (org.apache.openejb.ApplicationException e) { txPolicy.handleApplicationException(e.getRootCause(), false, txContext); } catch (org.apache.openejb.SystemException se) { txPolicy.handleSystemException(se.getRootCause(), bean, txContext); } catch (Exception e) {// handle reflection exception ExceptionType type = callContext.getDeploymentInfo().getExceptionType(e); if (type == ExceptionType.SYSTEM) { /* System Exception ****************************/ txPolicy.handleSystemException(e, bean, txContext); } else { /* Application Exception ***********************/ instanceManager.poolInstance(callContext, bean, callContext.getPrimaryKey()); txPolicy.handleApplicationException(e, type == ExceptionType.APPLICATION_ROLLBACK, txContext); } } finally { txPolicy.afterInvoke(bean, txContext); } } public void discardInstance(Object bean, ThreadContext threadContext) { if (bean != null) { try { instanceManager.discardInstance(threadContext, (EntityBean) bean); } catch (SystemException e) { logger.error("The instance manager encountered an unkown system exception while trying to discard the entity instance with primary key " + threadContext.getPrimaryKey()); } } } }
/* Copyright 2014 Matthew Rogers "BossLetsPlays" * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.redthirddivision.astilade.ai; import java.util.ArrayList; import java.util.Collections; import com.redthirddivision.astilade.ai.heuristic.ClosestHeuristic; /** * <strong>Project:</strong> Kingdom of Astilade-core <br> * <strong>File:</strong> AStarPathFinder.java * * <p> * A path finder implementation that uses the AStar heuristic based algorithm * to determine a path. * </p> * * @author <a href = "http://redthirddivision.com/team/BossLetsPlays"> Matthew Rogers</a> */ public class AStarPathFinder implements PathFinder { /** The set of nodes that have been searched through */ private ArrayList<Node> closed = new ArrayList<Node>(); /** The set of nodes that we do not yet consider fully searched */ private SortedList open = new SortedList(); /** The map being searched */ private TileBasedMap map; /** The maximum depth of search we're willing to accept before giving up */ private int maxSearchDistance; /** The complete set of nodes across the map */ private Node[][] nodes; /** True if we allow diagonal movement */ private boolean allowDiagMovement; /** The heuristic we're applying to determine which nodes to search first */ private AStarHeuristic heuristic; /** * Create a path finder with the default heuristic - closest to target. * * @param map The map to be searched * @param maxSearchDistance The maximum depth we'll search before giving up * @param allowDiagMovement True if the search should try diagonal movement */ public AStarPathFinder(TileBasedMap map, int maxSearchDistance, boolean allowDiagMovement) { this(map, maxSearchDistance, allowDiagMovement, new ClosestHeuristic()); } /** * Create a path finder * * @param heuristic The heuristic used to determine the search order of the map * @param map The map to be searched * @param maxSearchDistance The maximum depth we'll search before giving up * @param allowDiagMovement True if the search should try diagonal movement */ public AStarPathFinder(TileBasedMap map, int maxSearchDistance, boolean allowDiagMovement, AStarHeuristic heuristic) { this.heuristic = heuristic; this.map = map; this.maxSearchDistance = maxSearchDistance; this.allowDiagMovement = allowDiagMovement; nodes = new Node[map.getWidthInTiles()][map.getHeightInTiles()]; for (int x = 0; x < map.getWidthInTiles(); x++) { for (int y = 0; y < map.getHeightInTiles(); y++) { nodes[x][y] = new Node(x, y); } } } @Override public Path findPath(Mover mover, int sx, int sy, int tx, int ty) { // easy first check, if the destination is blocked, we can't get there if (map.isBlocked(mover, tx, ty)) return null; // initial state for A*. The closed group is empty. Only the starting // tile is in the open list and it'e're already there nodes[sx][sy].cost = 0; nodes[sx][sy].depth = 0; closed.clear(); open.clear(); open.add(nodes[sx][sy]); nodes[tx][ty].parent = null; // while we haven'n't exceeded our max search depth int maxDepth = 0; while ((maxDepth < maxSearchDistance) && (open.size() != 0)) { // pull out the first node in our open list, this is determined to // be the most likely to be the next step based on our heuristic Node current = getFirstInOpen(); if (current == nodes[tx][ty]) break; removeFromOpen(current); addToClosed(current); // search through all the neighbors of the current node evaluating // them as next steps for (int x = -1; x < 2; x++) { for (int y = -1; y < 2; y++) { // not a neighbor, its the current tile if ((x == 0) && (y == 0)) continue; // if we're not allowing diaganol movement then only // one of x or y can be set if (!allowDiagMovement) { if ((x != 0) && (y != 0)) continue; } // determine the location of the neighbour and evaluate it int xp = x + current.x; int yp = y + current.y; if (isValidLocation(mover, sx, sy, xp, yp)) { // the cost to get to this node is cost the current plus the movement // cost to reach this node. Note that the heursitic value is only used // in the sorted open list float nextStepCost = current.cost + getMovementCost(mover, current.x, current.y, xp, yp); Node neighbour = nodes[xp][yp]; map.pathFinderVisited(xp, yp); // if the new cost we've determined for this node is lower than // it has been previously makes sure the node hasn'e've // determined that there might have been a better path to get to // this node so it needs to be re-evaluated if (nextStepCost < neighbour.cost) { if (inOpenList(neighbour)) { removeFromOpen(neighbour); } if (inClosedList(neighbour)) { removeFromClosed(neighbour); } } // if the node hasn't already been processed and discarded then // reset it's cost to our current cost and add it as a next possible // step (i.e. to the open list) if (!inOpenList(neighbour) && !(inClosedList(neighbour))) { neighbour.cost = nextStepCost; neighbour.heuristic = getHeuristicCost(mover, xp, yp, tx, ty); maxDepth = Math.max(maxDepth, neighbour.setParent(current)); addToOpen(neighbour); } } } } } // since we have run out of search, // there was no path. Just return null if (nodes[tx][ty].parent == null) return null; // At this point we've definitely found a path so we can uses the parent // references of the nodes to find out way from the target location back // to the start recording the nodes on the way. Path path = new Path(); Node target = nodes[tx][ty]; while (target != nodes[sx][sy]) { path.prependStep(target.x, target.y); target = target.parent; } path.prependStep(sx, sy); // thats it, we have our path return path; } /** * Get the first element from the open list. This is the next * one to be searched. * * @return The first element in the open list */ protected Node getFirstInOpen() { return (Node) open.first(); } /** * Add a node to the open list * * @param node The node to be added to the open list */ protected void addToOpen(Node node) { open.add(node); } /** * Check if a node is in the open list * * @param node The node to check for * @return True if the node given is in the open list */ protected boolean inOpenList(Node node) { return open.contains(node); } /** * Remove a node from the open list * * @param node The node to remove from the open list */ protected void removeFromOpen(Node node) { open.remove(node); } /** * Add a node to the closed list * * @param node The node to add to the closed list */ protected void addToClosed(Node node) { closed.add(node); } /** * Check if the node supplied is in the closed list * * @param node The node to search for * @return True if the node specified is in the closed list */ protected boolean inClosedList(Node node) { return closed.contains(node); } /** * Remove a node from the closed list * * @param node The node to remove from the closed list */ protected void removeFromClosed(Node node) { closed.remove(node); } /** * Check if a given location is valid for the supplied mover * * @param mover The mover that would hold a given location * @param sx The starting x coordinate * @param sy The starting y coordinate * @param x The x coordinate of the location to check * @param y The y coordinate of the location to check * @return True if the location is valid for the given mover */ protected boolean isValidLocation(Mover mover, int sx, int sy, int x, int y) { boolean invalid = (x < 0) || (y < 0) || (x >= map.getWidthInTiles()) || (y >= map.getHeightInTiles()); if ((!invalid) && ((sx != x) || (sy != y))) invalid = map.isBlocked(mover, x, y); return !invalid; } /** * Get the cost to move through a given location * * @param mover The mover that is being moved * @param sx The x coordinate of the tile whose cost is being determined * @param sy The y coordinate of the tile whose cost is being determined * @param tx The x coordinate of the target location * @param ty The y coordinate of the target location * @return The cost of movement through the given tile */ public float getMovementCost(Mover mover, int sx, int sy, int tx, int ty) { return map.getCost(mover, sx, sy, tx, ty); } /** * Get the heuristic cost for the given location. This determines in which * order the locations are processed. * * @param mover The mover that is being moved * @param x The x coordinate of the tile whose cost is being determined * @param y The y coordinate of the tile whose cost is being determined * @param tx The x coordinate of the target location * @param ty The y coordinate of the target location * @return The heuristic cost assigned to the tile */ public float getHeuristicCost(Mover mover, int x, int y, int tx, int ty) { return heuristic.getCost(map, mover, x, y, tx, ty); } /** A simple sorted list */ @SuppressWarnings("all") private class SortedList { /** The list of elements */ private ArrayList list = new ArrayList(); /** * Retrieve the first element from the list * * @return The first element from the list */ public Object first() { return list.get(0); } /** * Empty the list */ public void clear() { list.clear(); } /** * Add an element to the list - causes sorting * * @param o The element to add */ public void add(Object o) { list.add(o); Collections.sort(list); } /** * Remove an element from the list * * @param o The element to remove */ public void remove(Object o) { list.remove(o); } /** * Get the number of elements in the list * * @return The number of element in the list */ public int size() { return list.size(); } /** * Check if an element is in the list * * @param o The element to search for * @return True if the element is in the list */ public boolean contains(Object o) { return list.contains(o); } } /** * A single node in the search graph */ private class Node implements Comparable { /** The x coordinate of the node */ private int x; /** The y coordinate of the node */ private int y; /** The path cost for this node */ private float cost; /** The parent of this node, how we reached it in the search */ private Node parent; /** The heuristic cost of this node */ private float heuristic; /** The search depth of this node */ private int depth; /** * Create a new node * * @param x The x coordinate of the node * @param y The y coordinate of the node */ public Node(int x, int y) { this.x = x; this.y = y; } /** * Set the parent of this node * * @param parent The parent node which lead us to this node * @return The depth we have no reached in searching */ public int setParent(Node parent) { depth = parent.depth + 1; this.parent = parent; return depth; } @Override public int compareTo(Object other) { Node o = (Node) other; float f = heuristic + cost; float of = o.heuristic + o.cost; if (f < of) return -1; if (f > of) return 1; return 0; } } }
package maritech.tile; import java.util.ArrayList; import mariculture.core.lib.MachineRenderedMultiMeta; import mariculture.core.network.PacketHandler; import mariculture.core.tile.base.TileMultiBlock; import maritech.network.PacketCompressor; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.tileentity.TileEntity; import net.minecraftforge.common.util.ForgeDirection; import cofh.api.energy.EnergyStorage; import cofh.api.energy.IEnergyReceiver; public class TileAirCompressor extends TileMultiBlock implements IEnergyReceiver { private int machineTick; public static final int max = 480; public int storedAir = 0; public EnergyStorage energyStorage; public TileAirCompressor() { energyStorage = new EnergyStorage(10000); needsInit = true; } @Override public int receiveEnergy(ForgeDirection from, int maxReceive, boolean simulate) { TileAirCompressor master = ((TileAirCompressor) getMaster()); if (master != null) { if (storedAir < max) { ((TileAirCompressor) getMaster()).energyStorage.receiveEnergy(maxReceive, simulate); } } return 0; } @Override public boolean canConnectEnergy(ForgeDirection from) { return true; } @Override public int getEnergyStored(ForgeDirection from) { return getMaster() != null ? ((TileAirCompressor) getMaster()).energyStorage.getEnergyStored() : 0; } @Override public int getMaxEnergyStored(ForgeDirection from) { return getMaster() != null ? ((TileAirCompressor) getMaster()).energyStorage.getMaxEnergyStored() : 0; } public int getAirStored() { return storedAir; } @Override public void readFromNBT(NBTTagCompound nbt) { super.readFromNBT(nbt); energyStorage.readFromNBT(nbt); storedAir = nbt.getInteger("StoredAir"); } @Override public void writeToNBT(NBTTagCompound nbt) { super.writeToNBT(nbt); energyStorage.writeToNBT(nbt); nbt.setInteger("StoredAir", storedAir); } public boolean onTick(int i) { return machineTick % i == 0; } @Override public boolean canUpdate() { return true; } @Override public void updateMaster() { machineTick++; if (onTick(20)) if (energyStorage.extractEnergy(1000, true) >= 1000) { energyStorage.extractEnergy(1000, false); if (storedAir < max) { storedAir++; if (!worldObj.isRemote) { PacketHandler.sendAround(new PacketCompressor(xCoord, yCoord, zCoord, storedAir, getEnergyStored(ForgeDirection.UP)), this); } } } } public float getWheelAngle() { return 0; } @Override public Class getTEClass() { return this.getClass(); } //Master Stuff @Override public boolean isPartnered(int x, int y, int z) { TileEntity tile = worldObj.getTileEntity(x, y, z); return tile instanceof TileAirCompressor ? ((TileAirCompressor) tile).master != null : false; } public boolean isBaseBlock(int x, int y, int z) { return worldObj.getBlock(x, y, z) == getBlockType() && worldObj.getBlockMetadata(x, y, z) == MachineRenderedMultiMeta.COMPRESSOR_BASE; } public boolean isTopBlock(int x, int y, int z) { return worldObj.getBlock(x, y, z) == getBlockType() && worldObj.getBlockMetadata(x, y, z) == MachineRenderedMultiMeta.COMPRESSOR_TOP; } public boolean isBase(int x, int y, int z) { return isBaseBlock(x, y, z) && !isPartnered(x, y, z); } public boolean isTop(int x, int y, int z) { return isTopBlock(x, y, z) && !isPartnered(x, y, z); } @Override public void onBlockPlaced() { if (getBlockMetadata() == MachineRenderedMultiMeta.COMPRESSOR_BASE) { onBlockPlacedBase(xCoord, yCoord, zCoord); } else if (getBlockMetadata() == MachineRenderedMultiMeta.COMPRESSOR_TOP) { onBlockPlacedTop(xCoord, yCoord, zCoord); } worldObj.markBlockForUpdate(xCoord, yCoord, zCoord); } //Base Setting of Master Block public void onBlockPlacedBase(int x, int y, int z) { if (isBase(x, y, z) && isBase(x + 1, y, z) && isTop(x, y + 1, z) && isTop(x + 1, y + 1, z)) { MultiPart mstr = new MultiPart(x, y, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x + 1, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x + 1, y + 1, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x, y + 1, z, ForgeDirection.EAST)); setAsMaster(mstr, parts, ForgeDirection.EAST); } if (isBase(x, y, z) && isBase(x - 1, y, z) && isTop(x, y + 1, z) && isTop(x - 1, y + 1, z)) { MultiPart mstr = new MultiPart(x - 1, y, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x - 1, y + 1, z, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x, y + 1, z, ForgeDirection.WEST)); setAsMaster(mstr, parts, ForgeDirection.EAST); } if (isBase(x, y, z) && isBase(x, y, z + 1) && isTop(x, y + 1, z) && isTop(x, y + 1, z + 1)) { MultiPart mstr = new MultiPart(x, y, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z + 1, ForgeDirection.SOUTH)); parts.add(setAsSlave(mstr, x, y + 1, z + 1, ForgeDirection.SOUTH)); parts.add(setAsSlave(mstr, x, y + 1, z, ForgeDirection.NORTH)); setAsMaster(mstr, parts, ForgeDirection.NORTH); } if (isBase(x, y, z) && isBase(x, y, z - 1) && isTop(x, y + 1, z) && isTop(x, y + 1, z - 1)) { MultiPart mstr = new MultiPart(x, y, z - 1); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.SOUTH)); parts.add(setAsSlave(mstr, x, y + 1, z - 1, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x, y + 1, z, ForgeDirection.SOUTH)); setAsMaster(mstr, parts, ForgeDirection.NORTH); } } //Top Setting of Master Block public void onBlockPlacedTop(int x, int y, int z) { if (isBase(x, y - 1, z) && isBase(x + 1, y - 1, z) && isTop(x, y, z) && isTop(x + 1, y, z)) { MultiPart mstr = new MultiPart(x, y - 1, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x + 1, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x + 1, y - 1, z, ForgeDirection.WEST)); setAsMaster(mstr, parts, ForgeDirection.EAST); } if (isBase(x, y - 1, z) && isBase(x - 1, y - 1, z) && isTop(x, y, z) && isTop(x - 1, y, z)) { MultiPart mstr = new MultiPart(x - 1, y - 1, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.WEST)); parts.add(setAsSlave(mstr, x - 1, y, z, ForgeDirection.EAST)); parts.add(setAsSlave(mstr, x, y - 1, z, ForgeDirection.WEST)); setAsMaster(mstr, parts, ForgeDirection.EAST); } if (isBase(x, y - 1, z) && isBase(x, y - 1, z + 1) && isTop(x, y, z) && isTop(x, y, z + 1)) { MultiPart mstr = new MultiPart(x, y - 1, z); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z + 1, ForgeDirection.SOUTH)); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x, y - 1, z + 1, ForgeDirection.SOUTH)); setAsMaster(mstr, parts, ForgeDirection.NORTH); } if (isBase(x, y - 1, z) && isBase(x, y - 1, z - 1) && isTop(x, y, z) && isTop(x, y, z - 1)) { MultiPart mstr = new MultiPart(x, y - 1, z - 1); ArrayList<MultiPart> parts = new ArrayList<MultiPart>(); parts.add(setAsSlave(mstr, x, y, z, ForgeDirection.SOUTH)); parts.add(setAsSlave(mstr, x, y, z - 1, ForgeDirection.NORTH)); parts.add(setAsSlave(mstr, x, y - 1, z, ForgeDirection.SOUTH)); setAsMaster(mstr, parts, ForgeDirection.NORTH); } } }