gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.bucket.composite;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.xcontent.ConstructingObjectParser;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.query.QueryShardContext;
import org.elasticsearch.search.aggregations.AbstractAggregationBuilder;
import org.elasticsearch.search.aggregations.AggregationBuilder;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.AggregatorFactory;
import org.elasticsearch.search.aggregations.bucket.nested.NestedAggregatorFactory;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.function.Function;
public class CompositeAggregationBuilder extends AbstractAggregationBuilder<CompositeAggregationBuilder> {
public static final String NAME = "composite";
public static final ParseField AFTER_FIELD_NAME = new ParseField("after");
public static final ParseField SIZE_FIELD_NAME = new ParseField("size");
public static final ParseField SOURCES_FIELD_NAME = new ParseField("sources");
private static final Function<String, ConstructingObjectParser<CompositeAggregationBuilder, Void>> PARSER = name -> {
@SuppressWarnings("unchecked")
ConstructingObjectParser<CompositeAggregationBuilder, Void> parser = new ConstructingObjectParser<>(NAME, a -> {
CompositeAggregationBuilder builder = new CompositeAggregationBuilder(name, (List<CompositeValuesSourceBuilder<?>>)a[0]);
if (a[1] != null) {
builder.size((Integer)a[1]);
}
if (a[2] != null) {
builder.aggregateAfter((Map<String, Object>)a[2]);
}
return builder;
});
parser.declareObjectArray(ConstructingObjectParser.constructorArg(),
(p, c) -> CompositeValuesSourceParserHelper.fromXContent(p), SOURCES_FIELD_NAME);
parser.declareInt(ConstructingObjectParser.optionalConstructorArg(), SIZE_FIELD_NAME);
parser.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, context) -> p.map(), AFTER_FIELD_NAME);
return parser;
};
public static CompositeAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException {
return PARSER.apply(aggregationName).parse(parser, null);
}
private List<CompositeValuesSourceBuilder<?>> sources;
private Map<String, Object> after;
private int size = 10;
public CompositeAggregationBuilder(String name, List<CompositeValuesSourceBuilder<?>> sources) {
super(name);
validateSources(sources);
this.sources = sources;
}
protected CompositeAggregationBuilder(CompositeAggregationBuilder clone,
AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metaData) {
super(clone, factoriesBuilder, metaData);
this.sources = new ArrayList<>(clone.sources);
this.after = clone.after;
this.size = clone.size;
}
@Override
protected AggregationBuilder shallowCopy(AggregatorFactories.Builder factoriesBuilder, Map<String, Object> metaData) {
return new CompositeAggregationBuilder(this, factoriesBuilder, metaData);
}
public CompositeAggregationBuilder(StreamInput in) throws IOException {
super(in);
int num = in.readVInt();
this.sources = new ArrayList<>(num);
for (int i = 0; i < num; i++) {
CompositeValuesSourceBuilder<?> builder = CompositeValuesSourceParserHelper.readFrom(in);
sources.add(builder);
}
this.size = in.readVInt();
if (in.readBoolean()) {
this.after = in.readMap();
}
}
@Override
protected void doWriteTo(StreamOutput out) throws IOException {
out.writeVInt(sources.size());
for (CompositeValuesSourceBuilder<?> builder : sources) {
CompositeValuesSourceParserHelper.writeTo(builder, out);
}
out.writeVInt(size);
out.writeBoolean(after != null);
if (after != null) {
out.writeMap(after);
}
}
@Override
public String getType() {
return NAME;
}
/**
* Gets the list of {@link CompositeValuesSourceBuilder} for this aggregation.
*/
public List<CompositeValuesSourceBuilder<?>> sources() {
return sources;
}
/**
* Sets the values that indicates which composite bucket this request should "aggregate after".
* Defaults to {@code null}.
*/
public CompositeAggregationBuilder aggregateAfter(Map<String, Object> afterKey) {
this.after = afterKey;
return this;
}
/**
* The number of composite buckets to return. Defaults to {@code 10}.
*/
public CompositeAggregationBuilder size(int size) {
this.size = size;
return this;
}
/**
* @return the number of composite buckets. Defaults to {@code 10}.
*/
public int size() {
return size;
}
/**
* Returns null if the provided factory and his parents are compatible with
* this aggregator or the instance of the parent's factory that is incompatible with
* the composite aggregation.
*/
private AggregatorFactory checkParentIsNullOrNested(AggregatorFactory factory) {
if (factory == null) {
return null;
} else if (factory instanceof NestedAggregatorFactory) {
return checkParentIsNullOrNested(factory.getParent());
} else {
return factory;
}
}
private static void validateSources(List<CompositeValuesSourceBuilder<?>> sources) {
if (sources == null || sources.isEmpty()) {
throw new IllegalArgumentException("Composite [" + SOURCES_FIELD_NAME.getPreferredName() + "] cannot be null or empty");
}
Set<String> names = new HashSet<>();
Set<String> duplicates = new HashSet<>();
sources.forEach(source -> {
if (source == null) {
throw new IllegalArgumentException("Composite source cannot be null");
}
boolean unique = names.add(source.name());
if (unique == false) {
duplicates.add(source.name());
}
});
if (duplicates.size() > 0) {
throw new IllegalArgumentException("Composite source names must be unique, found duplicates: " + duplicates);
}
}
@Override
protected AggregatorFactory doBuild(QueryShardContext queryShardContext, AggregatorFactory parent,
AggregatorFactories.Builder subfactoriesBuilder) throws IOException {
AggregatorFactory invalid = checkParentIsNullOrNested(parent);
if (invalid != null) {
throw new IllegalArgumentException("[composite] aggregation cannot be used with a parent aggregation of" +
" type: [" + invalid.getClass().getSimpleName() + "]");
}
CompositeValuesSourceConfig[] configs = new CompositeValuesSourceConfig[sources.size()];
for (int i = 0; i < configs.length; i++) {
configs[i] = sources.get(i).build(queryShardContext);
if (configs[i].valuesSource().needsScores()) {
throw new IllegalArgumentException("[sources] cannot access _score");
}
}
final CompositeKey afterKey;
if (after != null) {
if (after.size() != configs.length) {
throw new IllegalArgumentException("[after] has " + after.size() +
" value(s) but [sources] has " + sources.size());
}
Comparable[] values = new Comparable[sources.size()];
for (int i = 0; i < sources.size(); i++) {
String sourceName = sources.get(i).name();
if (after.containsKey(sourceName) == false) {
throw new IllegalArgumentException("Missing value for [after." + sources.get(i).name() + "]");
}
Object obj = after.get(sourceName);
if (configs[i].missingBucket() && obj == null) {
values[i] = null;
} else if (obj instanceof Comparable) {
values[i] = (Comparable) obj;
} else {
throw new IllegalArgumentException("Invalid value for [after." + sources.get(i).name() +
"], expected comparable, got [" + (obj == null ? "null" : obj.getClass().getSimpleName()) + "]");
}
}
afterKey = new CompositeKey(values);
} else {
afterKey = null;
}
return new CompositeAggregationFactory(name, queryShardContext, parent, subfactoriesBuilder, metaData, size, configs, afterKey);
}
@Override
protected XContentBuilder internalXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject();
builder.field(SIZE_FIELD_NAME.getPreferredName(), size);
builder.startArray(SOURCES_FIELD_NAME.getPreferredName());
for (CompositeValuesSourceBuilder<?> source: sources) {
CompositeValuesSourceParserHelper.toXContent(source, builder, params);
}
builder.endArray();
if (after != null) {
CompositeAggregation.buildCompositeMap(AFTER_FIELD_NAME.getPreferredName(), after, builder);
}
builder.endObject();
return builder;
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), sources, size, after);
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null || getClass() != obj.getClass()) return false;
if (super.equals(obj) == false) return false;
CompositeAggregationBuilder other = (CompositeAggregationBuilder) obj;
return size == other.size &&
Objects.equals(sources, other.sources) &&
Objects.equals(after, other.after);
}
}
| |
package com.ethlo.time;
/*-
* #%L
* Internet Time Utility
* %%
* Copyright (C) 2017 - 2019 Morten Haraldsen (ethlo)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import static com.ethlo.time.TemporalType.LOCAL_DATE;
import static com.ethlo.time.TemporalType.LOCAL_DATE_TIME;
import static com.ethlo.time.TemporalType.OFFSET_DATE_TIME;
import static com.ethlo.time.TemporalType.YEAR;
import static com.ethlo.time.TemporalType.YEAR_MONTH;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.fail;
import java.time.DateTimeException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.OffsetDateTime;
import java.time.Year;
import java.time.YearMonth;
import java.time.temporal.Temporal;
import org.junit.jupiter.api.Tag;
import org.junit.jupiter.api.Test;
@Tag("CorrectnessTest")
public class ITUTest
{
private static final OffsetDateTime VALID_DATETIME = OffsetDateTime.parse("2017-05-01T16:23:12Z");
@Test
public void parseDateTime()
{
assertThat(ITU.parseDateTime(VALID_DATETIME.toString())).isNotNull();
}
@Test
public void parseDateTimeWithoutSeconds()
{
assertThrows(DateTimeException.class, () -> ITU.parseDateTime("2017-12-09T11:23Z"));
}
@Test
void formatUtcWithFractionDigits()
{
assertThat(ITU.formatUtc(VALID_DATETIME, 6)).isEqualTo("2017-05-01T16:23:12.000000Z");
}
@Test
void formatOffsetDateTimeWithLimitedGranularity()
{
assertThat(ITU.formatUtc(VALID_DATETIME, Field.MINUTE)).isEqualTo("2017-05-01T16:23Z");
}
@Test
void formatDateTimeWithLimitedGranularity()
{
assertThat(DateTime.of(2012, 11, 31, 22, 50, TimezoneOffset.UTC).toString(Field.MINUTE)).isEqualTo("2012-11-31T22:50Z");
}
@Test
void formatDateTimeWithFullGranularity()
{
assertThat(DateTime.of(2012, 11, 31, 22, 50, 46, 123456789, TimezoneOffset.UTC).toString(9)).isEqualTo("2012-11-31T22:50:46.123456789Z");
}
@Test
public void testFormatUtc()
{
assertThat(ITU.formatUtc(VALID_DATETIME)).isNotNull();
}
@Test
public void isValidFalse()
{
assertThat(ITU.isValid("2017-asddsd")).isFalse();
}
@Test
public void isValidTrue()
{
assertThat(ITU.isValid("2017-12-09T11:23:39Z")).isTrue();
}
@Test
public void isValidEmpty()
{
assertThat(ITU.isValid("")).isFalse();
}
@Test
public void isValidNull()
{
assertThrows(NullPointerException.class, () -> ITU.isValid(null));
}
@Test
public void formatUtcMicro()
{
assertThat(ITU.formatUtcMicro(VALID_DATETIME)).isNotNull();
}
@Test
public void formatUtcNano()
{
assertThat(ITU.formatUtcNano(VALID_DATETIME)).isNotNull();
}
@Test
public void testFormatUtcMilli()
{
assertThat(ITU.formatUtcMilli(VALID_DATETIME)).isNotNull();
}
@Test
public void parseLenient()
{
assertThat(ITU.parseLenient("2017-01-31")).isNotNull();
}
@Test
public void parseLenient2()
{
assertThat(ITU.parseLenient("2017-01-31")).isNotNull();
}
@Test
public void parseLenientConsumer()
{
ITU.parse("2017-01-31", new TemporalConsumer()
{
@Override
public void handle(final LocalDate localDate)
{
assertThat(localDate).isEqualTo(LocalDate.of(2017, 1, 31));
}
@Override
public void fallback(final Temporal temporal)
{
fail("Should not fall back");
}
});
}
@Test
public void parseLenientConsumerLocalDate()
{
final String input = "2017-01-31";
assertThat(ITU.isValid(input, YEAR)).isFalse();
assertThat(ITU.isValid(input, YEAR_MONTH)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE)).isTrue();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isFalse();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isFalse();
ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final LocalDate localDate)
{
assertThat(localDate).isEqualTo(LocalDate.of(2017, 1, 31));
}
});
}
@Test
public void parseLenientConsumerLocalDateTime()
{
final String input = "2017-01-31T14:00";
assertThat(ITU.isValid(input, YEAR)).isFalse();
assertThat(ITU.isValid(input, YEAR_MONTH)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isTrue();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isFalse();
ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final LocalDateTime localDateTime)
{
assertThat(localDateTime).isEqualTo(LocalDateTime.parse(input));
}
});
}
@Test
public void parseLenientConsumerOffsetDateTime()
{
final String input = "2017-01-31T15:04:32+04:00";
assertThat(ITU.isValid(input, YEAR)).isFalse();
assertThat(ITU.isValid(input, YEAR_MONTH)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isFalse();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isTrue();
ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final OffsetDateTime offsetDateTime)
{
assertThat(offsetDateTime).isEqualTo(OffsetDateTime.parse(input));
}
});
}
@Test
public void parseLenientConsumerYearMonth()
{
final String input = "2017-01";
assertThat(ITU.isValid(input, YEAR)).isFalse();
assertThat(ITU.isValid(input, YEAR_MONTH)).isTrue();
assertThat(ITU.isValid(input, LOCAL_DATE)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isFalse();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isFalse();
ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final YearMonth yearMonth)
{
assertThat(yearMonth).isEqualTo(YearMonth.parse(input));
}
});
}
@Test
public void testWorksAsAny()
{
final String input = "2018-01";
assertThat(ITU.isValid(input, YEAR, YEAR_MONTH)).isTrue();
}
@Test
public void parseLenientConsumerYear()
{
final String input = "2017";
assertThat(ITU.isValid(input, YEAR)).isTrue();
assertThat(ITU.isValid(input, YEAR_MONTH)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isFalse();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isFalse();
ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final Year year)
{
assertThat(year).isEqualTo(Year.parse(input));
}
});
}
@Test
public void parseLenientUnparseable()
{
final String input = "2017-03-05G";
assertThat(ITU.isValid(input, YEAR)).isFalse();
assertThat(ITU.isValid(input, YEAR_MONTH)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE)).isFalse();
assertThat(ITU.isValid(input, LOCAL_DATE_TIME)).isFalse();
assertThat(ITU.isValid(input, OFFSET_DATE_TIME)).isFalse();
final DateTimeException exc = assertThrows(DateTimeException.class, () -> ITU.parse(input, new TemporalConsumer()
{
@Override
public void handle(final Year year)
{
assertThat(year).isEqualTo(Year.parse(input));
}
}));
assertThat(exc).hasMessage("Expected character [T, t, ] at position 11 '2017-03-05G'");
}
}
| |
/*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
/**
* Do not modify this class. It was generated.
* Instead modify LeafRegionEntry.cpp and then run
* bin/generateRegionEntryClasses.sh from the directory
* that contains your build.xml.
*/
package com.pivotal.gemfirexd.internal.engine.store.entry;
import java.util.concurrent.atomic.AtomicIntegerFieldUpdater;
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import com.gemstone.gemfire.internal.cache.Token;
import com.gemstone.gemfire.internal.concurrent.AtomicUpdaterFactory;
import com.gemstone.gemfire.internal.offheap.OffHeapRegionEntryHelper;
import com.gemstone.gemfire.internal.offheap.annotations.Released;
import com.gemstone.gemfire.internal.offheap.annotations.Retained;
import com.gemstone.gemfire.internal.offheap.annotations.Unretained;
import com.gemstone.gemfire.internal.cache.lru.EnableLRU;
import com.gemstone.gemfire.internal.InternalStatisticsDisabledException;
import com.gemstone.gemfire.internal.cache.lru.LRUClockNode;
import com.gemstone.gemfire.internal.cache.lru.NewLRUClockHand;
import com.gemstone.gemfire.internal.concurrent.CustomEntryConcurrentHashMap.HashEntry;
import java.io.DataOutput;
import java.io.IOException;
import com.gemstone.gemfire.internal.cache.LocalRegion;
import com.gemstone.gemfire.internal.cache.RegionEntry;
import com.gemstone.gemfire.internal.cache.RegionEntryContext;
import com.gemstone.gemfire.internal.cache.RegionEntryFactory;
import com.gemstone.gemfire.internal.shared.Version;
import com.gemstone.gemfire.internal.util.ArrayUtils;
import com.pivotal.gemfirexd.internal.engine.sql.catalog.ExtraTableInfo;
import com.pivotal.gemfirexd.internal.engine.store.CompactCompositeKey;
import com.pivotal.gemfirexd.internal.engine.store.GemFireContainer;
import com.pivotal.gemfirexd.internal.engine.store.RegionEntryUtils;
import com.pivotal.gemfirexd.internal.engine.store.RowFormatter;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.services.cache.ClassSize;
import com.pivotal.gemfirexd.internal.iapi.services.io.ArrayInputStream;
import com.pivotal.gemfirexd.internal.iapi.sql.execute.ExecRow;
import com.pivotal.gemfirexd.internal.iapi.types.BooleanDataValue;
import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.DataValueFactory;
import com.pivotal.gemfirexd.internal.iapi.types.RowLocation;
import com.pivotal.gemfirexd.internal.shared.common.StoredFormatIds;
import com.gemstone.gemfire.cache.CacheWriterException;
import com.gemstone.gemfire.cache.EntryNotFoundException;
import com.gemstone.gemfire.cache.TimeoutException;
import com.gemstone.gemfire.internal.cache.CachedDeserializable;
import com.gemstone.gemfire.internal.cache.EntryEventImpl;
import com.gemstone.gemfire.internal.cache.RegionClearedException;
import com.gemstone.gemfire.internal.cache.Token;
import com.gemstone.gemfire.internal.cache.OffHeapRegionEntry;
import com.pivotal.gemfirexd.internal.engine.store.CompactCompositeRegionKey;
import com.pivotal.gemfirexd.internal.engine.store.offheap.OffHeapRegionEntryUtils;
public class VMBucketRowLocationStatsLRURegionEntryOffHeap extends RowLocationStatsLRURegionEntry
implements OffHeapRegionEntry
{
public VMBucketRowLocationStatsLRURegionEntryOffHeap (RegionEntryContext context, Object key,
@Retained
Object value
) {
super(context,
value
);
this.tableInfo = RegionEntryUtils.entryGetTableInfo(context, key, value);
this.bucketId = RegionEntryUtils.getBucketId(context);
this.key = RegionEntryUtils.entryGetRegionKey(key, value);
}
protected int hash;
private HashEntry<Object, Object> next;
@SuppressWarnings("unused")
private volatile long lastModified;
private static final AtomicLongFieldUpdater<VMBucketRowLocationStatsLRURegionEntryOffHeap> lastModifiedUpdater
= AtomicUpdaterFactory.newLongFieldUpdater(VMBucketRowLocationStatsLRURegionEntryOffHeap.class, "lastModified");
protected long getlastModifiedField() {
return lastModifiedUpdater.get(this);
}
protected final boolean compareAndSetLastModifiedField(long expectedValue,
long newValue) {
return lastModifiedUpdater.compareAndSet(this, expectedValue, newValue);
}
@Override
public final int getEntryHash() {
return this.hash;
}
@Override
protected final void setEntryHash(int v) {
this.hash = v;
}
@Override
public final HashEntry<Object, Object> getNextEntry() {
return this.next;
}
@Override
public final void setNextEntry(final HashEntry<Object, Object> n) {
this.next = n;
}
@Override
public final void setDelayedDiskId(LocalRegion r) {
}
public final synchronized int updateEntrySize(EnableLRU capacityController) {
return updateEntrySize(capacityController, _getValue());
}
public final synchronized int updateEntrySize(EnableLRU capacityController,
Object value) {
int oldSize = getEntrySize();
int newSize = capacityController.entrySize(getRawKey(), value);
setEntrySize(newSize);
int delta = newSize - oldSize;
return delta;
}
private LRUClockNode nextLRU;
private LRUClockNode prevLRU;
private int size;
public final void setNextLRUNode( LRUClockNode next ) {
this.nextLRU = next;
}
public final LRUClockNode nextLRUNode() {
return this.nextLRU;
}
public final void setPrevLRUNode( LRUClockNode prev ) {
this.prevLRU = prev;
}
public final LRUClockNode prevLRUNode() {
return this.prevLRU;
}
public final int getEntrySize() {
return this.size;
}
protected final void setEntrySize(int size) {
this.size = size;
}
@Override
public final void updateStatsForGet(boolean hit, long time)
{
setLastAccessed(time);
if (hit) {
incrementHitCount();
} else {
incrementMissCount();
}
}
@Override
public final void setLastModified(long lastModified) {
_setLastModified(lastModified);
if (!DISABLE_ACCESS_TIME_UPDATE_ON_PUT) {
setLastAccessed(lastModified);
}
}
private volatile long lastAccessed;
private volatile int hitCount;
private volatile int missCount;
private static final AtomicIntegerFieldUpdater<VMBucketRowLocationStatsLRURegionEntryOffHeap> hitCountUpdater
= AtomicUpdaterFactory.newIntegerFieldUpdater(VMBucketRowLocationStatsLRURegionEntryOffHeap.class, "hitCount");
private static final AtomicIntegerFieldUpdater<VMBucketRowLocationStatsLRURegionEntryOffHeap> missCountUpdater
= AtomicUpdaterFactory.newIntegerFieldUpdater(VMBucketRowLocationStatsLRURegionEntryOffHeap.class, "missCount");
@Override
public final long getLastAccessed() throws InternalStatisticsDisabledException {
return this.lastAccessed;
}
private void setLastAccessed(long lastAccessed) {
this.lastAccessed = lastAccessed;
}
@Override
public final long getHitCount() throws InternalStatisticsDisabledException {
return this.hitCount & 0xFFFFFFFFL;
}
@Override
public final long getMissCount() throws InternalStatisticsDisabledException {
return this.missCount & 0xFFFFFFFFL;
}
private void incrementHitCount() {
hitCountUpdater.incrementAndGet(this);
}
private void incrementMissCount() {
missCountUpdater.incrementAndGet(this);
}
@Override
public final void resetCounts() throws InternalStatisticsDisabledException {
hitCountUpdater.set(this,0);
missCountUpdater.set(this,0);
}
@Override
public final void txDidDestroy(long currTime) {
setLastModified(currTime);
setLastAccessed(currTime);
this.hitCount = 0;
this.missCount = 0;
}
@Override
public final boolean hasStats() {
return true;
}
private Object key;
@Override
public final Object getRawKey() {
return this.key;
}
@Override
protected final void _setRawKey(Object key) {
this.key = key;
}
@Retained @Released private volatile long ohAddress;
private final static AtomicLongFieldUpdater<VMBucketRowLocationStatsLRURegionEntryOffHeap> ohAddrUpdater =
AtomicUpdaterFactory.newLongFieldUpdater(VMBucketRowLocationStatsLRURegionEntryOffHeap.class, "ohAddress");
@Override
public final boolean isOffHeap() {
return true;
}
@Override
public final Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
@Unretained
protected final Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
@Override
protected final void setValueField(@Unretained Object v) {
OffHeapRegionEntryHelper.setValue(this, v);
}
@Override
@Retained
public final Object _getValueRetain(RegionEntryContext context,
boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress);
}
@Override
public final long getAddress() {
return ohAddrUpdater.get(this);
}
@Override
public final boolean setAddress(long expectedAddr, long newAddr) {
return ohAddrUpdater.compareAndSet(this, expectedAddr, newAddr);
}
@Override
@Released
public final void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
private transient ExtraTableInfo tableInfo;
@Override
public final ExtraTableInfo getTableInfo(GemFireContainer baseContainer) {
return this.tableInfo;
}
@Override
public final Object getContainerInfo() {
return this.tableInfo;
}
@Override
public final Object setContainerInfo(final LocalRegion owner, final Object val) {
final GemFireContainer container;
ExtraTableInfo tabInfo;
if (owner == null) {
final RowFormatter rf;
if ((tabInfo = this.tableInfo) != null
&& (rf = tabInfo.getRowFormatter()) != null) {
container = rf.container;
}
else {
return null;
}
}
else {
container = (GemFireContainer)owner.getUserAttribute();
}
if (container != null && container.isByteArrayStore()) {
tabInfo = container.getExtraTableInfo(val);
this.tableInfo = tabInfo;
if (tabInfo != null && tabInfo.regionKeyPartOfValue()) {
return tabInfo;
}
}
return null;
}
@Override
public final int estimateMemoryUsage() {
return ClassSize.refSize;
}
@Override
public final int getTypeFormatId() {
return StoredFormatIds.ACCESS_MEM_HEAP_ROW_LOCATION_ID;
}
@Override
public final Object cloneObject() {
return this;
}
@Override
public final RowLocation getClone() {
return this;
}
@Override
public final int compare(DataValueDescriptor other) {
if (this == other) {
return 0;
}
return this.hashCode() - other.hashCode();
}
@Override
public final DataValueDescriptor recycle() {
return this;
}
@Override
public final DataValueDescriptor getNewNull() {
return DataValueFactory.DUMMY;
}
@Override
public final boolean isNull() {
return this == DataValueFactory.DUMMY;
}
@Override
public final Object getObject() throws StandardException {
return this;
}
@Override
public DataValueDescriptor coalesce(DataValueDescriptor[] list,
DataValueDescriptor returnValue) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public int compare(DataValueDescriptor other, boolean nullsOrderedLow)
throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public boolean compare(int op, DataValueDescriptor other,
boolean orderedNulls, boolean unknownRV) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public boolean compare(int op, DataValueDescriptor other,
boolean orderedNulls, boolean nullsOrderedLow, boolean unknownRV)
throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue equals(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public int getLengthInBytes(DataTypeDescriptor dtd) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue greaterOrEquals(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue greaterThan(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue in(DataValueDescriptor left,
DataValueDescriptor[] inList, boolean orderedList)
throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue isNotNull() {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue isNullOp() {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue lessOrEquals(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public BooleanDataValue lessThan(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public void normalize(DataTypeDescriptor dtd, DataValueDescriptor source)
throws StandardException {
}
@Override
public BooleanDataValue notEquals(DataValueDescriptor left,
DataValueDescriptor right) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public void readExternalFromArray(ArrayInputStream ais) throws IOException,
ClassNotFoundException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public void setValue(DataValueDescriptor theValue) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public int writeBytes(byte[] outBytes, int offset, DataTypeDescriptor dtd) {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public int computeHashCode(int maxWidth, int hash) {
throw new UnsupportedOperationException("unexpected invocation for " + toString());
}
@Override
public final DataValueDescriptor getKeyColumn(int index) {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public final void getKeyColumns(DataValueDescriptor[] keys) {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public boolean compare(int op, ExecRow row, boolean byteArrayStore,
int colIdx, boolean orderedNulls, boolean unknownRV)
throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public boolean compare(int op, CompactCompositeKey key, int colIdx,
boolean orderedNulls, boolean unknownRV) throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public int equals(RowFormatter rf, byte[] bytes, boolean isKeyBytes,
int logicalPosition, int keyBytesPos, final DataValueDescriptor[] outDVD)
throws StandardException {
throw new UnsupportedOperationException("unexpected invocation");
}
@Override
public byte getTypeId() {
throw new UnsupportedOperationException("Implement the method for DataType="+ this);
}
@Override
public void writeNullDVD(DataOutput out) throws IOException{
throw new UnsupportedOperationException("Implement the method for DataType="+ this);
}
@Override
public final Object getValueWithoutFaultInOrOffHeapEntry(LocalRegion owner) {
return this;
}
@Override
public final Object getValueOrOffHeapEntry(LocalRegion owner) {
return this;
}
@Override
public final Object getRawValue() {
Object val = OffHeapRegionEntryHelper._getValueRetain(this, false);
if (val != null && !Token.isInvalidOrRemoved(val)
&& val != Token.NOT_AVAILABLE) {
CachedDeserializable storedObject = (CachedDeserializable) val;
return storedObject.getDeserializedValue(null, this);
}
return null;
}
@Override
public final Object prepareValueForCache(RegionEntryContext r, Object val,
boolean isEntryUpdate, boolean valHasMetadataForGfxdOffHeapUpdate) {
if (okToStoreOffHeap(val)
&& OffHeapRegionEntryUtils.isValidValueForGfxdOffHeapStorage(val)) {
if (isEntryUpdate
) {
return OffHeapRegionEntryUtils.prepareValueForUpdate(this, r, val, valHasMetadataForGfxdOffHeapUpdate);
} else {
return OffHeapRegionEntryUtils.prepareValueForCreate(r, val, false);
}
}
return super.prepareValueForCache(r, val, isEntryUpdate, valHasMetadataForGfxdOffHeapUpdate);
}
@Override
public final boolean destroy(LocalRegion region, EntryEventImpl event,
boolean inTokenMode, boolean cacheWrite, @Unretained Object expectedOldValue,
boolean forceDestroy, boolean removeRecoveredEntry)
throws CacheWriterException, EntryNotFoundException, TimeoutException,
RegionClearedException {
Object key = event.getKey();
if (key instanceof CompactCompositeRegionKey) {
byte[] keyBytes = ((CompactCompositeRegionKey)key)
.snapshotKeyFromValue(false);
if (keyBytes != null) {
this._setRawKey(keyBytes);
}
}
return super.destroy(region, event, inTokenMode, cacheWrite,
expectedOldValue, forceDestroy, removeRecoveredEntry);
}
@Override
public final Version[] getSerializationVersions() {
return null;
}
private final int bucketId;
@Override
public final Object getValue(GemFireContainer baseContainer) {
return RegionEntryUtils.getValue(baseContainer, this.bucketId, this);
}
@Override
public final Object getValueWithoutFaultIn(GemFireContainer baseContainer) {
return RegionEntryUtils.getValueWithoutFaultIn(baseContainer,this.bucketId, this);
}
@Override
public final ExecRow getRow(GemFireContainer baseContainer) {
return RegionEntryUtils.getRow(baseContainer, this.bucketId, this, this.tableInfo);
}
@Override
public final ExecRow getRowWithoutFaultIn(GemFireContainer baseContainer) {
return RegionEntryUtils.getRowWithoutFaultIn(baseContainer, this.bucketId, this,
this.tableInfo);
}
@Override
public final int getBucketID() {
return this.bucketId;
}
@Override
protected StringBuilder appendFieldsToString(final StringBuilder sb) {
sb.append("key=");
final Object k = getKeyCopy();
final Object val = OffHeapRegionEntryUtils.getHeapRowForInVMValue(this);
RegionEntryUtils.entryKeyString(k, val, getTableInfo(null), sb);
sb.append("; byte source = "+ this._getValue());
sb.append("; rawValue=");
ArrayUtils.objectStringNonRecursive(val, sb);
sb.append("; lockState=0x").append(Integer.toHexString(getState()));
sb.append("; bucketId=").append(this.bucketId);
return sb;
}
private static RegionEntryFactory factory = new RegionEntryFactory() {
public final RegionEntry createEntry(RegionEntryContext context, Object key, Object value) {
return new VMBucketRowLocationStatsLRURegionEntryOffHeap(context, key, value);
}
public final Class<?> getEntryClass() {
return VMBucketRowLocationStatsLRURegionEntryOffHeap.class;
}
public RegionEntryFactory makeVersioned() {
return VersionedBucketRowLocationStatsLRURegionEntryOffHeap.getEntryFactory();
}
@Override
public RegionEntryFactory makeOnHeap() {
return VMBucketRowLocationStatsLRURegionEntryHeap.getEntryFactory();
}
};
public static RegionEntryFactory getEntryFactory() {
return factory;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.state.stack.upgrade;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlAttribute;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlEnum;
import javax.xml.bind.annotation.XmlEnumValue;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Objects;
/**
* The {@link ConfigUpgradeChangeDefinition} represents a configuration change. This change can be
* defined with conditional statements that will only set values if a condition
* passes:
* <p/>
*
* <pre>
* {@code
* <definition>
* <condition type="hive-site" key="hive.server2.transport.mode" value="binary">
* <type>hive-site</type>
* <key>hive.server2.thrift.port</key>
* <value>10010</value>
* </condition>
* <condition type="hive-site" key="hive.server2.transport.mode" value="http">
* <type>hive-site</type>
* <key>hive.server2.http.port</key>
* <value>10011</value>
* </condition>
* </definition>
* }
* </pre>
*
* It's also possible to simple set values directly without a precondition
* check.
*
* <pre>
* {@code
* <definition xsi:type="configure">
* <type>hive-site</type>
* <set key="hive.server2.thrift.port" value="10010"/>
* <set key="foo" value="bar"/>
* <set key="foobar" value="baz"/>
* </definition>
* }
* </pre>
*
*/
@XmlRootElement
@XmlAccessorType(XmlAccessType.FIELD)
public class ConfigUpgradeChangeDefinition {
private static final Logger LOG = LoggerFactory.getLogger(ConfigUpgradeChangeDefinition.class);
/**
* The key that represents the configuration type to change (ie hdfs-site).
*/
public static final String PARAMETER_CONFIG_TYPE = "configure-task-config-type";
/**
* Setting key/value pairs can be several per task, so they're passed in as a
* json-ified list of objects.
*/
public static final String PARAMETER_KEY_VALUE_PAIRS = "configure-task-key-value-pairs";
/**
* Transfers can be several per task, so they're passed in as a json-ified
* list of objects.
*/
public static final String PARAMETER_TRANSFERS = "configure-task-transfers";
/**
* Replacements can be several per task, so they're passed in as a json-ified list of
* objects.
*/
public static final String PARAMETER_REPLACEMENTS = "configure-task-replacements";
public static final String actionVerb = "Configuring";
public static final Float DEFAULT_PRIORITY = 1.0f;
/**
* An optional brief description of config changes.
*/
@XmlAttribute(name = "summary")
public String summary;
@XmlAttribute(name = "id", required = true)
public String id;
@XmlElement(name="type")
private String configType;
@XmlElement(name = "set")
private List<ConfigurationKeyValue> keyValuePairs;
@XmlElement(name = "transfer")
private List<Transfer> transfers;
@XmlElement(name="replace")
private List<Replace> replacements;
@XmlElement(name="regex-replace")
private List<RegexReplace> regexReplacements;
/**
* Insert new content into an existing value by either prepending or
* appending. Each {@link Insert} will only run if:
* <ul>
* <li>The key specified by {@link Insert#key} exists.
* <li>The content specified by {@link Insert#value} is not found in the key's
* existing content.
* </ul>
*/
@XmlElement(name = "insert")
private List<Insert> inserts;
/**
* @return the config type
*/
public String getConfigType() {
return configType;
}
/**
* @return the list of <set key=foo value=bar/> items
*/
public List<ConfigurationKeyValue> getKeyValuePairs() {
return keyValuePairs;
}
/**
* @return the list of transfers, checking for appropriate null fields.
*/
public List<Transfer> getTransfers() {
if (null == transfers) {
return Collections.emptyList();
}
List<Transfer> list = new ArrayList<>();
for (Transfer t : transfers) {
switch (t.operation) {
case COPY:
case MOVE:
if (null != t.fromKey && null != t.toKey) {
list.add(t);
} else {
LOG.warn(String.format("Transfer %s is invalid", t));
}
break;
case DELETE:
if (null != t.deleteKey) {
list.add(t);
} else {
LOG.warn(String.format("Transfer %s is invalid", t));
}
break;
}
}
return list;
}
/**
* @return the replacement tokens, never {@code null}
*/
public List<Replace> getReplacements() {
if (null == replacements) {
return Collections.emptyList();
}
List<Replace> list = new ArrayList<>();
for (Replace r : replacements) {
if (null == r.key || null == r.find || null == r.replaceWith) {
LOG.warn(String.format("Replacement %s is invalid", r));
continue;
}
list.add(r);
}
return list;
}
/**
* @return the replacement tokens, never {@code null}
*/
public List<Replace> getRegexReplacements(Cluster cluster) {
if (null == regexReplacements) {
return Collections.emptyList();
}
List<Replace> list = new ArrayList<>();
for (RegexReplace regexReplaceObj : regexReplacements) {
if (null == regexReplaceObj.key || null == regexReplaceObj.find || null == regexReplaceObj.replaceWith) {
LOG.warn(String.format("Replacement %s is invalid", regexReplaceObj));
continue;
}
try{
Config config = cluster.getDesiredConfigByType(configType);
Map<String, String> properties = config.getProperties();
String content = properties.get(regexReplaceObj.key);
Pattern REGEX = Pattern.compile(regexReplaceObj.find, Pattern.MULTILINE);
Matcher patternMatchObj = REGEX.matcher(content);
if (patternMatchObj.find() && patternMatchObj.groupCount()==1) {
regexReplaceObj.find = patternMatchObj.group();
Replace rep = regexReplaceObj.copyToReplaceObject();
list.add(rep);
}
}catch(Exception e){
String message = "getRegexReplacements : Error while fetching config properties : key - " + regexReplaceObj.key + " find - " + regexReplaceObj.find;
LOG.error(message, e);
}
}
return list;
}
/**
* Gets the insertion directives.
*
* @return the inserts, or an empty list (never {@code null}).
*/
public List<Insert> getInsertions() {
if (null == inserts) {
return Collections.emptyList();
}
return inserts;
}
/**
* Used for configuration updates that should mask their values from being
* printed in plain text.
*/
@XmlAccessorType(XmlAccessType.FIELD)
public static class Masked {
@XmlAttribute(name = "mask")
public boolean mask = false;
/**
* The key to read for the if condition.
*/
@XmlAttribute(name = "if-key")
public String ifKey;
/**
* The config type to read for the if condition.
*/
@XmlAttribute(name = "if-type")
public String ifType;
/**
* The property value to compare against for the if condition.
*/
@XmlAttribute(name = "if-value")
public String ifValue;
/**
* The property key state for the if condition
*/
@XmlAttribute(name = "if-key-state")
public PropertyKeyState ifKeyState;
}
/**
* A key/value pair to set in the type specified by {@link ConfigUpgradeChangeDefinition#configType}
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "set")
public static class ConfigurationKeyValue extends Masked {
@XmlAttribute(name = "key")
public String key;
@XmlAttribute(name = "value")
public String value;
@Override
public String toString() {
return Objects.toStringHelper("Set").add("key", key)
.add("value", value)
.add("ifKey", ifKey)
.add("ifType", ifType)
.add("ifValue",ifValue)
.add("ifKeyState", ifKeyState).omitNullValues().toString();
}
}
/**
* A {@code transfer} element will copy, move, or delete the value of one type/key to another type/key.
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "transfer")
public static class Transfer extends Masked {
/**
* The type of operation, such as COPY or DELETE.
*/
@XmlAttribute(name = "operation")
public TransferOperation operation;
/**
* The configuration type to copy or move from.
*/
@XmlAttribute(name = "from-type")
public String fromType;
/**
* The key to copy or move the configuration from.
*/
@XmlAttribute(name = "from-key")
public String fromKey;
/**
* The key to copy the configuration value to.
*/
@XmlAttribute(name = "to-key")
public String toKey;
/**
* The configuration key to delete, or "*" for all.
*/
@XmlAttribute(name = "delete-key")
public String deleteKey;
/**
* If {@code true}, this will ensure that any changed properties are not
* removed during a {@link TransferOperation#DELETE}.
*/
@XmlAttribute(name = "preserve-edits")
public boolean preserveEdits = false;
/**
* A default value to use when the configurations don't contain the
* {@link #fromKey}.
*/
@XmlAttribute(name = "default-value")
public String defaultValue;
/**
* A data type to convert the configuration value to when the action is
* {@link TransferOperation#COPY}.
*/
@XmlAttribute(name = "coerce-to")
public TransferCoercionType coerceTo;
/**
* The keys to keep when the action is {@link TransferOperation#DELETE}.
*/
@XmlElement(name = "keep-key")
public List<String> keepKeys = new ArrayList<>();
@Override
public String toString() {
return Objects.toStringHelper(this).add("operation", operation)
.add("fromType", fromType)
.add("fromKey", fromKey)
.add("toKey", toKey)
.add("deleteKey", deleteKey)
.add("preserveEdits",preserveEdits)
.add("defaultValue", defaultValue)
.add("coerceTo", coerceTo)
.add("ifKey", ifKey)
.add("ifType", ifType)
.add("ifValue", ifValue)
.add("ifKeyState", ifKeyState)
.add("keepKeys", keepKeys).omitNullValues().toString();
}
}
/**
* Used to replace strings in a key with other strings. More complex
* scenarios will be possible with regex (when needed)
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "replace")
public static class Replace extends Masked {
/**
* The key name
*/
@XmlAttribute(name="key")
public String key;
/**
* The string to find
*/
@XmlAttribute(name="find")
public String find;
/**
* The string to replace
*/
@XmlAttribute(name="replace-with")
public String replaceWith;
@Override
public String toString() {
return Objects.toStringHelper(this).add("key", key)
.add("find", find)
.add("replaceWith", replaceWith)
.add("ifKey", ifKey)
.add("ifType", ifType)
.add("ifValue", ifValue)
.add("ifKeyState", ifKeyState).omitNullValues().toString();
}
}
/**
* Used to replace strings in a key with other strings. More complex
* scenarios are possible with regex.
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "regex-replace")
public static class RegexReplace extends Masked{
/**
* The key name
*/
@XmlAttribute(name="key")
public String key;
/**
* The string to find
*/
@XmlAttribute(name="find")
public String find;
/**
* The string to replace
*/
@XmlAttribute(name="replace-with")
public String replaceWith;
@Override
public String toString() {
return Objects.toStringHelper(this).add("key", key)
.add("find", find)
.add("replaceWith",replaceWith)
.add("ifKey", ifKey)
.add("ifType", ifType)
.add("ifValue", ifValue)
.add("ifKeyState", ifKeyState).omitNullValues().toString();
}
/***
* Copies a RegexReplace type object to Replace object.
* @return Replace object
*/
public Replace copyToReplaceObject(){
Replace rep = new Replace();
rep.find = find;
rep.key = key;
rep.replaceWith = replaceWith;
rep.ifKey = ifKey;
rep.ifType = ifType;
rep.ifValue = ifValue;
rep.ifKeyState = ifKeyState;
return rep;
}
}
/**
* Used to replace strings in a key with other strings. More complex scenarios
* will be possible with regex (when needed). If the value specified in
* {@link Insert#value} already exists, then it is not inserted again.
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "insert")
public static class Insert {
/**
* The key name
*/
@XmlAttribute(name = "key", required = true)
public String key;
/**
* The value to insert.
*/
@XmlAttribute(name = "value", required = true)
public String value;
/**
* The value to insert.
*/
@XmlAttribute(name = "insert-type", required = true)
public InsertType insertType = InsertType.APPEND;
/**
* {@code true} to insert a new line before inserting the {@link #value}.
*/
@XmlAttribute(name = "newline-before")
public boolean newlineBefore = false;
/**
* {@code true} to insert a new line after inserting the {@link #value}.
*/
@XmlAttribute(name = "newline-after")
public boolean newlineAfter = false;
/**
* {@inheritDoc}
*/
@Override
public String toString() {
return Objects.toStringHelper(this).add("insertType", insertType)
.add("key", key)
.add("value",value)
.add("newlineBefore", newlineBefore)
.add("newlineAfter", newlineAfter).omitNullValues().toString();
}
}
/**
* The {@link InsertType} defines how to use the {@link Insert} directive.
*/
@XmlEnum
public enum InsertType {
/**
* Prepend the content.
*/
@XmlEnumValue("prepend")
PREPEND,
/**
* Append the content.
*/
@XmlEnumValue("append")
APPEND
}
}
| |
package org.metalev.multitouch.controller;
/**
* MultiTouchController.java
*
* Author: Luke Hutchison (luke.hutch@mit.edu)
* Please drop me an email if you use this code so I can list your project here!
*
* Usage:
* <code>
* public class MyMTView extends View implements MultiTouchObjectCanvas<PinchWidgetType> {
*
* private MultiTouchController<PinchWidgetType> multiTouchController = new MultiTouchController<PinchWidgetType>(this);
*
* // Pass touch events to the MT controller
* public boolean onTouchEvent(MotionEvent event) {
* return multiTouchController.onTouchEvent(event);
* }
*
* // ... then implement the MultiTouchObjectCanvas interface here, see details in the comments of that interface.
* }
* </code>
*
* Changelog:
* 2010-06-09 v1.5.1 Some API changes to make it possible to selectively update or not update scale / rotation.
* Fixed anisotropic zoom. Cleaned up rotation code. Added more comments. Better var names. (LH)
* 2010-06-09 v1.4 Added ability to track pinch rotation (Mickael Despesse, author of "Face Frenzy") and anisotropic pinch-zoom (LH)
* 2010-06-09 v1.3.3 Bugfixes for Android-2.1; added optional debug info (LH)
* 2010-06-09 v1.3 Ported to Android-2.2 (handle ACTION_POINTER_* actions); fixed several bugs; refactoring; documentation (LH)
* 2010-05-17 v1.2.1 Dual-licensed under Apache and GPL licenses
* 2010-02-18 v1.2 Support for compilation under Android 1.5/1.6 using introspection (mmin, author of handyCalc)
* 2010-01-08 v1.1.1 Bugfixes to Cyanogen's patch that only showed up in more complex uses of controller (LH)
* 2010-01-06 v1.1 Modified for official level 5 MT API (Cyanogen)
* 2009-01-25 v1.0 Original MT controller, released for hacked G1 kernel (LH)
*
* Planned features:
* - Add inertia (flick-pinch-zoom or flick-scroll)
*
* Known usages:
* - Mickael Despesse's "Face Frenzy" face distortion app, to be published to the Market soon
* - Yuan Chin's fork of ADW Launcher to support multitouch
* - David Byrne's fractal viewing app Fractoid
* - mmin's handyCalc calculator
* - My own "MultiTouch Visualizer 2" in the Market
* - Formerly: The browser in cyanogenmod (and before that, JesusFreke), and other firmwares like dwang5. This usage has been
* replaced with official pinch/zoom in Maps, Browser and Gallery[3D] as of API level 5.
*
* License:
* Dual-licensed under the Apache License v2 and the GPL v2.
*/
import java.lang.reflect.Method;
import android.util.Log;
import android.view.MotionEvent;
/**
* A class that simplifies the implementation of multitouch in applications. Subclass this and read the fields here as needed in subclasses.
*
* @author Luke Hutchison
*/
public class MultiTouchController<T> {
/**
* Time in ms required after a change in event status (e.g. putting down or lifting off the second finger) before events actually do anything --
* helps eliminate noisy jumps that happen on change of status
*/
private static final long EVENT_SETTLE_TIME_INTERVAL = 20;
/**
* The biggest possible abs val of the change in x or y between multitouch events (larger dx/dy events are ignored) -- helps eliminate jumps in
* pointer position on finger 2 up/down.
*/
private static final float MAX_MULTITOUCH_POS_JUMP_SIZE = 30.0f;
/**
* The biggest possible abs val of the change in multitouchWidth or multitouchHeight between multitouch events (larger-jump events are ignored) --
* helps eliminate jumps in pointer position on finger 2 up/down.
*/
private static final float MAX_MULTITOUCH_DIM_JUMP_SIZE = 40.0f;
/** The smallest possible distance between multitouch points (used to avoid div-by-zero errors and display glitches) */
private static final float MIN_MULTITOUCH_SEPARATION = 30.0f;
/** The max number of touch points that can be present on the screen at once */
public static final int MAX_TOUCH_POINTS = 20;
/** Generate tons of log entries for debugging */
public static final boolean DEBUG = false;
// ----------------------------------------------------------------------------------------------------------------------
MultiTouchObjectCanvas<T> objectCanvas;
/** The current touch point */
private PointInfo mCurrPt;
/** The previous touch point */
private PointInfo mPrevPt;
/** Fields extracted from mCurrPt */
private float mCurrPtX, mCurrPtY, mCurrPtDiam, mCurrPtWidth, mCurrPtHeight, mCurrPtAng;
/**
* Extract fields from mCurrPt, respecting the update* fields of mCurrPt. This just avoids code duplication. I hate that Java doesn't support
* higher-order functions, tuples or multiple return values from functions.
*/
private void extractCurrPtInfo() {
// Get new drag/pinch params. Only read multitouch fields that are needed,
// to avoid unnecessary computation (diameter and angle are expensive operations).
mCurrPtX = mCurrPt.getX();
mCurrPtY = mCurrPt.getY();
mCurrPtDiam = Math.max(MIN_MULTITOUCH_SEPARATION * .71f, !mCurrXform.updateScale ? 0.0f : mCurrPt.getMultiTouchDiameter());
mCurrPtWidth = Math.max(MIN_MULTITOUCH_SEPARATION, !mCurrXform.updateScaleXY ? 0.0f : mCurrPt.getMultiTouchWidth());
mCurrPtHeight = Math.max(MIN_MULTITOUCH_SEPARATION, !mCurrXform.updateScaleXY ? 0.0f : mCurrPt.getMultiTouchHeight());
mCurrPtAng = !mCurrXform.updateAngle ? 0.0f : mCurrPt.getMultiTouchAngle();
}
// ----------------------------------------------------------------------------------------------------------------------
/** Whether to handle single-touch events/drags before multi-touch is initiated or not; if not, they are handled by subclasses */
private boolean handleSingleTouchEvents;
/** The object being dragged/stretched */
private T selectedObject = null;
/** Current position and scale of the dragged object */
private PositionAndScale mCurrXform = new PositionAndScale();
/** Drag/pinch start time and time to ignore spurious events until (to smooth over event noise) */
private long mSettleStartTime, mSettleEndTime;
/** Conversion from object coords to screen coords */
private float startPosX, startPosY;
/** Conversion between scale and width, and object angle and start pinch angle */
private float startScaleOverPinchDiam, startAngleMinusPinchAngle;
/** Conversion between X scale and width, and Y scale and height */
private float startScaleXOverPinchWidth, startScaleYOverPinchHeight;
// ----------------------------------------------------------------------------------------------------------------------
/** No touch points down. */
private static final int MODE_NOTHING = 0;
/** One touch point down, dragging an object. */
private static final int MODE_DRAG = 1;
/** Two or more touch points down, stretching/rotating an object using the first two touch points. */
private static final int MODE_PINCH = 2;
/** Current drag mode */
private int mMode = MODE_NOTHING;
// ----------------------------------------------------------------------------------------------------------------------
/** Constructor that sets handleSingleTouchEvents to true */
public MultiTouchController(MultiTouchObjectCanvas<T> objectCanvas) {
this(objectCanvas, true);
}
/** Full constructor */
public MultiTouchController(MultiTouchObjectCanvas<T> objectCanvas, boolean handleSingleTouchEvents) {
this.mCurrPt = new PointInfo();
this.mPrevPt = new PointInfo();
this.handleSingleTouchEvents = handleSingleTouchEvents;
this.objectCanvas = objectCanvas;
}
// ------------------------------------------------------------------------------------
/**
* Whether to handle single-touch events/drags before multi-touch is initiated or not; if not, they are handled by subclasses. Default: true
*/
protected void setHandleSingleTouchEvents(boolean handleSingleTouchEvents) {
this.handleSingleTouchEvents = handleSingleTouchEvents;
}
/**
* Whether to handle single-touch events/drags before multi-touch is initiated or not; if not, they are handled by subclasses. Default: true
*/
protected boolean getHandleSingleTouchEvents() {
return handleSingleTouchEvents;
}
// ------------------------------------------------------------------------------------
public static final boolean multiTouchSupported;
private static Method m_getPointerCount;
private static Method m_getPointerId;
private static Method m_getPressure;
private static Method m_getHistoricalX;
private static Method m_getHistoricalY;
private static Method m_getHistoricalPressure;
private static Method m_getX;
private static Method m_getY;
private static int ACTION_POINTER_UP = 6;
private static int ACTION_POINTER_INDEX_SHIFT = 8;
static {
boolean succeeded = false;
try {
// Android 2.0.1 stuff:
m_getPointerCount = MotionEvent.class.getMethod("getPointerCount");
m_getPointerId = MotionEvent.class.getMethod("getPointerId", Integer.TYPE);
m_getPressure = MotionEvent.class.getMethod("getPressure", Integer.TYPE);
m_getHistoricalX = MotionEvent.class.getMethod("getHistoricalX", Integer.TYPE, Integer.TYPE);
m_getHistoricalY = MotionEvent.class.getMethod("getHistoricalY", Integer.TYPE, Integer.TYPE);
m_getHistoricalPressure = MotionEvent.class.getMethod("getHistoricalPressure", Integer.TYPE, Integer.TYPE);
m_getX = MotionEvent.class.getMethod("getX", Integer.TYPE);
m_getY = MotionEvent.class.getMethod("getY", Integer.TYPE);
succeeded = true;
} catch (Exception e) {
Log.e("MultiTouchController", "static initializer failed", e);
}
multiTouchSupported = succeeded;
if (multiTouchSupported) {
// Android 2.2+ stuff (the original Android 2.2 consts are declared above,
// and these actions aren't used previous to Android 2.2):
try {
ACTION_POINTER_UP = MotionEvent.class.getField("ACTION_POINTER_UP").getInt(null);
ACTION_POINTER_INDEX_SHIFT = MotionEvent.class.getField("ACTION_POINTER_INDEX_SHIFT").getInt(null);
} catch (Exception e) {
}
}
}
// ------------------------------------------------------------------------------------
private static final float[] xVals = new float[MAX_TOUCH_POINTS];
private static final float[] yVals = new float[MAX_TOUCH_POINTS];
private static final float[] pressureVals = new float[MAX_TOUCH_POINTS];
private static final int[] pointerIds = new int[MAX_TOUCH_POINTS];
/** Process incoming touch events */
public boolean onTouchEvent(MotionEvent event) {
try {
int pointerCount = multiTouchSupported ? (Integer) m_getPointerCount.invoke(event) : 1;
if (DEBUG)
Log.i("MultiTouch", "Got here 1 - " + multiTouchSupported + " " + mMode + " " + handleSingleTouchEvents + " " + pointerCount);
if (mMode == MODE_NOTHING && !handleSingleTouchEvents && pointerCount == 1)
// Not handling initial single touch events, just pass them on
return false;
if (DEBUG)
Log.i("MultiTouch", "Got here 2");
// Handle history first (we sometimes get history with ACTION_MOVE events)
int action = event.getAction();
int histLen = event.getHistorySize() / pointerCount;
for (int histIdx = 0; histIdx <= histLen; histIdx++) {
// Read from history entries until histIdx == histLen, then read from current event
boolean processingHist = histIdx < histLen;
if (!multiTouchSupported || pointerCount == 1) {
// Use single-pointer methods -- these are needed as a special case (for some weird reason) even if
// multitouch is supported but there's only one touch point down currently -- event.getX(0) etc. throw
// an exception if there's only one point down.
if (DEBUG)
Log.i("MultiTouch", "Got here 3");
xVals[0] = processingHist ? event.getHistoricalX(histIdx) : event.getX();
yVals[0] = processingHist ? event.getHistoricalY(histIdx) : event.getY();
pressureVals[0] = processingHist ? event.getHistoricalPressure(histIdx) : event.getPressure();
} else {
// Read x, y and pressure of each pointer
if (DEBUG)
Log.i("MultiTouch", "Got here 4");
int numPointers = Math.min(pointerCount, MAX_TOUCH_POINTS);
if (DEBUG && pointerCount > MAX_TOUCH_POINTS)
Log.i("MultiTouch", "Got more pointers than MAX_TOUCH_POINTS");
for (int ptrIdx = 0; ptrIdx < numPointers; ptrIdx++) {
int ptrId = (Integer) m_getPointerId.invoke(event, ptrIdx);
pointerIds[ptrIdx] = ptrId;
// N.B. if pointerCount == 1, then the following methods throw an array index out of range exception,
// and the code above is therefore required not just for Android 1.5/1.6 but also for when there is
// only one touch point on the screen -- pointlessly inconsistent :(
xVals[ptrIdx] = (Float) (processingHist ? m_getHistoricalX.invoke(event, ptrIdx, histIdx) : m_getX.invoke(event, ptrIdx));
yVals[ptrIdx] = (Float) (processingHist ? m_getHistoricalY.invoke(event, ptrIdx, histIdx) : m_getY.invoke(event, ptrIdx));
pressureVals[ptrIdx] = (Float) (processingHist ? m_getHistoricalPressure.invoke(event, ptrIdx, histIdx) : m_getPressure
.invoke(event, ptrIdx));
}
}
// Decode event
decodeTouchEvent(pointerCount, xVals, yVals, pressureVals, pointerIds, //
/* action = */processingHist ? MotionEvent.ACTION_MOVE : action, //
/* down = */processingHist ? true : action != MotionEvent.ACTION_UP //
&& (action & ((1 << ACTION_POINTER_INDEX_SHIFT) - 1)) != ACTION_POINTER_UP //
&& action != MotionEvent.ACTION_CANCEL, //
processingHist ? event.getHistoricalEventTime(histIdx) : event.getEventTime());
}
return true;
} catch (Exception e) {
// In case any of the introspection stuff fails (it shouldn't)
Log.e("MultiTouchController", "onTouchEvent() failed", e);
return false;
}
}
private void decodeTouchEvent(int pointerCount, float[] x, float[] y, float[] pressure, int[] pointerIds, int action, boolean down, long eventTime) {
if (DEBUG)
Log.i("MultiTouch", "Got here 5 - " + pointerCount + " " + action + " " + down);
// Swap curr/prev points
PointInfo tmp = mPrevPt;
mPrevPt = mCurrPt;
mCurrPt = tmp;
// Overwrite old prev point
mCurrPt.set(pointerCount, x, y, pressure, pointerIds, action, down, eventTime);
multiTouchController();
}
// ------------------------------------------------------------------------------------
/** Start dragging/pinching, or reset drag/pinch to current point if something goes out of range */
private void anchorAtThisPositionAndScale() {
if (selectedObject == null)
return;
// Get selected object's current position and scale
objectCanvas.getPositionAndScale(selectedObject, mCurrXform);
// Figure out the object coords of the drag start point's screen coords.
// All stretching should be around this point in object-coord-space.
// Also figure out out ratio between object scale factor and multitouch
// diameter at beginning of drag; same for angle and optional anisotropic
// scale.
float currScaleInv = 1.0f / (!mCurrXform.updateScale ? 1.0f : mCurrXform.scale == 0.0f ? 1.0f : mCurrXform.scale);
extractCurrPtInfo();
startPosX = (mCurrPtX - mCurrXform.xOff) * currScaleInv;
startPosY = (mCurrPtY - mCurrXform.yOff) * currScaleInv;
startScaleOverPinchDiam = mCurrXform.scale / mCurrPtDiam;
startScaleXOverPinchWidth = mCurrXform.scaleX / mCurrPtWidth;
startScaleYOverPinchHeight = mCurrXform.scaleY / mCurrPtHeight;
startAngleMinusPinchAngle = mCurrXform.angle - mCurrPtAng;
}
/** Drag/stretch/rotate the selected object using the current touch position(s) relative to the anchor position(s). */
private void performDragOrPinch() {
// Don't do anything if we're not dragging anything
if (selectedObject == null)
return;
// Calc new position of dragged object
float currScale = !mCurrXform.updateScale ? 1.0f : mCurrXform.scale == 0.0f ? 1.0f : mCurrXform.scale;
extractCurrPtInfo();
float newPosX = mCurrPtX - startPosX * currScale;
float newPosY = mCurrPtY - startPosY * currScale;
float newScale = startScaleOverPinchDiam * mCurrPtDiam;
float newScaleX = startScaleXOverPinchWidth * mCurrPtWidth;
float newScaleY = startScaleYOverPinchHeight * mCurrPtHeight;
float newAngle = startAngleMinusPinchAngle + mCurrPtAng;
// Set the new obj coords, scale, and angle as appropriate (notifying the subclass of the change).
mCurrXform.set(newPosX, newPosY, newScale, newScaleX, newScaleY, newAngle);
boolean success = objectCanvas.setPositionAndScale(selectedObject, mCurrXform, mCurrPt);
if (!success)
; // If we could't set those params, do nothing currently
}
/** Indicate if we are in the middle of a pinch action or not. */
public boolean isPinching() {
return mMode == MODE_PINCH;
}
/**
* State-based controller for tracking switches between no-touch, single-touch and multi-touch situations. Includes logic for cleaning up the
* event stream, as events around touch up/down are noisy at least on early Synaptics sensors.
*/
private void multiTouchController() {
if (DEBUG)
Log.i("MultiTouch", "Got here 6 - " + mMode + " " + mCurrPt.getNumTouchPoints() + " " + mCurrPt.isDown() + mCurrPt.isMultiTouch());
switch (mMode) {
case MODE_NOTHING:
// Not doing anything currently
if (mCurrPt.isDown()) {
// Start a new single-point drag
selectedObject = objectCanvas.getDraggableObjectAtPoint(mCurrPt);
if (selectedObject != null) {
// Started a new single-point drag
mMode = MODE_DRAG;
objectCanvas.selectObject(selectedObject, mCurrPt);
anchorAtThisPositionAndScale();
// Don't need any settling time if just placing one finger, there is no noise
mSettleStartTime = mSettleEndTime = mCurrPt.getEventTime();
}
}
break;
case MODE_DRAG:
// Currently in a single-point drag
if (!mCurrPt.isDown()) {
// First finger was released, stop dragging
mMode = MODE_NOTHING;
objectCanvas.selectObject((selectedObject = null), mCurrPt);
} else if (mCurrPt.isMultiTouch()) {
// Point 1 was already down and point 2 was just placed down
mMode = MODE_PINCH;
// Restart the drag with the new drag position (that is at the midpoint between the touchpoints)
anchorAtThisPositionAndScale();
// Need to let events settle before moving things, to help with event noise on touchdown
mSettleStartTime = mCurrPt.getEventTime();
mSettleEndTime = mSettleStartTime + EVENT_SETTLE_TIME_INTERVAL;
} else {
// Point 1 is still down and point 2 did not change state, just do single-point drag to new location
if (mCurrPt.getEventTime() < mSettleEndTime) {
// Ignore the first few events if we just stopped stretching, because if finger 2 was kept down while
// finger 1 is lifted, then point 1 gets mapped to finger 2. Restart the drag from the new position.
anchorAtThisPositionAndScale();
} else {
// Keep dragging, move to new point
performDragOrPinch();
}
}
break;
case MODE_PINCH:
// Two-point pinch-scale/rotate/translate
if (!mCurrPt.isMultiTouch() || !mCurrPt.isDown()) {
// Dropped one or both points, stop stretching
if (!mCurrPt.isDown()) {
// Dropped both points, go back to doing nothing
mMode = MODE_NOTHING;
objectCanvas.selectObject((selectedObject = null), mCurrPt);
} else {
// Just dropped point 2, downgrade to a single-point drag
mMode = MODE_DRAG;
// Restart the pinch with the single-finger position
anchorAtThisPositionAndScale();
// Ignore the first few events after the drop, in case we dropped finger 1 and left finger 2 down
mSettleStartTime = mCurrPt.getEventTime();
mSettleEndTime = mSettleStartTime + EVENT_SETTLE_TIME_INTERVAL;
}
} else {
// Still pinching
if (Math.abs(mCurrPt.getX() - mPrevPt.getX()) > MAX_MULTITOUCH_POS_JUMP_SIZE
|| Math.abs(mCurrPt.getY() - mPrevPt.getY()) > MAX_MULTITOUCH_POS_JUMP_SIZE
|| Math.abs(mCurrPt.getMultiTouchWidth() - mPrevPt.getMultiTouchWidth()) * .5f > MAX_MULTITOUCH_DIM_JUMP_SIZE
|| Math.abs(mCurrPt.getMultiTouchHeight() - mPrevPt.getMultiTouchHeight()) * .5f > MAX_MULTITOUCH_DIM_JUMP_SIZE) {
// Jumped too far, probably event noise, reset and ignore events for a bit
anchorAtThisPositionAndScale();
mSettleStartTime = mCurrPt.getEventTime();
mSettleEndTime = mSettleStartTime + EVENT_SETTLE_TIME_INTERVAL;
} else if (mCurrPt.eventTime < mSettleEndTime) {
// Events have not yet settled, reset
anchorAtThisPositionAndScale();
} else {
// Stretch to new position and size
performDragOrPinch();
}
}
break;
}
if (DEBUG)
Log.i("MultiTouch", "Got here 7 - " + mMode + " " + mCurrPt.getNumTouchPoints() + " " + mCurrPt.isDown() + mCurrPt.isMultiTouch());
}
// ------------------------------------------------------------------------------------
/** A class that packages up all MotionEvent information with all derived multitouch information (if available) */
public static class PointInfo {
// Multitouch information
private int numPoints;
private float[] xs = new float[MAX_TOUCH_POINTS];
private float[] ys = new float[MAX_TOUCH_POINTS];
private float[] pressures = new float[MAX_TOUCH_POINTS];
private int[] pointerIds = new int[MAX_TOUCH_POINTS];
// Midpoint of pinch operations
private float xMid, yMid, pressureMid;
// Width/diameter/angle of pinch operations
private float dx, dy, diameter, diameterSq, angle;
// Whether or not there is at least one finger down (isDown) and/or at least two fingers down (isMultiTouch)
private boolean isDown, isMultiTouch;
// Whether or not these fields have already been calculated, for caching purposes
private boolean diameterSqIsCalculated, diameterIsCalculated, angleIsCalculated;
// Event action code and event time
private int action;
private long eventTime;
// -------------------------------------------------------------------------------------------------------------------------------------------
/** Set all point info */
private void set(int numPoints, float[] x, float[] y, float[] pressure, int[] pointerIds, int action, boolean isDown, long eventTime) {
if (DEBUG)
Log.i("MultiTouch", "Got here 8 - " + +numPoints + " " + x[0] + " " + y[0] + " " + (numPoints > 1 ? x[1] : x[0]) + " "
+ (numPoints > 1 ? y[1] : y[0]) + " " + action + " " + isDown);
this.eventTime = eventTime;
this.action = action;
this.numPoints = numPoints;
for (int i = 0; i < numPoints; i++) {
this.xs[i] = x[i];
this.ys[i] = y[i];
this.pressures[i] = pressure[i];
this.pointerIds[i] = pointerIds[i];
}
this.isDown = isDown;
this.isMultiTouch = numPoints >= 2;
if (isMultiTouch) {
xMid = (x[0] + x[1]) * .5f;
yMid = (y[0] + y[1]) * .5f;
pressureMid = (pressure[0] + pressure[1]) * .5f;
dx = Math.abs(x[1] - x[0]);
dy = Math.abs(y[1] - y[0]);
} else {
// Single-touch event
xMid = x[0];
yMid = y[0];
pressureMid = pressure[0];
dx = dy = 0.0f;
}
// Need to re-calculate the expensive params if they're needed
diameterSqIsCalculated = diameterIsCalculated = angleIsCalculated = false;
}
/**
* Copy all fields from one PointInfo class to another. PointInfo objects are volatile so you should use this if you want to keep track of the
* last touch event in your own code.
*/
public void set(PointInfo other) {
this.numPoints = other.numPoints;
for (int i = 0; i < numPoints; i++) {
this.xs[i] = other.xs[i];
this.ys[i] = other.ys[i];
this.pressures[i] = other.pressures[i];
this.pointerIds[i] = other.pointerIds[i];
}
this.xMid = other.xMid;
this.yMid = other.yMid;
this.pressureMid = other.pressureMid;
this.dx = other.dx;
this.dy = other.dy;
this.diameter = other.diameter;
this.diameterSq = other.diameterSq;
this.angle = other.angle;
this.isDown = other.isDown;
this.action = other.action;
this.isMultiTouch = other.isMultiTouch;
this.diameterIsCalculated = other.diameterIsCalculated;
this.diameterSqIsCalculated = other.diameterSqIsCalculated;
this.angleIsCalculated = other.angleIsCalculated;
this.eventTime = other.eventTime;
}
// -------------------------------------------------------------------------------------------------------------------------------------------
/** True if number of touch points >= 2. */
public boolean isMultiTouch() {
return isMultiTouch;
}
/** Difference between x coords of touchpoint 0 and 1. */
public float getMultiTouchWidth() {
return isMultiTouch ? dx : 0.0f;
}
/** Difference between y coords of touchpoint 0 and 1. */
public float getMultiTouchHeight() {
return isMultiTouch ? dy : 0.0f;
}
/** Fast integer sqrt, by Jim Ulery. Much faster than Math.sqrt() for integers. */
private int julery_isqrt(int val) {
int temp, g = 0, b = 0x8000, bshft = 15;
do {
if (val >= (temp = (((g << 1) + b) << bshft--))) {
g += b;
val -= temp;
}
} while ((b >>= 1) > 0);
return g;
}
/** Calculate the squared diameter of the multitouch event, and cache it. Use this if you don't need to perform the sqrt. */
public float getMultiTouchDiameterSq() {
if (!diameterSqIsCalculated) {
diameterSq = (isMultiTouch ? dx * dx + dy * dy : 0.0f);
diameterSqIsCalculated = true;
}
return diameterSq;
}
/** Calculate the diameter of the multitouch event, and cache it. Uses fast int sqrt but gives accuracy to 1/16px. */
public float getMultiTouchDiameter() {
if (!diameterIsCalculated) {
if (!isMultiTouch) {
diameter = 0.0f;
} else {
// Get 1/16 pixel's worth of subpixel accuracy, works on screens up to 2048x2048
// before we get overflow (at which point you can reduce or eliminate subpix
// accuracy, or use longs in julery_isqrt())
float diamSq = getMultiTouchDiameterSq();
diameter = (diamSq == 0.0f ? 0.0f : (float) julery_isqrt((int) (256 * diamSq)) / 16.0f);
// Make sure diameter is never less than dx or dy, for trig purposes
if (diameter < dx)
diameter = dx;
if (diameter < dy)
diameter = dy;
}
diameterIsCalculated = true;
}
return diameter;
}
/**
* Calculate the angle of a multitouch event, and cache it. Actually gives the smaller of the two angles between the x axis and the line
* between the two touchpoints, so range is [0,Math.PI/2]. Uses Math.atan2().
*/
public float getMultiTouchAngle() {
if (!angleIsCalculated) {
if (!isMultiTouch)
angle = 0.0f;
else
angle = (float) Math.atan2(ys[1] - ys[0], xs[1] - xs[0]);
angleIsCalculated = true;
}
return angle;
}
// -------------------------------------------------------------------------------------------------------------------------------------------
/** Return the total number of touch points */
public int getNumTouchPoints() {
return numPoints;
}
/** Return the X coord of the first touch point if there's only one, or the midpoint between first and second touch points if two or more. */
public float getX() {
return xMid;
}
/** Return the array of X coords -- only the first getNumTouchPoints() of these is defined. */
public float[] getXs() {
return xs;
}
/** Return the X coord of the first touch point if there's only one, or the midpoint between first and second touch points if two or more. */
public float getY() {
return yMid;
}
/** Return the array of Y coords -- only the first getNumTouchPoints() of these is defined. */
public float[] getYs() {
return ys;
}
/**
* Return the array of pointer ids -- only the first getNumTouchPoints() of these is defined. These don't have to be all the numbers from 0 to
* getNumTouchPoints()-1 inclusive, numbers can be skipped if a finger is lifted and the touch sensor is capable of detecting that that
* particular touch point is no longer down. Note that a lot of sensors do not have this capability: when finger 1 is lifted up finger 2
* becomes the new finger 1. However in theory these IDs can correct for that. Convert back to indices using MotionEvent.findPointerIndex().
*/
public int[] getPointerIds() {
return pointerIds;
}
/** Return the pressure the first touch point if there's only one, or the average pressure of first and second touch points if two or more. */
public float getPressure() {
return pressureMid;
}
/** Return the array of pressures -- only the first getNumTouchPoints() of these is defined. */
public float[] getPressures() {
return pressures;
}
// -------------------------------------------------------------------------------------------------------------------------------------------
public boolean isDown() {
return isDown;
}
public int getAction() {
return action;
}
public long getEventTime() {
return eventTime;
}
}
// ------------------------------------------------------------------------------------
/**
* A class that is used to store scroll offsets and scale information for objects that are managed by the multitouch controller
*/
public static class PositionAndScale {
private float xOff, yOff, scale, scaleX, scaleY, angle;
private boolean updateScale, updateScaleXY, updateAngle;
/**
* Set position and optionally scale, anisotropic scale, and/or angle. Where if the corresponding "update" flag is set to false, the field's
* value will not be changed during a pinch operation. If the value is not being updated *and* the value is not used by the client
* application, then the value can just be zero. However if the value is not being updated but the value *is* being used by the client
* application, the value should still be specified and the update flag should be false (e.g. angle of the object being dragged should still
* be specified even if the program is in "resize" mode rather than "rotate" mode).
*/
public void set(float xOff, float yOff, boolean updateScale, float scale, boolean updateScaleXY, float scaleX, float scaleY,
boolean updateAngle, float angle) {
this.xOff = xOff;
this.yOff = yOff;
this.updateScale = updateScale;
this.scale = scale == 0.0f ? 1.0f : scale;
this.updateScaleXY = updateScaleXY;
this.scaleX = scaleX == 0.0f ? 1.0f : scaleX;
this.scaleY = scaleY == 0.0f ? 1.0f : scaleY;
this.updateAngle = updateAngle;
this.angle = angle;
}
/** Set position and optionally scale, anisotropic scale, and/or angle, without changing the "update" flags. */
protected void set(float xOff, float yOff, float scale, float scaleX, float scaleY, float angle) {
this.xOff = xOff;
this.yOff = yOff;
this.scale = scale == 0.0f ? 1.0f : scale;
this.scaleX = scaleX == 0.0f ? 1.0f : scaleX;
this.scaleY = scaleY == 0.0f ? 1.0f : scaleY;
this.angle = angle;
}
public float getXOff() {
return xOff;
}
public float getYOff() {
return yOff;
}
public float getScale() {
return !updateScale ? 1.0f : scale;
}
/** Included in case you want to support anisotropic scaling */
public float getScaleX() {
return !updateScaleXY ? 1.0f : scaleX;
}
/** Included in case you want to support anisotropic scaling */
public float getScaleY() {
return !updateScaleXY ? 1.0f : scaleY;
}
public float getAngle() {
return !updateAngle ? 0.0f : angle;
}
}
// ------------------------------------------------------------------------------------
public static interface MultiTouchObjectCanvas<T> {
/**
* See if there is a draggable object at the current point. Returns the object at the point, or null if nothing to drag. To start a multitouch
* drag/stretch operation, this routine must return some non-null reference to an object. This object is passed into the other methods in this
* interface when they are called.
*
* @param touchPoint
* The point being tested (in object coordinates). Return the topmost object under this point, or if dragging/stretching the whole
* canvas, just return a reference to the canvas.
* @return a reference to the object under the point being tested, or null to cancel the drag operation. If dragging/stretching the whole
* canvas (e.g. in a photo viewer), always return non-null, otherwise the stretch operation won't work.
*/
public T getDraggableObjectAtPoint(PointInfo touchPoint);
/**
* Get the screen coords of the dragged object's origin, and scale multiplier to convert screen coords to obj coords. The job of this routine
* is to call the .set() method on the passed PositionAndScale object to record the initial position and scale of the object (in object
* coordinates) before any dragging/stretching takes place.
*
* @param obj
* The object being dragged/stretched.
* @param objPosAndScaleOut
* Output parameter: You need to call objPosAndScaleOut.set() to record the current position and scale of obj.
*/
public void getPositionAndScale(T obj, PositionAndScale objPosAndScaleOut);
/**
* Callback to update the position and scale (in object coords) of the currently-dragged object.
*
* @param obj
* The object being dragged/stretched.
* @param newObjPosAndScale
* The new position and scale of the object, in object coordinates. Use this to move/resize the object before returning.
* @param touchPoint
* Info about the current touch point, including multitouch information and utilities to calculate and cache multitouch pinch
* diameter etc. (Note: touchPoint is volatile, if you want to keep any fields of touchPoint, you must copy them before the method
* body exits.)
* @return true if setting the position and scale of the object was successful, or false if the position or scale parameters are out of range
* for this object.
*/
public boolean setPositionAndScale(T obj, PositionAndScale newObjPosAndScale, PointInfo touchPoint);
/**
* Select an object at the given point. Can be used to bring the object to top etc. Only called when first touchpoint goes down, not when
* multitouch is initiated. Also called with null on touch-up.
*
* @param obj
* The object being selected by single-touch, or null on touch-up.
* @param touchPoint
* The current touch point.
*/
public void selectObject(T obj, PointInfo touchPoint);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.authenticator;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import org.junit.Assert;
import org.junit.Test;
import org.apache.tomcat.util.buf.ByteChunk;
import org.apache.tomcat.util.codec.binary.Base64;
/**
* Test the BasicAuthenticator's BasicCredentials inner class and the
* associated Base64 decoder.
*/
public class TestBasicAuthParser {
private static final String NICE_METHOD = "Basic";
private static final String USER_NAME = "userid";
private static final String PASSWORD = "secret";
/*
* test cases with good BASIC Auth credentials - Base64 strings
* can have zero, one or two trailing pad characters
*/
@Test
public void testGoodCredentials() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
@Test
public void testGoodCredentialsNoPassword() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, null);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertNull(credentials.getPassword());
}
@Test
public void testGoodCrib() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNlY3JldA==";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
@Test
public void testGoodCribUserOnly() throws Exception {
final String BASE64_CRIB = "dXNlcmlk";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertNull(credentials.getPassword());
}
@Test
public void testGoodCribOnePad() throws Exception {
final String PASSWORD1 = "secrets";
final String BASE64_CRIB = "dXNlcmlkOnNlY3JldHM=";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD1, credentials.getPassword());
}
/*
* RFC 2045 says the Base64 encoded string should be represented
* as lines of no more than 76 characters. However, RFC 2617
* says a base64-user-pass token is not limited to 76 char/line.
* It also says all line breaks, including mandatory ones,
* should be ignored during decoding.
* This test case has a line break in the Base64 string.
* (See also testGoodCribBase64Big below).
*/
@Test
public void testGoodCribLineWrap() throws Exception {
final String USER_LONG = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "abcdefghijklmnopqrstuvwxyz0123456789+/AAAABBBBCCCC"
+ "DDDD"; // 80 characters
final String BASE64_CRIB = "QUJDREVGR0hJSktMTU5PUFFSU1RVVldY"
+ "WVphYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ejAxMjM0"
+ "\n" + "NTY3ODkrL0FBQUFCQkJCQ0NDQ0REREQ=";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_LONG, credentials.getUsername());
}
/*
* RFC 2045 says the Base64 encoded string should be represented
* as lines of no more than 76 characters. However, RFC 2617
* says a base64-user-pass token is not limited to 76 char/line.
*/
@Test
public void testGoodCribBase64Big() throws Exception {
// Our decoder accepts a long token without complaint.
final String USER_LONG = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
+ "abcdefghijklmnopqrstuvwxyz0123456789+/AAAABBBBCCCC"
+ "DDDD"; // 80 characters
final String BASE64_CRIB = "QUJDREVGR0hJSktMTU5PUFFSU1RVVldY"
+ "WVphYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ejAxMjM0"
+ "NTY3ODkrL0FBQUFCQkJCQ0NDQ0REREQ="; // no new line
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_LONG, credentials.getUsername());
}
/*
* verify the parser follows RFC2617 by treating the auth-scheme
* token as case-insensitive.
*/
@Test
public void testAuthMethodCaseBasic() throws Exception {
final String METHOD = "bAsIc";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(METHOD, USER_NAME, PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* Confirm the Basic parser rejects an invalid authentication method.
*/
@Test
public void testAuthMethodBadMethod() throws Exception {
final String METHOD = "BadMethod";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(METHOD, USER_NAME, PASSWORD);
@SuppressWarnings("unused")
BasicAuthenticator.BasicCredentials credentials = null;
try {
credentials = new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.fail("IllegalArgumentException expected");
}
catch (Exception e) {
Assert.assertTrue(e instanceof IllegalArgumentException);
Assert.assertTrue(e.getMessage().contains("header method"));
}
}
/*
* Confirm the Basic parser tolerates excess white space after
* the authentication method.
*
* RFC2617 does not define the separation syntax between the auth-scheme
* and basic-credentials tokens. Tomcat tolerates any amount of white
* (within the limits of HTTP header sizes).
*/
@Test
public void testAuthMethodExtraLeadingSpace() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD + " ", USER_NAME, PASSWORD);
final BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* invalid decoded credentials cases
*/
@Test
public void testWrongPassword() throws Exception {
final String PWD_WRONG = "wrong";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PWD_WRONG);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertNotSame(PASSWORD, credentials.getPassword());
}
@Test
public void testMissingUsername() throws Exception {
final String EMPTY_USER_NAME = "";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, EMPTY_USER_NAME, PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(EMPTY_USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
@Test
public void testShortUsername() throws Exception {
final String SHORT_USER_NAME = "a";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, SHORT_USER_NAME, PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(SHORT_USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
@Test
public void testShortPassword() throws Exception {
final String SHORT_PASSWORD = "a";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, SHORT_PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(SHORT_PASSWORD, credentials.getPassword());
}
@Test
public void testPasswordHasSpaceEmbedded() throws Exception {
final String PASSWORD_SPACE = "abc def";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD_SPACE);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD_SPACE, credentials.getPassword());
}
@Test
public void testPasswordHasColonEmbedded() throws Exception {
final String PASSWORD_COLON = "abc:def";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD_COLON);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD_COLON, credentials.getPassword());
}
@Test
public void testPasswordHasColonLeading() throws Exception {
final String PASSWORD_COLON = ":abcdef";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD_COLON);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD_COLON, credentials.getPassword());
}
@Test
public void testPasswordHasColonTrailing() throws Exception {
final String PASSWORD_COLON = "abcdef:";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD_COLON);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD_COLON, credentials.getPassword());
}
/*
* Confirm the Basic parser tolerates excess white space after
* the base64 blob.
*
* RFC2617 does not define this case, but asks servers to be
* tolerant of this kind of client deviation.
*/
@Test
public void testAuthMethodExtraTrailingSpace() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, PASSWORD, " ");
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* Confirm the Basic parser tolerates excess white space around
* the username inside the base64 blob.
*
* RFC2617 does not define the separation syntax between the auth-scheme
* and basic-credentials tokens. Tomcat should tolerate any reasonable
* amount of white space.
*/
@Test
public void testUserExtraSpace() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, " " + USER_NAME + " ", PASSWORD);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* Confirm the Basic parser tolerates excess white space around
* the username within the base64 blob.
*
* RFC2617 does not define the separation syntax between the auth-scheme
* and basic-credentials tokens. Tomcat should tolerate any reasonable
* amount of white space.
*/
@Test
public void testPasswordExtraSpace() throws Exception {
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, USER_NAME, " " + PASSWORD + " ");
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* invalid base64 string tests
*
* Refer to RFC2045 section 6.8.
*/
/*
* non-trailing "=" should trigger premature termination of the
* decoder, returning a truncated string that will eventually
* result in an authentication Assert.failure.
*/
@Test
public void testBadBase64InlineEquals() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNlY3J=dAo=";
final String TRUNCATED_PWD = "secr";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertNotSame(PASSWORD, credentials.getPassword());
Assert.assertEquals(TRUNCATED_PWD, credentials.getPassword());
}
/*
* "-" is not a legal base64 character. The RFC says it must be
* ignored by the decoder. This will scramble the decoded string
* and eventually result in an authentication Assert.failure.
*/
@Test
public void testBadBase64Char() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNl-3JldHM=";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertNotSame(PASSWORD, credentials.getPassword());
}
/*
* "-" is not a legal base64 character. The RFC says it must be
* ignored by the decoder. This is a very strange case because the
* next character is a pad, which terminates the string normally.
* It is likely (but not certain) the decoded password will be
* damaged and subsequent authentication will fail.
*/
@Test
public void testBadBase64LastChar() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNlY3JldA-=";
final String POSSIBLY_DAMAGED_PWD = "secret";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(POSSIBLY_DAMAGED_PWD, credentials.getPassword());
}
/*
* The trailing third "=" is illegal. However, the RFC says the decoder
* must terminate as soon as the first pad is detected, so no error
* will be detected unless the payload has been damaged in some way.
*/
@Test
public void testBadBase64TooManyEquals() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNlY3JldA===";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* there should be a multiple of 4 encoded characters. However,
* the RFC says the decoder should pad the input string with
* zero bits out to the next boundary. An error will not be detected
* unless the payload has been damaged in some way - this
* particular crib has no damage.
*/
@Test
public void testBadBase64BadLength() throws Exception {
final String BASE64_CRIB = "dXNlcmlkOnNlY3JldA";
final BasicAuthHeader AUTH_HEADER =
new BasicAuthHeader(NICE_METHOD, BASE64_CRIB);
BasicAuthenticator.BasicCredentials credentials =
new BasicAuthenticator.BasicCredentials(
AUTH_HEADER.getHeader());
Assert.assertEquals(USER_NAME, credentials.getUsername());
Assert.assertEquals(PASSWORD, credentials.getPassword());
}
/*
* Encapsulate the logic to generate an HTTP header
* for BASIC Authentication.
* Note: only used internally, so no need to validate arguments.
*/
private final class BasicAuthHeader {
private final String HTTP_AUTH = "authorization: ";
private final byte[] HEADER =
HTTP_AUTH.getBytes(StandardCharsets.ISO_8859_1);
private ByteChunk authHeader;
private int initialOffset = 0;
/*
* This method creates a valid base64 blob
*/
private BasicAuthHeader(String method, String username,
String password) {
this(method, username, password, null);
}
/*
* This method creates valid base64 blobs with optional trailing data
*/
private BasicAuthHeader(String method, String username,
String password, String extraBlob) {
prefix(method);
String userCredentials =
((password == null) || (password.length() < 1))
? username
: username + ":" + password;
byte[] credentialsBytes =
userCredentials.getBytes(StandardCharsets.ISO_8859_1);
String base64auth = Base64.encodeBase64String(credentialsBytes);
byte[] base64Bytes =
base64auth.getBytes(StandardCharsets.ISO_8859_1);
byte[] extraBytes =
((extraBlob == null) || (extraBlob.length() < 1))
? null :
extraBlob.getBytes(StandardCharsets.ISO_8859_1);
try {
authHeader.append(base64Bytes, 0, base64Bytes.length);
if (extraBytes != null) {
authHeader.append(extraBytes, 0, extraBytes.length);
}
}
catch (IOException ioe) {
throw new IllegalStateException("unable to extend ByteChunk:"
+ ioe.getMessage());
}
// emulate tomcat server - offset points to method in header
authHeader.setOffset(initialOffset);
}
/*
* This method allows injection of cribbed base64 blobs,
* without any validation of the contents
*/
private BasicAuthHeader(String method, String fakeBase64) {
prefix(method);
byte[] fakeBytes = fakeBase64.getBytes(StandardCharsets.ISO_8859_1);
try {
authHeader.append(fakeBytes, 0, fakeBytes.length);
}
catch (IOException ioe) {
throw new IllegalStateException("unable to extend ByteChunk:"
+ ioe.getMessage());
}
// emulate tomcat server - offset points to method in header
authHeader.setOffset(initialOffset);
}
/*
* construct the common authorization header
*/
private void prefix(String method) {
authHeader = new ByteChunk();
authHeader.setBytes(HEADER, 0, HEADER.length);
initialOffset = HEADER.length;
String methodX = method + " ";
byte[] methodBytes = methodX.getBytes(StandardCharsets.ISO_8859_1);
try {
authHeader.append(methodBytes, 0, methodBytes.length);
}
catch (IOException ioe) {
throw new IllegalStateException("unable to extend ByteChunk:"
+ ioe.getMessage());
}
}
private ByteChunk getHeader() {
return authHeader;
}
}
}
| |
package org.leibnizcenter.rechtspraak.nameparser;
import cc.mallet.types.Token;
import org.leibnizcenter.rechtspraak.tagging.crf.features.textpatterns.KnownSurnamesNl;
import org.leibnizcenter.rechtspraak.tagging.crf.features.textpatterns.interfaces.Patterns;
import org.leibnizcenter.rechtspraak.tokens.text.TokenTreeLeaf;
import org.leibnizcenter.util.TextPattern;
import java.util.ArrayList;
import java.util.EnumSet;
import java.util.List;
import java.util.Set;
import java.util.function.Function;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by maarten on 14-3-16.
*/
public class Names {
// Personal titles
public static final String KNOWN_TITLE =
"(?:BA\\.?|[BbMm]\\.?[Ss][Cc]\\.?|LLM|MA|MPhil\\.?|P\\.?[Hh]\\.?D\\.?" +
"|[Bb]acc\\.?|[Cc]and\\." +
"|(?:[Dd]e ?)?[Hh]eer" +
"|[Dd][Rr][Ss]\\.?|[Dd]s\\.|[Dd][Hh]?r\\.(?:\\.h\\.c\\.)?|[Dd]?[Hh]r\\.|[Ii]ngr?\\.|[Ii]r\\.|[Kk]and\\.|lic\\." +
"|[Mm]eneer|[Mm]evrouw" +
"|[mM][Rr][Ss]?\\." +
"|[Mm]vr\\.|[Mm]w\\.|[Pp]rof(?:\\.|essor))";
public static final String TITLELOWCONF = "(?:[\\p{L}]{2,3}\\.)";
// Person roles
public static final String ROLE_SINGULAR =
"(?:[Aa]dv(?:\\.|ocaa?te?)(?:[ -][Gg]eneraal)?"
+ "|[Aa]mbtenaar(?: [Vv]an(?: [Dd]e)? (?:[Ss]taat|[Gg]emeente|[Pp]rovincie))?"
+ "|(?:[Ww]aarnemend[ -])?[Gg]riffier"
+ "|[Gg]emachtigde"
+ "|[Ll]id(?: [Vv]an [Dd]e(?: (?:enkel|meer)voudige)? kamer)"
+ "|[Oo]fficier [Vv](?:\\.|an) [Jj]ustitie"
+ "|[Pp]rocureur(?:[ -][Gg]eneraal)?"
+ "|[Rr]aads?(?:vrouw|man|heer)"
+ "|(?:[Rr]echter)(?:[ -][Cc]ommissaris)?" +
")";
public static final String ROLE_MULTIPLE =
"(?:[Aa]dv(?:ctn\\.|ocate[sn])(?:[ -][Gg]eneraal)?"
+ "|[Aa]mbtena(?:ren|ars)(?: [Vv]an(?: [Dd]e)? (?:[Ss]taat|[Gg]emeente|[Pp]rovincie))?"
+ "|(?:[Ww]aarnemend[ -])?[Gg]riffiers"
+ "|[Gg]emachtigden"
+ "|[Ll]eden(?: [Vv]an [Dd]e(?: (?:enkel|meer)voudige)? kamer)"
+ "|[Oo]fficier(?:s|en) [Vv](?:\\.|an) [Jj]ustitie"
+ "|[Pp]rocureurs(?:[ -][Gg]eneraal)?"
+ "|[Rr]aads?(?:vrouwen|mannen|heren)"
+ "|(?:[Rr]echters)(?:[ -][Cc]ommissaris)?"
+ "|(?:[Rr]echter(?:[ -]))?(?:[Cc]ommissarissen)"
+ "|[Vv]oorzitter" +
")";
/**
* ex. [A.], [Th.], [C.]
*/
public static final String INITIAL_WITHOUT_PERIOD = "\\p{Lu}\\p{L}{0,2}";
public static final String STRICT_INITIAL = INITIAL_WITHOUT_PERIOD + "\\.";
public static final String STRICT_INITIALS = STRICT_INITIAL + "(?: {0,2}?" + STRICT_INITIAL + "){0,10}";
public static final String LOOSE_INITIALS = INITIAL_WITHOUT_PERIOD + "(?:[\\. ] ?" + INITIAL_WITHOUT_PERIOD + "){0,10}\\.?";
// Initials
// First names
public static final String TOLERANT_FIRSTNAME_SINGLE = "\\p{Lu}\\p{Ll}{0,10}\\p{Ll}";
public static final String TOLERANT_FIRSTNAME = TOLERANT_FIRSTNAME_SINGLE
+ "(?:-" + TOLERANT_FIRSTNAME_SINGLE + ")?";
// Last names
public static final String VANDE = "[\\p{L}']{1,3}";
/**
* ex. van der Laan
*/
public static final String SINGLETOLERANTLASTNAME = "(?:(?:" + VANDE + " ){0,3}(?:\\p{L}{0,5}\\p{Lu}(?:\\p{Ll}[\\p{L}-]{0,15})?\\p{Ll}))";
/**
* ex. 'van der Laan-Wijngaerde'
* ex. 'de Beer de Laer Dupont'
*/
public static final String TOLERANTLASTNAME = "(?:(?:" + SINGLETOLERANTLASTNAME + ")+"
// Arbitrary amount of hyphens
+ "(?:-" + SINGLETOLERANTLASTNAME + "){0,5})";
//Token.string ==~ ".*[aeiouy].*", //At least one vowel...
public static final String STRICTINITIALSNAMEWITHMULTIPLEINITIALS = "((" +
KNOWN_TITLE +
")*(" +
STRICT_INITIAL +
")(" +
STRICT_INITIAL +
")+(" +
TOLERANTLASTNAME +
"))";
private static final String TOLERANT_FIRSTNAMES = "(?:\\b" + TOLERANT_FIRSTNAME + "(?: {1,2}" + TOLERANT_FIRSTNAME + "){0,5})";
public static final String TOLERANT_FIRST_NAME_AND_OR_INITIALS =
"(?:" + TOLERANT_FIRSTNAMES
+ "(?: {0,2}\\b" + LOOSE_INITIALS + ")?"
+ "|" + LOOSE_INITIALS + ")";
private static final String KNOWN_TITLES = "(?:" + KNOWN_TITLE + "(?: {0,2}" + KNOWN_TITLE + "){0,4})";
public static final String TOLERANTFULLNAME_STRICT_INITIALS =
"(?:(" + KNOWN_TITLES + ") {0,2})?"
+ "(?:(" + STRICT_INITIALS + ") {0,2})"
+ "(" + TOLERANTLASTNAME + ")";
/**
* Ex. [mr. Vox], [mr. A.D.W. de Heyde]
*/
public static final String TOLERANT_TITLED_NAME =
"(" + KNOWN_TITLES + ") {0,2}"
+ "(" + TOLERANT_FIRST_NAME_AND_OR_INITIALS + " {0,2})"
+ "(" + TOLERANTLASTNAME + ")";
public static final Pattern TITLED_NAME = Pattern.compile(TOLERANT_TITLED_NAME);
/**
* [Vincent Willems]
*/
public static final String TOLERANTFULLNAME =
"(?:(" + KNOWN_TITLES + ") {0,2})?"
+ "(?:(" + TOLERANT_FIRST_NAME_AND_OR_INITIALS + ") {0,2})"
+ "(" + TOLERANTLASTNAME + ")";
/**
* Matches any non-space strings
*/
public static final Pattern TOKEN_REGEX = Pattern.compile("[^\\s]+");
public static final String TOLERANTFULLNAME_WITH_OPTIONAL_ROLE = "(?:"
+ TOLERANTFULLNAME
+ "(?:, {0,2}(" + ROLE_SINGULAR + "))?"
+ ")";
public static final String TOLERANTFULLNAME_2_TO_4 =
"" + TOLERANTFULLNAME_WITH_OPTIONAL_ROLE + ""
+ "(?:[,;] {0,2}" + TOLERANTFULLNAME_WITH_OPTIONAL_ROLE + "){0,3}"
+ "[,;]? {0,2}en[,;]? {0,2}\\b"
+ TOLERANTFULLNAME_WITH_OPTIONAL_ROLE + "";
/////////////////////////////////////////
private static final String VERTEGENWOORDIGD_DOOR = "(?:(?:" +
"(?:vert(?:eg)?enwoordigd|bijgestaan|laten vertegenwoordigen|laten bijstaan)" +
"|" +
"(?:\\p{L}{0,5}ge\\p{L}{3,}(?:en|d|t)$))" +
" {1,2}door)";
private static final String ALLEN_ALS = "[,: ]{0,2}(?:alle(?:n|maal)?)?[,: ]{0,2}?(?:als)?";
private final static String ALS = "(?:[^\\p{L}]{0,2}als[^\\p{L}]{1,3}|,[^\\p{L}]{0,3})";
////////////
//// Rules
////////////
///////////////////////////////////////////////////
/* `!Sentence` was added to most macros so that last names don't flow over into new sentences.
* If we don't provision for this, we get cases like
* <p>
* ```
* w.g [van de Berg
* Voorzitter] ambtenaar van Staat
* ```
* <p>
* i.e., we should ignore the word 'Voorzitter'
*/
private static final String INTEGENWOORDIGHEIDVAN = "in(?: (?:het|de))? (?:bijzijn|tegenwoordigheid) van";
public static List<Name> getNames(Matcher matcher, boolean checkSurname) {
List<Name> names = new ArrayList<>();
while (matcher.find()) {
String lastName = matcher.group(3);
if (!checkSurname || KnownSurnamesNl.matchesAnyName.apply(lastName)) {
names.add(
new Name(
matcher.start(),
matcher.end(),
new Span(matcher.start(1), matcher.end(1), matcher.group(1)),
new Span(matcher.start(2), matcher.end(2), matcher.group(2)),
new Span(matcher.start(3), matcher.end(3), lastName)
));
}
}
return names;
}
@Deprecated
public static List<Span> parseTokens(String s) {
Matcher m = TOKEN_REGEX.matcher(s);
List<Span> spans = new ArrayList<>(s.length() / 3);
while (m.find()) {
spans.add(new Span(m.start(), m.end()));
}
return spans;
}
public static boolean nameFound(String s) {
for (NamePatterns p : NamePatterns.values()) {
if (p.getNames(s).size() > 0) {
return true;
}
}
return false;
}
// TODO
public enum NamePatterns implements Patterns.UnnormalizedTextContains {
/**
* All spans that sort-of look like a name
*/
LowConfidenceName(Pattern.compile(TOLERANTFULLNAME)),
HighConfidenceInTegenwoordigheidVanAls(Pattern.compile(
"(" + INTEGENWOORDIGHEIDVAN + ")"
+ "(" + TOLERANTFULLNAME + ")"
+ "(als|,)"
+ ROLE_SINGULAR
)),
HighConfidenceInTegenwoordigheidVanDe(Pattern.compile(
"(" + INTEGENWOORDIGHEIDVAN + " de)"
+ ROLE_SINGULAR
+ "(" + TOLERANTFULLNAME + ")")),
/**
* Was getekend. FirstName de la LooksLikeLastname
*/
WasGetekend(Pattern.compile("(?:(?:(?:w(?:as)? {1,2})?(?:get(?:ekend)?\\.?))|w\\.?g\\.?)\\s{0,3}" + TOLERANTFULLNAME + "")),
/**
* gewezen door [mr. A.B. van Wiel]
*/
GewezenDoor((Pattern.compile(
"(?:deze uitspraak is ge(?:daan|maakt)|gewezen) {0,2}(?:te [\\p{L}]{0,20} )?door" +
"[:;, ]{0,3}(?:" + TOLERANTFULLNAME + ")"
))),
/**
* prof. FirstName de la FamiliarLastname
*/
TitledNameKnownSurname(TITLED_NAME, true),
/**
* prof. S.T. R.I. C.T. de la TolerantLastName.
*/
StrictInitialsTolerantName(Pattern.compile(TOLERANTFULLNAME_STRICT_INITIALS)),
/**
* prof. S.T. R.I. C.T. de la FamiliarLastName.
*/
StrictInitialsKnownSurname(Pattern.compile(TOLERANTFULLNAME_STRICT_INITIALS), true),
/**
* advocaat [mr. A.B. van Wiel]
*/
AdvocaatPre(Pattern.compile(
"(?:[Aa]dvocaat)"
+ "[:; ]{0,3}\\b"
+ TOLERANTFULLNAME
)),
/**
* HighConfidence bijgestaan door mr. A. B. van der Werf, gemachtigde
*/
GexxxDoorRolePost((Pattern.compile(
VERTEGENWOORDIGD_DOOR
+ "(?:[:;, ]{0,3}" + TOLERANTFULLNAME + ")"
+ ALS
+ "(?:[:; ]{0,3}(\\b" + ROLE_SINGULAR + "))")
), Constants.MATCHER_TOLERANT_NAME_WITH_ROLE),
/**
* HighConfidence: (?:de {1,2})?rechter mr. A. B. van der Werf heeft concludeerde
*/
DeXConcludeert(Pattern.compile("(" + ROLE_SINGULAR + ")"
+ "[:; ]{0,3}" + TOLERANTFULLNAME_WITH_OPTIONAL_ROLE
+ "(?:[:; ]{0,3}heeft)?"
+ "[:; ]{0,3}(?:ge)?concludeer(?:t|d)e?"), (matcher) -> {
List<Name> names = new ArrayList<>();
while (matcher.find()) {
String lastName = matcher.group(4);
Name name = new Name(
matcher.start(),
matcher.end(),
new Span(matcher.start(2), matcher.end(2), matcher.group(2)),
new Span(matcher.start(3), matcher.end(3), matcher.group(3)),
new Span(matcher.start(4), matcher.end(4), lastName),
new Span(matcher.start(1), matcher.end(1), matcher.group(1))
);
names.add(name);
}
return names;
}),
/**
* Medium conf
*/
RolePre(Pattern.compile(
"" + ROLE_SINGULAR + ""
+ "[:; ]{1,3}"
// Having two or more initials makes the name more probable, or alternatively a title
+ TOLERANTFULLNAME),
true),
/**
* high confidence
* mr A de B als X
*/
NameAlsX(
Pattern.compile("(?:(" + ROLE_SINGULAR + ")[;: ]{0,2})?\\b" + TOLERANTFULLNAME_WITH_OPTIONAL_ROLE
+ "[,:; ]{0,2}" + ALS + "[:; ]{0,2}\\b(" + ROLE_SINGULAR + ")"), (matcher) -> {
List<Name> names = new ArrayList<>();
while (matcher.find()) {
String lastName = matcher.group(4);
Name name = new Name(
matcher.start(),
matcher.end(),
new Span(matcher.start(2), matcher.end(2), matcher.group(2)),
new Span(matcher.start(3), matcher.end(3), matcher.group(3)),
new Span(matcher.start(4), matcher.end(4), lastName),
new Span(matcher.start(1), matcher.end(1), matcher.group(1)),
new Span(matcher.start(5), matcher.end(5), matcher.group(5)),
new Span(matcher.start(6), matcher.end(6), matcher.group(6))
);
names.add(name);
}
return names;
}),
// Name groups
/**
* mrs. [TOLERANT_NAMES_WITH_OPTIONAL_ROLE]
*/
Meesters(Pattern.compile(
"([Mm][Rr][Ss][\\.: ]{0,2})"
+ "(" + TOLERANTFULLNAME_2_TO_4 + ")"
//Followed by the group role possibly
+ "(?:" + ALLEN_ALS
+ "[:;, ]{0,2}(" + ROLE_MULTIPLE + "|" + ROLE_SINGULAR + "))?"
), Constants.MATCHER_TODO),
GexxxDoorMultiple(Pattern.compile(
"" + VERTEGENWOORDIGD_DOOR + ""
+ "[:;, ]{0,2}(" + TOLERANTFULLNAME_2_TO_4 + ")"
+ "(?:" + ALLEN_ALS + ""
//Followed by the group role possibly
+ "[:;, ]{0,2}(" + ROLE_MULTIPLE + "|" + ROLE_SINGULAR + "))?"
), Constants.MATCHER_TODO),
RolePreMultiple((Pattern.compile(
"(" + ROLE_MULTIPLE + ")"
+ "[:;, ]{0,2}" + TOLERANTFULLNAME_2_TO_4
)),
Constants.MATCHER_TODO);
// //// Ex. [mr. Vox], [mr. A.D.W. de Heyde], [Vincent Willems]
// public static final String SEMITOLERANTFULLNAME = "((" +
// KNOWN_TITLE +
// ")*((" +
// TOLERANT_FIRSTNAME +
// ")*|(" +
// STRICT_INITIAL +
// ")*)(" +
// TOLERANTLASTNAME +
// "))";
///////////////////
public static Set<NamePatterns> set = EnumSet.allOf(NamePatterns.class);
private final TextPattern pattern;
private final boolean checkSurname;
private final Function<Matcher, List<Name>> handleMatcher;
NamePatterns(Pattern pattern) {
this(pattern, false);
}
NamePatterns(Pattern pattern, boolean checkIfSurnameIsKnown) {
this.checkSurname = checkIfSurnameIsKnown;
this.handleMatcher = null;
this.pattern = new NameTextPattern(this.name(), pattern);
}
NamePatterns(Pattern pattern, Function<Matcher, List<Name>> handleMatcher) {
this.pattern = new NameTextPattern(this.name(), pattern);
this.handleMatcher = handleMatcher;
this.checkSurname = false;
}
public static void setFeatureValues(Token t, TokenTreeLeaf token) {
set.forEach((p) -> {
if (Patterns.matches(p, token))
t.setFeatureValue(p.name(), 1.0);
});
}
public List<Name> getNames(String s) {
Matcher matcher = pattern.pattern.matcher(s);
if (handleMatcher != null) {
return handleMatcher.apply(matcher);
} else {
return Names.getNames(matcher, checkSurname);
}
}
@Override
public boolean matches(String s) {
List<Name> names = getNames(s);
return names.size() > 0;
}
private static class Constants {
public static final Function<Matcher, List<Name>> MATCHER_TODO = (matcher) -> {
ArrayList<Name> names = new ArrayList<>();
while (matcher.find()) {
// System.out.println(matcher.group(1));
// System.out.println(matcher.group(2));
// System.out.println(matcher.group(matcher.groupCount()));
names.add(new Name(-1, -1, Name.NON_MATCHING_SPAN, Name.NON_MATCHING_SPAN, Name.NON_MATCHING_SPAN));
}
return names;//todo
};
public static final Function<Matcher, List<Name>> MATCHER_TOLERANT_NAME_WITH_ROLE = (matcher) -> {
List<Name> names = new ArrayList<>();
while (matcher.find()) {
String lastName = matcher.group(3);
Name name = new Name(
matcher.start(),
matcher.end(),
new Span(matcher.start(1), matcher.end(1), matcher.group(1)),
new Span(matcher.start(2), matcher.end(2), matcher.group(2)),
new Span(matcher.start(3), matcher.end(3), lastName),
new Span(matcher.start(4), matcher.end(4), matcher.group(4))
);
names.add(name);
}
return names;
};
}
private static class NameTextPattern extends TextPattern {
public NameTextPattern(String name, Pattern compile) {
super(name, compile);
}
@Override
public boolean matches(String text) {
return super.matches(text);
}
}
}
@Deprecated
public enum PreRole {
adv("adv.", "advctn."),
advocaat("advocaat", "advocaten"),
advocaatGeneraal("advocaat-generaal", "advocaten-generaal"),
griffier("griffier", "griffiers"),
officier("officier van justitie", "officieren van justitie"),
procureur("procureur", "procureurs"),
raadman("raadman", "raadmannen"),
raadsheer("raadsheer", "raadsheren"),
raadsman("raadsman", "raadsmannen"),
raadsvrouw("raadsvrouw", "raadsvrouwen"),
raadvrouw("raadvrouw", "raadvrouwen"),
rechterCommissaris("rechter-commissaris", "rechter-commissarissen"),
rechter("rechter", "rechters"),
waarnemend("waarnemend-griffier", "waarnemend-griffiers");
public final String multiple;
public final String singular;
PreRole(String singular, String multiple) {
this.singular = singular;
this.multiple = multiple;
}
}
@Deprecated
public enum AlsRole {
adv("adv.", "advctn."),
advocaat("advocaat", "advocaten"),
advocaatGeneraal("advocaat-generaal", "advocaten-generaal"),
ambtenaar("ambtenaar", "ambtenaren"),
ambtenaarVanStaat("ambtenaar van Staat", "ambtenaren van Staat"),
ambtenaarVanGemeente("ambtenaar van de gemeente", "ambtenaren van de gemeente"),
ambtenaarVanPro("ambtenaar van de provincie", "ambtenaren van de provincie"),
gemachtigde("gemachtigde", "gemachtigden"),
griffier("griffier", "griffiers"),
lid("lid", "leden"),
lidVanEnkelvoudigeKamer("lid van de enkelvoudige kamer", "leden van de enkelvoudige kamer"),
lidVanMeervoudigeKamer("lid van de meervoudige kamer", "leden van de meervoudige kamer"),
officier("officier van justitie", "officieren van justitie"),
procureur("procureur", "procureurs"),
raad("raad", "raadmannen"),
raadman("raadman", "raadsheren"),
raadsheer("raadsheer", "raadsmannen"),
raadsman("raadsman", "raadsvrouwen"),
raadsvrouw("raadsvrouw", "raadvrouwen"),
raadvrouw("raadvrouw", "raden"),
rechter("rechter", "rechters"),
rechterCommissaris("rechter-commissaris", "rechter-commissarissen"),
voorzitter("voorzitter", "voorzitters");
public final String multiple;
public final String singular;
AlsRole(String singular, String multiple) {
this.singular = singular;
this.multiple = multiple;
}
}
public static class Span {
/**
* inclusive start
*/
public final int start;
/**
* exclusive start
*/
public final int end;
public final String string;
@Deprecated
public Span(int start, int end) {
this(start, end, null);
}
public Span(int start, int end, String string) {
this.start = start;
this.end = end;
this.string = string;
}
}
public static class Name {
public static final Span NON_MATCHING_SPAN = new Span(-1, -1, null);
public final int start;
public final int end;
public final Span firstName;
public final Span titles;
public final Span lastName;
public final Span role;
public final Span role2;
public final Span role3;
public Name(int start, int end, Span titles, Span firstName, Span lastName) {
this(start, end, titles, firstName, lastName, NON_MATCHING_SPAN);
}
public Name(int start, int end, Span titles, Span firstName, Span lastName, Span role) {
this(start, end, titles, firstName, lastName, role, NON_MATCHING_SPAN);
}
public Name(int start, int end, Span titles, Span firstName, Span lastName, Span role, Span role2) {
this(start, end, titles, firstName, lastName, role, role2, NON_MATCHING_SPAN);
}
public Name(int start, int end, Span titles, Span firstName, Span lastName, Span role, Span role2, Span role3) {
this.start = start;
this.end = end;
this.titles = titles.start >= 0 ? titles : NON_MATCHING_SPAN;
this.firstName = firstName.start >= 0 ? firstName : NON_MATCHING_SPAN;
this.lastName = lastName.start >= 0 ? lastName : NON_MATCHING_SPAN;
this.role = role.start >= 0 ? role : NON_MATCHING_SPAN;
this.role2 = role2.start >= 0 ? role2 : NON_MATCHING_SPAN;
this.role3 = role3.start >= 0 ? role3 : NON_MATCHING_SPAN;
}
}
}
| |
/*
* %CopyrightBegin%
*
* Copyright Ericsson AB 2000-2016. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* %CopyrightEnd%
*/
package com.ericsson.otp.erlang;
/**
* Provides a Java representation of Erlang atoms. Atoms can be created from
* strings whose length is not more than {@link #maxAtomLength maxAtomLength}
* characters.
*/
public class OtpErlangAtom extends OtpErlangObject {
// don't change this!
private static final long serialVersionUID = -3204386396807876641L;
/** The maximum allowed length of an atom, in characters */
public static final int maxAtomLength = 0xff; // one byte length
private final String atom;
/**
* Create an atom from the given string.
*
* @param atom
* the string to create the atom from.
*
* @exception java.lang.IllegalArgumentException
* if the string is null or contains more than
* {@link #maxAtomLength maxAtomLength} characters.
*/
public OtpErlangAtom(final String atom) {
if (atom == null) {
throw new java.lang.IllegalArgumentException("null string value");
}
if (atom.codePointCount(0, atom.length()) > maxAtomLength) {
throw new java.lang.IllegalArgumentException("Atom may not exceed "
+ maxAtomLength + " characters: " + atom);
}
this.atom = atom;
}
/**
* Create an atom from a stream containing an atom encoded in Erlang
* external format.
*
* @param buf
* the stream containing the encoded atom.
*
* @exception OtpErlangDecodeException
* if the buffer does not contain a valid external
* representation of an Erlang atom.
*/
public OtpErlangAtom(final OtpInputStream buf)
throws OtpErlangDecodeException {
atom = buf.read_atom();
}
/**
* Create an atom whose value is "true" or "false".
*/
public OtpErlangAtom(final boolean t) {
atom = String.valueOf(t);
}
/**
* Get the actual string contained in this object.
*
* @return the raw string contained in this object, without regard to Erlang
* quoting rules.
*
* @see #toString
*/
public String atomValue() {
return atom;
}
/**
* The boolean value of this atom.
*
* @return the value of this atom expressed as a boolean value. If the atom
* consists of the characters "true" (independent of case) the value
* will be true. For any other values, the value will be false.
*
*/
public boolean booleanValue() {
return Boolean.valueOf(atomValue()).booleanValue();
}
/**
* Get the printname of the atom represented by this object. The difference
* between this method and {link #atomValue atomValue()} is that the
* printname is quoted and escaped where necessary, according to the Erlang
* rules for atom naming.
*
* @return the printname representation of this atom object.
*
* @see #atomValue
*/
@Override
public String toString() {
if (atomNeedsQuoting(atom)) {
return "'" + escapeSpecialChars(atom) + "'";
}
return atom;
}
/**
* Determine if two atoms are equal.
*
* @param o
* the other object to compare to.
*
* @return true if the atoms are equal, false otherwise.
*/
@Override
public boolean equals(final Object o) {
if (!(o instanceof OtpErlangAtom)) {
return false;
}
final OtpErlangAtom other = (OtpErlangAtom) o;
return atom.compareTo(other.atom) == 0;
}
@Override
protected int doHashCode() {
return atom.hashCode();
}
/**
* Convert this atom to the equivalent Erlang external representation.
*
* @param buf
* an output stream to which the encoded atom should be written.
*/
@Override
public void encode(final OtpOutputStream buf) {
buf.write_atom(atom);
}
/* the following four predicates are helpers for the toString() method */
private boolean isErlangDigit(final char c) {
return c >= '0' && c <= '9';
}
private boolean isErlangUpper(final char c) {
return c >= 'A' && c <= 'Z' || c == '_';
}
private boolean isErlangLower(final char c) {
return c >= 'a' && c <= 'z';
}
private boolean isErlangLetter(final char c) {
return isErlangLower(c) || isErlangUpper(c);
}
// true if the atom should be displayed with quotation marks
private boolean atomNeedsQuoting(final String s) {
char c;
if (s.length() == 0) {
return true;
}
if (!isErlangLower(s.charAt(0))) {
return true;
}
final int len = s.length();
for (int i = 1; i < len; i++) {
c = s.charAt(i);
if (!isErlangLetter(c) && !isErlangDigit(c) && c != '@') {
return true;
}
}
return false;
}
/*
* Get the atom string, with special characters escaped. Note that this
* function currently does not consider any characters above 127 to be
* printable.
*/
private String escapeSpecialChars(final String s) {
char c;
final StringBuffer so = new StringBuffer();
final int len = s.length();
for (int i = 0; i < len; i++) {
c = s.charAt(i);
/*
* note that some of these escape sequences are unique to Erlang,
* which is why the corresponding 'case' values use octal. The
* resulting string is, of course, in Erlang format.
*/
switch (c) {
// some special escape sequences
case '\b':
so.append("\\b");
break;
case 0177:
so.append("\\d");
break;
case 033:
so.append("\\e");
break;
case '\f':
so.append("\\f");
break;
case '\n':
so.append("\\n");
break;
case '\r':
so.append("\\r");
break;
case '\t':
so.append("\\t");
break;
case 013:
so.append("\\v");
break;
case '\\':
so.append("\\\\");
break;
case '\'':
so.append("\\'");
break;
case '\"':
so.append("\\\"");
break;
default:
// some other character classes
if (c < 027) {
// control chars show as "\^@", "\^A" etc
so.append("\\^" + (char) ('A' - 1 + c));
} else if (c > 126) {
// 8-bit chars show as \345 \344 \366 etc
so.append("\\" + Integer.toOctalString(c));
} else {
// character is printable without modification!
so.append(c);
}
}
}
return new String(so);
}
}
| |
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.lambda.model;
import java.io.Serializable;
/**
* <p>
* Describes mapping between an Amazon Kinesis stream and a Lambda
* function.
* </p>
*/
public class GetEventSourceMappingResult implements Serializable, Cloneable {
/**
* The AWS Lambda assigned opaque identifier for the mapping.
*/
private String uUID;
/**
* The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - 10000<br/>
*/
private Integer batchSize;
/**
* The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:([a-zA-Z0-9\-])+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:(.*)<br/>
*/
private String eventSourceArn;
/**
* The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:lambda:[a-z]{2}-[a-z]+-\d{1}:\d{12}:function:[a-zA-Z0-9-_]+(:(\$LATEST|[a-zA-Z0-9-_]+))?<br/>
*/
private String functionArn;
/**
* The UTC time string indicating the last time the event mapping was
* updated.
*/
private java.util.Date lastModified;
/**
* The result of the last AWS Lambda invocation of your Lambda function.
*/
private String lastProcessingResult;
/**
* The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*/
private String state;
/**
* The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*/
private String stateTransitionReason;
/**
* The AWS Lambda assigned opaque identifier for the mapping.
*
* @return The AWS Lambda assigned opaque identifier for the mapping.
*/
public String getUUID() {
return uUID;
}
/**
* The AWS Lambda assigned opaque identifier for the mapping.
*
* @param uUID The AWS Lambda assigned opaque identifier for the mapping.
*/
public void setUUID(String uUID) {
this.uUID = uUID;
}
/**
* The AWS Lambda assigned opaque identifier for the mapping.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param uUID The AWS Lambda assigned opaque identifier for the mapping.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withUUID(String uUID) {
this.uUID = uUID;
return this;
}
/**
* The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - 10000<br/>
*
* @return The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
*/
public Integer getBatchSize() {
return batchSize;
}
/**
* The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - 10000<br/>
*
* @param batchSize The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
*/
public void setBatchSize(Integer batchSize) {
this.batchSize = batchSize;
}
/**
* The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Range: </b>1 - 10000<br/>
*
* @param batchSize The largest number of records that AWS Lambda will retrieve from your
* event source at the time of invoking your function. Your function
* receives an event with all the retrieved records.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withBatchSize(Integer batchSize) {
this.batchSize = batchSize;
return this;
}
/**
* The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:([a-zA-Z0-9\-])+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:(.*)<br/>
*
* @return The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
*/
public String getEventSourceArn() {
return eventSourceArn;
}
/**
* The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:([a-zA-Z0-9\-])+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:(.*)<br/>
*
* @param eventSourceArn The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
*/
public void setEventSourceArn(String eventSourceArn) {
this.eventSourceArn = eventSourceArn;
}
/**
* The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:([a-zA-Z0-9\-])+:([a-z]{2}-[a-z]+-\d{1})?:(\d{12})?:(.*)<br/>
*
* @param eventSourceArn The Amazon Resource Name (ARN) of the Amazon Kinesis stream that is
* the source of events.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withEventSourceArn(String eventSourceArn) {
this.eventSourceArn = eventSourceArn;
return this;
}
/**
* The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:lambda:[a-z]{2}-[a-z]+-\d{1}:\d{12}:function:[a-zA-Z0-9-_]+(:(\$LATEST|[a-zA-Z0-9-_]+))?<br/>
*
* @return The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
*/
public String getFunctionArn() {
return functionArn;
}
/**
* The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:lambda:[a-z]{2}-[a-z]+-\d{1}:\d{12}:function:[a-zA-Z0-9-_]+(:(\$LATEST|[a-zA-Z0-9-_]+))?<br/>
*
* @param functionArn The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
*/
public void setFunctionArn(String functionArn) {
this.functionArn = functionArn;
}
/**
* The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
* <p>
* Returns a reference to this object so that method calls can be chained together.
* <p>
* <b>Constraints:</b><br/>
* <b>Pattern: </b>arn:aws:lambda:[a-z]{2}-[a-z]+-\d{1}:\d{12}:function:[a-zA-Z0-9-_]+(:(\$LATEST|[a-zA-Z0-9-_]+))?<br/>
*
* @param functionArn The Lambda function to invoke when AWS Lambda detects an event on the
* stream.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withFunctionArn(String functionArn) {
this.functionArn = functionArn;
return this;
}
/**
* The UTC time string indicating the last time the event mapping was
* updated.
*
* @return The UTC time string indicating the last time the event mapping was
* updated.
*/
public java.util.Date getLastModified() {
return lastModified;
}
/**
* The UTC time string indicating the last time the event mapping was
* updated.
*
* @param lastModified The UTC time string indicating the last time the event mapping was
* updated.
*/
public void setLastModified(java.util.Date lastModified) {
this.lastModified = lastModified;
}
/**
* The UTC time string indicating the last time the event mapping was
* updated.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param lastModified The UTC time string indicating the last time the event mapping was
* updated.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withLastModified(java.util.Date lastModified) {
this.lastModified = lastModified;
return this;
}
/**
* The result of the last AWS Lambda invocation of your Lambda function.
*
* @return The result of the last AWS Lambda invocation of your Lambda function.
*/
public String getLastProcessingResult() {
return lastProcessingResult;
}
/**
* The result of the last AWS Lambda invocation of your Lambda function.
*
* @param lastProcessingResult The result of the last AWS Lambda invocation of your Lambda function.
*/
public void setLastProcessingResult(String lastProcessingResult) {
this.lastProcessingResult = lastProcessingResult;
}
/**
* The result of the last AWS Lambda invocation of your Lambda function.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param lastProcessingResult The result of the last AWS Lambda invocation of your Lambda function.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withLastProcessingResult(String lastProcessingResult) {
this.lastProcessingResult = lastProcessingResult;
return this;
}
/**
* The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*
* @return The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*/
public String getState() {
return state;
}
/**
* The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*
* @param state The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*/
public void setState(String state) {
this.state = state;
}
/**
* The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param state The state of the event source mapping. It can be
* <code>Creating</code>, <code>Enabled</code>, <code>Disabled</code>,
* <code>Enabling</code>, <code>Disabling</code>, <code>Updating</code>,
* or <code>Deleting</code>.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withState(String state) {
this.state = state;
return this;
}
/**
* The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*
* @return The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*/
public String getStateTransitionReason() {
return stateTransitionReason;
}
/**
* The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*
* @param stateTransitionReason The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*/
public void setStateTransitionReason(String stateTransitionReason) {
this.stateTransitionReason = stateTransitionReason;
}
/**
* The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
* <p>
* Returns a reference to this object so that method calls can be chained together.
*
* @param stateTransitionReason The reason the event source mapping is in its current state. It is
* either user-requested or an AWS Lambda-initiated state transition.
*
* @return A reference to this updated object so that method calls can be chained
* together.
*/
public GetEventSourceMappingResult withStateTransitionReason(String stateTransitionReason) {
this.stateTransitionReason = stateTransitionReason;
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getUUID() != null) sb.append("UUID: " + getUUID() + ",");
if (getBatchSize() != null) sb.append("BatchSize: " + getBatchSize() + ",");
if (getEventSourceArn() != null) sb.append("EventSourceArn: " + getEventSourceArn() + ",");
if (getFunctionArn() != null) sb.append("FunctionArn: " + getFunctionArn() + ",");
if (getLastModified() != null) sb.append("LastModified: " + getLastModified() + ",");
if (getLastProcessingResult() != null) sb.append("LastProcessingResult: " + getLastProcessingResult() + ",");
if (getState() != null) sb.append("State: " + getState() + ",");
if (getStateTransitionReason() != null) sb.append("StateTransitionReason: " + getStateTransitionReason() );
sb.append("}");
return sb.toString();
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getUUID() == null) ? 0 : getUUID().hashCode());
hashCode = prime * hashCode + ((getBatchSize() == null) ? 0 : getBatchSize().hashCode());
hashCode = prime * hashCode + ((getEventSourceArn() == null) ? 0 : getEventSourceArn().hashCode());
hashCode = prime * hashCode + ((getFunctionArn() == null) ? 0 : getFunctionArn().hashCode());
hashCode = prime * hashCode + ((getLastModified() == null) ? 0 : getLastModified().hashCode());
hashCode = prime * hashCode + ((getLastProcessingResult() == null) ? 0 : getLastProcessingResult().hashCode());
hashCode = prime * hashCode + ((getState() == null) ? 0 : getState().hashCode());
hashCode = prime * hashCode + ((getStateTransitionReason() == null) ? 0 : getStateTransitionReason().hashCode());
return hashCode;
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
if (obj == null) return false;
if (obj instanceof GetEventSourceMappingResult == false) return false;
GetEventSourceMappingResult other = (GetEventSourceMappingResult)obj;
if (other.getUUID() == null ^ this.getUUID() == null) return false;
if (other.getUUID() != null && other.getUUID().equals(this.getUUID()) == false) return false;
if (other.getBatchSize() == null ^ this.getBatchSize() == null) return false;
if (other.getBatchSize() != null && other.getBatchSize().equals(this.getBatchSize()) == false) return false;
if (other.getEventSourceArn() == null ^ this.getEventSourceArn() == null) return false;
if (other.getEventSourceArn() != null && other.getEventSourceArn().equals(this.getEventSourceArn()) == false) return false;
if (other.getFunctionArn() == null ^ this.getFunctionArn() == null) return false;
if (other.getFunctionArn() != null && other.getFunctionArn().equals(this.getFunctionArn()) == false) return false;
if (other.getLastModified() == null ^ this.getLastModified() == null) return false;
if (other.getLastModified() != null && other.getLastModified().equals(this.getLastModified()) == false) return false;
if (other.getLastProcessingResult() == null ^ this.getLastProcessingResult() == null) return false;
if (other.getLastProcessingResult() != null && other.getLastProcessingResult().equals(this.getLastProcessingResult()) == false) return false;
if (other.getState() == null ^ this.getState() == null) return false;
if (other.getState() != null && other.getState().equals(this.getState()) == false) return false;
if (other.getStateTransitionReason() == null ^ this.getStateTransitionReason() == null) return false;
if (other.getStateTransitionReason() != null && other.getStateTransitionReason().equals(this.getStateTransitionReason()) == false) return false;
return true;
}
@Override
public GetEventSourceMappingResult clone() {
try {
return (GetEventSourceMappingResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException(
"Got a CloneNotSupportedException from Object.clone() "
+ "even though we're Cloneable!",
e);
}
}
}
| |
/*
* This file is part of dependency-check-core.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* Copyright (c) 2012 Jeremy Long. All Rights Reserved.
*/
package org.owasp.dependencycheck.utils;
import java.io.BufferedOutputStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.InvalidAlgorithmParameterException;
import java.util.zip.GZIPInputStream;
import java.util.zip.InflaterInputStream;
import static java.lang.String.format;
import static org.owasp.dependencycheck.utils.Settings.KEYS.DOWNLOADER_QUICK_QUERY_TIMESTAMP;
import static org.owasp.dependencycheck.utils.Settings.getBoolean;
/**
* A utility to download files from the Internet.
*
* @author Jeremy Long
*/
public final class Downloader {
/**
* The logger.
*/
private static final Logger LOGGER = LoggerFactory.getLogger(Downloader.class);
/**
* The maximum number of redirects that will be followed when attempting to download a file.
*/
private static final int MAX_REDIRECT_ATTEMPTS = 5;
/**
* The default HTTP request method for query timestamp
*/
private static final String HEAD = "HEAD";
/**
* The HTTP request method which can be used by query timestamp
*/
private static final String GET = "GET";
/**
* Private constructor for utility class.
*/
private Downloader() {
}
/**
* Retrieves a file from a given URL and saves it to the outputPath.
*
* @param url the URL of the file to download
* @param outputPath the path to the save the file to
* @throws DownloadFailedException is thrown if there is an error downloading the file
*/
public static void fetchFile(URL url, File outputPath) throws DownloadFailedException {
fetchFile(url, outputPath, true);
}
/**
* Retrieves a file from a given URL and saves it to the outputPath.
*
* @param url the URL of the file to download
* @param outputPath the path to the save the file to
* @param useProxy whether to use the configured proxy when downloading files
* @throws DownloadFailedException is thrown if there is an error downloading the file
*/
public static void fetchFile(URL url, File outputPath, boolean useProxy) throws DownloadFailedException {
if ("file".equalsIgnoreCase(url.getProtocol())) {
File file;
try {
file = new File(url.toURI());
} catch (URISyntaxException ex) {
final String msg = format("Download failed, unable to locate '%s'", url.toString());
throw new DownloadFailedException(msg);
}
if (file.exists()) {
try {
org.apache.commons.io.FileUtils.copyFile(file, outputPath);
} catch (IOException ex) {
final String msg = format("Download failed, unable to copy '%s' to '%s'", url.toString(), outputPath.getAbsolutePath());
throw new DownloadFailedException(msg);
}
} else {
final String msg = format("Download failed, file ('%s') does not exist", url.toString());
throw new DownloadFailedException(msg);
}
} else {
HttpURLConnection conn = null;
try {
LOGGER.debug("Attempting download of {}", url.toString());
conn = URLConnectionFactory.createHttpURLConnection(url, useProxy);
conn.setRequestProperty("Accept-Encoding", "gzip, deflate");
conn.connect();
int status = conn.getResponseCode();
int redirectCount = 0;
while ((status == HttpURLConnection.HTTP_MOVED_TEMP
|| status == HttpURLConnection.HTTP_MOVED_PERM
|| status == HttpURLConnection.HTTP_SEE_OTHER)
&& MAX_REDIRECT_ATTEMPTS > redirectCount++) {
final String location = conn.getHeaderField("Location");
try {
conn.disconnect();
} finally {
conn = null;
}
LOGGER.debug("Download is being redirected from {} to {}", url.toString(), location);
conn = URLConnectionFactory.createHttpURLConnection(new URL(location), useProxy);
conn.setRequestProperty("Accept-Encoding", "gzip, deflate");
conn.connect();
status = conn.getResponseCode();
}
if (status != 200) {
try {
conn.disconnect();
} finally {
conn = null;
}
final String msg = format("Error downloading file %s; received response code %s.", url.toString(), status);
throw new DownloadFailedException(msg);
}
} catch (IOException ex) {
try {
if (conn != null) {
conn.disconnect();
}
} finally {
conn = null;
}
final String msg = format("Error downloading file %s; unable to connect.", url.toString());
throw new DownloadFailedException(msg, ex);
}
final String encoding = conn.getContentEncoding();
BufferedOutputStream writer = null;
InputStream reader = null;
try {
if (encoding != null && "gzip".equalsIgnoreCase(encoding)) {
reader = new GZIPInputStream(conn.getInputStream());
} else if (encoding != null && "deflate".equalsIgnoreCase(encoding)) {
reader = new InflaterInputStream(conn.getInputStream());
} else {
reader = conn.getInputStream();
}
writer = new BufferedOutputStream(new FileOutputStream(outputPath));
final byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = reader.read(buffer)) > 0) {
writer.write(buffer, 0, bytesRead);
}
LOGGER.debug("Download of {} complete", url.toString());
} catch (IOException ex) {
analyzeException(ex);
final String msg = format("Error saving '%s' to file '%s'%nConnection Timeout: %d%nEncoding: %s%n",
url.toString(), outputPath.getAbsolutePath(), conn.getConnectTimeout(), encoding);
throw new DownloadFailedException(msg, ex);
} catch (Throwable ex) {
final String msg = format("Unexpected exception saving '%s' to file '%s'%nConnection Timeout: %d%nEncoding: %s%n",
url.toString(), outputPath.getAbsolutePath(), conn.getConnectTimeout(), encoding);
throw new DownloadFailedException(msg, ex);
} finally {
if (writer != null) {
try {
writer.close();
} catch (IOException ex) {
LOGGER.trace("Error closing the writer in Downloader.", ex);
}
}
if (reader != null) {
try {
reader.close();
} catch (IOException ex) {
LOGGER.trace("Error closing the reader in Downloader.", ex);
}
}
try {
conn.disconnect();
} finally {
conn = null;
}
}
}
}
/**
* Makes an HTTP Head request to retrieve the last modified date of the given URL. If the file:// protocol is specified, then
* the lastTimestamp of the file is returned.
*
* @param url the URL to retrieve the timestamp from
* @return an epoch timestamp
* @throws DownloadFailedException is thrown if an exception occurs making the HTTP request
*/
public static long getLastModified(URL url) throws DownloadFailedException {
long timestamp = 0;
//TODO add the FTP protocol?
if ("file".equalsIgnoreCase(url.getProtocol())) {
File lastModifiedFile;
try {
lastModifiedFile = new File(url.toURI());
} catch (URISyntaxException ex) {
final String msg = format("Unable to locate '%s'", url.toString());
throw new DownloadFailedException(msg);
}
timestamp = lastModifiedFile.lastModified();
} else {
final String httpMethod = determineHttpMethod();
HttpURLConnection conn = null;
try {
conn = URLConnectionFactory.createHttpURLConnection(url);
conn.setRequestMethod(httpMethod);
conn.connect();
final int t = conn.getResponseCode();
if (t >= 200 && t < 300) {
timestamp = conn.getLastModified();
} else {
throw new DownloadFailedException(format("%s request returned a non-200 status code", httpMethod));
}
} catch (URLConnectionFailureException ex) {
throw new DownloadFailedException(format("Error creating URL Connection for HTTP %s request.", httpMethod), ex);
} catch (IOException ex) {
analyzeException(ex);
throw new DownloadFailedException(format("Error making HTTP %s request.", httpMethod), ex);
} finally {
if (conn != null) {
try {
conn.disconnect();
} finally {
conn = null;
}
}
}
}
return timestamp;
}
/**
* Analyzes the IOException, logs the appropriate information for debugging purposes, and then throws a
* DownloadFailedException that wraps the IO Exception.
*
* @param ex the original exception
* @throws DownloadFailedException a wrapper exception that contains the original exception as the cause
*/
protected static void analyzeException(IOException ex) throws DownloadFailedException {
Throwable cause = ex;
while (cause != null) {
if (cause instanceof InvalidAlgorithmParameterException) {
final String keystore = System.getProperty("javax.net.ssl.keyStore");
final String version = System.getProperty("java.version");
final String vendor = System.getProperty("java.vendor");
LOGGER.info("Error making HTTPS request - InvalidAlgorithmParameterException");
LOGGER.info("There appears to be an issue with the installation of Java and the cacerts."
+ "See closed issue #177 here: https://github.com/jeremylong/DependencyCheck/issues/177");
LOGGER.info("Java Info:\njavax.net.ssl.keyStore='{}'\njava.version='{}'\njava.vendor='{}'",
keystore, version, vendor);
throw new DownloadFailedException("Error making HTTPS request. Please see the log for more details.");
}
cause = cause.getCause();
}
}
/**
* Returns the HEAD or GET HTTP method. HEAD is the default.
*
* @return the HTTP method to use
*/
private static String determineHttpMethod() {
return isQuickQuery() ? HEAD : GET;
}
/**
* Determines if the HTTP method GET or HEAD should be used to check the timestamp on external resources.
*
* @return true if configured to use HEAD requests
*/
private static boolean isQuickQuery() {
boolean quickQuery;
try {
quickQuery = getBoolean(DOWNLOADER_QUICK_QUERY_TIMESTAMP, true);
} catch (InvalidSettingException e) {
quickQuery = true;
}
return quickQuery;
}
}
| |
/*
* The MIT License
*
* Copyright 2015 Neil McAlister.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package dotaSoundEditor.Controls;
import dotaSoundEditor.Helpers.PortraitFinder;
import dotaSoundEditor.Helpers.ScriptParser;
import dotaSoundEditor.Helpers.Utility;
import dotaSoundEditor.*;
import dotaSoundEditor.Helpers.*;
import info.ata4.vpk.VPKArchive;
import info.ata4.vpk.VPKEntry;
import java.io.File;
import java.io.IOException;
import java.nio.file.*;
import javax.swing.ImageIcon;
import javax.swing.JFileChooser;
import javax.swing.JOptionPane;
import javax.swing.ToolTipManager;
import javax.swing.event.TreeSelectionEvent;
import javax.swing.event.TreeSelectionListener;
import javax.swing.filechooser.FileNameExtensionFilter;
import javax.swing.tree.DefaultMutableTreeNode;
import javax.swing.tree.DefaultTreeModel;
import javax.swing.tree.TreeModel;
import javax.swing.tree.TreeSelectionModel;
import org.apache.commons.io.FilenameUtils;
/**
*
* @author Image 17
*/
public final class ItemPanel extends EditorPanel
{
PortraitFinder portraitFinder;
public ItemPanel()
{
initComponents();
}
public ItemPanel(String _vpkPath, String _installDir, CacheManager _cm, SoundPlayer _sp)
{
vpkPath = _vpkPath;
installDir = _installDir;
this.setName("Items");
initComponents();
soundPlayer = _sp;
cacheManager = _cm;
currentTree = itemTree;
portraitFinder = Utility.portraitFinder;
this.populateSoundList();
initTreeSelectionListener();
fillImageFrame("default");
ToolTipManager.sharedInstance().setDismissDelay(20000);
ToolTipManager.sharedInstance().setInitialDelay(0);
}
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents()
{
itemImageLabel = new javax.swing.JLabel();
jScrollPane2 = new javax.swing.JScrollPane();
itemTree = new javax.swing.JTree();
itemLabel = new javax.swing.JLabel();
itemHelpLabel = new javax.swing.JLabel();
itemImageLabel.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
itemImageLabel.setMaximumSize(new java.awt.Dimension(128, 72));
itemImageLabel.setMinimumSize(new java.awt.Dimension(128, 72));
itemImageLabel.setPreferredSize(new java.awt.Dimension(124, 64));
jScrollPane2.setViewportView(itemTree);
itemLabel.setText("Item:");
itemHelpLabel.setForeground(new java.awt.Color(255, 0, 0));
itemHelpLabel.setHorizontalAlignment(javax.swing.SwingConstants.RIGHT);
itemHelpLabel.setText("<html><p align=\"right\">Sounds not working?<br>Mouseover here!</p></html>");
itemHelpLabel.setToolTipText("<html>Item sound replacements are more restricted than other sounds.\n<br>Item sounds must be the same file type (WAV or MP3) as the original. \n<brIf the new sound is longer than the original, it will be cut off.\n<br>They must also match the bitrate (128 kpbs for MP3s, 1141 kbps for WAVs) and frequency (44.1KHz) of the original.</html>");
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addComponent(itemLabel)
.addGap(0, 0, Short.MAX_VALUE))
.addComponent(jScrollPane2)
.addGroup(layout.createSequentialGroup()
.addGap(0, 120, Short.MAX_VALUE)
.addComponent(itemImageLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(0, 38, Short.MAX_VALUE)
.addComponent(itemHelpLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addContainerGap()
.addComponent(itemLabel)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(itemImageLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 64, javax.swing.GroupLayout.PREFERRED_SIZE)
.addComponent(itemHelpLabel, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane2, javax.swing.GroupLayout.DEFAULT_SIZE, 383, Short.MAX_VALUE)
.addContainerGap())
);
}// </editor-fold>//GEN-END:initComponents
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JLabel itemHelpLabel;
private javax.swing.JLabel itemImageLabel;
private javax.swing.JLabel itemLabel;
private javax.swing.JTree itemTree;
private javax.swing.JScrollPane jScrollPane2;
// End of variables declaration//GEN-END:variables
@Override
void fillImageFrame(Object _selectedItem)
{
NamedItem selectedItem = new NamedItem();
try
{
if (_selectedItem instanceof NamedItem)
{
selectedItem = (NamedItem) _selectedItem;
itemImageLabel.setIcon(new ImageIcon(portraitFinder.getPortrait(selectedItem.getIconName())));
}
else
{
itemImageLabel.setIcon(new ImageIcon(portraitFinder.getPortrait("default")));
}
}
catch (NullPointerException ex)
{
System.err.println("Icon not found for item: " + selectedItem.getFriendlyName());
itemImageLabel.setIcon(new ImageIcon(""));
}
}
//This panel doesn't use a dropdown box. No need to implement.
@Override
void populateDropdownBox()
{
}
@Override
void populateSoundList()
{
inAdvancedMode = false;
currentTree.setEditable(false);
String scriptKey = "game_sounds_items.txt";
File scriptFile = new File(getCurrentScriptString());
VPKEntry entry;
boolean needsValidation = false;
if (!scriptFile.isFile())
{
entry = getItemScriptFile();
this.writeScriptFileToDisk(entry, false);
this.updateCache(scriptKey, entry.getCRC32());
scriptFile = new File(getCurrentScriptString());
}
else
{
needsValidation = true;
}
ScriptParser parser = new ScriptParser(scriptFile);
TreeModel scriptTree = parser.getTreeModel();
if (needsValidation)
{
boolean isUpToDate = this.validateScriptFile(scriptKey, "scripts/" + scriptKey);
if (!isUpToDate)
{
this.writeScriptFileToDisk(cacheManager.getCachedVpkEntry(), true);
mergeNewChanges(scriptTree, scriptFile);
this.updateCache(cacheManager.getCachedVpkEntry().getName() + ".txt", cacheManager.getCachedVpkEntry().getCRC32());
}
}
this.currentTreeModel = scriptTree;
currentTree.setModel(buildSoundListTree(scriptTree));
currentTree.setRootVisible(false);
currentTree.setShowsRootHandles(true);
}
@Override
String getCurrentScriptString()
{
String scriptPathString = Paths.get(installDir, "/dota/scripts/game_sounds_items.txt").toString();
File scriptFilePath = new File(scriptPathString);
if (scriptFilePath.isFile())
{
return scriptFilePath.getAbsolutePath();
}
else
{
return "";
}
}
@Override
String getCustomSoundPathString()
{
return "custom/items/";
}
private VPKEntry getItemScriptFile()
{
String internalScriptPath = "scripts/game_sounds_items.txt";
File vpkFile = new File(vpkPath);
VPKArchive vpk = new VPKArchive();
try
{
vpk.load(vpkFile);
}
catch (Exception ex)
{
JOptionPane.showMessageDialog(this,
"Error: Unable to open VPK file.\nDetails: " + ex.getMessage(),
"Error opening VPK", JOptionPane.ERROR_MESSAGE);
ex.printStackTrace();
return null;
}
VPKEntry entry = vpk.getEntry(internalScriptPath);
return entry;
}
@Override
void updateCache(String scriptKey, long internalCrc)
{
String internalPath = "scripts/game_sounds_items.txt";
cacheManager.putScript(scriptKey, internalPath, internalCrc);
}
private void initTreeSelectionListener()
{
currentTree.getSelectionModel().setSelectionMode(TreeSelectionModel.SINGLE_TREE_SELECTION);
currentTree.addTreeSelectionListener(new TreeSelectionListener()
{
@Override
public void valueChanged(TreeSelectionEvent evt)
{
currentTreeSelectionChanged(evt);
}
});
attachDoubleClickListenerToTree();
}
//Need this for ItemPanel because it's only a single script file, and thus each node requires a different portrait
private void currentTreeSelectionChanged(TreeSelectionEvent evt)
{
DefaultMutableTreeNode node = (DefaultMutableTreeNode) currentTree.getLastSelectedPathComponent();
if (node == null)
{
return;
}
//get highest parent of selected node
while (node.getParent() != null && !node.getParent().equals(node.getRoot()))
{
node = (DefaultMutableTreeNode) node.getParent();
}
NamedItem clickedItem = new NamedItem(node.getUserObject().toString(), this.getCurrentScriptString());
fillImageFrame(clickedItem);
itemLabel.setText("Item: " + clickedItem.getFriendlyName());
}
//For whatever reasons, item sounds replacements REFUSE to work unless the
//filename remains unchanged, so we're overriding the logic here to copy the
//custom sound, rename it and just note what it used to be in a wavestring
//comment. Additionally, it limits the filetypes we're allowed to use,
//because we have to use whatever the original was.
@Override
protected File promptUserForNewFile(String wavePath)
{
DefaultMutableTreeNode selectedTreeNode = (DefaultMutableTreeNode) getTreeNodeFromWavePath(wavePath);
String waveString = selectedTreeNode.getUserObject().toString();
String allowedExtension = FilenameUtils.getExtension(waveString).replace("\"", "");
JFileChooser chooser = new JFileChooser(new File(UserPrefs.getInstance().getWorkingDirectory()));
FileNameExtensionFilter filter = allowedExtension.equals("wav")
? new FileNameExtensionFilter("WAVs", "wav")
: new FileNameExtensionFilter("MP3s", "mp3");
chooser.setAcceptAllFileFilterUsed((false));
chooser.setFileFilter(filter);
chooser.setMultiSelectionEnabled(false);
int chooserRetVal = chooser.showOpenDialog(chooser);
if (chooserRetVal == JFileChooser.APPROVE_OPTION)
{
Path chosenFile = Paths.get(chooser.getSelectedFile().getAbsolutePath());
int startIndex = -1;
int endIndex = -1;
//Get the actual value for the wavestring key-value pair.
if (waveString.contains("\"wave\""))
{
startIndex = Utility.nthOccurrence(selectedTreeNode.getUserObject().toString(), '\"', 2);
endIndex = Utility.nthOccurrence(selectedTreeNode.getUserObject().toString(), '\"', 3);
}
else //Some wavestrings don't have the "wave" at the beginning for some reason
{
startIndex = Utility.nthOccurrence(selectedTreeNode.getUserObject().toString(), '\"', 0);
endIndex = Utility.nthOccurrence(selectedTreeNode.getUserObject().toString(), '\"', 1);
}
String waveStringFilePath = waveString.substring(startIndex, endIndex + 1);
String waveStringNormalizedFilePath = waveStringFilePath.substring(0, waveStringFilePath.lastIndexOf("\""));
waveStringNormalizedFilePath = waveStringNormalizedFilePath.replace(")", "");
waveStringNormalizedFilePath = waveStringNormalizedFilePath.replace("\"", "");
Path destPath = Paths.get(installDir, "/dota/sound/" + waveStringNormalizedFilePath);
UserPrefs.getInstance().setWorkingDirectory(chosenFile.getParent().toString());
try
{
new File(destPath.toString()).mkdirs();
Files.copy(chosenFile, destPath, StandardCopyOption.REPLACE_EXISTING);
if (waveString.contains("//")) { waveString = waveString.replace(waveString.substring(waveString.indexOf("//"), waveString.length()), ""); }
waveString = waveString.replace(waveStringFilePath, "\"" + waveStringNormalizedFilePath + "\" //Replaced by: " + chosenFile.getFileName().toString());
selectedTreeNode.setUserObject(waveString);
//Write out modified tree to scriptfile.
ScriptParser parser = new ScriptParser(this.currentTreeModel);
String scriptString = getCurrentScriptString();
Path scriptPath = Paths.get(scriptString);
parser.writeModelToFile(scriptPath.toString());
//Update UI
((DefaultMutableTreeNode) currentTree.getLastSelectedPathComponent()).setUserObject(waveString);
((DefaultTreeModel) currentTree.getModel()).nodeChanged((DefaultMutableTreeNode) currentTree.getLastSelectedPathComponent());
JOptionPane.showMessageDialog(this, "Sound file successfully replaced.");
}
catch (IOException ex)
{
JOptionPane.showMessageDialog(null, "Unable to replace sound.\nDetails: " + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
}
}
return null;
}
}
| |
/**********************************************************************
Copyright (c) 2006 Andy Jefferson and others. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Contributors:
...
**********************************************************************/
package org.datanucleus.tests.metadata;
import javax.persistence.PostLoad;
import javax.persistence.PostPersist;
import javax.persistence.PrePersist;
import javax.persistence.PreRemove;
import org.datanucleus.ClassLoaderResolver;
import org.datanucleus.ClassLoaderResolverImpl;
import org.datanucleus.NucleusContext;
import org.datanucleus.PersistenceNucleusContextImpl;
import org.datanucleus.api.jpa.metadata.JPAMetaDataManager;
import org.datanucleus.metadata.AbstractMemberMetaData;
import org.datanucleus.metadata.ClassMetaData;
import org.datanucleus.metadata.ColumnMetaData;
import org.datanucleus.metadata.ElementMetaData;
import org.datanucleus.metadata.EventListenerMetaData;
import org.datanucleus.metadata.FieldPersistenceModifier;
import org.datanucleus.metadata.IdentityStrategy;
import org.datanucleus.metadata.IdentityType;
import org.datanucleus.metadata.InheritanceMetaData;
import org.datanucleus.metadata.InheritanceStrategy;
import org.datanucleus.metadata.JoinMetaData;
import org.datanucleus.metadata.KeyMetaData;
import org.datanucleus.metadata.MapMetaData;
import org.datanucleus.metadata.MetaDataManager;
import org.datanucleus.metadata.OrderMetaData;
import org.datanucleus.metadata.PackageMetaData;
import org.datanucleus.metadata.PersistenceUnitMetaData;
import org.datanucleus.metadata.QueryLanguage;
import org.datanucleus.metadata.QueryMetaData;
import org.datanucleus.metadata.QueryResultMetaData;
import org.datanucleus.metadata.RelationType;
import org.datanucleus.metadata.SequenceMetaData;
import org.datanucleus.metadata.TableGeneratorMetaData;
import org.datanucleus.metadata.OrderMetaData.FieldOrder;
import org.datanucleus.metadata.QueryResultMetaData.PersistentTypeMapping;
import org.datanucleus.tests.JPAPersistenceTestCase;
import org.jpox.samples.annotations.abstractclasses.AbstractSimpleBase;
import org.jpox.samples.annotations.abstractclasses.ConcreteSimpleSub1;
import org.jpox.samples.annotations.abstractclasses.ConcreteSimpleSub2;
import org.jpox.samples.annotations.array.ByteArray;
import org.jpox.samples.annotations.idclass.IdClassAccessors;
import org.jpox.samples.annotations.models.company.MyListener;
import org.jpox.samples.annotations.models.company.WebSite;
import org.jpox.samples.annotations.many_many.PetroleumCustomer;
import org.jpox.samples.annotations.many_many.PetroleumSupplier;
import org.jpox.samples.annotations.models.company.Account;
import org.jpox.samples.annotations.models.company.DepartmentPK;
import org.jpox.samples.annotations.models.company.Employee;
import org.jpox.samples.annotations.models.company.Department;
import org.jpox.samples.annotations.models.company.Manager;
import org.jpox.samples.annotations.models.company.Person;
import org.jpox.samples.annotations.models.company.Project;
import org.jpox.samples.annotations.one_many.unidir_2.UserGroup;
import org.jpox.samples.annotations.one_one.bidir.Boiler;
import org.jpox.samples.annotations.one_one.bidir.Timer;
import org.jpox.samples.annotations.one_one.unidir.Login;
import org.jpox.samples.annotations.one_one.unidir.LoginAccount;
import org.jpox.samples.annotations.secondarytable.Printer;
import org.jpox.samples.annotations.types.basic.TypeHolder;
import org.jpox.samples.annotations.types.enums.EnumHolder;
/**
* Tests for the use of JPA annotations and the generation of internal JPOX metadata.
*/
public class AnnotationTest extends JPAPersistenceTestCase
{
public AnnotationTest(String name)
{
super(name);
}
/**
* Test of basic JPA annotations reading capability
*/
public void testBasic()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
// Checks for Department
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Department.class.getName(), clr);
String prefix = cmd1.getFullClassName() + " : ";
assertEquals(prefix + "detachable is wrong", cmd1.isDetachable(), true);
assertEquals(prefix + "identity-type is wrong", cmd1.getIdentityType(), IdentityType.APPLICATION);
assertEquals(prefix + "embedded-only is wrong", cmd1.isEmbeddedOnly(), false);
assertEquals(prefix + "requires-extent is wrong", cmd1.isRequiresExtent(), true);
assertEquals(prefix + "catalog is wrong", cmd1.getCatalog(), null);
assertEquals(prefix + "schema is wrong", cmd1.getSchema(), null);
assertEquals(prefix + "table is wrong", cmd1.getTable(), "JPA_AN_DEPARTMENT");
assertEquals(prefix + "has incorrect number of persistent fields", cmd1.getNoOfManagedMembers(), 4);
InheritanceMetaData inhmd1 = cmd1.getInheritanceMetaData();
assertEquals("Inheritance strategy is incorrect", InheritanceStrategy.NEW_TABLE, inhmd1.getStrategy());
// "projects"
AbstractMemberMetaData fmd = cmd1.getMetaDataForMember("projects");
assertNotNull(prefix + "doesnt have required field", fmd);
assertEquals(prefix + "should be persistent", fmd.getPersistenceModifier(), FieldPersistenceModifier.PERSISTENT);
assertFalse(prefix + "pk is wrong", fmd.isPrimaryKey());
assertFalse(prefix + "dfg is wrong", fmd.isDefaultFetchGroup());
assertTrue(prefix + "has no container specified!", fmd.getCollection() != null);
assertEquals(prefix + "should have collection of Project elements but hasnt",
fmd.getCollection().getElementType(), Project.class.getName());
assertEquals(prefix + "shouldnt have collection of serialised elements but has",
fmd.getCollection().isSerializedElement(), false);
assertEquals(prefix + "shouldnt have collection of dependent elements but has",
fmd.getCollection().isDependentElement(), false);
// Checks for Project
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Project.class.getName(), clr);
prefix = cmd2.getFullClassName() + " : ";
assertEquals(prefix + "detachable is wrong", true, cmd2.isDetachable());
assertEquals(prefix + "identity-type is wrong", cmd2.getIdentityType(), IdentityType.APPLICATION);
assertEquals(prefix + "objectid-class is wrong", "javax.jdo.identity.StringIdentity", cmd2.getObjectidClass());
assertEquals(prefix + "embedded-only is wrong", cmd2.isEmbeddedOnly(), false);
assertEquals(prefix + "requires-extent is wrong", cmd2.isRequiresExtent(), true);
assertEquals(prefix + "catalog is wrong", cmd2.getCatalog(), null);
assertEquals(prefix + "schema is wrong", cmd2.getSchema(), null);
assertEquals(prefix + "table is wrong", "JPA_AN_PROJECT", cmd2.getTable());
assertEquals(prefix + "has incorrect number of persistent fields", cmd2.getNoOfManagedMembers(), 2);
InheritanceMetaData inhmd2 = cmd2.getInheritanceMetaData();
assertEquals("Inheritance strategy is incorrect", InheritanceStrategy.NEW_TABLE, inhmd2.getStrategy());
// "name"
fmd = cmd2.getMetaDataForMember("name");
assertNotNull(prefix + "doesnt have required field", fmd);
assertTrue(prefix + "pk is wrong", fmd.isPrimaryKey());
assertTrue(prefix + "dfg is wrong", fmd.isDefaultFetchGroup());
assertEquals(prefix + "should be persistent", fmd.getPersistenceModifier(), FieldPersistenceModifier.PERSISTENT);
// "budget"
fmd = cmd2.getMetaDataForMember("budget");
assertNotNull(prefix + "doesnt have required field", fmd);
assertEquals(prefix + "has incorrect persistent field", fmd.getName(), "budget");
assertFalse(prefix + "pk is wrong", fmd.isPrimaryKey());
assertTrue(prefix + "dfg is wrong", fmd.isDefaultFetchGroup());
assertEquals(prefix + "should be persistent", fmd.getPersistenceModifier(), FieldPersistenceModifier.PERSISTENT);
}
/**
* Test of JPA 1-1 unidir relation
*/
public void testOneToOneUni()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(LoginAccount.class.getName(), clr);
assertEquals("LoginAccount has incorrect table name", cmd1.getTable(), "JPA_AN_LOGINACCOUNT");
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("login");
assertNotNull("LoginAccount.login is null!", fmd1);
assertEquals("LoginAccount.login mapped-by is incorrect", fmd1.getMappedBy(), null);
assertEquals("LoginAccount.login relationType is incorrect",
fmd1.getRelationType(clr), RelationType.ONE_TO_ONE_UNI);
assertNotNull("LoginAccount.login has no column info", fmd1.getColumnMetaData());
assertEquals("LoginAccount.login has incorrect number of columns", fmd1.getColumnMetaData().length, 1);
assertEquals("LoginAccount.login column name is wrong", fmd1.getColumnMetaData()[0].getName(), "LOGIN_ID");
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Login.class.getName(), clr);
assertEquals("LoginAccount has incorrect table name", cmd2.getTable(), "JPA_AN_LOGIN");
}
/**
* Test of JPA 1-1 bidir relation
*/
public void testOneToOneBi()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// non-owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Boiler.class.getName(), clr);
assertEquals("Boiler has incorrect table name", "JPA_AN_BOILER", cmd1.getTable());
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("timer");
assertNotNull("Boiler.timer is null!", fmd1);
assertEquals("Boiler.timer mapped-by is incorrect", "boiler", fmd1.getMappedBy());
assertEquals("Boiler.timer relationType is incorrect",
RelationType.ONE_TO_ONE_BI, fmd1.getRelationType(clr));
// owner side
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Timer.class.getName(), clr);
assertEquals("Timer has incorrect table name", "JPA_AN_TIMER", cmd2.getTable());
AbstractMemberMetaData fmd2 = cmd2.getMetaDataForMember("boiler");
assertNotNull("Timer.boiler is null!", fmd2);
assertEquals("Timer.boiler mapped-by is incorrect", null, fmd2.getMappedBy());
assertEquals("Timer.boiler relationType is incorrect", RelationType.ONE_TO_ONE_BI, fmd2.getRelationType(clr));
assertNotNull("Timer.boiler has no column info", fmd2.getColumnMetaData());
assertEquals("Timer.boiler has incorrect number of columns", 1, fmd2.getColumnMetaData().length);
assertEquals("Timer.boiler column name is wrong", "BOILER_ID", fmd2.getColumnMetaData()[0].getName());
}
/**
* Test of JPA 1-N bidir FK relation
*/
public void testOneToManyBiFK()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Manager.class.getName(), clr);
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("departments");
assertNotNull("Manager.departments is null!", fmd1);
assertEquals("Manager.departments mapped-by is incorrect", fmd1.getMappedBy(), "manager");
assertEquals("Manager.departments relationType is incorrect",
fmd1.getRelationType(clr), RelationType.ONE_TO_MANY_BI);
ElementMetaData elemmd = fmd1.getElementMetaData();
assertNull("Manager.departments has join column info but shouldnt (specified on N side)", elemmd);
// non-owner side
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Department.class.getName(), clr);
AbstractMemberMetaData fmd2 = cmd2.getMetaDataForMember("manager");
assertNotNull("Department.manager is null!", fmd2);
assertEquals("Department.manager mapped-by is incorrect", fmd2.getMappedBy(), null);
assertEquals("Department.manager relationType is incorrect",
fmd2.getRelationType(clr), RelationType.MANY_TO_ONE_BI);
ColumnMetaData[] colmds = fmd2.getColumnMetaData();
assertNotNull("Department.manager has no join column info", colmds);
assertEquals("Department.manager has incorrect number of joincolumns", colmds.length, 1);
assertEquals("Department.manager joincolumn name is wrong", "MGR_ID", colmds[0].getName());
}
/**
* Test of JPA 1-N unidir JoinTable relation
*/
public void testOneToManyUniJoin()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Department.class.getName(), clr);
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("projects");
assertNotNull("Department.projects is null!", fmd1);
assertEquals("Department.projects mapped-by is incorrect", null, fmd1.getMappedBy());
assertEquals("Department.projects relationType is incorrect", RelationType.ONE_TO_MANY_UNI,
fmd1.getRelationType(clr));
assertEquals("Department.projects jointable name is incorrect", "JPA_AN_DEPT_PROJECTS", fmd1.getTable());
JoinMetaData joinmd = fmd1.getJoinMetaData();
assertNotNull("Department.projects has no join table!", joinmd);
assertNotNull("Department.projects has incorrect join columns", joinmd.getColumnMetaData());
assertEquals("Department.projects has incorrect number of join columns",
2, joinmd.getColumnMetaData().length);
assertEquals("Department.projects has incorrect join column name",
joinmd.getColumnMetaData()[0].getName(), "DEPT_ID");
assertEquals("Department.projects has incorrect join column name",
joinmd.getColumnMetaData()[1].getName(), "DEPT_ID_STRING");
ElementMetaData elemmd = fmd1.getElementMetaData();
assertNotNull("Department.projects has no element column info but should", elemmd);
ColumnMetaData[] colmds = elemmd.getColumnMetaData();
assertNotNull("Department.projects has incorrect element columns", colmds);
assertEquals("Department.projects has incorrect number of element columns", 1, colmds.length);
assertEquals("Department.projects has incorrect element column name", "PROJECT_ID", colmds[0].getName());
}
/**
* Test of JPA 1-N unidir FK relation.
* Really is 1-N uni join since JPA doesnt support 1-N uni FK
*/
/*public void testOneToManyUniFK()
{
NucleusContext nucleusCtx = new NucleusContext(new PersistenceConfiguration(){});
nucleusCtx.setApi("JPA");
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Site.class.getName(), clr);
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("offices");
assertNotNull("Site.offices is null!", fmd1);
assertEquals("Site.offices mapped-by is incorrect", fmd1.getMappedBy(), null);
assertEquals("Site.offices relationType is incorrect",
fmd1.getRelationType(clr), Relation.ONE_TO_MANY_UNI);
assertEquals("Site.offices jointable name is incorrect", fmd1.getTable(), null);
assertNotNull("Site.offices should have join but doesnt", fmd1.getJoinMetaData());
ElementMetaData elemmd = fmd1.getElementMetaData();
assertNotNull("Site.offices has no element column info but should", elemmd);
ColumnMetaData[] colmds = elemmd.getColumnMetaData();
assertNotNull("Site.offices has incorrect element columns", colmds);
assertEquals("Site.offices has incorrect number of element columns", colmds.length, 1);
assertEquals("Site.offices has incorrect element column name", colmds[0].getName(), "SITE_ID");
}*/
/**
* Test of JPA 1-N bidir join relation
*/
public void testOneToManyBiJoin()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Manager.class.getName(), clr);
assertEquals("Manager has incorrect table name", cmd1.getTable(), "JPA_AN_MANAGER");
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("subordinates");
assertNotNull("Manager.subordinates is null!", fmd1);
assertEquals("Manager.subordinates mapped-by is incorrect", fmd1.getMappedBy(), "manager");
assertEquals("Manager.subordinates relationType is incorrect",
fmd1.getRelationType(clr), RelationType.ONE_TO_MANY_BI);
assertEquals("Manager.subordinates jointable name is incorrect", fmd1.getTable(), "JPA_AN_MGR_EMPLOYEES");
// non-owner side
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Employee.class.getName(), clr);
assertEquals("Employee has incorrect table name", cmd2.getTable(), "JPA_AN_EMPLOYEE");
AbstractMemberMetaData fmd2 = cmd2.getMetaDataForMember("manager");
assertNotNull("Employee.manager is null!", fmd2);
assertEquals("Employee.manager mapped-by is incorrect", fmd2.getMappedBy(), null);
assertEquals("Employee.manager relationType is incorrect",
fmd2.getRelationType(clr), RelationType.MANY_TO_ONE_BI);
assertEquals("Employee.manager jointable name is incorrect", fmd2.getTable(), null);
// join-table
JoinMetaData joinmd = fmd1.getJoinMetaData();
assertNotNull("Manager.subordinates has no join table!", joinmd);
assertNotNull("Manager.subordinates has incorrect join columns", joinmd.getColumnMetaData());
assertEquals("Manager.subordinates has incorrect number of join columns", 1, joinmd.getColumnMetaData().length);
assertEquals("Manager.subordinates has incorrect owner join column name",
"MGR_ID", joinmd.getColumnMetaData()[0].getName());
ElementMetaData elemmd = fmd1.getElementMetaData();
assertNotNull("Manager.subordinates has no element column info but should", elemmd);
assertNotNull("Manager.subordinates has incorrect element columns", elemmd.getColumnMetaData());
assertEquals("Manager.subordinates has incorrect number of element columns", 1, elemmd.getColumnMetaData().length);
assertEquals("Manager.subordinates has incorrect element join column name",
"EMP_ID", elemmd.getColumnMetaData()[0].getName());
}
/**
* Test of JPA M-N relation
*/
public void testManyToMany()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(PetroleumCustomer.class.getName(), clr);
assertEquals("Customer has incorrect table name", cmd1.getTable(), "JPA_AN_PETROL_CUSTOMER");
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("suppliers");
assertNotNull("Customer.suppliers is null!", fmd1);
assertEquals("Customer.suppliers mapped-by is incorrect", fmd1.getMappedBy(), "customers");
assertEquals("Customer.suppliers relationType is incorrect",
fmd1.getRelationType(clr), RelationType.MANY_TO_MANY_BI);
assertEquals("Customer.suppliers jointable name is incorrect", fmd1.getTable(), "JPA_AN_PETROL_CUST_SUPP");
// non-owner side
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(PetroleumSupplier.class.getName(), clr);
assertEquals("Supplier has incorrect table name", cmd2.getTable(), "JPA_AN_PETROL_SUPPLIER");
AbstractMemberMetaData fmd2 = cmd2.getMetaDataForMember("customers");
assertNotNull("Supplier.customers is null!", fmd2);
assertEquals("Supplier.customers mapped-by is incorrect", fmd2.getMappedBy(), null);
assertEquals("Supplier.customers relationType is incorrect",
fmd2.getRelationType(clr), RelationType.MANY_TO_MANY_BI);
assertEquals("Supplier.customers jointable name is incorrect", fmd2.getTable(), null);
// join table info
JoinMetaData joinmd = fmd1.getJoinMetaData();
assertNotNull("Customer.suppliers has no join table!", joinmd);
assertNotNull("Customer.suppliers has incorrect join columns", joinmd.getColumnMetaData());
assertEquals("Customer.suppliers has incorrect number of join columns", joinmd.getColumnMetaData().length, 1);
assertEquals("Customer.suppliers has incorrect owner join column name",
joinmd.getColumnMetaData()[0].getName(), "CUSTOMER_ID");
ElementMetaData elemmd = fmd1.getElementMetaData();
assertNotNull("Customer.suppliers has no element column info but should", elemmd);
assertNotNull("Customer.suppliers has incorrect element columns", elemmd.getColumnMetaData());
assertEquals("Customer.suppliers has incorrect number of element columns", elemmd.getColumnMetaData().length, 1);
assertEquals("Customer.suppliers has incorrect element join column name",
elemmd.getColumnMetaData()[0].getName(), "SUPPLIER_ID");
}
/**
* Test of JPA 1-N unidir Map relation
*/
public void testOneToManyUniMapFK()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// owner side
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Person.class.getName(), clr);
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("phoneNumbers");
assertNotNull("Department.phoneNumbers is null!", fmd1);
assertEquals("Department.phoneNumbers mapped-by is incorrect", fmd1.getMappedBy(), null);
assertEquals("Department.phoneNumbers relationType is incorrect",
fmd1.getRelationType(clr), RelationType.ONE_TO_MANY_UNI);
assertEquals("Department.phoneNumbers jointable name is incorrect", fmd1.getTable(), null);
MapMetaData mmd = fmd1.getMap();
assertNotNull("Department.phoneNumbers has no Map metadata!", mmd);
KeyMetaData keymd = fmd1.getKeyMetaData();
assertNotNull("Department.phoneNumbers has no Key metadata!", keymd);
assertEquals("Department.phoneNumbers has incorrect key mapped-by", keymd.getMappedBy(), "name");
}
/**
* Test of basic JPA @GeneratedValue.
*/
public void testGeneratedValue()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Account.class.getName(), clr);
AbstractMemberMetaData fmd1 = cmd1.getMetaDataForMember("id");
assertNotNull("Account has no id field!", fmd1);
assertEquals("Account has incorrect value strategy", fmd1.getValueStrategy(), IdentityStrategy.INCREMENT);
}
/**
* Test of basic JPA @TableGenerator
*/
public void testTableGenerator()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Employee.class.getName(), clr);
PackageMetaData pmd = cmd1.getPackageMetaData();
assertEquals("Number of TableGenerators registered for Employee class is wrong", pmd.getNoOfTableGenerators(), 1);
TableGeneratorMetaData tgmd = pmd.getTableGenerators()[0];
assertEquals("TableGenerator has incorrect name", tgmd.getName(), "EmployeeGenerator");
assertEquals("TableGenerator has incorrect table", tgmd.getTableName(), "ID_TABLE");
assertEquals("TableGenerator has incorrect pk column name", tgmd.getPKColumnName(), "TYPE");
assertEquals("TableGenerator has incorrect value column name", tgmd.getValueColumnName(), "LATEST_VALUE");
assertEquals("TableGenerator has incorrect pk column value", tgmd.getPKColumnValue(), "EMPLOYEE");
assertEquals("TableGenerator has incorrect initial value", tgmd.getInitialValue(), 0);
assertEquals("TableGenerator has incorrect allocation size", tgmd.getAllocationSize(), 50);
}
/**
* Test of basic JPA @SequenceGenerator
*/
public void testSequenceGenerator()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Department.class.getName(), clr);
PackageMetaData pmd = cmd1.getPackageMetaData();
assertEquals("Number of Sequences registered for Department class is wrong", pmd.getNoOfSequences(), 1);
SequenceMetaData seqmd = pmd.getSequences()[0];
assertEquals("SequenceGenerator has incorrect name", seqmd.getName(), "DepartmentGenerator");
assertEquals("SequenceGenerator has incorrect sequence name", seqmd.getDatastoreSequence(), "DEPT_SEQ");
assertEquals("SequenceGenerator has incorrect initial value", seqmd.getInitialValue(), 1);
assertEquals("SequenceGenerator has incorrect allocation size", seqmd.getAllocationSize(), 50);
}
/**
* Test of basic JPA @EmbeddedId.
*/
public void testEmbeddedId()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Department.class.getName(), clr);
assertEquals(1, cmd1.getNoOfPrimaryKeyMembers());
}
/**
* Test of JPA @Embeddable.
*/
public void testEmbeddable()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(DepartmentPK.class.getName(), clr);
assertNotNull(cmd1);
}
/**
* Test of JPA Byte[] is embedded by default
*/
public void testByteArrayEmbeddedByDefault()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(ByteArray.class.getName(), clr);
assertTrue(cmd1.getMetaDataForMember("array1").isEmbedded());
}
/**
* Test of JPA column length
*/
public void testColumnLength()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Printer.class.getName(), clr);
AbstractMemberMetaData fmd = cmd1.getMetaDataForMember("make");
assertEquals(fmd.getColumnMetaData().length, 1);
assertEquals(fmd.getColumnMetaData()[0].getName(), "MAKE");
assertEquals(40, fmd.getColumnMetaData()[0].getLength().intValue());
}
/**
* Test of EventListeners
*/
public void testEventListeners()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(WebSite.class.getName(), clr);
// Example callbacks
EventListenerMetaData elmd = cmd1.getListenerForClass(cmd1.getFullClassName());
assertNotNull("Site didnt have its own class registered as an EventListener!", elmd);
assertEquals("Site EventListener has incorrect method for prePersist callback",
elmd.getClassName() + ".prePersist", elmd.getMethodNameForCallbackClass(PrePersist.class.getName()));
assertEquals("Site EventListener has incorrect method for postPersist callback",
elmd.getClassName() + ".postPersist", elmd.getMethodNameForCallbackClass(PostPersist.class.getName()));
assertEquals("Site EventListener has incorrect method for postPersist callback",
elmd.getClassName() + ".load", elmd.getMethodNameForCallbackClass(PostLoad.class.getName()));
assertNull(elmd.getMethodNameForCallbackClass(PreRemove.class.getName()));
// Example listener
elmd = cmd1.getListenerForClass(MyListener.class.getName());
assertNotNull("Site didnt have MyListener registered as an EventListener!", elmd);
assertEquals("Site EventListener has incorrect method for prePersist callback",
elmd.getClassName() + ".register", elmd.getMethodNameForCallbackClass(PostPersist.class.getName()));
assertEquals("Site EventListener has incorrect method for postPersist callback",
elmd.getClassName() + ".deregister", elmd.getMethodNameForCallbackClass(PreRemove.class.getName()));
assertNull(elmd.getMethodNameForCallbackClass(PrePersist.class.getName()));
}
/**
* Test of MappedSuperclass
*/
public void testMappedSuperclass()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
// AbstractSimpleBase
ClassMetaData cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(AbstractSimpleBase.class.getName(), clr);
assertNotNull("No MetaData found for AbstractSimpleBase yet is MappedSuperclass", cmd);
assertNotNull("No Inheritance info found for AbstractSimpleBase", cmd.getInheritanceMetaData());
assertEquals("Inheritance for AbstractSimpleBase is incorrect", "subclass-table",
cmd.getInheritanceMetaData().getStrategy().toString());
AbstractMemberMetaData fmd = cmd.getMetaDataForMember("id");
assertNotNull("No field info found for AbstractSimpleBase.id", fmd);
assertNotNull("No column info found for AbstractSimpleBase.id", fmd.getColumnMetaData());
assertEquals("Column name for AbstractSimpleBase.id is wrong", "ID", fmd.getColumnMetaData()[0].getName());
fmd = cmd.getMetaDataForMember("baseField");
assertNotNull("No field info found for AbstractSimpleBase.baseField", fmd);
assertNotNull("No column info found for AbstractSimpleBase.baseField", fmd.getColumnMetaData());
assertEquals("Column name for Product.baseField is wrong", "BASE_FIELD", fmd.getColumnMetaData()[0].getName());
// ConcreteSimpleSub1
cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(ConcreteSimpleSub1.class.getName(), clr);
assertNotNull("No MetaData found for ConcreteSimpleSub1 yet is Entity", cmd);
assertNotNull("No Inheritance info found for ConcreteSimpleSub1", cmd.getInheritanceMetaData());
assertEquals("Inheritance for ConcreteSimpleSub1 is incorrect", "new-table",
cmd.getInheritanceMetaData().getStrategy().toString());
fmd = cmd.getOverriddenMember("baseField");
assertNotNull("No overridden field info found for ConcreteSimpleSub1.baseField", fmd);
assertNotNull("No column info found for ConcreteSimpleSub1.baseField", fmd.getColumnMetaData());
assertEquals("Column name for ConcreteSimpleSub1.baseField is wrong",
"BASE_FIELD_OR", fmd.getColumnMetaData()[0].getName());
fmd = cmd.getMetaDataForMember("sub1Field");
assertNotNull("No field info found for ConcreteSimpleSub1.sub1Field", fmd);
assertNotNull("No column info found for ConcreteSimpleSub1.sub1Field", fmd.getColumnMetaData());
assertEquals("Column name for ConcreteSimpleSub1.sub1Field is wrong",
"SUB1_FIELD", fmd.getColumnMetaData()[0].getName());
// ConcreteSimpleSub2
cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(ConcreteSimpleSub2.class.getName(), clr);
assertNotNull("No MetaData found for ConcreteSimpleSub2 yet is Entity", cmd);
assertNotNull("No Inheritance info found for ConcreteSimpleSub2", cmd.getInheritanceMetaData());
assertEquals("Inheritance for ConcreteSimpleSub2 is incorrect", "new-table",
cmd.getInheritanceMetaData().getStrategy().toString());
fmd = cmd.getOverriddenMember("baseField");
assertNull("Overridden field info found for ConcreteSimpleSub2.baseField!", fmd);
fmd = cmd.getMetaDataForMember("sub2Field");
assertNotNull("No overridden field info found for ConcreteSimpleSub2.sub2Field", fmd);
assertNotNull("No column info found for ConcreteSimpleSub2.sub2Field", fmd.getColumnMetaData());
assertEquals("Column name for ConcreteSimpleSub2.sub2Field is wrong",
"SUB2_FIELD", fmd.getColumnMetaData()[0].getName());
}
/**
* Test of JPA @NamedQuery, @NamedNativeQuery.
*/
public void testNamedQuery()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(LoginAccount.class.getName(), clr);
QueryMetaData[] qmds = cmd.getQueries();
assertNotNull("LoginAccount has no queries!", qmds);
assertEquals("LoginAccount has incorrect number of queries", 2, qmds.length);
QueryMetaData jpqlQuery = null;
QueryMetaData sqlQuery = null;
if (qmds[0].getLanguage().equals(QueryLanguage.JPQL.toString()))
{
jpqlQuery = qmds[0];
}
else if (qmds[1].getLanguage().equals(QueryLanguage.JPQL.toString()))
{
jpqlQuery = qmds[1];
}
if (qmds[0].getLanguage().equals(QueryLanguage.SQL.toString()))
{
sqlQuery = qmds[0];
}
else if (qmds[1].getLanguage().equals(QueryLanguage.SQL.toString()))
{
sqlQuery = qmds[1];
}
if (jpqlQuery == null)
{
fail("No JPQL Query was registered for LoginAccount");
}
if (sqlQuery == null)
{
fail("No SQL Query was registered for LoginAccount");
}
assertEquals("LoginAccount JPQL has incorrect query name", "LoginForJohnSmith", jpqlQuery.getName());
assertEquals("LoginAccount JPQL has incorrect query",
"SELECT a FROM LoginAccount a WHERE a.firstName='John' AND a.lastName='Smith'", jpqlQuery.getQuery());
assertEquals("LoginAccount SQL has incorrect query name", "LoginForJohn", sqlQuery.getName());
assertEquals("LoginAccount SQL has incorrect query",
"SELECT * FROM JPA_AN_LOGIN WHERE FIRSTNAME = 'John'", sqlQuery.getQuery());
}
/**
* Test of JPA @SqlResultSetMapping
*/
public void testSqlResultSetMapping()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
ClassLoaderResolver clr = nucleusCtx.getClassLoaderResolver(null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
PersistenceUnitMetaData pumd = metaDataMgr.getMetaDataForPersistenceUnit("JPATest");
metaDataMgr.loadPersistenceUnit(pumd, null);
ClassMetaData cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(LoginAccount.class.getName(), clr);
QueryResultMetaData[] queryResultMappings = cmd.getQueryResultMetaData();
assertNotNull("LoginAccount has no QueryResultMetaData!", queryResultMappings);
assertEquals("LoginAccount has incorrect number of query result mappings", 4, queryResultMappings.length);
// Example 1 : Returning 2 entities
QueryResultMetaData qrmd = null;
for (int i=0;i<queryResultMappings.length;i++)
{
QueryResultMetaData md = queryResultMappings[i];
if (md.getName().equals("AN_LOGIN_PLUS_ACCOUNT"))
{
qrmd = md;
break;
}
}
if (qrmd == null)
{
fail("SQL ResultSet mapping AN_LOGIN_PLUS_ACCOUNT is not present!");
}
String[] scalarCols = qrmd.getScalarColumns();
assertNull("LoginAccount sql mapping has incorrect scalar cols", scalarCols);
PersistentTypeMapping[] sqlMappingEntities = qrmd.getPersistentTypeMappings();
assertNotNull("LoginAccount sql mapping has incorrect entities", sqlMappingEntities);
assertEquals("LoginAccount sql mapping has incorrect number of entities", 2, sqlMappingEntities.length);
// LoginAccount
assertEquals("LoginAccount sql mapping entity 0 has incorrect class",
LoginAccount.class.getName(), sqlMappingEntities[0].getClassName());
assertNull("LoginAccount sql mapping entity 0 has incorrect discriminator",
sqlMappingEntities[0].getDiscriminatorColumn());
// Login
assertEquals("LoginAccount sql mapping entity 1 has incorrect class",
Login.class.getName(), sqlMappingEntities[1].getClassName());
assertNull("LoginAccount sql mapping entity 1 has incorrect discriminator",
sqlMappingEntities[1].getDiscriminatorColumn());
// Example 2 : Returning 2 scalars
qrmd = null;
for (int i=0;i<queryResultMappings.length;i++)
{
QueryResultMetaData md = queryResultMappings[i];
if (md.getName().equals("AN_ACCOUNT_NAMES"))
{
qrmd = md;
break;
}
}
if (qrmd == null)
{
fail("SQL ResultSet mapping AN_ACCOUNT_NAMES is not present!");
}
scalarCols = qrmd.getScalarColumns();
assertNotNull("LoginAccount sql mapping has incorrect scalar cols", scalarCols);
assertEquals("LoginAccount sql mapping has incorrect column name", "FIRSTNAME", scalarCols[0]);
assertEquals("LoginAccount sql mapping has incorrect column name", "LASTNAME", scalarCols[1]);
sqlMappingEntities = qrmd.getPersistentTypeMappings();
assertNull("LoginAccount sql mapping has incorrect entities", sqlMappingEntities);
}
/**
* Test for use of annotations for secondary tables, in particular @SecondaryTable.
* Uses Printer class, storing some fields in table "PRINTER" and some in "PRINTER_TONER".
*/
public void testSecondaryTable()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd = (ClassMetaData)metaDataMgr.getMetaDataForClass(Printer.class.getName(), clr);
assertEquals("detachable is wrong", cmd.isDetachable(), true);
assertEquals("identity-type is wrong", cmd.getIdentityType(), IdentityType.APPLICATION);
assertEquals("embedded-only is wrong", cmd.isEmbeddedOnly(), false);
assertEquals("requires-extent is wrong", cmd.isRequiresExtent(), true);
assertNull("catalog is wrong", cmd.getCatalog());
assertNull("schema is wrong", cmd.getSchema());
assertEquals("table is wrong", cmd.getTable(), "JPA_AN_PRINTER");
assertEquals("has incorrect number of persistent fields", cmd.getNoOfManagedMembers(), 5);
// Check JoinMetaData at class-level
JoinMetaData[] joinmds = cmd.getJoinMetaData();
assertNotNull("JoinMetaData at class-level is null!", joinmds);
assertEquals("Number of JoinMetaData at class-level is wrong!", joinmds.length, 1);
assertEquals("Table of JoinMetaData at class-level is wrong", "JPA_AN_PRINTER_TONER", joinmds[0].getTable());
ColumnMetaData[] joinColmds = joinmds[0].getColumnMetaData();
assertEquals("Number of columns with MetaData in secondary table is incorrect", 1, joinColmds.length);
assertEquals("Column of JoinMetaData at class-level is wrong", joinColmds[0].getName(), "PRINTER_ID");
// "model" (stored in primary-table)
AbstractMemberMetaData fmd = cmd.getMetaDataForMember("model");
assertNotNull("Doesnt have required field", fmd);
assertNull("Field 'model' has non-null table!", fmd.getTable());
// "tonerModel" (stored in secondary-table)
fmd = cmd.getMetaDataForMember("tonerModel");
assertNotNull("Doesnt have required field", fmd);
assertEquals("Field 'tonerModel' has non-null table!", fmd.getTable(), "JPA_AN_PRINTER_TONER");
}
/**
* Test of JPA enumerated JDBC type.
*/
public void testEnumeratedJDBCType()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(EnumHolder.class.getName(), clr);
AbstractMemberMetaData mmd1 = cmd1.getMetaDataForMember("colour1");
assertEquals("INTEGER", mmd1.getColumnMetaData()[0].getJdbcType());
assertEquals(FieldPersistenceModifier.PERSISTENT, mmd1.getPersistenceModifier());
AbstractMemberMetaData mmd2 = cmd1.getMetaDataForMember("colour2");
assertEquals("VARCHAR", mmd2.getColumnMetaData()[0].getJdbcType());
assertEquals(FieldPersistenceModifier.PERSISTENT, mmd2.getPersistenceModifier());
}
/**
* Test of string length default to JPA default 255.
*/
public void testStringLength()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Account.class.getName(), clr);
AbstractMemberMetaData mmd1 = cmd1.getMetaDataForMember("username");
assertEquals(255, mmd1.getColumnMetaData()[0].getLength().intValue());
}
/**
* Test of char length default to 1 with JPA.
*/
public void testCharDefaultTo1Length()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(TypeHolder.class.getName(), clr);
assertEquals(1, cmd1.getMetaDataForMember("char1").getColumnMetaData()[0].getLength().intValue());
}
/**
* Test of @OrderBy.
*/
public void testOrderBy()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(UserGroup.class.getName(), clr);
OrderMetaData omd = cmd1.getMetaDataForMember("members").getOrderMetaData();
assertNotNull("UserGroup.members has no OrderMetaData!", omd);
FieldOrder[] orderTerms = omd.getFieldOrders();
assertFalse("UserGroup.members is not marked as using an ordered list", omd.isIndexedList());
assertNotNull("UserGroup.members has null field ordering info", orderTerms);
assertEquals("UserGroup.members has incorrect number of field ordering terms", orderTerms.length, 1);
assertEquals("UserGroup.members has incorrect field ordering field-name", orderTerms[0].getFieldName(), "name");
assertTrue("UserGroup.members has incorrect field ordering direction", orderTerms[0].isForward());
}
/**
* Test of JPA @IdClass with pk using acessors.
*/
public void testIdClassAccessors()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(IdClassAccessors.class.getName(), clr);
assertEquals(1, cmd1.getNoOfPrimaryKeyMembers());
assertTrue(cmd1.getAbsolutePositionOfMember("free")>=0);
assertEquals("FFFF",cmd1.getMetaDataForManagedMemberAtAbsolutePosition(cmd1.getRelativePositionOfMember("free")).getColumnMetaData()[0].getName());
}
/**
* Test of persistent properties using annotations.
*/
/*public void testPersistentProperties()
{
NucleusContext nucleusCtx = new NucleusContext("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(JPAGetter.class.getName(), clr);
assertEquals(1, cmd1.getNoOfPrimaryKeyMembers());
}*/
/**
* Test of column name for property instead of field
*/
/*public void testPropertyColumName()
{
NucleusContext nucleusCtx = new NucleusContext("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Employee.class.getName(), clr);
// it is valid according JPA to have property accessor instead of field accessors. property accessors are persistent while field not.
assertNotNull("Employee.lastName has no field information", cmd1.getMetaDataForMember("lastName"));
assertNotNull("Employee.lastName has no column information", cmd1.getMetaDataForMember("lastName").getColumnMetaData());
assertEquals("Employee.lastName has incorrect number of columns",
1, cmd1.getMetaDataForMember("lastName").getColumnMetaData().length);
assertEquals("Employee.last has incorrect column spec",
"LASTNAME", cmd1.getMetaDataForMember("lastName").getColumnMetaData()[0].getName());
ClassMetaData cmd2 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Person.class.getName(), clr);
// it is valid according JPA to have property accessor instead of field accessors. property accessors are persistent while field not.
assertNotNull(cmd2.getMetaDataForMember("age"));
assertNotNull("AGE_COL",cmd2.getMetaDataForMember("age").getColumnMetaData()[0].getName());
assertNotNull(cmd2.getMetaDataForMember("maidenName"));
assertEquals(FieldPersistenceModifier.NONE,cmd2.getMetaDataForMember("_maidenName").getPersistenceModifier());
assertEquals(FieldPersistenceModifier.PERSISTENT,cmd2.getMetaDataForMember("maidenName").getPersistenceModifier());
}*/
/**
* Test of JPA @MapKeyColumn.
*/
public void testMapKeyColumn()
{
NucleusContext nucleusCtx = new PersistenceNucleusContextImpl("JPA", null);
MetaDataManager metaDataMgr = new JPAMetaDataManager(nucleusCtx);
// Retrieve the metadata from the MetaDataManager (populates and initialises everything)
ClassLoaderResolver clr = new ClassLoaderResolverImpl();
ClassMetaData cmd1 = (ClassMetaData)metaDataMgr.getMetaDataForClass(Person.class.getName(), clr);
assertEquals("phoneNumbers_key1",cmd1.getMetaDataForMember("phoneNumbers").getKeyMetaData().getColumnMetaData()[0].getName());
}
}
| |
// ========================================================================
// Copyright (c) 2004-2009 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
package org.eclipse.jetty.http;
import org.eclipse.jetty.io.Buffer;
import org.eclipse.jetty.io.Buffers;
import org.eclipse.jetty.io.ThreadLocalBuffers;
import org.eclipse.jetty.util.component.AbstractLifeCycle;
/* ------------------------------------------------------------ */
/** Abstract Buffer pool.
* simple unbounded pool of buffers for header, request and response sizes.
*
*/
public abstract class HttpBuffers extends AbstractLifeCycle
{
private final ThreadLocalBuffers _requestBuffers = new ThreadLocalBuffers()
{
@Override
protected Buffer newBuffer(int size)
{
return newRequestBuffer(size);
}
@Override
protected Buffer newHeader(int size)
{
return newRequestHeader(size);
}
@Override
protected boolean isHeader(Buffer buffer)
{
return isRequestHeader(buffer);
}
};
private final ThreadLocalBuffers _responseBuffers = new ThreadLocalBuffers()
{
@Override
protected Buffer newBuffer(int size)
{
return newResponseBuffer(size);
}
@Override
protected Buffer newHeader(int size)
{
return newResponseHeader(size);
}
@Override
protected boolean isHeader(Buffer buffer)
{
return isResponseHeader(buffer);
}
};
public HttpBuffers()
{
super();
_requestBuffers.setBufferSize(8*1024);
_requestBuffers.setHeaderSize(6*1024);
_responseBuffers.setBufferSize(12*1024);
_responseBuffers.setHeaderSize(6*1024);
}
@Override
protected void doStart()
throws Exception
{
super.doStart();
}
/**
* @return Returns the headerBufferSize.
* @deprecated use {@link #getRequestHeaderSize()} or {@link #getResponseHeaderSize()}
*/
@Deprecated
public int getHeaderBufferSize()
{
return _requestBuffers.getHeaderSize();
}
public Buffers getRequestBuffers()
{
return _requestBuffers;
}
/**
* @return Returns the requestBufferSize.
*/
public int getRequestBufferSize()
{
return _requestBuffers.getBufferSize();
}
/**
* @return Returns the request header size.
*/
public int getRequestHeaderSize()
{
return _requestBuffers.getHeaderSize();
}
public Buffers getResponseBuffers()
{
return _responseBuffers;
}
/**
* @return Returns the responseBufferSize.
*/
public int getResponseBufferSize()
{
return _responseBuffers.getBufferSize();
}
/**
* @return Returns the response header size.
*/
public int getResponseHeaderSize()
{
return _responseBuffers.getHeaderSize();
}
protected abstract Buffer newRequestBuffer(int size);
protected abstract Buffer newRequestHeader(int size);
protected abstract Buffer newResponseBuffer(int size);
protected abstract Buffer newResponseHeader(int size);
/* ------------------------------------------------------------ */
/**
* @param buffer
* @return True if the buffer is the correct type for a request header buffer
*/
protected abstract boolean isRequestHeader(Buffer buffer);
/* ------------------------------------------------------------ */
/**
* @param buffer
* @return True if the buffer is the correct type for a response header buffer
*/
protected abstract boolean isResponseHeader(Buffer buffer);
/**
* @param headerBufferSize The headerBufferSize to set.
* @deprecated
*/
@Deprecated
public void setHeaderBufferSize( int headerBufferSize )
{
setRequestHeaderSize(headerBufferSize);
setResponseHeaderSize(headerBufferSize);
}
/**
* @param size The requestBufferSize to set.
*/
public void setRequestBufferSize( int size )
{
if (isStarted())
throw new IllegalStateException();
_requestBuffers.setBufferSize(size);
}
/**
* @param size
*/
public void setRequestHeaderSize( int size )
{
if (isStarted())
throw new IllegalStateException();
_requestBuffers.setHeaderSize(size);
}
/**
* @param size The response buffer size in bytes.
*/
public void setResponseBufferSize( int size )
{
if (isStarted())
throw new IllegalStateException();
_responseBuffers.setBufferSize(size);
}
/**
* @param size
*/
public void setResponseHeaderSize( int size )
{
if (isStarted())
throw new IllegalStateException();
_responseBuffers.setHeaderSize(size);
}
}
| |
// Copyright (C) 2008 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.client.account;
import static com.google.gerrit.reviewdb.client.AccountGeneralPreferences.DEFAULT_PAGESIZE;
import static com.google.gerrit.reviewdb.client.AccountGeneralPreferences.PAGESIZE_CHOICES;
import com.google.gerrit.client.Gerrit;
import com.google.gerrit.client.StringListPanel;
import com.google.gerrit.client.config.ConfigServerApi;
import com.google.gerrit.client.extensions.TopMenuItem;
import com.google.gerrit.client.rpc.GerritCallback;
import com.google.gerrit.client.rpc.Natives;
import com.google.gerrit.client.rpc.ScreenLoadCallback;
import com.google.gerrit.client.ui.OnEditEnabler;
import com.google.gerrit.reviewdb.client.AccountGeneralPreferences;
import com.google.gerrit.reviewdb.client.AccountGeneralPreferences.ReviewCategoryStrategy;
import com.google.gwt.core.client.JsArray;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.i18n.client.DateTimeFormat;
import com.google.gwt.i18n.client.LocaleInfo;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.FlowPanel;
import com.google.gwt.user.client.ui.Grid;
import com.google.gwt.user.client.ui.ListBox;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
public class MyPreferencesScreen extends SettingsScreen {
private CheckBox showSiteHeader;
private CheckBox useFlashClipboard;
private CheckBox copySelfOnEmails;
private CheckBox relativeDateInChangeTable;
private CheckBox sizeBarInChangeTable;
private CheckBox legacycidInChangeTable;
private CheckBox muteCommonPathPrefixes;
private ListBox maximumPageSize;
private ListBox dateFormat;
private ListBox timeFormat;
private ListBox reviewCategoryStrategy;
private ListBox diffView;
private StringListPanel myMenus;
private Button save;
@Override
protected void onInitUI() {
super.onInitUI();
showSiteHeader = new CheckBox(Util.C.showSiteHeader());
useFlashClipboard = new CheckBox(Util.C.useFlashClipboard());
copySelfOnEmails = new CheckBox(Util.C.copySelfOnEmails());
maximumPageSize = new ListBox();
for (final short v : PAGESIZE_CHOICES) {
maximumPageSize.addItem(Util.M.rowsPerPage(v), String.valueOf(v));
}
reviewCategoryStrategy = new ListBox();
reviewCategoryStrategy.addItem(
Util.C.messageShowInReviewCategoryNone(),
AccountGeneralPreferences.ReviewCategoryStrategy.NONE.name());
reviewCategoryStrategy.addItem(
Util.C.messageShowInReviewCategoryName(),
AccountGeneralPreferences.ReviewCategoryStrategy.NAME.name());
reviewCategoryStrategy.addItem(
Util.C.messageShowInReviewCategoryEmail(),
AccountGeneralPreferences.ReviewCategoryStrategy.EMAIL.name());
reviewCategoryStrategy.addItem(
Util.C.messageShowInReviewCategoryUsername(),
AccountGeneralPreferences.ReviewCategoryStrategy.USERNAME.name());
reviewCategoryStrategy.addItem(
Util.C.messageShowInReviewCategoryAbbrev(),
AccountGeneralPreferences.ReviewCategoryStrategy.ABBREV.name());
diffView = new ListBox();
diffView.addItem(
com.google.gerrit.client.changes.Util.C.sideBySide(),
AccountGeneralPreferences.DiffView.SIDE_BY_SIDE.name());
diffView.addItem(
com.google.gerrit.client.changes.Util.C.unifiedDiff(),
AccountGeneralPreferences.DiffView.UNIFIED_DIFF.name());
Date now = new Date();
dateFormat = new ListBox();
for (AccountGeneralPreferences.DateFormat fmt : AccountGeneralPreferences.DateFormat
.values()) {
StringBuilder r = new StringBuilder();
r.append(DateTimeFormat.getFormat(fmt.getShortFormat()).format(now));
r.append(" ; ");
r.append(DateTimeFormat.getFormat(fmt.getLongFormat()).format(now));
dateFormat.addItem(r.toString(), fmt.name());
}
timeFormat = new ListBox();
for (AccountGeneralPreferences.TimeFormat fmt : AccountGeneralPreferences.TimeFormat
.values()) {
StringBuilder r = new StringBuilder();
r.append(DateTimeFormat.getFormat(fmt.getFormat()).format(now));
timeFormat.addItem(r.toString(), fmt.name());
}
FlowPanel dateTimePanel = new FlowPanel();
final int labelIdx, fieldIdx;
if (LocaleInfo.getCurrentLocale().isRTL()) {
labelIdx = 1;
fieldIdx = 0;
dateTimePanel.add(timeFormat);
dateTimePanel.add(dateFormat);
} else {
labelIdx = 0;
fieldIdx = 1;
dateTimePanel.add(dateFormat);
dateTimePanel.add(timeFormat);
}
relativeDateInChangeTable = new CheckBox(Util.C.showRelativeDateInChangeTable());
sizeBarInChangeTable = new CheckBox(Util.C.showSizeBarInChangeTable());
legacycidInChangeTable = new CheckBox(Util.C.showLegacycidInChangeTable());
muteCommonPathPrefixes = new CheckBox(Util.C.muteCommonPathPrefixes());
final Grid formGrid = new Grid(11, 2);
int row = 0;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, showSiteHeader);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, useFlashClipboard);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, copySelfOnEmails);
row++;
formGrid.setText(row, labelIdx, Util.C.reviewCategoryLabel());
formGrid.setWidget(row, fieldIdx, reviewCategoryStrategy);
row++;
formGrid.setText(row, labelIdx, Util.C.maximumPageSizeFieldLabel());
formGrid.setWidget(row, fieldIdx, maximumPageSize);
row++;
formGrid.setText(row, labelIdx, Util.C.dateFormatLabel());
formGrid.setWidget(row, fieldIdx, dateTimePanel);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, relativeDateInChangeTable);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, sizeBarInChangeTable);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, legacycidInChangeTable);
row++;
formGrid.setText(row, labelIdx, "");
formGrid.setWidget(row, fieldIdx, muteCommonPathPrefixes);
row++;
formGrid.setText(row, labelIdx, Util.C.diffViewLabel());
formGrid.setWidget(row, fieldIdx, diffView);
add(formGrid);
save = new Button(Util.C.buttonSaveChanges());
save.setEnabled(false);
save.addClickHandler(new ClickHandler() {
@Override
public void onClick(final ClickEvent event) {
doSave();
}
});
myMenus = new MyMenuPanel(save);
add(myMenus);
add(save);
final OnEditEnabler e = new OnEditEnabler(save);
e.listenTo(showSiteHeader);
e.listenTo(useFlashClipboard);
e.listenTo(copySelfOnEmails);
e.listenTo(maximumPageSize);
e.listenTo(dateFormat);
e.listenTo(timeFormat);
e.listenTo(relativeDateInChangeTable);
e.listenTo(sizeBarInChangeTable);
e.listenTo(legacycidInChangeTable);
e.listenTo(muteCommonPathPrefixes);
e.listenTo(diffView);
}
@Override
protected void onLoad() {
super.onLoad();
AccountApi.self().view("preferences")
.get(new ScreenLoadCallback<Preferences>(this) {
@Override
public void preDisplay(Preferences prefs) {
display(prefs);
}
});
}
private void enable(final boolean on) {
showSiteHeader.setEnabled(on);
useFlashClipboard.setEnabled(on);
copySelfOnEmails.setEnabled(on);
maximumPageSize.setEnabled(on);
dateFormat.setEnabled(on);
timeFormat.setEnabled(on);
relativeDateInChangeTable.setEnabled(on);
sizeBarInChangeTable.setEnabled(on);
legacycidInChangeTable.setEnabled(on);
muteCommonPathPrefixes.setEnabled(on);
reviewCategoryStrategy.setEnabled(on);
diffView.setEnabled(on);
}
private void display(Preferences p) {
showSiteHeader.setValue(p.showSiteHeader());
useFlashClipboard.setValue(p.useFlashClipboard());
copySelfOnEmails.setValue(p.copySelfOnEmail());
setListBox(maximumPageSize, DEFAULT_PAGESIZE, p.changesPerPage());
setListBox(dateFormat, AccountGeneralPreferences.DateFormat.STD, //
p.dateFormat());
setListBox(timeFormat, AccountGeneralPreferences.TimeFormat.HHMM_12, //
p.timeFormat());
relativeDateInChangeTable.setValue(p.relativeDateInChangeTable());
sizeBarInChangeTable.setValue(p.sizeBarInChangeTable());
legacycidInChangeTable.setValue(p.legacycidInChangeTable());
muteCommonPathPrefixes.setValue(p.muteCommonPathPrefixes());
setListBox(reviewCategoryStrategy,
AccountGeneralPreferences.ReviewCategoryStrategy.NONE,
p.reviewCategoryStrategy());
setListBox(diffView,
AccountGeneralPreferences.DiffView.SIDE_BY_SIDE,
p.diffView());
display(p.my());
}
private void display(JsArray<TopMenuItem> items) {
List<List<String>> values = new ArrayList<>();
for (TopMenuItem item : Natives.asList(items)) {
values.add(Arrays.asList(item.getName(), item.getUrl()));
}
myMenus.display(values);
}
private void setListBox(final ListBox f, final short defaultValue,
final short currentValue) {
setListBox(f, String.valueOf(defaultValue), String.valueOf(currentValue));
}
private <T extends Enum<?>> void setListBox(final ListBox f,
final T defaultValue, final T currentValue) {
setListBox(f,
defaultValue != null ? defaultValue.name() : "",
currentValue != null ? currentValue.name() : "");
}
private void setListBox(final ListBox f, final String defaultValue,
final String currentValue) {
final int n = f.getItemCount();
for (int i = 0; i < n; i++) {
if (f.getValue(i).equals(currentValue)) {
f.setSelectedIndex(i);
return;
}
}
if (!currentValue.equals(defaultValue)) {
setListBox(f, defaultValue, defaultValue);
}
}
private short getListBox(final ListBox f, final short defaultValue) {
final int idx = f.getSelectedIndex();
if (0 <= idx) {
return Short.parseShort(f.getValue(idx));
}
return defaultValue;
}
private <T extends Enum<?>> T getListBox(final ListBox f,
final T defaultValue, T[] all) {
final int idx = f.getSelectedIndex();
if (0 <= idx) {
String v = f.getValue(idx);
if ("".equals(v)) {
return defaultValue;
}
for (T t : all) {
if (t.name().equals(v)) {
return t;
}
}
}
return defaultValue;
}
private void doSave() {
final AccountGeneralPreferences p = new AccountGeneralPreferences();
p.setShowSiteHeader(showSiteHeader.getValue());
p.setUseFlashClipboard(useFlashClipboard.getValue());
p.setCopySelfOnEmails(copySelfOnEmails.getValue());
p.setMaximumPageSize(getListBox(maximumPageSize, DEFAULT_PAGESIZE));
p.setDateFormat(getListBox(dateFormat,
AccountGeneralPreferences.DateFormat.STD,
AccountGeneralPreferences.DateFormat.values()));
p.setTimeFormat(getListBox(timeFormat,
AccountGeneralPreferences.TimeFormat.HHMM_12,
AccountGeneralPreferences.TimeFormat.values()));
p.setRelativeDateInChangeTable(relativeDateInChangeTable.getValue());
p.setSizeBarInChangeTable(sizeBarInChangeTable.getValue());
p.setLegacycidInChangeTable(legacycidInChangeTable.getValue());
p.setMuteCommonPathPrefixes(muteCommonPathPrefixes.getValue());
p.setReviewCategoryStrategy(getListBox(reviewCategoryStrategy,
ReviewCategoryStrategy.NONE,
ReviewCategoryStrategy.values()));
p.setDiffView(getListBox(diffView,
AccountGeneralPreferences.DiffView.SIDE_BY_SIDE,
AccountGeneralPreferences.DiffView.values()));
enable(false);
save.setEnabled(false);
List<TopMenuItem> items = new ArrayList<>();
for (List<String> v : myMenus.getValues()) {
items.add(TopMenuItem.create(v.get(0), v.get(1)));
}
AccountApi.self().view("preferences")
.put(Preferences.create(p, items), new GerritCallback<Preferences>() {
@Override
public void onSuccess(Preferences prefs) {
Gerrit.getUserAccount().setGeneralPreferences(p);
Gerrit.applyUserPreferences();
enable(true);
display(prefs);
Gerrit.refreshMenuBar();
}
@Override
public void onFailure(Throwable caught) {
enable(true);
save.setEnabled(true);
super.onFailure(caught);
}
});
}
private class MyMenuPanel extends StringListPanel {
MyMenuPanel(Button save) {
super(Util.C.myMenu(), Arrays.asList(Util.C.myMenuName(),
Util.C.myMenuUrl()), save, false);
setInfo(Util.C.myMenuInfo());
Button resetButton = new Button(Util.C.myMenuReset());
resetButton.addClickHandler(new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
ConfigServerApi.defaultPreferences(new GerritCallback<Preferences>() {
@Override
public void onSuccess(Preferences p) {
MyPreferencesScreen.this.display(p.my());
widget.setEnabled(true);
}
});
}
});
buttonPanel.add(resetButton);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.admin.remote;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.lang.reflect.AccessibleObject;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.ConcurrentModificationException;
import java.util.IdentityHashMap;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.apache.geode.InternalGemFireException;
import org.apache.geode.internal.admin.EntryValueNode;
/**
* This class holds the metadata for a single object field in a value stored in the cache. They are
* built during construction of {@link ObjectDetailsResponse} instances and returned to the console.
* This class does not implement {@link org.apache.geode.DataSerializable} since that mechanism gets
* confused by the often cyclical refrences between instances of this class.
*/
public class EntryValueNodeImpl implements EntryValueNode, Externalizable/* , DataSerializable */ {
private Object primitiveVal;
private String type;
private String name;
private boolean primitive;
private EntryValueNodeImpl[] fields;
private static final ThreadLocal recursionSet = new ThreadLocal();
public static EntryValueNodeImpl createFromValueRoot(Object value, boolean logicalInspection) {
recursionSet.set(new IdentityHashMap());
EntryValueNodeImpl retVal = null;
if (value != null) {
retVal = createFromObject(constructKeyDisplay(value), value, logicalInspection);
}
Map map = (Map) recursionSet.get();
map.clear();
recursionSet.set(null);
return retVal;
}
private static EntryValueNodeImpl createFromPrimitive(String fieldName, String type,
Object primitiveWrapper) {
EntryValueNodeImpl node = new EntryValueNodeImpl();
node.name = fieldName;
node.type = type;
node.primitiveVal = primitiveWrapper;
node.primitive = true;
return node;
}
private static EntryValueNodeImpl createFromNullField(String fieldName, Class fieldType) {
EntryValueNodeImpl node = new EntryValueNodeImpl();
node.name = fieldName;
if (fieldType.isArray()) {
node.type = "array of" + fieldType.getComponentType().getName();
} else {
node.type = fieldType.getName();
}
node.primitiveVal = "null";
node.primitive = true;
return node;
}
private static EntryValueNodeImpl createFromArray(String fieldName, Object arrayObj,
Class arrayClass) {
EntryValueNodeImpl node = new EntryValueNodeImpl();
Map map = (Map) recursionSet.get();
map.put(arrayObj, node);
node.name = fieldName;
Class compType = arrayClass.getComponentType();
String elType = compType.getName();
node.type = "array of " + elType;
node.primitiveVal = arrayObj.toString();
node.primitive = false;
// if (arrayObj != null) (cannot be null)
{
EntryValueNodeImpl[] children;
if (arrayObj instanceof Object[]) {
Object[] array = (Object[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
if (array[i] != null) {
children[i] = createFromObject("[" + i + "]", array[i], false);
} else {
children[i] = createFromNullField("[" + i + "]", compType);
}
}
node.fields = children;
} else if (arrayObj instanceof int[]) {
int[] array = (int[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, Integer.valueOf(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof boolean[]) {
boolean[] array = (boolean[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, Boolean.valueOf(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof char[]) {
char[] array = (char[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, new Character(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof double[]) {
double[] array = (double[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, Double.valueOf(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof long[]) {
long[] array = (long[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, Long.valueOf(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof float[]) {
float[] array = (float[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, new Float(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof byte[]) {
byte[] array = (byte[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, new Byte(array[i]));
}
node.fields = children;
} else if (arrayObj instanceof short[]) {
short[] array = (short[]) arrayObj;
children = new EntryValueNodeImpl[array.length];
for (int i = 0; i < array.length; i++) {
children[i] = createFromPrimitive("[" + i + "]", elType, new Short(array[i]));
}
node.fields = children;
}
}
return node;
}
private static EntryValueNodeImpl createFromObject(String fieldName, Object obj,
boolean logicalInspection) {
Map map = (Map) recursionSet.get();
EntryValueNodeImpl stored = (EntryValueNodeImpl) map.get(obj);
if (stored != null) {
return stored;
}
Class clazz = obj.getClass();
if (clazz.isArray()) {
return createFromArray(fieldName, obj, clazz);
}
EntryValueNodeImpl node = new EntryValueNodeImpl();
map.put(obj, node);
node.name = fieldName;
node.type = clazz.getName();
if (isWrapperOrString(obj)) {
node.primitiveVal = obj;
node.primitive = true;
return node;
} else {
node.primitiveVal = obj.toString();
node.primitive = false;
}
if (logicalInspection && hasLogicalView(obj)) {
int retryCount = 0;
boolean retry;
List elements = new ArrayList();
do {
// if (cancelled) { return; }
retry = false;
try {
if (obj instanceof Map) {
Map theMap = (Map) obj;
Set entries = theMap.entrySet();
if (entries != null) {
Iterator it = entries.iterator();
while (it.hasNext()) {
// if (cancelled) { return; }
Map.Entry entry = (Map.Entry) it.next();
Object key = entry.getKey();
Object value = entry.getValue();
if (key != null) {
elements.add(
createFromObject("key->" + constructKeyDisplay(key), key, logicalInspection));
} else {
elements
.add(createFromNullField("key->" + constructKeyDisplay(key), Object.class));
}
if (value != null) {
elements.add(createFromObject("value->" + constructKeyDisplay(value), value,
logicalInspection));
} else {
elements.add(
createFromNullField("value->" + constructKeyDisplay(value), Object.class));
}
}
}
} else if (obj instanceof List) {
java.util.List list = (List) obj;
ListIterator it = list.listIterator();
while (it.hasNext()) {
// if (cancelled) { return; }
Object element = it.next();
elements
.add(createFromObject(constructKeyDisplay(element), element, logicalInspection));
}
} else if (obj instanceof Collection) {
Collection coll = (Collection) obj;
Iterator it = coll.iterator();
while (it.hasNext()) {
// if (cancelled) { return; }
Object element = it.next();
elements
.add(createFromObject(constructKeyDisplay(element), element, logicalInspection));
}
}
} catch (ConcurrentModificationException ex) {
elements = new ArrayList();
retryCount++;
if (retryCount <= 5) {
retry = true;
}
}
} while (retry);
// if (cancelled) { return; }
node.fields = (EntryValueNodeImpl[]) elements.toArray(new EntryValueNodeImpl[0]);
} else { // physical inspection
Field[] fields = clazz.getDeclaredFields();
try {
AccessibleObject.setAccessible(fields, true);
} catch (SecurityException se) {
throw new InternalGemFireException(
"Unable to set accessibility of Field objects during cache value display construction",
se);
}
List fieldList = new ArrayList();
for (int i = 0; i < fields.length; i++) {
int mods = fields[i].getModifiers();
if ((mods & Modifier.STATIC) != 0) {
continue;
}
Object fieldVal = null;
try {
fieldVal = fields[i].get(obj);
} catch (Exception e) {
throw new InternalGemFireException(
"Unable to build cache value display",
e);
}
String name = fields[i].getName();
if (fieldVal == null) {
fieldList.add(createFromNullField(name, fields[i].getType()));
} else if (isWrapperOrString(fieldVal)) {
fieldList.add(createFromPrimitive(name, fields[i].getType().getName(), fieldVal));
} else {
fieldList.add(createFromObject(name, fieldVal, logicalInspection));
}
}
node.fields = (EntryValueNodeImpl[]) fieldList.toArray(new EntryValueNodeImpl[0]);
}
return node;
}
private static boolean isWrapperOrString(Object test) {
return (test instanceof Number || test instanceof String || test instanceof Boolean
|| test instanceof Character);
}
private static boolean hasLogicalView(Object obj) {
return (obj instanceof Map || obj instanceof List || obj instanceof Collection);
}
@Override
public boolean isPrimitiveOrString() {
return primitive;
}
@Override
public String getName() {
return name;
}
@Override
public String getType() {
return type;
}
@Override
public EntryValueNode[] getChildren() {
if (fields != null) {
return fields;
} else {
return new EntryValueNodeImpl[0];
}
}
@Override
public Object getPrimitiveValue() {
return primitiveVal;
}
public static String constructKeyDisplay(Object toDisplay) {
if (toDisplay == null) {
return "null";
} else if (toDisplay instanceof String) {
return (String) toDisplay;
} else if (toDisplay instanceof Number) {
return toDisplay.toString();
} else if (toDisplay instanceof Character) {
return toDisplay.toString();
} else if (toDisplay instanceof Boolean) {
return toDisplay.toString();
} else {
String className = toDisplay.getClass().getName();
className = className.substring(className.lastIndexOf(".") + 1);
char c = className.charAt(0);
c = Character.toLowerCase(c);
if (c == 'a' || c == 'e' || c == 'i' || c == 'o' || c == 'u') {
return "an " + className;
} else {
return "a " + className;
}
}
}
@Override
public void writeExternal(ObjectOutput out) throws IOException {
out.writeObject(primitiveVal);
out.writeObject(type);
out.writeObject(name);
out.writeObject(fields);
out.writeBoolean(primitive);
}
@Override
public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
this.primitiveVal = in.readObject();
this.type = (String) in.readObject();
this.name = (String) in.readObject();
this.fields = (EntryValueNodeImpl[]) in.readObject();
this.primitive = in.readBoolean();
}
// public void toData(DataOutput out) throws IOException {
// Helper.writeObject(primitive, out);
// Helper.writeString(type, out);
// Helper.writeString(name, out);
// Helper.writeObject(fields, out);
// }
// public void fromData(DataInput in) throws IOException, ClassNotFoundException {
// this.primitive = Helper.readObject(in);
// this.type = Helper.readString(in);
// this.name = Helper.readString(in);
// this.fields = (EntryValueNodeImpl[])Helper.readObject(in);
// }
}
| |
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.store;
import com.github.ambry.utils.ByteBufferInputStream;
import com.github.ambry.utils.Crc32;
import com.github.ambry.utils.CrcInputStream;
import com.github.ambry.utils.Pair;
import com.github.ambry.utils.Utils;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.channels.ReadableByteChannel;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
/**
* Represents a segment of a log. The segment is represented by its relative position in the log and the generation
* number of the segment. Each segment knows the segment that "follows" it logically (if such a segment exists) and can
* transparently redirect operations if required.
*/
class LogSegment implements Read, Write {
private static final short VERSION = 0;
private static final int VERSION_HEADER_SIZE = 2;
private static final int CAPACITY_HEADER_SIZE = 8;
private static final int CRC_SIZE = 8;
static final int HEADER_SIZE = VERSION_HEADER_SIZE + CAPACITY_HEADER_SIZE + CRC_SIZE;
private final FileChannel fileChannel;
private final File file;
private final long capacityInBytes;
private final String name;
private final Pair<File, FileChannel> segmentView;
private final StoreMetrics metrics;
private final long startOffset;
private final AtomicLong endOffset;
private final AtomicLong refCount = new AtomicLong(0);
private final AtomicBoolean open = new AtomicBoolean(true);
/**
* Creates a LogSegment abstraction with the given capacity.
* @param name the desired name of the segment. The name signifies the handle/ID of the LogSegment and may be
* different from the filename of the {@code file}.
* @param file the backing {@link File} for this segment.
* @param capacityInBytes the intended capacity of the segment
* @param metrics the {@link StoreMetrics} instance to use.
* @param writeHeader if {@code true}, headers are written that provide metadata about the segment.
* @throws IOException if the file cannot be read or created
*/
LogSegment(String name, File file, long capacityInBytes, StoreMetrics metrics, boolean writeHeader)
throws IOException {
if (!file.exists() || !file.isFile()) {
throw new IllegalArgumentException(file.getAbsolutePath() + " does not exist or is not a file");
}
this.file = file;
this.name = name;
this.capacityInBytes = capacityInBytes;
this.metrics = metrics;
fileChannel = Utils.openChannel(file, true);
segmentView = new Pair<>(file, fileChannel);
// externals will set the correct value of end offset.
endOffset = new AtomicLong(0);
if (writeHeader) {
// this will update end offset
writeHeader(capacityInBytes);
}
startOffset = endOffset.get();
}
/**
* Creates a LogSegment abstraction with the given file. Obtains capacity from the headers in the file.
* @param name the desired name of the segment. The name signifies the handle/ID of the LogSegment and may be
* different from the filename of the {@code file}.
* @param file the backing {@link File} for this segment.
* @param metrics he {@link StoreMetrics} instance to use.
* @throws IOException
*/
LogSegment(String name, File file, StoreMetrics metrics) throws IOException {
if (!file.exists() || !file.isFile()) {
throw new IllegalArgumentException(file.getAbsolutePath() + " does not exist or is not a file");
}
// TODO: just because the file exists, it does not mean the headers have been written into it. LogSegment should
// TODO: be able to handle this situation.
CrcInputStream crcStream = new CrcInputStream(new FileInputStream(file));
try (DataInputStream stream = new DataInputStream(crcStream)) {
switch (stream.readShort()) {
case 0:
capacityInBytes = stream.readLong();
long computedCrc = crcStream.getValue();
long crcFromFile = stream.readLong();
if (crcFromFile != computedCrc) {
throw new IllegalStateException("CRC from the segment file does not match computed CRC of header");
}
startOffset = HEADER_SIZE;
break;
default:
throw new IllegalArgumentException("Unknown version in segment [" + file.getAbsolutePath() + "]");
}
}
this.file = file;
this.name = name;
this.metrics = metrics;
fileChannel = Utils.openChannel(file, true);
segmentView = new Pair<>(file, fileChannel);
// externals will set the correct value of end offset.
endOffset = new AtomicLong(startOffset);
}
/**
* {@inheritDoc}
* <p/>
* Attempts to write the {@code buffer} in its entirety in this segment. To guarantee that the write is persisted,
* {@link #flush()} has to be called.
* <p/>
* The write is not started if it cannot be completed.
* @param buffer The buffer from which data needs to be written from
* @return the number of bytes written.
* @throws IllegalArgumentException if there is not enough space for {@code buffer}
* @throws IOException if data could not be written to the file because of I/O errors
*/
@Override
public int appendFrom(ByteBuffer buffer) throws IOException {
int bytesWritten = 0;
if (endOffset.get() + buffer.remaining() > capacityInBytes) {
metrics.overflowWriteError.inc();
throw new IllegalArgumentException(
"Buffer cannot be written to segment [" + file.getAbsolutePath() + "] because " + "it exceeds the capacity ["
+ capacityInBytes + "]");
} else {
while (buffer.hasRemaining()) {
bytesWritten += fileChannel.write(buffer, endOffset.get());
}
endOffset.addAndGet(bytesWritten);
}
return bytesWritten;
}
/**
* {@inheritDoc}
* <p/>
* Attempts to write the {@code channel} in its entirety in this segment. To guarantee that the write is persisted,
* {@link #flush()} has to be called.
* <p/>
* The write is not started if it cannot be completed.
* @param channel The channel from which data needs to be written from
* @param size The amount of data in bytes to be written from the channel
* @throws IllegalArgumentException if there is not enough space for data of size {@code size}.
* @throws IOException if data could not be written to the file because of I/O errors
*/
@Override
public void appendFrom(ReadableByteChannel channel, long size) throws IOException {
if (endOffset.get() + size > capacityInBytes) {
metrics.overflowWriteError.inc();
throw new IllegalArgumentException(
"Channel cannot be written to segment [" + file.getAbsolutePath() + "] because" + " it exceeds the capacity ["
+ capacityInBytes + "]");
} else {
long bytesWritten = 0;
while (bytesWritten < size) {
bytesWritten += fileChannel.transferFrom(channel, endOffset.get() + bytesWritten, size - bytesWritten);
}
endOffset.addAndGet(bytesWritten);
}
}
/**
* {@inheritDoc}
* <p/>
* The read is not started if it cannot be completed.
* @param buffer The buffer into which the data needs to be written
* @param position The position to start the read from
* @throws IOException if data could not be written to the file because of I/O errors
* @throws IndexOutOfBoundsException if {@code position} < header size or >= {@link #getEndOffset()} or if
* {@code buffer} size is greater than the data available for read.
*/
@Override
public void readInto(ByteBuffer buffer, long position) throws IOException {
if (position < startOffset || position >= getEndOffset()) {
throw new IndexOutOfBoundsException(
"Provided position [" + position + "] is out of bounds for the segment [" + file.getAbsolutePath()
+ "] with end offset [" + getEndOffset() + "]");
}
if (position + buffer.remaining() > getEndOffset()) {
metrics.overflowReadError.inc();
throw new IndexOutOfBoundsException(
"Cannot read from segment [" + file.getAbsolutePath() + "] from position [" + position + "] for size ["
+ buffer.remaining() + "] because it exceeds the end offset [" + endOffset + "]");
}
long bytesRead = 0;
int size = buffer.remaining();
while (bytesRead < size) {
bytesRead += fileChannel.read(buffer, position + bytesRead);
}
}
/**
* Writes {@code size} number of bytes from the channel {@code channel} into the segment at {@code offset}.
* <p/>
* The write is not started if it cannot be completed.
* @param channel The channel from which data needs to be written from.
* @param offset The offset in the segment at which to start writing.
* @param size The amount of data in bytes to be written from the channel.
* @throws IOException if data could not be written to the file because of I/O errors
* @throws IndexOutOfBoundsException if {@code offset} < header size or if there is not enough space for
* {@code offset } + {@code size} data.
*
*/
void writeFrom(ReadableByteChannel channel, long offset, long size) throws IOException {
if (offset < startOffset || offset >= capacityInBytes) {
throw new IndexOutOfBoundsException(
"Provided offset [" + offset + "] is out of bounds for the segment [" + file.getAbsolutePath()
+ "] with capacity [" + capacityInBytes + "]");
}
if (offset + size > capacityInBytes) {
metrics.overflowWriteError.inc();
throw new IndexOutOfBoundsException(
"Cannot write to segment [" + file.getAbsolutePath() + "] from offset [" + offset + "] for size [" + size
+ "] because it exceeds the capacity [" + capacityInBytes + "]");
}
long bytesWritten = 0;
while (bytesWritten < size) {
bytesWritten += fileChannel.transferFrom(channel, offset + bytesWritten, size - bytesWritten);
}
if (offset + size > endOffset.get()) {
endOffset.set(offset + size);
}
}
/**
* Gets the {@link File} and {@link FileChannel} backing this log segment. Also increments a ref count.
* <p/>
* The expectation is that a matching {@link #closeView()} will be called eventually to decrement the ref count.
* @return the {@link File} and {@link FileChannel} backing this log segment.
*/
Pair<File, FileChannel> getView() {
refCount.incrementAndGet();
return segmentView;
}
/**
* Closes view that was obtained (decrements ref count).
*/
void closeView() {
refCount.decrementAndGet();
}
/**
* @return size of the backing file on disk.
* @throws IOException if the size could not be obtained due to I/O error.
*/
long sizeInBytes() throws IOException {
return fileChannel.size();
}
/**
* @return the name of this segment.
*/
String getName() {
return name;
}
/**
* Sets the end offset of this segment. This can be lesser than the actual size of the file and represents the offset
* until which data that is readable is stored (exclusive) and the offset (inclusive) from which the next append will
* begin.
* @param endOffset the end offset of this log.
* @throws IllegalArgumentException if {@code endOffset} < header size or {@code endOffset} > the size of the file.
* @throws IOException if there is any I/O error.
*/
void setEndOffset(long endOffset) throws IOException {
long fileSize = sizeInBytes();
if (endOffset < startOffset || endOffset > fileSize) {
throw new IllegalArgumentException(
file.getAbsolutePath() + ": EndOffset [" + endOffset + "] outside the file size [" + fileSize + "]");
}
fileChannel.position(endOffset);
this.endOffset.set(endOffset);
}
/**
* @return the offset in this log segment from which there is valid data.
*/
long getStartOffset() {
return startOffset;
}
/**
* @return the offset in this log segment until which there is valid data.
*/
long getEndOffset() {
return endOffset.get();
}
/**
* @return the reference count of this log segment.
*/
long refCount() {
return refCount.get();
}
/**
* @return the total capacity, in bytes, of this log segment.
*/
long getCapacityInBytes() {
return capacityInBytes;
}
/**
* Flushes the backing file to disk.
* @throws IOException if there is an I/O error while flushing.
*/
void flush() throws IOException {
fileChannel.force(true);
}
/**
* Closes this log segment
*/
void close() throws IOException {
if (open.compareAndSet(true, false)) {
flush();
fileChannel.close();
}
}
/**
* Writes a header describing the segment.
* @param capacityInBytes the intended capacity of the segment.
* @throws IOException if there is any I/O error writing to the file.
*/
private void writeHeader(long capacityInBytes) throws IOException {
Crc32 crc = new Crc32();
ByteBuffer buffer = ByteBuffer.allocate(HEADER_SIZE);
buffer.putShort(VERSION);
buffer.putLong(capacityInBytes);
crc.update(buffer.array(), 0, HEADER_SIZE - CRC_SIZE);
buffer.putLong(crc.getValue());
buffer.flip();
appendFrom(Channels.newChannel(new ByteBufferInputStream(buffer)), buffer.remaining());
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.ui.tabs.impl.singleRow;
import com.intellij.ui.tabs.JBTabsPosition;
import com.intellij.ui.tabs.TabInfo;
import com.intellij.ui.tabs.impl.*;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.lang.ref.WeakReference;
import java.util.List;
public class SingleRowLayout extends TabLayout {
final JBTabsImpl myTabs;
public SingleRowPassInfo myLastSingRowLayout;
private final SingleRowLayoutStrategy myTop;
private final SingleRowLayoutStrategy myLeft;
private final SingleRowLayoutStrategy myBottom;
private final SingleRowLayoutStrategy myRight;
public final MoreTabsIcon myMoreIcon = new MoreTabsIcon() {
@Nullable
protected Rectangle getIconRec() {
return myLastSingRowLayout != null ? myLastSingRowLayout.moreRect : null;
}
@Override
protected int getIconY(Rectangle iconRec) {
final int shift;
switch (myTabs.getTabsPosition()) {
case bottom: shift = myTabs.getActiveTabUnderlineHeight(); break;
case top: shift = -(myTabs.getActiveTabUnderlineHeight() / 2); break;
default: shift = 0;
}
return super.getIconY(iconRec) + shift;
}
};
public JPopupMenu myMorePopup;
public final GhostComponent myLeftGhost = new GhostComponent(RowDropPolicy.first, RowDropPolicy.first);
public final GhostComponent myRightGhost = new GhostComponent(RowDropPolicy.last, RowDropPolicy.first);
private enum RowDropPolicy {
first, last
}
private RowDropPolicy myRowDropPolicy = RowDropPolicy.first;
@Override
public boolean isSideComponentOnTabs() {
return getStrategy().isSideComponentOnTabs();
}
@Override
public ShapeTransform createShapeTransform(Rectangle labelRec) {
return getStrategy().createShapeTransform(labelRec);
}
@Override
public boolean isDragOut(TabLabel tabLabel, int deltaX, int deltaY) {
return getStrategy().isDragOut(tabLabel, deltaX, deltaY);
}
public SingleRowLayout(final JBTabsImpl tabs) {
myTabs = tabs;
myTop = new SingleRowLayoutStrategy.Top(this);
myLeft = new SingleRowLayoutStrategy.Left(this);
myBottom = new SingleRowLayoutStrategy.Bottom(this);
myRight = new SingleRowLayoutStrategy.Right(this);
}
SingleRowLayoutStrategy getStrategy() {
switch (myTabs.getPresentation().getTabsPosition()) {
case top:
return myTop;
case left:
return myLeft;
case bottom:
return myBottom;
case right:
return myRight;
}
return null;
}
protected boolean checkLayoutLabels(SingleRowPassInfo data) {
boolean layoutLabels = true;
if (!myTabs.myForcedRelayout &&
myLastSingRowLayout != null &&
myLastSingRowLayout.contentCount == myTabs.getTabCount() &&
myLastSingRowLayout.layoutSize.equals(myTabs.getSize()) &&
myLastSingRowLayout.scrollOffset == getScrollOffset()) {
for (TabInfo each : data.myVisibleInfos) {
final TabLabel eachLabel = myTabs.myInfo2Label.get(each);
if (!eachLabel.isValid()) {
layoutLabels = true;
break;
}
if (myTabs.getSelectedInfo() == each) {
if (eachLabel.getBounds().width != 0) {
layoutLabels = false;
}
}
}
}
return layoutLabels;
}
int getScrollOffset() {
return 0;
}
public void scroll(int units) {
}
public int getScrollUnitIncrement() {
return 0;
}
public void scrollSelectionInView() {
}
public LayoutPassInfo layoutSingleRow(List<TabInfo> visibleInfos) {
SingleRowPassInfo data = new SingleRowPassInfo(this, visibleInfos);
final boolean layoutLabels = checkLayoutLabels(data);
if (!layoutLabels) {
data = myLastSingRowLayout;
}
final TabInfo selected = myTabs.getSelectedInfo();
prepareLayoutPassInfo(data, selected);
myTabs.resetLayout(layoutLabels || myTabs.isHideTabs());
if (layoutLabels && !myTabs.isHideTabs()) {
data.position = getStrategy().getStartPosition(data) - getScrollOffset();
recomputeToLayout(data);
layoutLabelsAndGhosts(data);
layoutMoreButton(data);
}
if (selected != null) {
data.comp = new WeakReference<>(selected.getComponent());
getStrategy().layoutComp(data);
}
updateMoreIconVisibility(data);
data.tabRectangle = new Rectangle();
if (data.toLayout.size() > 0) {
final TabLabel firstLabel = myTabs.myInfo2Label.get(data.toLayout.get(0));
final TabLabel lastLabel = findLastVisibleLabel(data);
if (firstLabel != null && lastLabel != null) {
data.tabRectangle.x = firstLabel.getBounds().x;
data.tabRectangle.y = firstLabel.getBounds().y;
data.tabRectangle.width = (int)lastLabel.getBounds().getMaxX() - data.tabRectangle.x;
data.tabRectangle.height = (int)lastLabel.getBounds().getMaxY() - data.tabRectangle.y;
}
}
myLastSingRowLayout = data;
return data;
}
@Nullable
protected TabLabel findLastVisibleLabel(SingleRowPassInfo data) {
return myTabs.myInfo2Label.get(data.toLayout.get(data.toLayout.size() - 1));
}
protected void prepareLayoutPassInfo(SingleRowPassInfo data, TabInfo selected) {
data.insets = myTabs.getLayoutInsets();
if (myTabs.isHorizontalTabs()) {
data.insets.left += myTabs.getFirstTabOffset();
}
final JBTabsImpl.Toolbar selectedToolbar = myTabs.myInfo2Toolbar.get(selected);
data.hToolbar =
new WeakReference<>(selectedToolbar != null && myTabs.myHorizontalSide && !selectedToolbar.isEmpty() ? selectedToolbar : null);
data.vToolbar =
new WeakReference<>(selectedToolbar != null && !myTabs.myHorizontalSide && !selectedToolbar.isEmpty() ? selectedToolbar : null);
data.toFitLength = getStrategy().getToFitLength(data);
if (myTabs.isGhostsAlwaysVisible()) {
data.toFitLength -= myTabs.getGhostTabLength() * 2 + (myTabs.getInterTabSpaceLength() * 2);
}
}
protected void updateMoreIconVisibility(SingleRowPassInfo data) {
int counter = (int)data.myVisibleInfos.stream().filter(this::isTabHidden).count();
myMoreIcon.updateCounter(counter);
}
protected void layoutMoreButton(SingleRowPassInfo data) {
if (data.toDrop.size() > 0) {
data.moreRect = getStrategy().getMoreRect(data);
}
}
protected void layoutLabelsAndGhosts(final SingleRowPassInfo data) {
if (data.firstGhostVisible || myTabs.isGhostsAlwaysVisible()) {
data.firstGhost = getStrategy().getLayoutRect(data, data.position, myTabs.getGhostTabLength());
myTabs.layout(myLeftGhost, data.firstGhost);
data.position += getStrategy().getLengthIncrement(data.firstGhost.getSize()) + myTabs.getInterTabSpaceLength();
}
int deltaToFit = 0;
if (data.firstGhostVisible || data.lastGhostVisible) {
if (data.requiredLength < data.toFitLength && getStrategy().canBeStretched()) {
deltaToFit = (int)Math.floor((data.toFitLength - data.requiredLength) / (double)data.toLayout.size());
}
}
int totalLength = 0;
int positionStart = data.position;
boolean layoutStopped = false;
for (TabInfo eachInfo : data.toLayout) {
final TabLabel label = myTabs.myInfo2Label.get(eachInfo);
if (layoutStopped) {
label.setActionPanelVisible(false);
final Rectangle rec = getStrategy().getLayoutRect(data, 0, 0);
myTabs.layout(label, rec);
continue;
}
label.setActionPanelVisible(true);
final Dimension eachSize = label.getPreferredSize();
boolean isLast = data.toLayout.indexOf(eachInfo) == data.toLayout.size() - 1;
int length;
if (!isLast || deltaToFit == 0) {
length = getStrategy().getLengthIncrement(eachSize) + deltaToFit;
}
else {
length = data.toFitLength - totalLength;
}
boolean continueLayout = applyTabLayout(data, label, length, deltaToFit);
data.position = getStrategy().getMaxPosition(label.getBounds());
data.position += myTabs.getInterTabSpaceLength();
totalLength = getStrategy().getMaxPosition(label.getBounds()) - positionStart + myTabs.getInterTabSpaceLength();
if (!continueLayout) {
layoutStopped = true;
}
}
for (TabInfo eachInfo : data.toDrop) {
JBTabsImpl.resetLayout(myTabs.myInfo2Label.get(eachInfo));
}
if (data.lastGhostVisible || myTabs.isGhostsAlwaysVisible()) {
data.lastGhost = getStrategy().getLayoutRect(data, data.position, myTabs.getGhostTabLength());
myTabs.layout(myRightGhost, data.lastGhost);
}
}
protected boolean applyTabLayout(SingleRowPassInfo data, TabLabel label, int length, int deltaToFit) {
final Rectangle rec = getStrategy().getLayoutRect(data, data.position, length);
myTabs.layout(label, rec);
label.setAlignmentToCenter((deltaToFit > 0 || myTabs.isEditorTabs()) && getStrategy().isToCenterTextWhenStretched());
return true;
}
protected void recomputeToLayout(final SingleRowPassInfo data) {
calculateRequiredLength(data);
while (true) {
if (data.requiredLength <= data.toFitLength - data.position) break;
if (data.toLayout.size() == 0) break;
final TabInfo first = data.toLayout.get(0);
final TabInfo last = data.toLayout.get(data.toLayout.size() - 1);
if (myRowDropPolicy == RowDropPolicy.first) {
if (first != myTabs.getSelectedInfo()) {
processDrop(data, first, true);
}
else if (last != myTabs.getSelectedInfo()) {
processDrop(data, last, false);
}
else {
break;
}
}
else {
if (last != myTabs.getSelectedInfo()) {
processDrop(data, last, false);
}
else if (first != myTabs.getSelectedInfo()) {
processDrop(data, first, true);
}
else {
break;
}
}
}
for (int i = 1; i < data.myVisibleInfos.size() - 1; i++) {
final TabInfo each = data.myVisibleInfos.get(i);
final TabInfo prev = data.myVisibleInfos.get(i - 1);
final TabInfo next = data.myVisibleInfos.get(i + 1);
if (data.toLayout.contains(each) && data.toDrop.contains(prev)) {
myLeftGhost.setInfo(prev);
}
else if (data.toLayout.contains(each) && data.toDrop.contains(next)) {
myRightGhost.setInfo(next);
}
}
}
protected void calculateRequiredLength(SingleRowPassInfo data) {
for (TabInfo eachInfo : data.myVisibleInfos) {
data.requiredLength += getRequiredLength(eachInfo);
if (myTabs.getTabsPosition() == JBTabsPosition.left || myTabs.getTabsPosition() == JBTabsPosition.right) {
data.requiredLength -= 1;
}
data.toLayout.add(eachInfo);
}
}
protected int getRequiredLength(TabInfo eachInfo) {
TabLabel label = myTabs.myInfo2Label.get(eachInfo);
return getStrategy().getLengthIncrement(label != null ? label.getPreferredSize() : new Dimension())
+ (myTabs.isEditorTabs() ? myTabs.getInterTabSpaceLength() : 0);
}
public boolean isTabHidden(TabInfo tabInfo) {
return myLastSingRowLayout != null && myLastSingRowLayout.toDrop.contains(tabInfo);
}
public class GhostComponent extends JLabel {
private TabInfo myInfo;
private GhostComponent(final RowDropPolicy before, final RowDropPolicy after) {
addMouseListener(new MouseAdapter() {
public void mousePressed(final MouseEvent e) {
if (JBTabsImpl.isSelectionClick(e, true) && myInfo != null) {
myRowDropPolicy = before;
myTabs.select(myInfo, true).doWhenDone(() -> myRowDropPolicy = after);
} else {
MouseEvent event = SwingUtilities.convertMouseEvent(e.getComponent(), e, myTabs);
myTabs.processMouseEvent(event);
}
}
});
}
public void setInfo(@Nullable final TabInfo info) {
myInfo = info;
setToolTipText(info != null ? info.getTooltipText() : null);
}
public void reset() {
JBTabsImpl.resetLayout(this);
setInfo(null);
}
}
private void processDrop(final SingleRowPassInfo data, final TabInfo info, boolean isFirstSide) {
data.requiredLength -= getStrategy().getLengthIncrement(myTabs.myInfo2Label.get(info).getPreferredSize());
data.toDrop.add(info);
data.toLayout.remove(info);
if (data.toDrop.size() == 1) {
data.toFitLength -= data.moreRectAxisSize;
}
if (!data.firstGhostVisible && isFirstSide) {
data.firstGhostVisible = !myTabs.isEditorTabs();
if (!myTabs.isGhostsAlwaysVisible() && !myTabs.isEditorTabs()) {
data.toFitLength -= myTabs.getGhostTabLength();
}
}
else if (!data.lastGhostVisible && !isFirstSide) {
data.lastGhostVisible = !myTabs.isEditorTabs();
if (!myTabs.isGhostsAlwaysVisible() && !myTabs.isEditorTabs()) {
data.toFitLength -= myTabs.getGhostTabLength();
}
}
}
@Override
public int getDropIndexFor(Point point) {
if (myLastSingRowLayout == null) return -1;
int result = -1;
Component c = myTabs.getComponentAt(point);
if (c instanceof JBTabsImpl) {
for (int i = 0; i < myLastSingRowLayout.myVisibleInfos.size() - 1; i++) {
TabLabel first = myTabs.myInfo2Label.get(myLastSingRowLayout.myVisibleInfos.get(i));
TabLabel second = myTabs.myInfo2Label.get(myLastSingRowLayout.myVisibleInfos.get(i + 1));
Rectangle firstBounds = first.getBounds();
Rectangle secondBounds = second.getBounds();
final boolean between;
boolean horizontal = getStrategy() instanceof SingleRowLayoutStrategy.Horizontal;
if (horizontal) {
between = firstBounds.getMaxX() < point.x
&& secondBounds.getX() > point.x
&& firstBounds.y < point.y
&& secondBounds.getMaxY() > point.y;
} else {
between = firstBounds.getMaxY() < point.y
&& secondBounds.getY() > point.y
&& firstBounds.x < point.x
&& secondBounds.getMaxX() > point.x;
}
if (between) {
c = first;
break;
}
}
}
if (c instanceof TabLabel) {
TabInfo info = ((TabLabel)c).getInfo();
int index = myLastSingRowLayout.myVisibleInfos.indexOf(info);
boolean isDropTarget = myTabs.isDropTarget(info);
if (!isDropTarget) {
for (int i = 0; i <= index; i++) {
if (myTabs.isDropTarget(myLastSingRowLayout.myVisibleInfos.get(i))) {
index -= 1;
break;
}
}
result = index;
} else if (index < myLastSingRowLayout.myVisibleInfos.size()) {
result = index;
}
} else if (c instanceof GhostComponent) {
GhostComponent ghost = (GhostComponent)c;
TabInfo info = ghost.myInfo;
if (info != null) {
int index = myLastSingRowLayout.myVisibleInfos.indexOf(info);
index += myLeftGhost == ghost ? -1 : 1;
result = index >= 0 && index < myLastSingRowLayout.myVisibleInfos.size() ? index : -1;
} else {
if (myLastSingRowLayout.myVisibleInfos.size() == 0) {
result = 0;
} else {
result = myLeftGhost == ghost ? 0 : myLastSingRowLayout.myVisibleInfos.size() - 1;
}
}
}
return result;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.spy;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.nio.channels.FileChannel;
import java.util.HashMap;
import java.util.Map;
import java.util.SortedMap;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogLoader.EditLogValidation;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.PathUtils;
import org.apache.log4j.Level;
import org.junit.Test;
import com.google.common.collect.Maps;
import com.google.common.io.Files;
public class TestFSEditLogLoader {
static {
((Log4JLogger)FSImage.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)FSEditLogLoader.LOG).getLogger().setLevel(Level.ALL);
}
private static final File TEST_DIR = PathUtils.getTestDir(TestFSEditLogLoader.class);
private static final int NUM_DATA_NODES = 0;
@Test
public void testDisplayRecentEditLogOpCodes() throws IOException {
// start a cluster
Configuration conf = new HdfsConfiguration();
MiniDFSCluster cluster = null;
FileSystem fileSys = null;
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES)
.enableManagedDfsDirsRedundancy(false).build();
cluster.waitActive();
fileSys = cluster.getFileSystem();
final FSNamesystem namesystem = cluster.getNamesystem();
FSImage fsimage = namesystem.getFSImage();
for (int i = 0; i < 20; i++) {
fileSys.mkdirs(new Path("/tmp/tmp" + i));
}
StorageDirectory sd = fsimage.getStorage().dirIterator(NameNodeDirType.EDITS).next();
cluster.shutdown();
File editFile = FSImageTestUtil.findLatestEditsLog(sd).getFile();
assertTrue("Should exist: " + editFile, editFile.exists());
// Corrupt the edits file.
long fileLen = editFile.length();
RandomAccessFile rwf = new RandomAccessFile(editFile, "rw");
rwf.seek(fileLen - 40);
for (int i = 0; i < 20; i++) {
rwf.write(FSEditLogOpCodes.OP_DELETE.getOpCode());
}
rwf.close();
StringBuilder bld = new StringBuilder();
bld.append("^Error replaying edit log at offset \\d+. ");
bld.append("Expected transaction ID was \\d+\n");
bld.append("Recent opcode offsets: (\\d+\\s*){4}$");
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(NUM_DATA_NODES)
.enableManagedDfsDirsRedundancy(false).format(false).build();
fail("should not be able to start");
} catch (IOException e) {
assertTrue("error message contains opcodes message",
e.getMessage().matches(bld.toString()));
}
}
/**
* Test that, if the NN restarts with a new minimum replication,
* any files created with the old replication count will get
* automatically bumped up to the new minimum upon restart.
*/
@Test
public void testReplicationAdjusted() throws Exception {
// start a cluster
Configuration conf = new HdfsConfiguration();
// Replicate and heartbeat fast to shave a few seconds off test
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
MiniDFSCluster cluster = null;
try {
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.build();
cluster.waitActive();
FileSystem fs = cluster.getFileSystem();
// Create a file with replication count 1
Path p = new Path("/testfile");
DFSTestUtil.createFile(fs, p, 10, /*repl*/ (short)1, 1);
DFSTestUtil.waitReplication(fs, p, (short)1);
// Shut down and restart cluster with new minimum replication of 2
cluster.shutdown();
cluster = null;
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MIN_KEY, 2);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
.format(false).build();
cluster.waitActive();
fs = cluster.getFileSystem();
// The file should get adjusted to replication 2 when
// the edit log is replayed.
DFSTestUtil.waitReplication(fs, p, (short)2);
} finally {
if (cluster != null) {
cluster.shutdown();
}
}
}
/**
* Corrupt the byte at the given offset in the given file,
* by subtracting 1 from it.
*/
private void corruptByteInFile(File file, long offset)
throws IOException {
RandomAccessFile raf = new RandomAccessFile(file, "rw");
try {
raf.seek(offset);
int origByte = raf.read();
raf.seek(offset);
raf.writeByte(origByte - 1);
} finally {
IOUtils.closeStream(raf);
}
}
/**
* Truncate the given file to the given length
*/
private void truncateFile(File logFile, long newLength)
throws IOException {
RandomAccessFile raf = new RandomAccessFile(logFile, "rw");
raf.setLength(newLength);
raf.close();
}
/**
* Return the length of bytes in the given file after subtracting
* the trailer of 0xFF (OP_INVALID)s.
* This seeks to the end of the file and reads chunks backwards until
* it finds a non-0xFF byte.
* @throws IOException if the file cannot be read
*/
private static long getNonTrailerLength(File f) throws IOException {
final int chunkSizeToRead = 256*1024;
FileInputStream fis = new FileInputStream(f);
try {
byte buf[] = new byte[chunkSizeToRead];
FileChannel fc = fis.getChannel();
long size = fc.size();
long pos = size - (size % chunkSizeToRead);
while (pos >= 0) {
fc.position(pos);
int readLen = (int) Math.min(size - pos, chunkSizeToRead);
IOUtils.readFully(fis, buf, 0, readLen);
for (int i = readLen - 1; i >= 0; i--) {
if (buf[i] != FSEditLogOpCodes.OP_INVALID.getOpCode()) {
return pos + i + 1; // + 1 since we count this byte!
}
}
pos -= chunkSizeToRead;
}
return 0;
} finally {
fis.close();
}
}
@Test
public void testStreamLimiter() throws IOException {
final File LIMITER_TEST_FILE = new File(TEST_DIR, "limiter.test");
FileOutputStream fos = new FileOutputStream(LIMITER_TEST_FILE);
try {
fos.write(0x12);
fos.write(0x12);
fos.write(0x12);
} finally {
fos.close();
}
FileInputStream fin = new FileInputStream(LIMITER_TEST_FILE);
BufferedInputStream bin = new BufferedInputStream(fin);
FSEditLogLoader.PositionTrackingInputStream tracker =
new FSEditLogLoader.PositionTrackingInputStream(bin);
try {
tracker.setLimit(2);
tracker.mark(100);
tracker.read();
tracker.read();
try {
tracker.read();
fail("expected to get IOException after reading past the limit");
} catch (IOException e) {
}
tracker.reset();
tracker.mark(100);
byte arr[] = new byte[3];
try {
tracker.read(arr);
fail("expected to get IOException after reading past the limit");
} catch (IOException e) {
}
tracker.reset();
arr = new byte[2];
tracker.read(arr);
} finally {
tracker.close();
}
}
/**
* Create an unfinalized edit log for testing purposes
*
* @param testDir Directory to create the edit log in
* @param numTx Number of transactions to add to the new edit log
* @param offsetToTxId A map from transaction IDs to offsets in the
* edit log file.
* @return The new edit log file name.
* @throws IOException
*/
static private File prepareUnfinalizedTestEditLog(File testDir, int numTx,
SortedMap<Long, Long> offsetToTxId) throws IOException {
File inProgressFile = new File(testDir, NNStorage.getInProgressEditsFileName(1));
FSEditLog fsel = null, spyLog = null;
try {
fsel = FSImageTestUtil.createStandaloneEditLog(testDir);
spyLog = spy(fsel);
// Normally, the in-progress edit log would be finalized by
// FSEditLog#endCurrentLogSegment. For testing purposes, we
// disable that here.
doNothing().when(spyLog).endCurrentLogSegment(true);
spyLog.openForWrite();
assertTrue("should exist: " + inProgressFile, inProgressFile.exists());
for (int i = 0; i < numTx; i++) {
long trueOffset = getNonTrailerLength(inProgressFile);
long thisTxId = spyLog.getLastWrittenTxId() + 1;
offsetToTxId.put(trueOffset, thisTxId);
System.err.println("txid " + thisTxId + " at offset " + trueOffset);
spyLog.logDelete("path" + i, i, false);
spyLog.logSync();
}
} finally {
if (spyLog != null) {
spyLog.close();
} else if (fsel != null) {
fsel.close();
}
}
return inProgressFile;
}
@Test
public void testValidateEditLogWithCorruptHeader() throws IOException {
File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptHeader");
SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap();
File logFile = prepareUnfinalizedTestEditLog(testDir, 2, offsetToTxId);
RandomAccessFile rwf = new RandomAccessFile(logFile, "rw");
try {
rwf.seek(0);
rwf.writeLong(42); // corrupt header
} finally {
rwf.close();
}
EditLogValidation validation = EditLogFileInputStream.validateEditLog(logFile);
assertTrue(validation.hasCorruptHeader());
}
@Test
public void testValidateEditLogWithCorruptBody() throws IOException {
File testDir = new File(TEST_DIR, "testValidateEditLogWithCorruptBody");
SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap();
final int NUM_TXNS = 20;
File logFile = prepareUnfinalizedTestEditLog(testDir, NUM_TXNS,
offsetToTxId);
// Back up the uncorrupted log
File logFileBak = new File(testDir, logFile.getName() + ".bak");
Files.copy(logFile, logFileBak);
EditLogValidation validation =
EditLogFileInputStream.validateEditLog(logFile);
assertTrue(!validation.hasCorruptHeader());
// We expect that there will be an OP_START_LOG_SEGMENT, followed by
// NUM_TXNS opcodes, followed by an OP_END_LOG_SEGMENT.
assertEquals(NUM_TXNS + 1, validation.getEndTxId());
// Corrupt each edit and verify that validation continues to work
for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) {
long txOffset = entry.getKey();
long txId = entry.getValue();
// Restore backup, corrupt the txn opcode
Files.copy(logFileBak, logFile);
corruptByteInFile(logFile, txOffset);
validation = EditLogFileInputStream.validateEditLog(logFile);
long expectedEndTxId = (txId == (NUM_TXNS + 1)) ?
NUM_TXNS : (NUM_TXNS + 1);
assertEquals("Failed when corrupting txn opcode at " + txOffset,
expectedEndTxId, validation.getEndTxId());
assertTrue(!validation.hasCorruptHeader());
}
// Truncate right before each edit and verify that validation continues
// to work
for (Map.Entry<Long, Long> entry : offsetToTxId.entrySet()) {
long txOffset = entry.getKey();
long txId = entry.getValue();
// Restore backup, corrupt the txn opcode
Files.copy(logFileBak, logFile);
truncateFile(logFile, txOffset);
validation = EditLogFileInputStream.validateEditLog(logFile);
long expectedEndTxId = (txId == 0) ?
HdfsServerConstants.INVALID_TXID : (txId - 1);
assertEquals("Failed when corrupting txid " + txId + " txn opcode " +
"at " + txOffset, expectedEndTxId, validation.getEndTxId());
assertTrue(!validation.hasCorruptHeader());
}
}
@Test
public void testValidateEmptyEditLog() throws IOException {
File testDir = new File(TEST_DIR, "testValidateEmptyEditLog");
SortedMap<Long, Long> offsetToTxId = Maps.newTreeMap();
File logFile = prepareUnfinalizedTestEditLog(testDir, 0, offsetToTxId);
// Truncate the file so that there is nothing except the header and
// layout flags section.
truncateFile(logFile, 8);
EditLogValidation validation =
EditLogFileInputStream.validateEditLog(logFile);
assertTrue(!validation.hasCorruptHeader());
assertEquals(HdfsServerConstants.INVALID_TXID, validation.getEndTxId());
}
private static final Map<Byte, FSEditLogOpCodes> byteToEnum =
new HashMap<Byte, FSEditLogOpCodes>();
static {
for(FSEditLogOpCodes opCode : FSEditLogOpCodes.values()) {
byteToEnum.put(opCode.getOpCode(), opCode);
}
}
private static FSEditLogOpCodes fromByte(byte opCode) {
return byteToEnum.get(opCode);
}
@Test
public void testFSEditLogOpCodes() throws IOException {
//try all codes
for(FSEditLogOpCodes c : FSEditLogOpCodes.values()) {
final byte code = c.getOpCode();
assertEquals("c=" + c + ", code=" + code,
c, FSEditLogOpCodes.fromByte(code));
}
//try all byte values
for(int b = 0; b < (1 << Byte.SIZE); b++) {
final byte code = (byte)b;
assertEquals("b=" + b + ", code=" + code,
fromByte(code), FSEditLogOpCodes.fromByte(code));
}
}
}
| |
/*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ratpack.zipkin.internal;
import brave.Span;
import brave.http.HttpClientAdapter;
import brave.http.HttpClientHandler;
import brave.http.HttpSampler;
import brave.http.HttpTracing;
import brave.propagation.CurrentTraceContext;
import brave.propagation.SamplingFlags;
import brave.propagation.ThreadLocalSpan;
import brave.propagation.TraceContext;
import brave.propagation.TraceContextOrSamplingFlags;
import io.netty.buffer.ByteBufAllocator;
import java.net.URI;
import java.time.Duration;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
import java.util.function.BiFunction;
import javax.inject.Inject;
import javax.net.ssl.SSLContext;
import ratpack.exec.Promise;
import ratpack.exec.Result;
import ratpack.func.Action;
import ratpack.func.Function;
import ratpack.http.HttpMethod;
import ratpack.http.MutableHeaders;
import ratpack.http.client.HttpClient;
import ratpack.http.client.ReceivedResponse;
import ratpack.http.client.RequestSpec;
import ratpack.http.client.StreamedResponse;
/**
* Decorator that adds Zipkin client logging around {@link HttpClient}.
*/
public final class ZipkinHttpClientImpl implements HttpClient {
private final HttpClient delegate;
private final CurrentTraceContext currentTraceContext;
private final ThreadLocalSpan threadLocalSpan;
private final BiFunction<WrappedRequestSpec, TraceContext, Span> nextThreadLocalSpan;
private final HttpClientHandler<WrappedRequestSpec, Integer> handler;
private final TraceContext.Injector<MutableHeaders> injector;
@Inject
public ZipkinHttpClientImpl(final HttpClient delegate, final HttpTracing httpTracing) {
this.delegate = delegate;
this.threadLocalSpan = ThreadLocalSpan.create(httpTracing.tracing().tracer());
this.currentTraceContext = httpTracing.tracing().currentTraceContext();
this.nextThreadLocalSpan = new NextSpan(threadLocalSpan, httpTracing.clientSampler());
this.handler = HttpClientHandler.create(httpTracing, ADAPTER);
this.injector = httpTracing.tracing().propagation().injector(MutableHeaders::set);
}
@Override
public ByteBufAllocator getByteBufAllocator() {
return delegate.getByteBufAllocator();
}
@Override
public int getPoolSize() {
return delegate.getPoolSize();
}
@Override
public Duration getReadTimeout() {
return delegate.getReadTimeout();
}
@Override
public int getMaxContentLength() {
return delegate.getMaxContentLength();
}
@Override
public void close() {
delegate.close();
}
@Override
public Promise<ReceivedResponse> request(URI uri, Action<? super RequestSpec> action) {
// save off the current span as the parent of a future client span
TraceContext parent = currentTraceContext.get();
// this reference is used to manually propagate the span from the request to the response
// we use this because we cannot assume a thread context exists betweeen them.
AtomicReference<Span> currentSpan = new AtomicReference<>();
return delegate.request(uri, (RequestSpec requestSpec) -> {
try {
action.execute(new WrappedRequestSpec(requestSpec, parent, currentSpan));
} finally {
// moves the span from thread local context to an atomic ref the response can read
currentSpan.set(threadLocalSpan.remove());
}
}).wiretap(response -> responseWithSpan(response, currentSpan.getAndSet(null)));
}
@Override
public Promise<StreamedResponse> requestStream(URI uri, Action<? super RequestSpec> action) {
// save off the current span as the parent of a future client span
TraceContext parent = currentTraceContext.get();
// this reference is used to manually propagate the span from the request to the response
// we use this because we cannot assume a thread context exists betweeen them.
AtomicReference<Span> currentSpan = new AtomicReference<>();
return delegate.requestStream(uri, (RequestSpec requestSpec) -> {
// streamed request doesn't set the http method.
// start span here until a better solution presents itself.
WrappedRequestSpec captor = new WrappedRequestSpec(requestSpec, parent, currentSpan);
Span span = nextThreadLocalSpan.apply(captor, parent);
try {
handler.handleSend(injector, captor.getHeaders(), captor, span);
action.execute(new WrappedRequestSpec(requestSpec, parent, currentSpan));
} finally {
// moves the span from thread local context to an atomic ref the response can read
currentSpan.set(threadLocalSpan.remove());
}
}).wiretap(response -> streamedResponseWithSpan(response, currentSpan.getAndSet(null)));
}
@Override
public Promise<ReceivedResponse> get(final URI uri, final Action<? super RequestSpec> requestConfigurer) {
return request(uri, requestConfigurer.prepend(RequestSpec::get));
}
@Override
public Promise<ReceivedResponse> post(final URI uri, final Action<? super RequestSpec> requestConfigurer) {
return request(uri, requestConfigurer.prepend(RequestSpec::post));
}
private void streamedResponseWithSpan(Result<StreamedResponse> response, Span currentSpan) {
if (currentSpan == null) return;
Integer statusCode = (response.isError() || response.getValue() == null)
? null : response.getValue().getStatusCode();
handler.handleReceive(statusCode, response.getThrowable(), currentSpan);
}
private void responseWithSpan(Result<ReceivedResponse> response, Span currentSpan) {
if (currentSpan == null) return;
Integer statusCode = (response.isError() || response.getValue() == null)
? null : response.getValue().getStatusCode();
handler.handleReceive(statusCode, response.getThrowable(), currentSpan);
}
static final HttpClientAdapter<WrappedRequestSpec, Integer> ADAPTER =
new HttpClientAdapter<WrappedRequestSpec, Integer>() {
@Override public String method(WrappedRequestSpec request) {
HttpMethod method = Optional.ofNullable(request.getCapturedMethod()).orElse(HttpMethod.GET);
return method.getName();
}
@Override public String path(WrappedRequestSpec request) {
return request.getUri().getPath();
}
@Override public String url(WrappedRequestSpec request) {
return request.getUri().toString();
}
@Override public String requestHeader(WrappedRequestSpec request, String name) {
return request.getHeaders().get(name);
}
// integer because ReceivedResponse and StreamedResponse share no common interface
@Override public Integer statusCode(Integer response) {
return response;
}
};
/**
* RequestSpec wrapper that captures the method type, sets up redirect handling
* and starts new spans when a method type is set.
*/
// not a static type as it shares many references with the enclosing class
final class WrappedRequestSpec implements RequestSpec {
private final RequestSpec delegate;
private final TraceContext parent;
private final AtomicReference<Span> currentSpan;
private HttpMethod capturedMethod;
WrappedRequestSpec(
RequestSpec delegate,
TraceContext parent,
AtomicReference<Span> currentSpan
) {
this.delegate = delegate;
this.parent = parent;
this.currentSpan = currentSpan;
this.delegate.onRedirect(this::redirectHandler);
}
/**
* Default redirect handler that ensures the span is marked as received before
* a new span is created.
*/
private Action<? super RequestSpec> redirectHandler(ReceivedResponse response) {
Span span = currentSpan.getAndSet(null);
handler.handleReceive(response.getStatusCode(), null, span);
return (s) -> new WrappedRequestSpec(s, parent, currentSpan);
}
@Override
public RequestSpec redirects(int maxRedirects) {
this.delegate.redirects(maxRedirects);
return this;
}
@Override
public RequestSpec onRedirect(Function<? super ReceivedResponse, Action<? super RequestSpec>> function) {
Function<? super ReceivedResponse, Action<? super RequestSpec>> wrapped =
(ReceivedResponse response) -> redirectHandler(response).append(function.apply(response));
this.delegate.onRedirect(wrapped);
return this;
}
@Override
public RequestSpec sslContext(SSLContext sslContext) {
this.delegate.sslContext(sslContext);
return this;
}
@Override
public MutableHeaders getHeaders() {
return this.delegate.getHeaders();
}
@Override
public RequestSpec maxContentLength(int numBytes) {
this.delegate.maxContentLength(numBytes);
return this;
}
@Override
public RequestSpec headers(Action<? super MutableHeaders> action) throws Exception {
this.delegate.headers(action);
return this;
}
@Override
public RequestSpec method(HttpMethod method) {
this.capturedMethod = method;
Span currentSpan = nextThreadLocalSpan.apply(this, parent);
handler.handleSend(injector, this.getHeaders(), this, currentSpan);
this.delegate.method(method);
return this;
}
@Override
public RequestSpec decompressResponse(boolean shouldDecompress) {
this.delegate.decompressResponse(shouldDecompress);
return this;
}
@Override
public URI getUri() {
return this.delegate.getUri();
}
@Override
public RequestSpec connectTimeout(Duration duration) {
this.delegate.connectTimeout(duration);
return this;
}
@Override
public RequestSpec readTimeout(Duration duration) {
this.delegate.readTimeout(duration);
return this;
}
@Override
public Body getBody() {
return this.delegate.getBody();
}
@Override
public RequestSpec body(Action<? super Body> action) throws Exception {
this.delegate.body(action);
return this;
}
public HttpMethod getCapturedMethod() {
return capturedMethod;
}
}
/** This is a partial function that applies the last parent when creating a new span */
static class NextSpan implements BiFunction<WrappedRequestSpec, TraceContext, Span> {
final ThreadLocalSpan span;
final HttpSampler sampler;
NextSpan(ThreadLocalSpan span, HttpSampler sampler) {
this.span = span;
this.sampler = sampler;
}
@Override public Span apply(WrappedRequestSpec req, TraceContext parent) {
if (parent != null) return span.next(TraceContextOrSamplingFlags.create(parent));
Boolean sampled = sampler.trySample(ADAPTER, req);
return span.next(TraceContextOrSamplingFlags.create(
new SamplingFlags.Builder().sampled(sampled).build())
);
}
}
}
| |
package imj2.zipslideviewer;
import static java.lang.Math.atan2;
import static java.lang.Math.ceil;
import static java.lang.Math.cos;
import static java.lang.Math.log;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.pow;
import static java.lang.Math.sin;
import static java.lang.Math.sqrt;
import static multij.tools.MathTools.square;
import java.awt.Graphics;
import java.awt.Graphics2D;
import java.awt.Point;
import java.awt.event.MouseEvent;
import java.awt.event.MouseWheelEvent;
import java.awt.geom.AffineTransform;
import java.awt.geom.Point2D;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicBoolean;
import javax.swing.JComponent;
import imj2.core.TiledImage2D;
import imj2.pixel3d.MouseHandler;
import imj2.tools.IMJTools;
import imj2.tools.MultiThreadTools;
import imj2.tools.Image2DComponent.Painter;
/**
* @author codistmonk (creation 2014-11-23)
*/
public final class View extends JComponent {
private final List<Painter<View>> painters;
private final TiledImage2D image;
private final TiledImage2D initialLODImage;
private final Map<Integer, TiledImage2D> lodImages;
private final Map<Integer, Map<Point, BufferedImage>> cache;
private final AtomicBoolean updateNeeded;
private final Point2D.Double center;
private double scale;
private double angle;
public View(final TiledImage2D image) {
this.painters = new ArrayList<>();
this.image = image;
this.lodImages = Collections.synchronizedMap(new HashMap<>());
this.cache = Collections.synchronizedMap(new HashMap<>());
this.updateNeeded = new AtomicBoolean(true);
this.center = new Point2D.Double(image.getWidth() / 2.0, image.getHeight() / 2.0);
this.scale = min((double) image.getOptimalTileWidth() / image.getWidth(),
(double) image.getOptimalTileHeight() / image.getHeight());
this.initialLODImage = this.getLODImage();
this.setFocusable(true);
new MouseHandler(null) {
private final Point mouse = new Point();
@Override
public final void mousePressed(final MouseEvent event) {
this.mouse.setLocation(event.getX(), event.getY());
}
@Override
public final void mouseDragged(final MouseEvent event) {
if (event.isAltDown()) {
final double x0 = getWidth() / 2.0;
final double y0 = getHeight() / 2.0;
updateAngle(atan2(event.getY() - y0, event.getX() - x0) - atan2(this.mouse.y - y0, this.mouse.x - x0));
} else {
final double dx = (event.getX() - this.mouse.x) / getScale();
final double dy = (event.getY() - this.mouse.y) / getScale();
updateCenter(-dx, -dy);
}
this.mouse.setLocation(event.getX(), event.getY());
}
@Override
public final void mouseWheelMoved(final MouseWheelEvent event) {
if (event.getWheelRotation() < 0) {
updateScale(5.0 / 4.0);
} else {
updateScale(4.0 / 5.0);
}
}
/**
* {@value}.
*/
private static final long serialVersionUID = -6649302685423143601L;
}.addTo(this);
}
public final List<Painter<View>> getPainters() {
return this.painters;
}
public final TiledImage2D getImage() {
return this.image;
}
public final Point2D.Double getCenter() {
return this.center;
}
public final double getScale() {
return this.scale;
}
public final double getAngle() {
return this.angle;
}
@Override
protected final void paintComponent(final Graphics g) {
if (this.updateNeeded.getAndSet(false)) {
MultiThreadTools.getExecutor().submit(this::updateCache);
}
this.drawTiles((Graphics2D) g, this.initialLODImage);
TiledImage2D lodImage = this.getLODImageOrNull(this.computeLOD() + 2);
if (lodImage != null) {
this.drawTiles((Graphics2D) g, lodImage);
}
lodImage = this.getLODImageOrNull();
if (lodImage != null) {
this.drawTiles((Graphics2D) g, lodImage);
}
for (final Painter<View> painter : this.getPainters()) {
painter.paint((Graphics2D) g, this, this.getWidth(), this.getHeight());
}
}
public final void updateCache() {
synchronized (this.image) {
final double x0 = this.getWidth() / 2.0;
final double y0 = this.getHeight() / 2.0;
final double r = sqrt(square(x0) + square(y0));
final TiledImage2D lodImage = this.getLODImage();
final int lod = lodImage.getLOD();
final Map<Point, BufferedImage> sharedCache = this.getCache(lod);
final Map<Point, BufferedImage> cache = new HashMap<>();
synchronized (sharedCache) {
cache.putAll(sharedCache);
}
final int lodImageWidth = lodImage.getWidth();
final int lodImageHeight = lodImage.getHeight();
final double lodScale = this.getScale() / pow(2.0, -lod);
final double lodCenterX = this.getCenter().x * pow(2.0, -lod);
final double lodCenterY = this.getCenter().y * pow(2.0, -lod);
final double visibleLeft = max(0.0, lodCenterX - r / lodScale);
final double visibleTop = max(0.0, lodCenterY - r / lodScale);
final double visibleWidth = min(lodImageWidth, lodCenterX + r / lodScale) - visibleLeft;
final double visibleHeight = min(lodImageHeight, lodCenterY + r / lodScale) - visibleTop;
final int optimalTileWidth = lodImage.getOptimalTileWidth();
final int optimalTileHeight = lodImage.getOptimalTileHeight();
final Map<Point, BufferedImage> tmp = new HashMap<>();
for (double y = quantize(visibleTop, optimalTileHeight); y < visibleTop + visibleHeight; y += optimalTileHeight) {
for (double x = quantize(visibleLeft, optimalTileWidth); x < visibleLeft + visibleWidth; x += optimalTileWidth) {
if (this.updateNeeded.get()) {
return;
}
final Point tileXY = new Point((int) x, (int) y);
final int actualTileWidth = min(lodImageWidth - tileXY.x, optimalTileWidth);
final int actualTileHeight = min(lodImageHeight - tileXY.y, optimalTileHeight);
BufferedImage tile = cache.get(tileXY);
if (tile != null) {
tmp.put(tileXY, tile);
} else {
tmp.put(tileXY, tile = IMJTools.awtImage(lodImage, tileXY.x, tileXY.y,
actualTileWidth, actualTileHeight));
}
}
}
synchronized (sharedCache) {
sharedCache.clear();
sharedCache.putAll(tmp);
}
this.repaint();
}
}
final TiledImage2D getLODImageOrNull() {
return this.getLODImageOrNull(this.computeLOD());
}
final int computeLOD() {
return max(0, (int) (-log(this.getScale()) / log(2.0)));
}
final TiledImage2D getLODImageOrNull(final int lod) {
if (this.lodImages.containsKey(lod)) {
return this.lodImages.get(lod);
}
new Thread(this::getLODImage).start();
return null;
}
final TiledImage2D getLODImage() {
return this.getLODImage(this.computeLOD());
}
final TiledImage2D getLODImage(final int lod) {
return this.lodImages.computeIfAbsent(lod, l -> ZipSlideViewer.tiled(this.image.getLODImage(lod)));
}
final void drawTiles(final Graphics2D g, final TiledImage2D lodImage) {
final int lod = lodImage.getLOD();
final Map<Point, BufferedImage> tiles = this.getCacheCopy(lod);
final double x0 = this.getWidth() / 2.0;
final double y0 = this.getHeight() / 2.0;
final double r = sqrt(square(x0) + square(y0));
final int lodImageWidth = lodImage.getWidth();
final int lodImageHeight = lodImage.getHeight();
final double lodScale = this.getScale() / pow(2.0, -lod);
final double lodCenterX = this.getCenter().x * pow(2.0, -lod);
final double lodCenterY = this.getCenter().y * pow(2.0, -lod);
final double visibleLeft = max(0.0, lodCenterX - r / lodScale);
final double visibleTop = max(0.0, lodCenterY - r / lodScale);
final double visibleWidth = min(lodImageWidth, lodCenterX + r / lodScale) - visibleLeft;
final double visibleHeight = min(lodImageHeight, lodCenterY + r / lodScale) - visibleTop;
final int optimalTileWidth = lodImage.getOptimalTileWidth();
final int optimalTileHeight = lodImage.getOptimalTileHeight();
final AffineTransform savedTransform = g.getTransform();
g.rotate(this.getAngle(), x0, y0);
for (double y = quantize(visibleTop, optimalTileHeight); y < visibleTop + visibleHeight; y += optimalTileHeight) {
for (double x = quantize(visibleLeft, optimalTileWidth); x < visibleLeft + visibleWidth; x += optimalTileWidth) {
final Point tileXY = new Point((int) x, (int) y);
final int actualTileWidth = min(lodImageWidth - tileXY.x, optimalTileWidth);
final int actualTileHeight = min(lodImageHeight - tileXY.y, optimalTileHeight);
final BufferedImage tile = tiles.get(tileXY);
if (tile != null) {
g.drawImage(tile,
(int) (x0 - lodCenterX * lodScale + x * lodScale),
(int) (y0 - lodCenterY * lodScale + y * lodScale),
(int) ceil(actualTileWidth * lodScale),
(int) ceil(actualTileHeight * lodScale),
null);
}
}
}
g.setTransform(savedTransform);
}
final Map<Point, BufferedImage> getCache(final int lod) {
return this.cache.computeIfAbsent(lod, l -> new HashMap<>());
}
final Map<Point, BufferedImage> getCacheCopy(final int lod) {
final Map<Point, BufferedImage> cache = this.getCache(lod);
synchronized (cache) {
return new HashMap<>(cache);
}
}
final void updateCenter(final double deltaX, final double deltaY) {
/*
* M = [cos(a) -sin(a)]
* [sin(a) cos(a)]
*
* M^-1 = [cos(a) sin(a)]
* [-sin(a) cos(a)]
*
* M^-1 * [dx] = [dx * cos(a) + dy * sin(a)]
* [dy] [-dx * sin(a) + dy * cos(a)]
*/
this.center.x += deltaX * cos(this.angle) + deltaY * sin(this.angle);
this.center.y += -deltaX * sin(this.angle) + deltaY * cos(this.angle);
this.updateNeeded.set(true);
this.repaint();
}
final void setScale(final double scale) {
this.scale = scale;
this.updateNeeded.set(true);
}
final void updateScale(final double factor) {
this.scale *= factor;
this.updateNeeded.set(true);
this.repaint();
}
final void setAngle(final double angle) {
this.angle = angle;
this.updateNeeded.set(true);
}
final void updateAngle(final double deltaRadians) {
this.angle += deltaRadians;
this.updateNeeded.set(true);
this.repaint();
}
/**
* {@value}.
*/
private static final long serialVersionUID = -8101322100984141200L;
public static final double quantize(final double value, final double q) {
return q * (int) (value / q);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sling.testing.mock.osgi;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import java.io.File;
import java.util.Collection;
import java.util.Dictionary;
import java.util.Hashtable;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.runners.MockitoJUnitRunner;
import org.osgi.framework.Bundle;
import org.osgi.framework.BundleContext;
import org.osgi.framework.BundleEvent;
import org.osgi.framework.BundleListener;
import org.osgi.framework.Constants;
import org.osgi.framework.Filter;
import org.osgi.framework.InvalidSyntaxException;
import org.osgi.framework.ServiceEvent;
import org.osgi.framework.ServiceFactory;
import org.osgi.framework.ServiceListener;
import org.osgi.framework.ServiceReference;
import org.osgi.framework.ServiceRegistration;
@RunWith(MockitoJUnitRunner.class)
public class MockBundleContextTest {
private BundleContext bundleContext;
@Before
public void setUp() {
bundleContext = MockOsgi.newBundleContext();
}
@After
public void tearDown() {
MockOsgi.shutdown(bundleContext);
}
@Test
public void testBundle() {
assertNotNull(bundleContext.getBundle());
}
@Test
public void testServiceRegistration() throws InvalidSyntaxException {
// prepare test services
String[] clazzes1 = new String[] { String.class.getName(), Integer.class.getName() };
Object service1 = new Object();
Dictionary<String, Object> properties2 = ranking(null);
ServiceRegistration reg1 = bundleContext.registerService(clazzes1, service1, properties2);
String clazz2 = String.class.getName();
Object service2 = new Object();
Dictionary<String, Object> properties1 = ranking(null);
ServiceRegistration reg2 = bundleContext.registerService(clazz2, service2, properties1);
String clazz3 = Integer.class.getName();
Object service3 = new Object();
Dictionary<String, Object> properties3 = ranking(-100);
ServiceRegistration reg3 = bundleContext.registerService(clazz3, service3, properties3);
// test get service references
ServiceReference<?> refString = bundleContext.getServiceReference(String.class.getName());
assertSame(reg1.getReference(), refString);
ServiceReference<?> refInteger = bundleContext.getServiceReference(Integer.class.getName());
assertSame(reg1.getReference(), refInteger);
ServiceReference<?>[] refsString = bundleContext.getServiceReferences(String.class.getName(), null);
assertEquals(2, refsString.length);
assertSame(reg1.getReference(), refsString[0]);
assertSame(reg2.getReference(), refsString[1]);
Collection<ServiceReference<String>> refColString = bundleContext.getServiceReferences(String.class, null);
assertEquals(2, refColString.size());
assertSame(reg1.getReference(), refColString.iterator().next());
ServiceReference<?>[] refsInteger = bundleContext.getServiceReferences(Integer.class.getName(), null);
assertEquals(2, refsInteger.length);
assertSame(reg1.getReference(), refsInteger[0]);
assertSame(reg3.getReference(), refsInteger[1]);
ServiceReference<?>[] allRefsString = bundleContext.getAllServiceReferences(String.class.getName(), null);
assertArrayEquals(refsString, allRefsString);
// test get services
assertSame(service1, bundleContext.getService(refsString[0]));
assertSame(service2, bundleContext.getService(refsString[1]));
assertSame(service1, bundleContext.getService(refInteger));
// unget does nothing
bundleContext.ungetService(refsString[0]);
bundleContext.ungetService(refsString[1]);
bundleContext.ungetService(refInteger);
}
@Test
public void testServiceFactoryRegistration() throws InvalidSyntaxException {
// prepare test services
Class<String> clazz = String.class;
final String service = "abc";
Dictionary<String, Object> properties1 = ranking(null);
ServiceRegistration reg = bundleContext.registerService(clazz, new ServiceFactory<String>() {
@Override
public String getService(Bundle bundle, ServiceRegistration<String> registration) {
return service;
}
@Override
public void ungetService(Bundle bundle, ServiceRegistration<String> registration, String service) {
// do nothing
}
}, properties1);
ServiceReference<String> ref = bundleContext.getServiceReference(clazz);
assertNotNull(ref);
assertSame(reg.getReference(), ref);
assertSame(service, bundleContext.getService(ref));
bundleContext.ungetService(ref);
}
@Test
public void testNoServiceReferences() throws InvalidSyntaxException {
ServiceReference<?>[] refs = bundleContext.getServiceReferences(String.class.getName(), null);
assertNull(refs);
Collection<ServiceReference<String>> refCol = bundleContext.getServiceReferences(String.class, null);
assertNotNull(refCol);
assertTrue(refCol.isEmpty());
}
@Test
public void testServiceUnregistration() {
// prepare test services
String clazz1 = String.class.getName();
Object service1 = new Object();
Dictionary<String, Object> properties1 = ranking(null);
ServiceRegistration reg1 = bundleContext.registerService(clazz1, service1, properties1);
assertNotNull(bundleContext.getServiceReference(clazz1));
reg1.unregister();
assertNull(bundleContext.getServiceReference(clazz1));
}
@Test
public void testGetBundles() throws Exception {
assertEquals(0, bundleContext.getBundles().length);
}
@Test
public void testServiceListener() throws Exception {
ServiceListener serviceListener = mock(ServiceListener.class);
bundleContext.addServiceListener(serviceListener);
// prepare test services
String clazz1 = String.class.getName();
Object service1 = new Object();
bundleContext.registerService(clazz1, service1, null);
verify(serviceListener).serviceChanged(any(ServiceEvent.class));
bundleContext.removeServiceListener(serviceListener);
}
@Test
public void testBundleListener() throws Exception {
BundleListener bundleListener = mock(BundleListener.class);
BundleEvent bundleEvent = mock(BundleEvent.class);
bundleContext.addBundleListener(bundleListener);
MockOsgi.sendBundleEvent(bundleContext, bundleEvent);
verify(bundleListener).bundleChanged(bundleEvent);
bundleContext.removeBundleListener(bundleListener);
}
@Test
public void testFrameworkListener() throws Exception {
// ensure that listeners can be called (although they are not expected
// to to anything)
bundleContext.addFrameworkListener(null);
bundleContext.removeFrameworkListener(null);
}
@Test
public void testGetProperty() {
assertNull(bundleContext.getProperty("anyProperty"));
}
@Test
public void testObjectClassFilterMatches() throws InvalidSyntaxException {
Filter filter = bundleContext.createFilter("(" + Constants.OBJECTCLASS + "=" + Integer.class.getName() + ")");
ServiceRegistration serviceRegistration = bundleContext.registerService(Integer.class.getName(), Integer.valueOf(1), null);
assertTrue(filter.match(serviceRegistration.getReference()));
}
@Test
public void testObjectClassFilterDoesNotMatch() throws InvalidSyntaxException {
Filter filter = bundleContext.createFilter("(" + Constants.OBJECTCLASS + "=" + Integer.class.getName() + ")");
ServiceRegistration serviceRegistration = bundleContext.registerService(Long.class.getName(), Long.valueOf(1), null);
assertFalse(filter.match(serviceRegistration.getReference()));
}
@Test
public void testGetDataFile() {
File rootFile = bundleContext.getDataFile("");
assertNotNull(rootFile);
File childFile = bundleContext.getDataFile("child");
assertNotNull(childFile);
assertEquals(childFile.getParentFile(), rootFile);
}
@Test
public void testSystemBundleById() {
Bundle systemBundle = bundleContext.getBundle(Constants.SYSTEM_BUNDLE_ID);
assertNotNull(systemBundle);
assertEquals(Constants.SYSTEM_BUNDLE_ID, systemBundle.getBundleId());
assertEquals(Constants.SYSTEM_BUNDLE_SYMBOLICNAME, systemBundle.getSymbolicName());
assertEquals(Constants.SYSTEM_BUNDLE_LOCATION, systemBundle.getLocation());
}
@Test
public void testSystemBundleByLocation() {
Bundle systemBundle = bundleContext.getBundle(Constants.SYSTEM_BUNDLE_LOCATION);
assertNotNull(systemBundle);
assertEquals(Constants.SYSTEM_BUNDLE_ID, systemBundle.getBundleId());
assertEquals(Constants.SYSTEM_BUNDLE_SYMBOLICNAME, systemBundle.getSymbolicName());
assertEquals(Constants.SYSTEM_BUNDLE_LOCATION, systemBundle.getLocation());
}
@Test
public void testGetServiceOrderWithRanking() {
bundleContext.registerService(String.class, "service1", ranking(10));
bundleContext.registerService(String.class, "service2", ranking(20));
bundleContext.registerService(String.class, "service3", ranking(5));
// should return service with highest ranking
ServiceReference<String> ref = bundleContext.getServiceReference(String.class);
String service = bundleContext.getService(ref);
assertEquals("service2", service);
bundleContext.ungetService(ref);
}
@Test
public void testGetServiceOrderWithoutRanking() {
bundleContext.registerService(String.class, "service1", ranking(null));
bundleContext.registerService(String.class, "service2", ranking(null));
bundleContext.registerService(String.class, "service3", ranking(null));
// should return service with lowest service id = which was registered first
ServiceReference<String> ref = bundleContext.getServiceReference(String.class);
String service = bundleContext.getService(ref);
assertEquals("service1", service);
bundleContext.ungetService(ref);
}
private static Dictionary<String, Object> ranking(final Integer serviceRanking) {
Dictionary<String, Object> props = new Hashtable<String, Object>();
if (serviceRanking != null) {
props.put(Constants.SERVICE_RANKING, serviceRanking);
}
return props;
}
}
| |
package nam.model.transport;
import java.io.Serializable;
import javax.enterprise.context.SessionScoped;
import javax.inject.Inject;
import javax.inject.Named;
import org.apache.commons.lang.StringUtils;
import org.aries.runtime.BeanContext;
import org.aries.ui.AbstractPageManager;
import org.aries.ui.AbstractWizardPage;
import org.aries.ui.Breadcrumb;
import org.aries.util.NameUtil;
import nam.model.Transport;
import nam.model.util.TransportUtil;
import nam.ui.design.SelectionContext;
@SessionScoped
@Named("transportPageManager")
public class TransportPageManager extends AbstractPageManager<Transport> implements Serializable {
@Inject
private TransportWizard transportWizard;
@Inject
private TransportDataManager transportDataManager;
@Inject
private TransportListManager transportListManager;
@Inject
private TransportRecord_OverviewSection transportOverviewSection;
@Inject
private TransportRecord_IdentificationSection transportIdentificationSection;
@Inject
private TransportRecord_ConfigurationSection transportConfigurationSection;
@Inject
private TransportRecord_DocumentationSection transportDocumentationSection;
@Inject
private SelectionContext selectionContext;
public TransportPageManager() {
initializeSections();
initializeDefaultView();
}
public void refresh() {
refresh("transport");
}
public void refreshLocal() {
refreshLocal("transport");
}
public void refreshMembers() {
refreshMembers("transport");
}
public void refresh(String scope) {
refreshLocal(scope);
refreshMembers(scope);
}
public void refreshLocal(String scope) {
transportDataManager.setScope(scope);
transportListManager.refresh();
}
public void refreshMembers(String scope) {
transportListManager.refresh();
}
public String getTransportListPage() {
return "/nam/model/transport/transportListPage.xhtml";
}
public String getTransportTreePage() {
return "/nam/model/transport/transportTreePage.xhtml";
}
public String getTransportSummaryPage() {
return "/nam/model/transport/transportSummaryPage.xhtml";
}
public String getTransportRecordPage() {
return "/nam/model/transport/transportRecordPage.xhtml";
}
public String getTransportWizardPage() {
return "/nam/model/transport/transportWizardPage.xhtml";
}
public String getTransportManagementPage() {
return "/nam/model/transport/transportManagementPage.xhtml";
}
public String initializeTransportListPage() {
String pageLevelKey = "transportList";
clearBreadcrumbs(pageLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportManagementPage()");
String url = getTransportListPage();
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
selectionContext.resetOrigin();
selectionContext.setUrl(url);
sections.clear();
return url;
}
public String initializeTransportTreePage() {
String pageLevelKey = "transportTree";
clearBreadcrumbs(pageLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportTreePage()");
String url = getTransportTreePage();
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
selectionContext.resetOrigin();
selectionContext.setUrl(url);
sections.clear();
return url;
}
public String initializeTransportSummaryPage(Transport transport) {
String pageLevelKey = "transportSummary";
clearBreadcrumbs(pageLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportSummaryPage()");
String url = getTransportSummaryPage();
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
selectionContext.resetOrigin();
selectionContext.setUrl(url);
sections.clear();
return url;
}
public String initializeTransportRecordPage() {
Transport transport = selectionContext.getSelection("transport");
String transportName = TransportUtil.getLabel(transport);
String pageLevelKey = "transportRecord";
clearBreadcrumbs(pageLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportManagementPage()");
addBreadcrumb(pageLevelKey, transportName, "showTransportRecordPage()");
String url = getTransportRecordPage();
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
selectionContext.resetOrigin();
selectionContext.setUrl(url);
initializeDefaultView();
sections.clear();
return url;
}
public String initializeTransportCreationPage(Transport transport) {
setPageTitle("New "+getTransportLabel(transport));
setPageIcon("/icons/nam/NewTransport16.gif");
setSectionTitle("Transport Identification");
transportWizard.setNewMode(true);
String pageLevelKey = "transport";
String wizardLevelKey = "transportWizard";
clearBreadcrumbs(pageLevelKey);
clearBreadcrumbs(wizardLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportManagementPage()");
addBreadcrumb(pageLevelKey, new Breadcrumb("New Transport", "showTransportWizardPage()"));
transportIdentificationSection.setOwner("transportWizard");
transportConfigurationSection.setOwner("transportWizard");
transportDocumentationSection.setOwner("transportWizard");
sections.clear();
sections.add(transportIdentificationSection);
sections.add(transportConfigurationSection);
sections.add(transportDocumentationSection);
String url = getTransportWizardPage() + "?section=Identification";
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
//selectionContext.resetOrigin();
selectionContext.setUrl(url);
refresh();
return url;
}
public String initializeTransportUpdatePage(Transport transport) {
setPageTitle(getTransportLabel(transport));
setPageIcon("/icons/nam/Transport16.gif");
setSectionTitle("Transport Overview");
String transportName = TransportUtil.getLabel(transport);
transportWizard.setNewMode(false);
String pageLevelKey = "transport";
String wizardLevelKey = "transportWizard";
clearBreadcrumbs(pageLevelKey);
clearBreadcrumbs(wizardLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportManagementPage()");
addBreadcrumb(pageLevelKey, new Breadcrumb(transportName, "showTransportWizardPage()"));
transportOverviewSection.setOwner("transportWizard");
transportIdentificationSection.setOwner("transportWizard");
transportConfigurationSection.setOwner("transportWizard");
transportDocumentationSection.setOwner("transportWizard");
sections.clear();
sections.add(transportOverviewSection);
sections.add(transportIdentificationSection);
sections.add(transportConfigurationSection);
sections.add(transportDocumentationSection);
String url = getTransportWizardPage() + "?section=Overview";
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
//selectionContext.resetOrigin();
selectionContext.setUrl(url);
refresh();
return url;
}
public String initializeTransportManagementPage() {
setPageTitle("Transports");
setPageIcon("/icons/nam/Transport16.gif");
String pageLevelKey = "transportManagement";
clearBreadcrumbs(pageLevelKey);
addBreadcrumb(pageLevelKey, "Top", "showMainPage()");
addBreadcrumb(pageLevelKey, "Transports", "showTransportManagementPage()");
String url = getTransportManagementPage();
selectionContext.setCurrentArea("transport");
selectionContext.setSelectedArea(pageLevelKey);
selectionContext.setMessageDomain(pageLevelKey);
selectionContext.resetOrigin();
selectionContext.setUrl(url);
initializeDefaultView();
sections.clear();
refresh();
return url;
}
public void initializeDefaultView() {
setSectionType("transport");
setSectionName("Overview");
setSectionTitle("Overview of Transports");
setSectionIcon("/icons/nam/Overview16.gif");
}
public String initializeTransportSummaryView(Transport transport) {
//String viewTitle = getTransportLabel(transport);
//String currentArea = selectionContext.getCurrentArea();
setSectionType("transport");
setSectionName("Summary");
setSectionTitle("Summary of Transport Records");
setSectionIcon("/icons/nam/Transport16.gif");
String viewLevelKey = "transportSummary";
clearBreadcrumbs(viewLevelKey);
addBreadcrumb(viewLevelKey, "Top", "showMainPage()");
addBreadcrumb(viewLevelKey, "Transports", "showTransportManagementPage()");
selectionContext.setMessageDomain(viewLevelKey);
sections.clear();
return null;
}
protected String getTransportLabel(Transport transport) {
String label = "Transport";
String name = TransportUtil.getLabel(transport);
if (name == null && transport.getName() != null)
name = TransportUtil.getLabel(transport);
if (name != null && !name.isEmpty())
label = name + " " + label;
return label;
}
protected void updateState() {
AbstractWizardPage<Transport> page = transportWizard.getPage();
if (page != null)
setSectionTitle("Transport " + page.getName());
}
protected void updateState(Transport transport) {
String transportName = TransportUtil.getLabel(transport);
setSectionTitle(transportName + " Transport");
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.editor.impl.softwrap.mapping;
import com.intellij.mock.MockFoldRegion;
import com.intellij.openapi.editor.*;
import com.intellij.openapi.editor.ex.DocumentEx;
import com.intellij.openapi.editor.ex.EditorEx;
import com.intellij.openapi.editor.ex.FoldingModelEx;
import com.intellij.openapi.editor.ex.SoftWrapModelEx;
import com.intellij.openapi.editor.impl.TextChangeImpl;
import com.intellij.openapi.editor.impl.softwrap.*;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.TextRange;
import gnu.trove.TIntHashSet;
import org.jetbrains.annotations.Nullable;
import org.jmock.Expectations;
import org.jmock.Mockery;
import org.jmock.api.Invocation;
import org.jmock.integration.junit4.JUnit4Mockery;
import org.jmock.lib.action.CustomAction;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* @author Denis Zhdanov
* @since 07/07/2010
*/
public class CachingSoftWrapDataMapperTest {
private static final Comparator<DataEntry> LOGICAL_POSITIONS_COMPARATOR = new Comparator<DataEntry>() {
@Override
public int compare(DataEntry o1, DataEntry o2) {
LogicalPosition logical1 = o1.logical;
LogicalPosition logical2 = o2.logical;
if (logical1.line != logical2.line) {
return logical1.line - logical2.line;
}
// There is a possible case that multiple logical positions match to the same visual position (e.g. logical
// positions for folded text match to the same visual position). We want to match to the logical position of
// folding region start if we search by logical position from folded text.
if (o1.foldedSpace && o2.foldedSpace && logical1.column + logical1.foldingColumnDiff == logical2.foldingColumnDiff) {
return o1.foldedSpace ? 1 : -1;
}
return logical1.column - logical2.column;
}
};
private static final Comparator<DataEntry> OFFSETS_COMPARATOR = new Comparator<DataEntry>() {
@Override
public int compare(DataEntry o1, DataEntry o2) {
if (o1.offset != o2.offset) {
return o1.offset - o2.offset;
}
// There are numerous situations when multiple visual positions share the same offset (e.g. all soft wrap-introduced virtual
// spaces share offset with the first document symbol after soft wrap or all virtual spaces after line end share the same offset
// as the last line symbol). We want to ignore such positions during lookup by offset.
if (o1.virtualSpace ^ o2.virtualSpace) {
return o1.virtualSpace ? 1 : -1;
}
if (o1.insideTab ^ o2.insideTab) {
return o1.insideTab ? 1 : -1;
}
return 0;
}
};
private static final String SOFT_WRAP_START_MARKER = "<WRAP>";
private static final String SOFT_WRAP_END_MARKER = "</WRAP>";
private static final String FOLDING_START_MARKER = "<FOLD>";
private static final String FOLDING_END_MARKER = "</FOLD>";
private static final int TAB_SIZE = 4;
private static final int SPACE_SIZE = 7;
private static final int SOFT_WRAP_DRAWING_WIDTH = 11;
/** Holds expected mappings between visual and logical positions and offset. */
private final List<DataEntry> myExpectedData = new ArrayList<DataEntry>();
private final List<TextRange> myLineRanges = new ArrayList<TextRange>();
/** Holds document offsets that are considered to be folded. */
private final TIntHashSet myFoldedOffsets = new TIntHashSet();
private final List<FoldRegion> myFoldRegions = new ArrayList<FoldRegion>();
private CachingSoftWrapDataMapper myMapper;
private Mockery myMockery;
private EditorEx myEditor;
private DocumentEx myDocument;
private StringBuilder myChars;
private SoftWrapsStorage myStorage;
private SoftWrapModelEx mySoftWrapModel;
private FoldingModelEx myFoldingModel;
private MockEditorTextRepresentationHelper myRepresentationHelper;
@Before
public void setUp() {
myMockery = new JUnit4Mockery();
myEditor = myMockery.mock(EditorEx.class);
myDocument = myMockery.mock(DocumentEx.class);
myChars = new StringBuilder();
myStorage = new SoftWrapsStorage();
mySoftWrapModel = myMockery.mock(SoftWrapModelEx.class);
myFoldingModel = myMockery.mock(FoldingModelEx.class);
final EditorSettings settings = myMockery.mock(EditorSettings.class);
final Project project = myMockery.mock(Project.class);
final SoftWrapPainter painter = myMockery.mock(SoftWrapPainter.class);
myRepresentationHelper = new MockEditorTextRepresentationHelper(myChars, SPACE_SIZE, TAB_SIZE);
myMockery.checking(new Expectations() {{
// Document
allowing(myEditor).getDocument();will(returnValue(myDocument));
allowing(myDocument).getLineCount(); will(new CustomAction("getLineCount()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myLineRanges.size();
}
});
allowing(myDocument).getLineNumber(with(any(int.class))); will(new CustomAction("getLineNumber()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return getLineNumber((Integer)invocation.getParameter(0));
}
});
allowing(myDocument).getLineStartOffset(with(any(int.class))); will(new CustomAction("getLineStart()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return getLineStartOffset((Integer)invocation.getParameter(0));
}
});
allowing(myDocument).getLineEndOffset(with(any(int.class))); will(new CustomAction("getLineEnd()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return getLineEndOffset((Integer)invocation.getParameter(0));
}
});
// Settings.
allowing(myEditor).getSettings();will(returnValue(settings));
allowing(settings).isUseSoftWraps();will(returnValue(true));
allowing(settings).getTabSize(project);will(returnValue(TAB_SIZE));
allowing(settings).isWhitespacesShown();will(returnValue(true));
allowing(myEditor).getProject();will(returnValue(project));
// Soft wraps.
allowing(myEditor).getSoftWrapModel(); will(returnValue(mySoftWrapModel));
allowing(mySoftWrapModel).getSoftWrap(with(any(int.class))); will(new CustomAction("getSoftWrap") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return getSoftWrap((Integer)invocation.getParameter(0));
}
});
allowing(mySoftWrapModel).getEditorTextRepresentationHelper(); will(returnValue(myRepresentationHelper));
// Folding.
allowing(myEditor).getFoldingModel();will(returnValue(myFoldingModel));
allowing(myFoldingModel).isFoldingEnabled(); returnValue(true);
allowing(myFoldingModel).setFoldingEnabled(with(any(boolean.class)));
allowing(myFoldingModel).isOffsetCollapsed(with(any(int.class))); will(new CustomAction("isOffsetCollapsed()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myFoldedOffsets.contains((Integer)invocation.getParameter(0));
}
});
allowing(myFoldingModel).getCollapsedRegionAtOffset(with(any(int.class))); will(new CustomAction("getCollapsedRegionAtOffset()") {
@Nullable
@Override
public Object invoke(Invocation invocation) throws Throwable {
return getCollapsedFoldRegion((Integer)invocation.getParameter(0));
}
});
allowing(myFoldingModel).fetchTopLevel(); will(new CustomAction("fetchTopLevel()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myFoldRegions.toArray(new FoldRegion[myFoldRegions.size()]);
}
});
// Soft wrap-unaware conversions.
allowing(myEditor).logicalToVisualPosition(with(any(LogicalPosition.class))); will(new CustomAction("logical2visual()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return logicalToVisual((LogicalPosition)invocation.getParameter(0));
}
});
allowing(myEditor).logicalPositionToOffset(with(any(LogicalPosition.class))); will(new CustomAction("logical2offset()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return logicalToOffset((LogicalPosition)invocation.getParameter(0));
}
});
allowing(myEditor).offsetToLogicalPosition(with(any(int.class))); will(new CustomAction("offset2logical()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return offsetToLogical((Integer)invocation.getParameter(0));
}
});
allowing(myEditor).offsetToLogicalPosition(with(any(int.class)), with(equal(false))); will(new CustomAction("offset2logical()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return offsetToSoftWrapUnawareLogical((Integer)invocation.getParameter(0));
}
});
// Soft wrap painter.
allowing(painter).getMinDrawingWidth(SoftWrapDrawingType.AFTER_SOFT_WRAP); will(returnValue(SOFT_WRAP_DRAWING_WIDTH));
}});
myMapper = new CachingSoftWrapDataMapper(myEditor, myStorage);
}
@After
public void checkExpectations() {
myMockery.assertIsSatisfied();
}
@Test
public void noSoftWrapsAndFolding() {
String document =
"class Test { \n" +
" public void foo() {} \n" +
" \n" +
"}";
test(document);
}
@Test
public void singleSoftWrap() {
String document =
"class Test { \n" +
" public void <WRAP>\n" +
" </WRAP>foo() {\n" +
" } \n" +
" \n" +
"}";
test(document);
}
@Test
public void multipleSoftWrappedLogicalLines() {
String document =
"public class Test {\n" +
" public void foo(int[] data) {\n" +
" bar(data[0], <WRAP>\n" +
" </WRAP>data[1] <WRAP>\n" +
" </WRAP>data[2] \n" +
" data[3], \n" +
" data[4], <WRAP>\n" +
" </WRAP>data[5] <WRAP>\n" +
" </WRAP>data[6]); \n" +
" }\n" +
" public void bar(int ... i) {\n" +
" }\n" +
"}";
test(document);
}
@Test
public void singleLineFolding() {
String document =
"class Test {\n" +
" Map<String, Integer> map = new HashMap<FOLD><String, Integer></FOLD>(); \n" +
" \n" +
"}";
test(document);
}
@Test
public void twoLinesFolding() {
String document =
"class Test {\n" +
" public void foo() {<FOLD>\n" +
" }</FOLD>\n" +
"}";
test(document);
}
@Test
public void threeLinesFolding() {
String document =
"class Test {\n" +
" public void foo() {<FOLD>\n" +
" int i = 1; \n" +
" }</FOLD>\n" +
"}";
test(document);
}
@Test
public void softWrappedSingleLineFolding() {
String document =
"class Test {<WRAP>\n" +
" </WRAP><FOLD>public void foo() {}</FOLD> \n" +
" \n" +
"}";
test(document);
}
@Test
public void softWrappedMultiLineLineFolding() {
String document =
"class Test {<WRAP>\n" +
" </WRAP><FOLD>public void foo() {\n" +
" }</FOLD> \n" +
"}";
test(document);
}
@Test
public void multipleFoldRegionsAfterSingleSoftWrap() {
String document =
"class Test {<WRAP>\n" +
" </WRAP><FOLD>public void foo() {\n" +
" }</FOLD> <FOLD>// comment</FOLD> \n" +
"}";
test(document);
}
@Test
public void softWrapAndFoldedLines() {
String document =
"public class Test {\n" +
" public void foo(int[] data) {\n" +
" bar(data[0] <WRAP>\n" +
" </WRAP>data[1], <WRAP>\n" +
" </WRAP>data[2], \n" +
" data[3], \n" +
" <FOLD>data[4], \n" +
" data[5], \n" +
" data[6], </FOLD> \n" +
" data[7], \n" +
" data[8] <WRAP>\n" +
" </WRAP>data[9], <WRAP>\n" +
" </WRAP>data[10]); \n" +
" }\n" +
" public void bar(int ... i) {\n" +
" }\n" +
"}";
test(document);
}
@Test
public void consecutiveFoldRegions() {
String document =
"package org;\n" +
"\n" +
"import <FOLD>java.util.List;\n" +
"import java.util.ArrayList;\n" +
"import java.util.LinkedList;</FOLD>\n" +
"<FOLD>/**\n" +
" * @author Vasiliy\n" +
" */</FOLD>\n" +
"public class Test {\n" +
" <FOLD>/**\n" +
" * Method-level javadoc\n" +
" */</FOLD>\n" +
" public void test() {\n" +
" }" +
"}";
test(document);
}
@Test
public void tabSymbolsBeforeSoftWrap() {
String document =
"class Test\t\t{<WRAP>\n" +
" </WRAP> \n}";
test(document);
}
@Test
public void tabSymbolsAfterSoftWrap() {
String document =
"class Test {<WRAP>\n" +
" </WRAP> \t\t\n" +
"}";
test(document);
}
@Test
public void multipleTabsAndSoftWraps() {
String document =
"public class \tTest {\n" +
" public void foo(int[] data) {\n" +
" bar(data[0], data[1],\t\t <WRAP>\n" +
" </WRAP>data[2], data[3], <WRAP>\n" +
" </WRAP>data[4], data[5],\t \t \n" +
" data[6], data[7],\t \t \n" +
" data[8], data[9],\t \t <WRAP>\n" +
" </WRAP>data[10], data[11], <WRAP>\n" +
" </WRAP>data[12],\t \t data[13]); \n" +
" }\n" +
" public void bar(int ... i) {\n" +
" }\n" +
"}";
test(document);
}
@Test
public void tabBeforeFolding() {
String document =
"class Test\t \t <FOLD>{\n" +
" </FOLD> \t\t\n" +
"}";
test(document);
}
@Test
public void multipleTabsAndFolding() {
String document =
"public class \tTest {\n" +
" public void foo(int[] data) {\n" +
" bar(data[0], data[1],\t\t <FOLD>\n" +
" \t \t </FOLD>data[2], data[3]\n" +
" data[4], data[5],\t \t \n" +
" data[6], data[7],\t \t <FOLD>\n" +
"\t \t </FOLD>); \n" +
" }\n" +
" public void bar(int ... i) {\n" +
" }\n" +
"}";
test(document);
}
private int getLineNumber(int offset) {
int line = 0;
for (TextRange range : myLineRanges) {
if (offset >= range.getStartOffset() && offset <= range.getEndOffset()) {
return line;
}
line++;
}
return line + 1;
}
private int getLineStartOffset(int line) {
checkLine(line);
return myLineRanges.get(line).getStartOffset();
}
private int getLineEndOffset(int line) {
checkLine(line);
return myLineRanges.get(line).getEndOffset();
}
private void checkLine(int line) {
if (line < 0 || line >= myLineRanges.size()) {
throw new AssertionError(String.format("Can't retrieve target data for the given line (%d). Reason - it's not within allowed "
+ "bounds ([0; %d])", line, myLineRanges.size() - 1));
}
}
@Nullable
private SoftWrap getSoftWrap(int offset) {
return myStorage.getSoftWrap(offset);
}
@Nullable
private FoldRegion getCollapsedFoldRegion(int offset) {
for (FoldRegion region : myFoldRegions) {
if (region.getStartOffset() <= offset && region.getEndOffset() > offset) {
return region;
}
}
return null;
}
private VisualPosition logicalToVisual(LogicalPosition position) {
DataEntry dataEntry = myExpectedData.get(findIndex(new DataEntry(null, position, 0, false), LOGICAL_POSITIONS_COMPARATOR));
return toSoftWrapUnawareVisual(dataEntry);
}
private int logicalToOffset(LogicalPosition position) {
DataEntry dataEntry = myExpectedData.get(findIndex(new DataEntry(null, position, 0, false), LOGICAL_POSITIONS_COMPARATOR));
return dataEntry.offset;
}
private LogicalPosition offsetToLogical(int offset) {
DataEntry dataEntry = myExpectedData.get(findIndex(new DataEntry(null, null, offset, false), OFFSETS_COMPARATOR));
return toVisualPositionUnawareLogical(dataEntry);
}
private LogicalPosition offsetToSoftWrapUnawareLogical(int offset) {
DataEntry dataEntry = myExpectedData.get(findIndex(new DataEntry(null, null, offset, false), OFFSETS_COMPARATOR));
return toVisualPositionUnawareLogical(dataEntry);
}
private int findIndex(DataEntry key, Comparator<DataEntry> comparator) {
int i = Collections.binarySearch(myExpectedData, key, comparator);
if (i < 0 || i >= myExpectedData.size()) {
throw new AssertionError(String.format("Can't find pre-configured data entry for the given key (%s). "
+ "Available data: %s", key, myExpectedData));
}
return i;
}
private static VisualPosition toSoftWrapUnawareVisual(DataEntry dataEntry) {
LogicalPosition logical = dataEntry.logical;
return new VisualPosition(logical.line - logical.foldedLines, logical.column + logical.foldingColumnDiff);
}
private static LogicalPosition toSoftWrapUnawareLogicalByVisual(DataEntry dataEntry) {
LogicalPosition logical = dataEntry.logical;
return new LogicalPosition(
logical.line + logical.softWrapLinesBeforeCurrentLogicalLine + logical.softWrapLinesOnCurrentLogicalLine,
logical.column
);
}
private static LogicalPosition toVisualPositionUnawareLogical(DataEntry dataEntry) {
LogicalPosition logical = dataEntry.logical;
return new LogicalPosition(logical.line, logical.column);
}
private void test(String documentText) {
init(documentText);
for (DataEntry data : myExpectedData) {
// Check logical by visual.
LogicalPosition actualLogicalByVisual = myMapper.visualToLogical(data.visual);
// We don't want to perform the check for logical positions that correspond to the folded space because all of them relate to
// the same logical position of the folding start.
if (!data.foldedSpace && !data.insideTab && !equals(data.logical, actualLogicalByVisual)) {
throw new AssertionError(
String.format("Detected unmatched logical position by visual (%s). Expected: '%s', actual: '%s'. Calculation was performed "
+ "against soft wrap-unaware logical: '%s'",
data.visual, data.logical, actualLogicalByVisual, toSoftWrapUnawareLogicalByVisual(data))
);
}
// Check logical by offset.
LogicalPosition actualLogicalByOffset = myMapper.offsetToLogicalPosition(data.offset);
// We don't to perform the check for the data that points to soft wrap location here. The reason is that it shares offset
// with the first document symbol after soft wrap, hence, examination always fails.
if (!data.virtualSpace && !data.insideTab && !equals(data.logical, actualLogicalByOffset)) {
throw new AssertionError(
String.format("Detected unmatched logical position by offset. Expected: '%s', actual: '%s'. Calculation was performed "
+ "against offset: '%d' and soft wrap-unaware logical: '%s'",
data.logical, actualLogicalByOffset, data.offset, toVisualPositionUnawareLogical(data))
);
}
// Check visual by logical.
//VisualPosition actualVisualByLogical = myMapper.logicalToVisualPosition(toVisualPositionUnawareLogical(data));
//if (!data.virtualSpace && !actualVisualByLogical.equals(data.visual)) {
// throw new AssertionError(
// String.format("Detected unmatched visual position by logical. Expected: '%s', actual: '%s'. Calculation was performed "
// + "against logical position: '%s' and soft wrap-unaware visual: '%s'",
// data.visual, actualVisualByLogical, data.logical, toSoftWrapUnawareVisual(data))
// );
//}
}
}
private static boolean equals(LogicalPosition expected, LogicalPosition actual) {
return expected.equals(actual) && expected.softWrapLinesBeforeCurrentLogicalLine == actual.softWrapLinesBeforeCurrentLogicalLine
&& expected.softWrapLinesOnCurrentLogicalLine == actual.softWrapLinesOnCurrentLogicalLine
&& expected.softWrapColumnDiff == actual.softWrapColumnDiff && expected.foldedLines == actual.foldedLines
&& expected.foldingColumnDiff == actual.foldingColumnDiff;
}
@SuppressWarnings({"AssignmentToForLoopParameter"})
private void init(final String documentText) {
final TestEditorPosition context = new TestEditorPosition();
myMockery.checking(new Expectations() {{
allowing(myDocument).getCharsSequence(); will(new CustomAction("getCharsSequence()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myChars;
}
});
allowing(myDocument).getText(); will(new CustomAction("getCharsSequence()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myChars.toString();
}
});
allowing(myDocument).getTextLength(); will(new CustomAction("getTextLength()") {
@Override
public Object invoke(Invocation invocation) throws Throwable {
return myChars.length();
}
});
}});
for (int i = 0; i < documentText.length(); i++) {
if (isSoftWrapStart(documentText, i)) {
context.onSoftWrapStart();
i += SOFT_WRAP_START_MARKER.length() - 1; // Subtract 1 because 'i' is incremented by 1 on every iteration
continue;
}
if (isSoftWrapEnd(documentText, i)) {
context.onSoftWrapEnd();
i += SOFT_WRAP_END_MARKER.length() - 1; // Subtract 1 because 'i' is incremented by 1 on every iteration
continue;
}
if (isFoldingStart(documentText, i)) {
context.onFoldingStart();
i += FOLDING_START_MARKER.length() - 1; // Subtract 1 because 'i' is incremented by 1 on every iteration
continue;
}
if (isFoldingEnd(documentText, i)) {
context.onFoldingEnd();
i += FOLDING_END_MARKER.length() - 1; // Subtract 1 because 'i' is incremented by 1 on every iteration
continue;
}
char c = documentText.charAt(i);
context.onNewSymbol(c);
}
myLineRanges.add(new TextRange(context.logicalLineStartOffset, myChars.length()));
}
private static boolean isSoftWrapStart(String document, int index) {
return matches(document, index, SOFT_WRAP_START_MARKER);
}
private static boolean isSoftWrapEnd(String document, int index) {
return matches(document, index, SOFT_WRAP_END_MARKER);
}
private static boolean isFoldingStart(String document, int index) {
return matches(document, index, FOLDING_START_MARKER);
}
private static boolean isFoldingEnd(String document, int index) {
return matches(document, index, FOLDING_END_MARKER);
}
private static boolean matches(String document, int index, String pattern) {
if (index + pattern.length() > document.length()) {
return false;
}
return pattern.equals(document.substring(index, index + pattern.length()));
}
private static class DataEntry {
public final VisualPosition visual;
public final LogicalPosition logical;
public final int offset;
public final boolean foldedSpace;
public final boolean virtualSpace;
public final boolean insideTab;
DataEntry(VisualPosition visual, LogicalPosition logical, int offset, boolean foldedSpace) {
this(visual, logical, offset, foldedSpace, false, false);
}
DataEntry(VisualPosition visual, LogicalPosition logical, int offset, boolean foldedSpace, boolean virtualSpace, boolean insideTab) {
this.visual = visual;
this.logical = logical;
this.offset = offset;
this.foldedSpace = foldedSpace;
this.virtualSpace = virtualSpace;
this.insideTab = insideTab;
}
@Override
public String toString() {
return "offset: " + offset + ", logical: " + logical + ", visual: " + visual + ", folded: " + foldedSpace
+ ", virtual: " + virtualSpace;
}
}
private class TestEditorPosition extends EditorPosition {
private final StringBuilder mySoftWrapBuffer = new StringBuilder();
EditorPosition lineStartPosition;
boolean insideSoftWrap;
boolean insideFolding;
boolean insideTab;
int logicalLineStartOffset;
int softWrapStartOffset;
int softWrapSymbolsOnCurrentVisualLine;
int foldingStartOffset;
int foldingStartVisualLine;
int foldingStartVisualColumn;
TestEditorPosition() {
super(myEditor);
lineStartPosition = clone();
}
public void onSoftWrapStart() {
softWrapStartOffset = offset;
insideSoftWrap = true;
myMapper.onVisualLineStart(lineStartPosition);
}
public void onSoftWrapEnd() {
myStorage.storeOrReplace(
new SoftWrapImpl(
new TextChangeImpl('\n' + mySoftWrapBuffer.toString(), softWrapStartOffset),
mySoftWrapBuffer.length() + 1/* for 'after soft wrap' drawing */,
(mySoftWrapBuffer.length() * SPACE_SIZE) + SOFT_WRAP_DRAWING_WIDTH
));
mySoftWrapBuffer.setLength(0);
insideSoftWrap = false;
x += SOFT_WRAP_DRAWING_WIDTH;
}
public void onFoldingStart() {
foldingStartOffset = offset;
foldingStartVisualLine = visualLine;
foldingStartVisualColumn = visualColumn;
insideFolding = true;
myMapper.onVisualLineStart(lineStartPosition);
}
public void onFoldingEnd() {
visualColumn += 3; // For '...' folding
foldingColumnDiff += 3;
int prevX = x;
x += 3 * SPACE_SIZE;
insideFolding = false;
MockFoldRegion foldRegion = new MockFoldRegion(foldingStartOffset, offset);
myFoldRegions.add(foldRegion);
myMapper.onCollapsedFoldRegion(foldRegion, myRepresentationHelper.toVisualColumnSymbolsNumber(foldingStartOffset, offset, prevX), foldingStartVisualLine);
}
public void onNewSymbol(char c) {
symbol = c;
addData();
if (insideFolding) {
myFoldedOffsets.add(offset);
onNonSoftWrapSymbol(c);
if (c == '\n') {
foldedLines++;
offset++;
x = 0;
softWrapColumnDiff = 0;
softWrapLinesBefore += softWrapLinesCurrent;
softWrapLinesCurrent = 0;
foldingColumnDiff = foldingStartVisualColumn;
}
else if (c == '\t') {
int tabWidthInColumns = myRepresentationHelper.toVisualColumnSymbolsNumber(c, x);
x += myRepresentationHelper.charWidth(c, x);
// There is a possible case that single tabulation symbols is shown in more than one visual column at IJ editor.
// We store data entry only for the first tab column without 'inside tab' flag then.
insideTab = true;
for (int i = tabWidthInColumns - 1; i > 0; i--) {
logicalColumn++;
addData(false);
}
insideTab = false;
logicalColumn++;
offset++;
foldingColumnDiff -= tabWidthInColumns;
} else {
logicalColumn++;
offset++;
x += myRepresentationHelper.charWidth(c, x);
foldingColumnDiff--;
}
return;
}
// Symbol inside soft wrap.
if (insideSoftWrap) {
if (c == '\n') {
myMapper.beforeSoftWrapLineFeed(this);
// Emulate the situation when the user works with a virtual space after document line end (add such virtual
// positions two symbols behind the end).
visualColumn++;
softWrapColumnDiff++;
x += SPACE_SIZE;
addData(true);
visualColumn++;
softWrapColumnDiff++;
x += SPACE_SIZE;
addData(true);
visualLine++;
x = 0;
softWrapLinesCurrent++;
softWrapColumnDiff = -logicalColumn - foldingColumnDiff;
visualColumn = 0;
myMapper.afterSoftWrapLineFeed(this);
visualColumn = 1; // For the column reserved for soft wrap sign.
softWrapColumnDiff++; // For the column reserved for soft wrap sign.
softWrapSymbolsOnCurrentVisualLine = 0;
}
else {
mySoftWrapBuffer.append(c);
visualColumn++;
softWrapColumnDiff++;
softWrapSymbolsOnCurrentVisualLine++;
x += myRepresentationHelper.charWidth(c, x);
}
return;
}
// Symbol outside soft wrap and folding.
onNonSoftWrapSymbol(c);
if (c == '\n') {
visualLine++;
visualColumn = 0;
foldingColumnDiff = 0;
softWrapColumnDiff = 0;
x = 0;
softWrapLinesBefore += softWrapLinesCurrent;
softWrapLinesCurrent = 0;
softWrapSymbolsOnCurrentVisualLine = 0;
foldingColumnDiff = 0;
offset++;
lineStartPosition.from(this);
}
else if (c == '\t') {
myMapper.onVisualLineStart(lineStartPosition);
symbolWidthInColumns = myRepresentationHelper.toVisualColumnSymbolsNumber(c, x);
myMapper.onTabulation(this, symbolWidthInColumns);
int oldX = x;
x += myRepresentationHelper.charWidth(c, x);
symbolWidthInPixels = x - oldX;
// There is a possible case that single tabulation symbols is shown in more than one visual column at IJ editor.
// We store data entry only for the first tab column without 'inside tab' flag then.
insideTab = true;
for (int i = symbolWidthInColumns - 1; i > 0; i--) {
visualColumn++;
logicalColumn++;
addData(false);
}
insideTab = false;
visualColumn++;
logicalColumn++;
offset++;
}
else {
visualColumn++;
logicalColumn++;
offset++;
x += myRepresentationHelper.charWidth(c, x);
}
}
private void onNonSoftWrapSymbol(char c) {
myChars.append(c);
if (c == '\n') {
myLineRanges.add(new TextRange(logicalLineStartOffset, offset));
// Emulate the situation when the user works with a virtual space after document line end (add such virtual
// positions two symbols behind the end).
if (!insideFolding) {
myMapper.onVisualLineEnd(this);
visualColumn++;
logicalColumn++;
addData(true);
visualColumn++;
logicalColumn++;
addData(true);
}
logicalLineStartOffset = offset + 1;
logicalLine++;
logicalColumn = 0;
}
}
private void addData() {
addData(insideSoftWrap);
}
private void addData(boolean virtualSpace) {
myExpectedData.add(new DataEntry(
buildVisualPosition(), buildLogicalPosition(), offset, insideFolding && offset != foldingStartOffset, virtualSpace, insideTab
));
}
}
}
| |
/**
* Copyright 2012 Ronen Hamias, Anton Kharenko
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*/
package io.scalecube.socketio.serialization;
import static org.junit.Assert.assertEquals;
import io.scalecube.socketio.packets.Packet;
import io.scalecube.socketio.packets.PacketType;
import io.netty.buffer.Unpooled;
import io.netty.util.CharsetUtil;
import org.junit.Test;
import java.io.IOException;
/**
*
* @author Anton Kharenko
*
*/
public class PacketEncoderTest {
@Test
public void testEncodeAckPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.ACK);
packet.setData(Unpooled.copiedBuffer("140", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("6:::140", result);
}
@Test
public void testEncodeAckPacketWithArgs() throws IOException {
// Given
Packet packet = new Packet(PacketType.ACK);
packet.setData(Unpooled.copiedBuffer("12+[\"woot\",\"wa\"]", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("6:::12+[\"woot\",\"wa\"]", result);
}
@Test
public void testEncodeHeartbeatPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.HEARTBEAT);
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("2::", result);
}
@Test
public void testEncodeDisconnectPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.DISCONNECT);
// packet.setEndpoint("/woot");
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("0::/woot", result);
assertEquals("0::", result);
}
@Test
public void testEncodeConnectPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.CONNECT);
// packet.setEndpoint("/tobi");
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("1::/tobi", result);
assertEquals("1::", result);
}
@Test
public void testEncodeConnectPacketWithQueryString() throws IOException {
// Given
Packet packet = new Packet(PacketType.CONNECT);
// packet.setEndpoint("/test");
packet.setData(Unpooled.copiedBuffer("?test=1", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("1::/test:?test=1", result);
assertEquals("1:::?test=1", result);
}
@Test
public void testEncodeErrorPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.ERROR);
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("7::", result);
}
@Test
public void testEncodeErrorPacketWithReason() throws IOException {
// Given
Packet packet = new Packet(PacketType.ERROR);
packet.setData(Unpooled.copiedBuffer("0", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("7:::0", result);
}
@Test
public void testEncodeErrorPacketWithReasonAndAdvice() throws IOException {
// Given
Packet packet = new Packet(PacketType.ERROR);
packet.setData(Unpooled.copiedBuffer("2+0", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("7:::2+0", result);
}
@Test
public void testEncodeErrorPacketWithEndpoint() throws IOException {
// Given
Packet packet = new Packet(PacketType.ERROR);
// packet.setEndpoint("/woot");
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("7::/woot", result);
assertEquals("7::", result);
}
@Test
public void testEncodeEventPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.EVENT);
packet.setData(Unpooled.copiedBuffer("{\"name\":\"woot\"}", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("5:::{\"name\":\"woot\"}", result);
}
@Test
public void testEncodeEventPacketWithMessageIdAndAck() throws IOException {
// Given
Packet packet = new Packet(PacketType.EVENT);
// packet.setId("1+");
packet.setData(Unpooled.copiedBuffer("{\"name\":\"tobi\"}", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("5:1+::{\"name\":\"tobi\"}", result);
assertEquals("5:::{\"name\":\"tobi\"}", result);
}
@Test
public void testEncodeEventPacketWithData() throws IOException {
// Given
Packet packet = new Packet(PacketType.EVENT);
packet.setData(Unpooled.copiedBuffer("{\"name\":\"edwald\",\"args\":[{\"a\":\"b\"},2,\"3\"]}", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("5:::{\"name\":\"edwald\",\"args\":[{\"a\":\"b\"},2,\"3\"]}", result);
}
@Test
public void testEncodeJsonPacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.JSON);
packet.setData(Unpooled.copiedBuffer("\"2\"", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("4:::\"2\"", result);
}
@Test
public void testEncodeJsonPacketWithMessageIdAndAckData() throws IOException {
// Given
Packet packet = new Packet(PacketType.JSON);
// packet.setId("1+");
packet.setData(Unpooled.copiedBuffer("{\"a\":\"b\"}", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("4:1+::{\"a\":\"b\"}", result);
assertEquals("4:::{\"a\":\"b\"}", result);
}
@Test
public void testEncodeMessagePacket() throws IOException {
// Given
Packet packet = new Packet(PacketType.MESSAGE);
packet.setData(Unpooled.copiedBuffer("woot", CharsetUtil.UTF_8));
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
assertEquals("3:::woot", result);
}
@Test
public void testEncodeMessagePacketWithIdAndEndpoint() throws IOException {
// Given
Packet packet = new Packet(PacketType.MESSAGE);
// packet.setId("5");
// packet.setEndpoint("/tobi");
// When
String result = PacketEncoder.encodePacket(packet).toString(CharsetUtil.UTF_8);
// Then
// Assert.assertEquals("3:5:/tobi", result);
assertEquals("3::", result);
}
}
| |
/**
* Copyright 2017 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ai.api.util;
import android.util.Log;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.ShortBuffer;
public class VoiceActivityDetector {
public static final String TAG = VoiceActivityDetector.class.getName();
public static final int FRAME_SIZE_IN_BYTES = 320;
private static final int SEQUENCE_LENGTH_MILLIS = 30;
private static final int MIN_SPEECH_SEQUENCE_COUNT = 3;
private static final long MIN_SILENCE_MILLIS = 800;
private static final long MAX_SILENCE_MILLIS = 3500;
private static final long SILENCE_DIFF_MILLIS = MAX_SILENCE_MILLIS - MIN_SILENCE_MILLIS;
private static final int NOISE_FRAMES = 15;
public static final int NOISE_BYTES = NOISE_FRAMES * FRAME_SIZE_IN_BYTES;
private static final double ENERGY_FACTOR = 3.1;
private static final int MIN_CZ = 5;
private static final int MAX_CZ = 15;
private final int sampleRate;
private SpeechEventsListener eventsListener;
private double noiseEnergy = 0.0;
private long lastActiveTime = -1;
/**
* last time active frame hit sequence.
*/
private long lastSequenceTime = 0;
/**
* number of active frame in sequence.
*/
private int sequenceCounter = 0;
/**
* current processed time in millis
*/
private long time = 0;
private int frameNumber;
private long silenceMillis = MAX_SILENCE_MILLIS;
private boolean speechActive = false;
private boolean enabled = true;
private boolean process = true;
private double sum = 0;
private int size = 0;
public VoiceActivityDetector(final int sampleRate) {
this.sampleRate = sampleRate;
}
public void processBuffer(final byte[] buffer, final int bytesRead) {
if (!process) {
return;
}
final ByteBuffer byteBuffer = ByteBuffer.wrap(buffer, 0, bytesRead).order(ByteOrder.LITTLE_ENDIAN);
final ShortBuffer shorts = byteBuffer.asShortBuffer();
final boolean active = isFrameActive(shorts);
final int frameSize = bytesRead / 2; // 16 bit encoding
time = frameNumber * frameSize * 1000 / sampleRate;
if (active) {
if (lastActiveTime >= 0 && (time - lastActiveTime) < SEQUENCE_LENGTH_MILLIS) {
if (++sequenceCounter >= MIN_SPEECH_SEQUENCE_COUNT) {
if (!speechActive) {
onSpeechBegin();
}
lastSequenceTime = time;
silenceMillis = Math.max(MIN_SILENCE_MILLIS, silenceMillis - SILENCE_DIFF_MILLIS / 4);
}
} else {
sequenceCounter = 1;
}
lastActiveTime = time;
} else {
if (time - lastSequenceTime > silenceMillis) {
if (speechActive) {
onSpeechEnd();
} else {
onSpeechCancel();
}
}
}
}
private boolean isFrameActive(final ShortBuffer frame) {
int lastSign = 0;
int czCount = 0;
double energy = 0.0;
final int frameSize = frame.limit();
size += frameSize;
for (int i = 0; i < frameSize; i++) {
final short raw = frame.get(i);
final double amplitude = (double) raw / (double) Short.MAX_VALUE;
energy += (float) amplitude * (float) amplitude / (double) frameSize;
sum += raw * raw;
final int sign = (float) amplitude > 0 ? 1 : -1;
if (lastSign != 0 && sign != lastSign) {
czCount++;
}
lastSign = sign;
}
boolean result = false;
if (++frameNumber < NOISE_FRAMES) {
noiseEnergy += (energy / (double) NOISE_FRAMES);
} else {
if (czCount >= MIN_CZ && czCount <= MAX_CZ) {
if (energy > noiseEnergy * ENERGY_FACTOR) {
result = true;
}
}
}
return result;
}
public double calculateRms() {
final double rms = Math.sqrt(sum / size) / 100;
sum = 0;
size = 0;
return rms;
}
public void reset() {
time = 0;
frameNumber = 0;
noiseEnergy = 0.0;
lastActiveTime = -1;
lastSequenceTime = 0;
sequenceCounter = 0;
silenceMillis = MAX_SILENCE_MILLIS;
speechActive = false;
process = true;
}
public void setSpeechListener(final SpeechEventsListener eventsListener) {
this.eventsListener = eventsListener;
}
private void onSpeechEnd() {
Log.v(TAG, "onSpeechEnd");
speechActive = false;
process = false;
if (enabled) {
if (eventsListener != null) {
eventsListener.onSpeechEnd();
}
}
}
private void onSpeechCancel() {
Log.v(TAG, "onSpeechCancel");
speechActive = false;
process = false;
if (eventsListener != null) {
eventsListener.onSpeechCancel();
}
}
private void onSpeechBegin() {
Log.v(TAG, "onSpeechBegin");
speechActive = true;
if (eventsListener != null) {
eventsListener.onSpeechBegin();
}
}
/**
* If enabled, voice activity detector fires onSpeechEnd events.
* This option does not affect onSpeechBegin and onChangeLevel events
*
* @param enabled new option values
*/
public void setEnabled(final boolean enabled) {
this.enabled = enabled;
}
/**
* Used to notify about speech begin/end events
*/
public interface SpeechEventsListener {
void onSpeechBegin();
void onSpeechCancel();
void onSpeechEnd();
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.parse;
import org.apache.commons.io.IOUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.metastore.ReplChangeManager;
import org.apache.hadoop.hive.metastore.api.Database;
import org.apache.hadoop.hive.metastore.api.FieldSchema;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.ErrorMsg;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.repl.util.ReplUtils;
import org.apache.hadoop.hive.ql.hooks.ReadEntity;
import org.apache.hadoop.hive.ql.hooks.WriteEntity;
import org.apache.hadoop.hive.ql.metadata.Hive;
import org.apache.hadoop.hive.ql.metadata.Partition;
import org.apache.hadoop.hive.ql.metadata.Table;
import org.apache.hadoop.hive.ql.parse.repl.DumpType;
import org.apache.hadoop.hive.ql.parse.repl.dump.Utils;
import org.apache.hadoop.hive.ql.parse.repl.dump.io.DBSerializer;
import org.apache.hadoop.hive.ql.parse.repl.dump.io.JsonWriter;
import org.apache.hadoop.hive.ql.parse.repl.dump.io.ReplicationSpecSerializer;
import org.apache.hadoop.hive.ql.parse.repl.dump.io.TableSerializer;
import org.apache.hadoop.hive.ql.parse.repl.load.MetaData;
import org.apache.hadoop.hive.ql.parse.repl.load.MetadataJson;
import org.apache.thrift.TException;
import org.json.JSONException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.io.Serializable;
import java.net.URI;
import java.net.URISyntaxException;
import java.nio.charset.StandardCharsets;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
/**
*
* EximUtil. Utility methods for the export/import semantic
* analyzers.
*
*/
public class EximUtil {
public static final String METADATA_NAME = "_metadata";
public static final String FILES_NAME = "_files";
public static final String DATA_PATH_NAME = "data";
private static final Logger LOG = LoggerFactory.getLogger(EximUtil.class);
/**
* Wrapper class for common BaseSemanticAnalyzer non-static members
* into static generic methods without having the fn signatures
* becoming overwhelming, with passing each of these into every function.
*
* Note, however, that since this is constructed with args passed in,
* parts of the context, such as the tasks or inputs, might have been
* overridden with temporary context values, rather than being exactly
* 1:1 equivalent to BaseSemanticAnalyzer.getRootTasks() or BSA.getInputs().
*/
public static class SemanticAnalyzerWrapperContext {
private HiveConf conf;
private Hive db;
private HashSet<ReadEntity> inputs;
private HashSet<WriteEntity> outputs;
private List<Task<? extends Serializable>> tasks;
private Logger LOG;
private Context ctx;
private DumpType eventType = DumpType.EVENT_UNKNOWN;
private Task<? extends Serializable> openTxnTask = null;
public HiveConf getConf() {
return conf;
}
public Hive getHive() {
return db;
}
public HashSet<ReadEntity> getInputs() {
return inputs;
}
public HashSet<WriteEntity> getOutputs() {
return outputs;
}
public List<Task<? extends Serializable>> getTasks() {
return tasks;
}
public Logger getLOG() {
return LOG;
}
public Context getCtx() {
return ctx;
}
public void setEventType(DumpType eventType) {
this.eventType = eventType;
}
public DumpType getEventType() {
return eventType;
}
public SemanticAnalyzerWrapperContext(HiveConf conf, Hive db,
HashSet<ReadEntity> inputs,
HashSet<WriteEntity> outputs,
List<Task<? extends Serializable>> tasks,
Logger LOG, Context ctx){
this.conf = conf;
this.db = db;
this.inputs = inputs;
this.outputs = outputs;
this.tasks = tasks;
this.LOG = LOG;
this.ctx = ctx;
}
public Task<? extends Serializable> getOpenTxnTask() {
return openTxnTask;
}
public void setOpenTxnTask(Task<? extends Serializable> openTxnTask) {
this.openTxnTask = openTxnTask;
}
}
private EximUtil() {
}
/**
* Initialize the URI where the exported data collection is
* to created for export, or is present for import
*/
public static URI getValidatedURI(HiveConf conf, String dcPath) throws SemanticException {
try {
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE)
|| conf.getBoolVar(HiveConf.ConfVars.HIVEEXIMTESTMODE);
URI uri = new Path(dcPath).toUri();
FileSystem fs = FileSystem.get(uri, conf);
// Get scheme from FileSystem
String scheme = fs.getScheme();
String authority = uri.getAuthority();
String path = uri.getPath();
LOG.info("Path before norm :" + path);
// generate absolute path relative to home directory
if (!path.startsWith("/")) {
if (testMode) {
path = (new Path(System.getProperty("test.tmp.dir"), path)).toUri().getPath();
} else {
path =
(new Path(new Path("/user/" + System.getProperty("user.name")), path)).toUri()
.getPath();
}
}
// if scheme is specified but not authority then use the default authority
if (StringUtils.isEmpty(authority)) {
URI defaultURI = FileSystem.get(conf).getUri();
authority = defaultURI.getAuthority();
}
LOG.info("Scheme:" + scheme + ", authority:" + authority + ", path:" + path);
Collection<String> eximSchemes =
conf.getStringCollection(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname);
if (!eximSchemes.contains(scheme)) {
throw new SemanticException(
ErrorMsg.INVALID_PATH
.getMsg("only the following file systems accepted for export/import : "
+ conf.get(HiveConf.ConfVars.HIVE_EXIM_URI_SCHEME_WL.varname)));
}
try {
return new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
}
} catch (IOException e) {
throw new SemanticException(ErrorMsg.IO_ERROR.getMsg() + ": " + e.getMessage(), e);
}
}
static void validateTable(org.apache.hadoop.hive.ql.metadata.Table table) throws SemanticException {
if (table.isNonNative()) {
throw new SemanticException(ErrorMsg.EXIM_FOR_NON_NATIVE.getMsg());
}
}
public static String relativeToAbsolutePath(HiveConf conf, String location)
throws SemanticException {
try {
boolean testMode = conf.getBoolVar(HiveConf.ConfVars.HIVETESTMODE)
|| conf.getBoolVar(HiveConf.ConfVars.HIVEEXIMTESTMODE);;
if (testMode) {
URI uri = new Path(location).toUri();
FileSystem fs = FileSystem.get(uri, conf);
String scheme = fs.getScheme();
String authority = uri.getAuthority();
String path = uri.getPath();
if (!path.startsWith("/")) {
path = (new Path(System.getProperty("test.tmp.dir"), path)).toUri().getPath();
}
if (StringUtils.isEmpty(scheme)) {
scheme = "pfile";
}
try {
uri = new URI(scheme, authority, path, null, null);
} catch (URISyntaxException e) {
throw new SemanticException(ErrorMsg.INVALID_PATH.getMsg(), e);
}
return uri.toString();
} else {
Path path = new Path(location);
if (path.isAbsolute()) {
return location;
}
return path.getFileSystem(conf).makeQualified(path).toString();
}
} catch (IOException e) {
throw new SemanticException(ErrorMsg.IO_ERROR.getMsg() + ": " + e.getMessage(), e);
}
}
/* major version number should match for backward compatibility */
public static final String METADATA_FORMAT_VERSION = "0.2";
/* If null, then the major version number should match */
public static final String METADATA_FORMAT_FORWARD_COMPATIBLE_VERSION = null;
public static void createDbExportDump(FileSystem fs, Path metadataPath, Database dbObj,
ReplicationSpec replicationSpec) throws IOException, SemanticException {
// WARNING NOTE : at this point, createDbExportDump lives only in a world where ReplicationSpec is in replication scope
// If we later make this work for non-repl cases, analysis of this logic might become necessary. Also, this is using
// Replv2 semantics, i.e. with listFiles laziness (no copy at export time)
// Remove all the entries from the parameters which are added by repl tasks internally.
Map<String, String> parameters = dbObj.getParameters();
if (parameters != null) {
Map<String, String> tmpParameters = new HashMap<>(parameters);
tmpParameters.entrySet()
.removeIf(e -> e.getKey().startsWith(Utils.BOOTSTRAP_DUMP_STATE_KEY_PREFIX)
|| e.getKey().equals(ReplUtils.REPL_CHECKPOINT_KEY)
|| e.getKey().equals(ReplChangeManager.SOURCE_OF_REPLICATION)
|| e.getKey().equals(ReplUtils.REPL_FIRST_INC_PENDING_FLAG));
dbObj.setParameters(tmpParameters);
}
try (JsonWriter jsonWriter = new JsonWriter(fs, metadataPath)) {
new DBSerializer(dbObj).writeTo(jsonWriter, replicationSpec);
}
if (parameters != null) {
dbObj.setParameters(parameters);
}
}
public static void createExportDump(FileSystem fs, Path metadataPath, Table tableHandle,
Iterable<Partition> partitions, ReplicationSpec replicationSpec, HiveConf hiveConf)
throws SemanticException, IOException {
if (replicationSpec == null) {
replicationSpec = new ReplicationSpec(); // instantiate default values if not specified
}
if (tableHandle == null) {
replicationSpec.setNoop(true);
}
try (JsonWriter writer = new JsonWriter(fs, metadataPath)) {
if (replicationSpec.isInReplicationScope()) {
new ReplicationSpecSerializer().writeTo(writer, replicationSpec);
}
new TableSerializer(tableHandle, partitions, hiveConf).writeTo(writer, replicationSpec);
}
}
public static MetaData readMetaData(FileSystem fs, Path metadataPath)
throws IOException, SemanticException {
String message = readAsString(fs, metadataPath);
try {
return new MetadataJson(message).getMetaData();
} catch (TException | JSONException e) {
throw new SemanticException(ErrorMsg.ERROR_SERIALIZE_METADATA.getMsg(), e);
}
}
public static String readAsString(final FileSystem fs, final Path fromMetadataPath)
throws IOException {
try (FSDataInputStream stream = fs.open(fromMetadataPath)) {
return IOUtils.toString(stream, StandardCharsets.UTF_8);
}
}
/* check the forward and backward compatibility */
public static void doCheckCompatibility(String currVersion,
String version, String fcVersion) throws SemanticException {
if (version == null) {
throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Version number missing"));
}
StringTokenizer st = new StringTokenizer(version, ".");
int data_major = Integer.parseInt(st.nextToken());
StringTokenizer st2 = new StringTokenizer(currVersion, ".");
int code_major = Integer.parseInt(st2.nextToken());
int code_minor = Integer.parseInt(st2.nextToken());
if (code_major > data_major) {
throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not backward compatible."
+ " Producer version " + version + ", Consumer version " +
currVersion));
} else {
if ((fcVersion == null) || fcVersion.isEmpty()) {
if (code_major < data_major) {
throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ "Producer version " + version + ", Consumer version " +
currVersion));
}
} else {
StringTokenizer st3 = new StringTokenizer(fcVersion, ".");
int fc_major = Integer.parseInt(st3.nextToken());
int fc_minor = Integer.parseInt(st3.nextToken());
if ((fc_major > code_major) || ((fc_major == code_major) && (fc_minor > code_minor))) {
throw new SemanticException(ErrorMsg.INVALID_METADATA.getMsg("Not forward compatible."
+ "Minimum version " + fcVersion + ", Consumer version " +
currVersion));
}
}
}
}
/**
* Return the partition specification from the specified keys and values
*
* @param partCols
* the names of the partition keys
* @param partVals
* the values of the partition keys
*
* @return the partition specification as a map
*/
public static Map<String, String> makePartSpec(List<FieldSchema> partCols, List<String> partVals) {
Map<String, String> partSpec = new LinkedHashMap<String, String>();
for (int i = 0; i < partCols.size(); ++i) {
partSpec.put(partCols.get(i).getName(), partVals.get(i));
}
return partSpec;
}
/**
* Compares the schemas - names, types and order, but ignoring comments
*
* @param newSchema
* the new schema
* @param oldSchema
* the old schema
* @return a boolean indicating match
*/
public static boolean schemaCompare(List<FieldSchema> newSchema, List<FieldSchema> oldSchema) {
Iterator<FieldSchema> newColIter = newSchema.iterator();
for (FieldSchema oldCol : oldSchema) {
FieldSchema newCol = null;
if (newColIter.hasNext()) {
newCol = newColIter.next();
} else {
return false;
}
// not using FieldSchema.equals as comments can be different
if (!oldCol.getName().equals(newCol.getName())
|| !oldCol.getType().equals(newCol.getType())) {
return false;
}
}
if (newColIter.hasNext()) {
return false;
}
return true;
}
public static PathFilter getDirectoryFilter(final FileSystem fs) {
// TODO : isn't there a prior impl of an isDirectory utility PathFilter so users don't have to write their own?
return new PathFilter() {
@Override
public boolean accept(Path p) {
try {
return fs.isDirectory(p);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
};
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.usecases;
import javax.jms.Connection;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageConsumer;
import javax.jms.MessageProducer;
import javax.jms.Session;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.apache.activemq.TestSupport.PersistenceAdapterChoice;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(value = Parameterized.class)
public class DurableSubscriptionOffline3Test extends DurableSubscriptionOfflineTestBase {
private static final Logger LOG = LoggerFactory.getLogger(DurableSubscriptionOffline3Test.class);
@Parameterized.Parameters(name = "{0}")
public static Collection<PersistenceAdapterChoice[]> getTestParameters() {
String osName = System.getProperty("os.name");
LOG.debug("Running on [" + osName + "]");
PersistenceAdapterChoice[] kahaDb = {PersistenceAdapterChoice.KahaDB};
PersistenceAdapterChoice[] jdbc = {PersistenceAdapterChoice.JDBC};
List<PersistenceAdapterChoice[]> choices = new ArrayList<>();
choices.add(kahaDb);
choices.add(jdbc);
if (!osName.equalsIgnoreCase("AIX") && !osName.equalsIgnoreCase("SunOS")) {
PersistenceAdapterChoice[] levelDb = {PersistenceAdapterChoice.LevelDB};
choices.add(levelDb);
}
return choices;
}
public DurableSubscriptionOffline3Test(PersistenceAdapterChoice persistenceAdapterChoice) {
this.defaultPersistenceAdapter = persistenceAdapterChoice;
LOG.info(">>>> running {} with persistenceAdapterChoice: {}", testName.getMethodName(), this.defaultPersistenceAdapter);
}
@Test(timeout = 60 * 1000)
public void testInterleavedOfflineSubscriptionCanConsume() throws Exception {
// create durable subscription 1
Connection con = createConnection("cliId1");
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
session.close();
con.close();
// send messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(null);
int sent = 0;
for (int i = 0; i < 10; i++) {
sent++;
Message message = session.createMessage();
message.setStringProperty("filter", "true");
producer.send(topic, message);
}
Thread.sleep(1 * 1000);
// create durable subscription 2
Connection con2 = createConnection("cliId2");
Session session2 = con2.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer2 = session2.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
DurableSubscriptionOfflineTestListener listener2 = new DurableSubscriptionOfflineTestListener();
consumer2.setMessageListener(listener2);
assertEquals(0, listener2.count);
session2.close();
con2.close();
// send some more
for (int i = 0; i < 10; i++) {
sent++;
Message message = session.createMessage();
message.setStringProperty("filter", "true");
producer.send(topic, message);
}
Thread.sleep(1 * 1000);
session.close();
con.close();
con2 = createConnection("cliId2");
session2 = con2.createSession(false, Session.AUTO_ACKNOWLEDGE);
consumer2 = session2.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
listener2 = new DurableSubscriptionOfflineTestListener("cliId2");
consumer2.setMessageListener(listener2);
// test online subs
Thread.sleep(3 * 1000);
assertEquals(10, listener2.count);
// consume all messages
con = createConnection("cliId1");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
DurableSubscriptionOfflineTestListener listener = new DurableSubscriptionOfflineTestListener("cliId1");
consumer.setMessageListener(listener);
Thread.sleep(3 * 1000);
session.close();
con.close();
assertEquals("offline consumer got all", sent, listener.count);
}
private static String filter = "$a='A1' AND (($b=true AND $c=true) OR ($d='D1' OR $d='D2'))";
@Test(timeout = 60 * 1000)
public void testMixOfOnLineAndOfflineSubsGetAllMatched() throws Exception {
// create offline subs 1
Connection con = createConnection("offCli1");
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "SubsId", filter, true);
session.close();
con.close();
// create offline subs 2
con = createConnection("offCli2");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "SubsId", filter, true);
session.close();
con.close();
// create online subs
Connection con2 = createConnection("onlineCli1");
Session session2 = con2.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer2 = session2.createDurableSubscriber(topic, "SubsId", filter, true);
DurableSubscriptionOfflineTestListener listener2 = new DurableSubscriptionOfflineTestListener();
consumer2.setMessageListener(listener2);
// create non-durable consumer
Connection con4 = createConnection("nondurableCli");
Session session4 = con4.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer4 = session4.createConsumer(topic, filter, true);
DurableSubscriptionOfflineTestListener listener4 = new DurableSubscriptionOfflineTestListener();
consumer4.setMessageListener(listener4);
// send messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(null);
boolean hasRelevant = false;
int filtered = 0;
for (int i = 0; i < 100; i++) {
int postf = (int) (Math.random() * 9) + 1;
String d = "D" + postf;
if ("D1".equals(d) || "D2".equals(d)) {
hasRelevant = true;
filtered++;
}
Message message = session.createMessage();
message.setStringProperty("$a", "A1");
message.setStringProperty("$d", d);
producer.send(topic, message);
}
Message message = session.createMessage();
message.setStringProperty("$a", "A1");
message.setBooleanProperty("$b", true);
message.setBooleanProperty("$c", hasRelevant);
producer.send(topic, message);
if (hasRelevant)
filtered++;
Thread.sleep(1 * 1000);
session.close();
con.close();
Thread.sleep(3 * 1000);
// test non-durable consumer
session4.close();
con4.close();
assertEquals(filtered, listener4.count); // succeeded!
// test online subs
session2.close();
con2.close();
assertEquals(filtered, listener2.count); // succeeded!
// test offline 1
con = createConnection("offCli1");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", filter, true);
DurableSubscriptionOfflineTestListener listener = new FilterCheckListener();
consumer.setMessageListener(listener);
Thread.sleep(3 * 1000);
session.close();
con.close();
assertEquals(filtered, listener.count);
// test offline 2
Connection con3 = createConnection("offCli2");
Session session3 = con3.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer3 = session3.createDurableSubscriber(topic, "SubsId", filter, true);
DurableSubscriptionOfflineTestListener listener3 = new FilterCheckListener();
consumer3.setMessageListener(listener3);
Thread.sleep(3 * 1000);
session3.close();
con3.close();
assertEquals(filtered, listener3.count);
assertTrue("no unexpected exceptions: " + exceptions, exceptions.isEmpty());
}
@Test(timeout = 60 * 1000)
public void testOfflineSubscriptionWithSelectorAfterRestart() throws Exception {
if (PersistenceAdapterChoice.LevelDB == defaultPersistenceAdapter) {
// https://issues.apache.org/jira/browse/AMQ-4296
return;
}
// create offline subs 1
Connection con = createConnection("offCli1");
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
session.close();
con.close();
// create offline subs 2
con = createConnection("offCli2");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
session.close();
con.close();
// send messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(null);
int filtered = 0;
for (int i = 0; i < 10; i++) {
boolean filter = (int) (Math.random() * 2) >= 1;
if (filter)
filtered++;
Message message = session.createMessage();
message.setStringProperty("filter", filter ? "true" : "false");
producer.send(topic, message);
}
LOG.info("sent: " + filtered);
Thread.sleep(1 * 1000);
session.close();
con.close();
// restart broker
Thread.sleep(3 * 1000);
broker.stop();
createBroker(false /*deleteAllMessages*/);
// send more messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
producer = session.createProducer(null);
for (int i = 0; i < 10; i++) {
boolean filter = (int) (Math.random() * 2) >= 1;
if (filter)
filtered++;
Message message = session.createMessage();
message.setStringProperty("filter", filter ? "true" : "false");
producer.send(topic, message);
}
LOG.info("after restart, total sent with filter='true': " + filtered);
Thread.sleep(1 * 1000);
session.close();
con.close();
// test offline subs
con = createConnection("offCli1");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
DurableSubscriptionOfflineTestListener listener = new DurableSubscriptionOfflineTestListener("1>");
consumer.setMessageListener(listener);
Connection con3 = createConnection("offCli2");
Session session3 = con3.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer3 = session3.createDurableSubscriber(topic, "SubsId", "filter = 'true'", true);
DurableSubscriptionOfflineTestListener listener3 = new DurableSubscriptionOfflineTestListener();
consumer3.setMessageListener(listener3);
Thread.sleep(3 * 1000);
session.close();
con.close();
session3.close();
con3.close();
assertEquals(filtered, listener.count);
assertEquals(filtered, listener3.count);
}
@Test(timeout = 60 * 1000)
public void testOfflineSubscriptionAfterRestart() throws Exception {
// create offline subs 1
Connection con = createConnection("offCli1");
Session session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageConsumer consumer = session.createDurableSubscriber(topic, "SubsId", null, false);
DurableSubscriptionOfflineTestListener listener = new DurableSubscriptionOfflineTestListener();
consumer.setMessageListener(listener);
// send messages
MessageProducer producer = session.createProducer(null);
int sent = 0;
for (int i = 0; i < 10; i++) {
sent++;
Message message = session.createMessage();
message.setStringProperty("filter", "false");
producer.send(topic, message);
}
LOG.info("sent: " + sent);
Thread.sleep(5 * 1000);
session.close();
con.close();
assertEquals(sent, listener.count);
// restart broker
Thread.sleep(3 * 1000);
broker.stop();
createBroker(false /*deleteAllMessages*/);
// send more messages
con = createConnection();
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
producer = session.createProducer(null);
for (int i = 0; i < 10; i++) {
sent++;
Message message = session.createMessage();
message.setStringProperty("filter", "false");
producer.send(topic, message);
}
LOG.info("after restart, sent: " + sent);
Thread.sleep(1 * 1000);
session.close();
con.close();
// test offline subs
con = createConnection("offCli1");
session = con.createSession(false, Session.AUTO_ACKNOWLEDGE);
consumer = session.createDurableSubscriber(topic, "SubsId", null, true);
consumer.setMessageListener(listener);
Thread.sleep(3 * 1000);
session.close();
con.close();
assertEquals(sent, listener.count);
}
public class FilterCheckListener extends DurableSubscriptionOfflineTestListener {
@Override
public void onMessage(Message message) {
count++;
try {
Object b = message.getObjectProperty("$b");
if (b != null) {
boolean c = message.getBooleanProperty("$c");
assertTrue("", c);
} else {
String d = message.getStringProperty("$d");
assertTrue("", "D1".equals(d) || "D2".equals(d));
}
} catch (JMSException e) {
e.printStackTrace();
exceptions.add(e);
}
}
}
}
| |
/*
* Copyright (c) 2008-2020, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.client.replicatedmap;
import com.hazelcast.client.config.ClientConfig;
import com.hazelcast.client.config.ClientNetworkConfig;
import com.hazelcast.client.test.TestHazelcastFactory;
import com.hazelcast.core.HazelcastInstance;
import com.hazelcast.replicatedmap.ReplicatedMap;
import com.hazelcast.cluster.Address;
import com.hazelcast.test.AssertTask;
import com.hazelcast.test.HazelcastParallelClassRunner;
import com.hazelcast.test.HazelcastTestSupport;
import com.hazelcast.test.annotation.ParallelJVMTest;
import com.hazelcast.test.annotation.QuickTest;
import org.junit.After;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.runner.RunWith;
import java.lang.reflect.Field;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
@RunWith(HazelcastParallelClassRunner.class)
@Category({QuickTest.class, ParallelJVMTest.class})
public class DummyClientReplicatedMapTest extends HazelcastTestSupport {
private TestHazelcastFactory hazelcastFactory = new TestHazelcastFactory();
@After
public void cleanup() {
hazelcastFactory.terminateAll();
}
@Test
public void testGet() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance2.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
assertEquals(value, map.get(key));
}
@Test
public void testIsEmpty() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertFalse(map.isEmpty());
}
});
}
@Test
public void testKeySet() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
final String key = generateKeyOwnedBy(instance2);
final String value = randomString();
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
Collection<String> keySet = map.keySet();
assertEquals(1, keySet.size());
assertEquals(key, keySet.iterator().next());
}
});
}
@Test
public void testEntrySet() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
final String key = generateKeyOwnedBy(instance2);
final String value = randomString();
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
Set<Map.Entry<String, String>> entries = map.entrySet();
assertEquals(1, entries.size());
Map.Entry<String, String> entry = entries.iterator().next();
assertEquals(key, entry.getKey());
assertEquals(value, entry.getValue());
}
});
}
@Test
public void testValues() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
final String key = generateKeyOwnedBy(instance2);
final String value = randomString();
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
Collection<String> values = map.values();
assertEquals(1, values.size());
assertEquals(value, values.iterator().next());
}
});
}
@Test
public void testContainsKey() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
final String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
assertTrue(map.containsKey(key));
}
@Test
public void testContainsValue() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
final String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
final String value = randomString();
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertTrue(map.containsValue(value));
}
});
}
@Test
public void testSize() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(1, map.size());
}
});
}
@Test
public void testClear() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
map.clear();
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(0, map.size());
}
});
}
@Test
public void testRemove() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
map.put(key, value);
map.remove(key);
assertTrueEventually(new AssertTask() {
@Override
public void run() {
assertEquals(0, map.size());
}
});
}
@Test
public void testPutAll() throws Exception {
HazelcastInstance instance1 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance instance2 = hazelcastFactory.newHazelcastInstance();
HazelcastInstance client = hazelcastFactory.newHazelcastClient(getClientConfig(instance1));
final ReplicatedMap<String, String> map = client.getReplicatedMap(randomMapName());
String key = generateKeyOwnedBy(instance2);
int partitionId = instance1.getPartitionService().getPartition(key).getPartitionId();
setPartitionId(map, partitionId);
String value = randomString();
HashMap<String, String> m = new HashMap<String, String>();
m.put(key, value);
map.putAll(m);
assertEquals(value, map.get(key));
}
private ClientConfig getClientConfig(HazelcastInstance instance) {
Address address = instance.getCluster().getLocalMember().getAddress();
String addressString = address.getHost() + ":" + address.getPort();
ClientConfig dummyClientConfig = new ClientConfig();
ClientNetworkConfig networkConfig = new ClientNetworkConfig();
networkConfig.setSmartRouting(false);
networkConfig.addAddress(addressString);
dummyClientConfig.setNetworkConfig(networkConfig);
return dummyClientConfig;
}
private void setPartitionId(ReplicatedMap<String, String> map, int partitionId) throws Exception {
Class<?> clazz = map.getClass();
Field targetPartitionId = clazz.getDeclaredField("targetPartitionId");
targetPartitionId.setAccessible(true);
targetPartitionId.setInt(map, partitionId);
}
}
| |
/*
* Copyright 2013 Square Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.wire;
import com.google.gson.Gson;
import com.google.gson.JsonElement;
import com.google.gson.TypeAdapter;
import com.google.gson.reflect.TypeToken;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonToken;
import com.google.gson.stream.JsonWriter;
import java.io.IOException;
import java.lang.reflect.Type;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import okio.ByteString;
import static com.squareup.wire.Message.Datatype;
import static com.squareup.wire.Message.Label;
import static com.squareup.wire.UnknownFieldMap.UnknownFieldType;
class MessageTypeAdapter<M extends Message> extends TypeAdapter<M> {
// 2^64, used to convert sint64 values >= 2^63 to unsigned decimal form
private static final BigInteger POWER_64 = new BigInteger("18446744073709551616");
private final Wire wire;
private final Gson gson;
private final Class<M> type;
@SuppressWarnings("unchecked")
public MessageTypeAdapter(Wire wire, Gson gson, TypeToken<M> type) {
this.wire = wire;
this.gson = gson;
this.type = (Class<M>) type.getRawType();
}
@SuppressWarnings("unchecked")
@Override public void write(JsonWriter out, M message) throws IOException {
if (message == null) {
out.nullValue();
return;
}
MessageAdapter<M> messageAdapter = wire.messageAdapter((Class<M>) message.getClass());
out.beginObject();
for (MessageAdapter.FieldInfo fieldInfo : messageAdapter.getFields()) {
Object value = messageAdapter.getFieldValue(message, fieldInfo);
if (value == null) {
continue;
}
out.name(fieldInfo.name);
emitJson(out, value, fieldInfo.datatype, fieldInfo.label);
}
if (message instanceof ExtendableMessage<?>) {
emitExtensions((ExtendableMessage<?>) message, out);
}
Collection<List<UnknownFieldMap.FieldValue>> unknownFields = message.unknownFields();
if (unknownFields != null) {
for (List<UnknownFieldMap.FieldValue> fieldList : unknownFields) {
int tag = fieldList.get(0).getTag();
out.name("" + tag);
out.beginArray();
boolean first = true;
for (UnknownFieldMap.FieldValue unknownField : fieldList) {
switch (unknownField.getWireType()) {
case VARINT:
if (first) out.value("varint");
out.value(unknownField.getAsLong());
break;
case FIXED32:
if (first) out.value("fixed32");
out.value(unknownField.getAsInteger());
break;
case FIXED64:
if (first) out.value("fixed64");
out.value(unknownField.getAsLong());
break;
case LENGTH_DELIMITED:
if (first) out.value("length-delimited");
out.value(unknownField.getAsBytes().base64());
break;
default:
throw new AssertionError("Unknown wire type " + unknownField.getWireType());
}
first = false;
}
out.endArray();
}
}
out.endObject();
}
@SuppressWarnings("unchecked")
private <M extends ExtendableMessage<?>, E> void emitExtensions(ExtendableMessage<M> message,
JsonWriter out) throws IOException {
if (message.extensionMap == null) return;
for (int i = 0; i < message.extensionMap.size(); i++) {
Extension<M, E> extension = (Extension<M, E>) message.extensionMap.getExtension(i);
E value = (E) message.extensionMap.getExtensionValue(i);
emitExtension(extension, value, out);
}
}
private <M extends ExtendableMessage<?>, E> void emitExtension(Extension<M, E> extension,
E value, JsonWriter out) throws IOException {
out.name(extension.getName());
emitJson(out, value, extension.getDatatype(), extension.getLabel());
}
@SuppressWarnings("unchecked")
private void emitJson(JsonWriter out, Object value, Datatype datatype, Label label)
throws IOException {
if (datatype == Datatype.UINT64) {
if (label.isRepeated()) {
List<Long> longs = (List<Long>) value;
out.beginArray();
for (int i = 0, count = longs.size(); i < count; i++) {
emitUint64(longs.get(i), out);
}
out.endArray();
} else {
emitUint64((Long) value, out);
}
} else {
gson.toJson(value, value.getClass(), out);
}
}
private void emitUint64(Long value, JsonWriter out) throws IOException {
if (value < 0) {
BigInteger unsigned = POWER_64.add(BigInteger.valueOf(value));
out.value(unsigned);
} else {
out.value(value);
}
}
@SuppressWarnings("unchecked")
@Override public M read(JsonReader in) throws IOException {
if (in.peek() == JsonToken.NULL) {
in.nextNull();
return null;
}
MessageAdapter<M> messageAdapter = wire.messageAdapter(type);
Message.Builder<M> builder = messageAdapter.newBuilder();
in.beginObject();
while (in.peek() == JsonToken.NAME) {
String name = in.nextName();
MessageAdapter.FieldInfo fieldInfo = messageAdapter.getField(name);
if (fieldInfo == null) {
Extension<ExtendableMessage<?>, ?> extension = messageAdapter.getExtension(name);
if (extension == null) {
parseUnknownField(in, builder, Integer.parseInt(name));
} else {
Type valueType = getType(extension);
Object value = parseValue(extension.getLabel(), valueType, parse(in));
((ExtendableMessage.ExtendableBuilder) builder).setExtension(extension, value);
}
} else {
Type valueType = getType(fieldInfo);
Object value = parseValue(fieldInfo.label, valueType, parse(in));
// Use the builder setter method to ensure proper 'oneof' behavior.
messageAdapter.setBuilderMethod(builder, fieldInfo, value);
}
}
in.endObject();
return builder.build();
}
private JsonElement parse(JsonReader in) {
return gson.fromJson(in, JsonElement.class);
}
private Type getType(MessageAdapter.FieldInfo fieldInfo) {
Type valueType;
if (fieldInfo.datatype == Datatype.ENUM) {
valueType = fieldInfo.enumType;
} else if (fieldInfo.datatype == Datatype.MESSAGE) {
valueType = fieldInfo.messageType;
} else {
valueType = javaType(fieldInfo.datatype);
}
return valueType;
}
private Object parseValue(Label label, Type valueType, JsonElement valueElement) {
if (label.isRepeated()) {
List<Object> valueList = new ArrayList<Object>();
for (JsonElement element : valueElement.getAsJsonArray()) {
valueList.add(readJson(valueType, element));
}
return valueList;
} else {
return readJson(valueType, valueElement);
}
}
private Type getType(Extension<ExtendableMessage<?>, ?> extension) {
Datatype datatype = extension.getDatatype();
if (datatype == Datatype.ENUM) {
return extension.getEnumType();
} else if (datatype == Datatype.MESSAGE) {
return extension.getMessageType();
} else {
return javaType(datatype);
}
}
private void parseUnknownField(JsonReader in, Message.Builder<M> builder, int tag)
throws IOException {
in.beginArray();
UnknownFieldType type = UnknownFieldType.of(in.nextString());
while (in.peek() != JsonToken.END_ARRAY) {
switch (type) {
case VARINT:
builder.addVarint(tag, in.nextInt());
break;
case FIXED32:
builder.addFixed32(tag, in.nextInt());
break;
case FIXED64:
builder.addFixed64(tag, in.nextInt());
break;
case LENGTH_DELIMITED:
builder.addLengthDelimited(tag, ByteString.decodeBase64(in.nextString()));
break;
default:
throw new AssertionError("Unknown field type " + type);
}
}
in.endArray();
}
private Object readJson(Type valueType, JsonElement element) {
return gson.fromJson(element, valueType);
}
// Returns the Type used to store a given primitive scalar type.
@SuppressWarnings("unchecked")
private Type javaType(Datatype datatype) {
switch (datatype) {
case INT32:case UINT32:case SINT32:case FIXED32:case SFIXED32:
return int.class;
case INT64:case UINT64:case SINT64:case FIXED64:case SFIXED64:
return long.class;
case BOOL:
return boolean.class;
case FLOAT:
return float.class;
case DOUBLE:
return double.class;
case STRING:
return String.class;
case BYTES:
return ByteString.class;
default:
throw new AssertionError("Unknown datatype: " + datatype);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jclouds.http;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.jclouds.http.Uris.uriBuilder;
import static org.jclouds.http.utils.Queries.queryParser;
import static org.jclouds.io.Payloads.newUrlEncodedFormPayload;
import java.net.URI;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.jclouds.io.Payload;
import org.jclouds.javax.annotation.Nullable;
import com.google.common.base.Objects;
import com.google.common.base.Objects.ToStringHelper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.LinkedHashMultimap;
import com.google.common.collect.Multimap;
import com.google.common.collect.Multimaps;
/**
* Represents a request that can be executed within {@link HttpCommandExecutorService}
*/
public class HttpRequest extends HttpMessage {
public static final Set<String> NON_PAYLOAD_METHODS = ImmutableSet
.of("OPTIONS", "GET", "HEAD", "DELETE", "TRACE", "CONNECT");
public static Builder<?> builder() {
return new ConcreteBuilder();
}
public Builder<?> toBuilder() {
return new ConcreteBuilder().fromHttpRequest(this);
}
public abstract static class Builder<T extends Builder<T>> extends HttpMessage.Builder<T> {
protected String method;
protected URI endpoint;
protected ImmutableList.Builder<HttpRequestFilter> filters = ImmutableList.<HttpRequestFilter>builder();
/**
* @see HttpRequest#getMethod()
*/
public T method(String method) {
this.method = checkNotNull(method, "method");
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T endpoint(URI endpoint) {
this.endpoint = checkNotNull(endpoint, "endpoint");
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T endpoint(String endpoint) {
return endpoint(URI.create(checkNotNull(endpoint, "endpoint")));
}
/**
* @see HttpRequest#getEndpoint()
*/
public T addQueryParam(String name, Iterable<String> values) {
endpoint = uriBuilder(endpoint).addQuery(name, values).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T addQueryParam(String name, String... values) {
endpoint = uriBuilder(endpoint).addQuery(name, values).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T addQueryParams(Multimap<String, String> parameters) {
endpoint = uriBuilder(endpoint).addQuery(parameters).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T replaceQueryParam(String name, Iterable<String> values) {
endpoint = uriBuilder(endpoint).replaceQuery(name, values).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T replaceQueryParam(String name, String... values) {
endpoint = uriBuilder(endpoint).replaceQuery(name, values).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T replaceQueryParams(Map<String, String> parameters) {
return replaceQueryParams(Multimaps.forMap(parameters));
}
/**
* @see HttpRequest#getEndpoint()
*/
public T replaceQueryParams(Multimap<String, String> parameters) {
endpoint = uriBuilder(endpoint).replaceQuery(parameters).build();
return self();
}
/**
* @see HttpRequest#getEndpoint()
*/
public T replacePath(String path) {
checkNotNull(endpoint, "endpoint");
checkNotNull(path, "path");
endpoint = uriBuilder(endpoint).path(path).build();
return self();
}
/**
* @see #addFormParams
*/
public T addFormParam(String name, String... values) {
return addFormParams(ImmutableMultimap.<String, String> builder()
.putAll(checkNotNull(name, "name"), checkNotNull(values, "values of %s", name)).build());
}
/**
* Replaces the current payload with one that is a urlencoded payload including the following
* parameters and any formerly set.
*
* @see HttpRequest#getPayload()
*/
public T addFormParams(Multimap<String, String> parameters) {
checkNotNull(endpoint, "endpoint");
Multimap<String, String> map = payload != null ? queryParser().apply(payload.getRawContent().toString())
: LinkedHashMultimap.<String, String> create();
map.putAll(parameters);
payload = newUrlEncodedFormPayload(map);
return self();
}
/**
* @see #replaceFormParams
*/
public T replaceFormParam(String name, String... values) {
return replaceFormParams(ImmutableMultimap.<String, String> builder()
.putAll(checkNotNull(name, "name"), checkNotNull(values, "values of %s", name)).build());
}
/**
* Replaces the current payload with one that is a urlencoded payload including the following
* parameters and any formerly set.
*
* @see HttpRequest#getPayload()
*/
public T replaceFormParams(Multimap<String, String> parameters) {
checkNotNull(endpoint, "endpoint");
Multimap<String, String> map = payload != null ? queryParser().apply(payload.getRawContent().toString())
: LinkedHashMultimap.<String, String> create();
for (Map.Entry<String, Collection<String>> entry : parameters.asMap().entrySet()) {
map.replaceValues(entry.getKey(), entry.getValue());
}
payload = newUrlEncodedFormPayload(map);
return self();
}
/**
* @see HttpRequest#getFilters()
*/
public T filters(Iterable<HttpRequestFilter> filters) {
this.filters = ImmutableList.<HttpRequestFilter>builder();
this.filters.addAll(checkNotNull(filters, "filters"));
return self();
}
/**
* @see HttpRequest#getFilters()
*/
public T filter(HttpRequestFilter filter) {
this.filters.add(checkNotNull(filter, "filter"));
return self();
}
public HttpRequest build() {
return new HttpRequest(method, endpoint, headers.build(), payload, filters.build());
}
public T fromHttpRequest(HttpRequest in) {
return super.fromHttpMessage(in)
.method(in.getMethod())
.endpoint(in.getEndpoint())
.filters(in.getFilters());
}
}
private static class ConcreteBuilder extends Builder<ConcreteBuilder> {
@Override
protected ConcreteBuilder self() {
return this;
}
}
private final String method;
private final URI endpoint;
private final List<HttpRequestFilter> filters;
protected HttpRequest(String method, URI endpoint, Multimap<String, String> headers, @Nullable Payload payload,
Iterable<HttpRequestFilter> filters) {
super(headers, payload);
this.method = checkNotNull(method, "method");
this.endpoint = checkNotNull(endpoint, "endpoint");
checkArgument(endpoint.getHost() != null, String.format("endpoint.getHost() is null for %s", endpoint));
this.filters = ImmutableList.<HttpRequestFilter> copyOf(checkNotNull(filters, "filters"));
}
public String getRequestLine() {
return String.format("%s %s HTTP/1.1", getMethod(), getEndpoint().toASCIIString());
}
/**
* We cannot return an enum, as per specification custom methods are allowed. Enums are not
* extensible.
*
* @see <a href="http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.1" >rfc2616</a>
*/
public String getMethod() {
return method;
}
public URI getEndpoint() {
return endpoint;
}
public List<HttpRequestFilter> getFilters() {
return filters;
}
@Override
public int hashCode() {
return Objects.hashCode(method, endpoint, super.hashCode());
}
@Override
public boolean equals(Object obj) {
if (this == obj) return true;
// testing equals by value, not by type
if (!(obj instanceof HttpRequest)) return false;
HttpRequest that = HttpRequest.class.cast(obj);
return super.equals(that) && Objects.equal(this.method, that.method)
&& Objects.equal(this.endpoint, that.endpoint);
}
@Override
protected ToStringHelper string() {
return Objects.toStringHelper("").omitNullValues()
.add("method", method)
.add("endpoint", endpoint)
.add("headers", headers)
.add("payload", payload);
}
}
| |
/*
* PackageMatcher.java May 2007
*
* Copyright (C) 2007, Niall Gallagher <niallg@users.sf.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.simpleframework.xml.transform;
import java.io.File;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.URL;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Currency;
import java.util.Date;
import java.util.GregorianCalendar;
import java.util.Locale;
import java.util.TimeZone;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
/**
* The <code>PackageMatcher</code> object is used to match the stock
* transforms to Java packages. This is used to match useful types
* from the <code>java.lang</code> and <code>java.util</code> packages
* as well as other Java packages. This matcher groups types by their
* package names and attempts to search the stock transforms for a
* suitable match. If no match can be found this throws an exception.
*
* @author Niall Gallagher
*
* @see org.simpleframework.xml.transform.DefaultMatcher
*/
class PackageMatcher implements Matcher {
/**
* Constructor for the <code>PackageMatcher</code> object. The
* package matcher is used to resolve a transform instance to
* convert object types to an from strings. If a match cannot
* be found with this matcher then an exception is thrown.
*/
public PackageMatcher() {
super();
}
/**
* This method attempts to perform a resolution of the transform
* based on its package prefix. This allows this matcher to create
* a logical group of transforms within a single method based on
* the types package prefix. If no transform can be found then
* this will throw an exception.
*
* @param type this is the type to resolve a transform for
*
* @return the transform that is used to transform that type
*/
public Transform match(Class type) throws Exception {
String name = type.getName();
if(name.startsWith("java.lang")) {
return matchLanguage(type);
}
if(name.startsWith("java.util")) {
return matchUtility(type);
}
if(name.startsWith("java.net")) {
return matchURL(type);
}
if(name.startsWith("java.io")) {
return matchFile(type);
}
if(name.startsWith("java.sql")) {
return matchSQL(type);
}
if(name.startsWith("java.math")) {
return matchMath(type);
}
return matchEnum(type);
}
/**
* This is used to resolve <code>Transform</code> implementations
* that are <code>Enum</code> implementations. If the type is not
* an enumeration then this will return null.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchEnum(Class type) {
Class parent = type.getSuperclass();
if(parent != null) {
if(parent.isEnum()) {
return new EnumTransform(type);
}
if(type.isEnum()) {
return new EnumTransform(type);
}
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.lang</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchLanguage(Class type) throws Exception {
if(type == Boolean.class) {
return new BooleanTransform();
}
if(type == Integer.class) {
return new IntegerTransform();
}
if(type == Long.class) {
return new LongTransform();
}
if(type == Double.class) {
return new DoubleTransform();
}
if(type == Float.class) {
return new FloatTransform();
}
if(type == Short.class) {
return new ShortTransform();
}
if(type == Byte.class) {
return new ByteTransform();
}
if(type == Character.class) {
return new CharacterTransform();
}
if(type == String.class) {
return new StringTransform();
}
if(type == Class.class) {
return new ClassTransform();
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.math</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchMath(Class type) throws Exception {
if(type == BigDecimal.class) {
return new BigDecimalTransform();
}
if(type == BigInteger.class) {
return new BigIntegerTransform();
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.util</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchUtility(Class type) throws Exception {
if(type == Date.class) {
return new DateTransform(type);
}
if(type == Locale.class) {
return new LocaleTransform();
}
if(type == Currency.class) {
return new CurrencyTransform();
}
if(type == GregorianCalendar.class) {
return new GregorianCalendarTransform();
}
if(type == TimeZone.class) {
return new TimeZoneTransform();
}
if(type == AtomicInteger.class) {
return new AtomicIntegerTransform();
}
if(type == AtomicLong.class) {
return new AtomicLongTransform();
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.sql</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchSQL(Class type) throws Exception {
if(type == Time.class) {
return new DateTransform(type);
}
if(type == java.sql.Date.class) {
return new DateTransform(type);
}
if(type == Timestamp.class) {
return new DateTransform(type);
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.io</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchFile(Class type) throws Exception {
if(type == File.class) {
return new FileTransform();
}
return null;
}
/**
* This is used to resolve <code>Transform</code> implementations
* that relate to the <code>java.net</code> package. If the type
* does not resolve to a valid transform then this method will
* throw an exception to indicate that no stock transform exists
* for the specified type.
*
* @param type this is the type to resolve a stock transform for
*
* @return this will return a transform for the specified type
*/
private Transform matchURL(Class type) throws Exception {
if(type == URL.class) {
return new URLTransform();
}
return null;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.portable;
import org.apache.ignite.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.portable.mutabletest.*;
import org.apache.ignite.internal.processors.cache.portable.*;
import org.apache.ignite.internal.util.lang.*;
import org.apache.ignite.marshaller.portable.*;
import org.apache.ignite.portable.*;
import org.apache.ignite.testframework.*;
import org.apache.ignite.testframework.junits.common.*;
import com.google.common.collect.*;
import org.junit.*;
import java.lang.reflect.*;
import java.sql.*;
import java.util.*;
import java.util.Date;
import static org.apache.ignite.cache.CacheMode.*;
import static org.apache.ignite.internal.portable.mutabletest.GridPortableTestClasses.*;
/**
*
*/
public class GridPortableBuilderAdditionalSelfTest extends GridCommonAbstractTest {
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
CacheConfiguration cacheCfg = new CacheConfiguration();
cacheCfg.setCacheMode(REPLICATED);
cfg.setCacheConfiguration(cacheCfg);
PortableMarshaller marsh = new PortableMarshaller();
marsh.setClassNames(Arrays.asList("org.apache.ignite.internal.portable.mutabletest.*"));
marsh.setConvertStringToBytes(useUtf8());
cfg.setMarshaller(marsh);
return cfg;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
startGrids(1);
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
jcache(0).clear();
}
/**
* @return Whether to use UTF8 strings.
*/
protected boolean useUtf8() {
return true;
}
/**
* @return Portables API.
*/
protected IgnitePortables portables() {
return grid(0).portables();
}
/**
* @throws Exception If failed.
*/
public void testSimpleTypeFieldRead() throws Exception {
TestObjectAllTypes exp = new TestObjectAllTypes();
exp.setDefaultData();
PortableBuilder mutPo = wrap(exp);
for (Field field : TestObjectAllTypes.class.getDeclaredFields()) {
Object expVal = field.get(exp);
Object actVal = mutPo.getField(field.getName());
switch (field.getName()) {
case "anEnum":
assertEquals(((PortableBuilderEnum)actVal).getOrdinal(), ((Enum)expVal).ordinal());
break;
case "enumArr": {
PortableBuilderEnum[] actArr = (PortableBuilderEnum[])actVal;
Enum[] expArr = (Enum[])expVal;
assertEquals(expArr.length, actArr.length);
for (int i = 0; i < actArr.length; i++)
assertEquals(expArr[i].ordinal(), actArr[i].getOrdinal());
break;
}
case "entry":
assertEquals(((Map.Entry)expVal).getKey(), ((Map.Entry)actVal).getKey());
assertEquals(((Map.Entry)expVal).getValue(), ((Map.Entry)actVal).getValue());
break;
default:
assertTrue(field.getName(), Objects.deepEquals(expVal, actVal));
break;
}
}
}
/**
*
*/
public void testSimpleTypeFieldSerialize() {
TestObjectAllTypes exp = new TestObjectAllTypes();
exp.setDefaultData();
PortableBuilderImpl mutPo = wrap(exp);
TestObjectAllTypes res = mutPo.build().deserialize();
GridTestUtils.deepEquals(exp, res);
}
/**
* @throws Exception If any error occurs.
*/
public void testSimpleTypeFieldOverride() throws Exception {
TestObjectAllTypes exp = new TestObjectAllTypes();
exp.setDefaultData();
PortableBuilderImpl mutPo = wrap(new TestObjectAllTypes());
for (Field field : TestObjectAllTypes.class.getDeclaredFields())
mutPo.setField(field.getName(), field.get(exp));
TestObjectAllTypes res = mutPo.build().deserialize();
GridTestUtils.deepEquals(exp, res);
}
/**
* @throws Exception If any error occurs.
*/
public void testSimpleTypeFieldSetNull() throws Exception {
TestObjectAllTypes exp = new TestObjectAllTypes();
exp.setDefaultData();
PortableBuilderImpl mutPo = wrap(exp);
for (Field field : TestObjectAllTypes.class.getDeclaredFields()) {
if (!field.getType().isPrimitive())
mutPo.setField(field.getName(), null);
}
TestObjectAllTypes res = mutPo.build().deserialize();
for (Field field : TestObjectAllTypes.class.getDeclaredFields()) {
if (!field.getType().isPrimitive())
assertNull(field.getName(), field.get(res));
}
}
/**
* @throws IgniteCheckedException If any error occurs.
*/
public void testMakeCyclicDependency() throws IgniteCheckedException {
TestObjectOuter outer = new TestObjectOuter();
outer.inner = new TestObjectInner();
PortableBuilderImpl mutOuter = wrap(outer);
PortableBuilderImpl mutInner = mutOuter.getField("inner");
mutInner.setField("outer", mutOuter);
mutInner.setField("foo", mutInner);
TestObjectOuter res = mutOuter.build().deserialize();
assertEquals(res, res.inner.outer);
assertEquals(res.inner, res.inner.foo);
}
/**
*
*/
public void testSimpleArrayModification() {
TestObjectAllTypes obj = new TestObjectAllTypes();
obj.strArr = new String[]{"a", "a", "a"};
PortableBuilderImpl mutObj = wrap(obj);
String[] arr = mutObj.getField("strArr");
arr[0] = "b";
TestObjectAllTypes res = mutObj.build().deserialize();
Assert.assertArrayEquals(obj.strArr, res.strArr);
}
/**
*
*/
public void testModifyObjectArray() {
fail("https://issues.apache.org/jira/browse/IGNITE-1273");
TestObjectContainer obj = new TestObjectContainer();
obj.foo = new Object[]{"a"};
PortableBuilderImpl mutObj = wrap(obj);
Object[] arr = mutObj.getField("foo");
Assert.assertArrayEquals(new Object[]{"a"}, arr);
arr[0] = "b";
TestObjectContainer res = mutObj.build().deserialize();
Assert.assertArrayEquals(new Object[] {"a"}, (Object[])res.foo);
}
/**
*
*/
public void testOverrideObjectArrayField() {
PortableBuilderImpl mutObj = wrap(new TestObjectContainer());
Object[] createdArr = {mutObj, "a", 1, new String[] {"s", "s"}, new byte[]{1, 2}, new UUID(3, 0)};
mutObj.setField("foo", createdArr.clone());
TestObjectContainer res = mutObj.build().deserialize();
createdArr[0] = res;
assertTrue(Objects.deepEquals(createdArr, res.foo));
}
/**
*
*/
public void testDeepArray() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = new Object[]{new Object[]{"a", obj}};
PortableBuilderImpl mutObj = wrap(obj);
Object[] arr = (Object[])mutObj.<Object[]>getField("foo")[0];
assertEquals("a", arr[0]);
assertSame(mutObj, arr[1]);
arr[0] = mutObj;
TestObjectContainer res = mutObj.build().deserialize();
arr = (Object[])((Object[])res.foo)[0];
assertSame(arr[0], res);
assertSame(arr[0], arr[1]);
}
/**
*
*/
public void testArrayListRead() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newArrayList(obj, "a");
PortableBuilderImpl mutObj = wrap(obj);
List<Object> list = mutObj.getField("foo");
assert list.equals(Lists.newArrayList(mutObj, "a"));
}
/**
*
*/
public void testArrayListOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutObj = wrap(obj);
ArrayList<Object> list = Lists.newArrayList(mutObj, "a", Lists.newArrayList(1, 2));
mutObj.setField("foo", list);
TestObjectContainer res = mutObj.build().deserialize();
list.set(0, res);
assertNotSame(list, res.foo);
assertEquals(list, res.foo);
}
/**
*
*/
public void testArrayListModification() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newArrayList("a", "b", "c");
PortableBuilderImpl mutObj = wrap(obj);
List<String> list = mutObj.getField("foo");
list.add("!"); // "a", "b", "c", "!"
list.add(0, "_"); // "_", "a", "b", "c", "!"
String s = list.remove(1); // "_", "b", "c", "!"
assertEquals("a", s);
assertEquals(Arrays.asList("c", "!"), list.subList(2, 4));
assertEquals(1, list.indexOf("b"));
assertEquals(1, list.lastIndexOf("b"));
TestObjectContainer res = mutObj.build().deserialize();
assertTrue(res.foo instanceof ArrayList);
assertEquals(Arrays.asList("_", "b", "c", "!"), res.foo);
}
/**
*
*/
public void testArrayListClear() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newArrayList("a", "b", "c");
PortableBuilderImpl mutObj = wrap(obj);
List<String> list = mutObj.getField("foo");
list.clear();
assertEquals(Collections.emptyList(), mutObj.build().<TestObjectContainer>deserialize().foo);
}
/**
*
*/
public void testArrayListWriteUnmodifiable() {
TestObjectContainer obj = new TestObjectContainer();
ArrayList<Object> src = Lists.newArrayList(obj, "a", "b", "c");
obj.foo = src;
PortableBuilderImpl mutObj = wrap(obj);
TestObjectContainer deserialized = mutObj.build().deserialize();
List<Object> res = (List<Object>)deserialized.foo;
src.set(0, deserialized);
assertEquals(src, res);
}
/**
*
*/
public void testLinkedListRead() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newLinkedList(Arrays.asList(obj, "a"));
PortableBuilderImpl mutObj = wrap(obj);
List<Object> list = mutObj.getField("foo");
assert list.equals(Lists.newLinkedList(Arrays.asList(mutObj, "a")));
}
/**
*
*/
public void testLinkedListOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutObj = wrap(obj);
List<Object> list = Lists.newLinkedList(Arrays.asList(mutObj, "a", Lists.newLinkedList(Arrays.asList(1, 2))));
mutObj.setField("foo", list);
TestObjectContainer res = mutObj.build().deserialize();
list.set(0, res);
assertNotSame(list, res.foo);
assertEquals(list, res.foo);
}
/**
*
*/
public void testLinkedListModification() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newLinkedList(Arrays.asList("a", "b", "c"));
PortableBuilderImpl mutObj = wrap(obj);
List<String> list = mutObj.getField("foo");
list.add("!"); // "a", "b", "c", "!"
list.add(0, "_"); // "_", "a", "b", "c", "!"
String s = list.remove(1); // "_", "b", "c", "!"
assertEquals("a", s);
assertEquals(Arrays.asList("c", "!"), list.subList(2, 4));
assertEquals(1, list.indexOf("b"));
assertEquals(1, list.lastIndexOf("b"));
TestObjectContainer res = mutObj.build().deserialize();
assertTrue(res.foo instanceof LinkedList);
assertEquals(Arrays.asList("_", "b", "c", "!"), res.foo);
}
/**
*
*/
public void testLinkedListWriteUnmodifiable() {
TestObjectContainer obj = new TestObjectContainer();
LinkedList<Object> src = Lists.newLinkedList(Arrays.asList(obj, "a", "b", "c"));
obj.foo = src;
PortableBuilderImpl mutObj = wrap(obj);
TestObjectContainer deserialized = mutObj.build().deserialize();
List<Object> res = (List<Object>)deserialized.foo;
src.set(0, deserialized);
assertEquals(src, res);
}
/**
*
*/
public void testHashSetRead() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Sets.newHashSet(obj, "a");
PortableBuilderImpl mutObj = wrap(obj);
Set<Object> set = mutObj.getField("foo");
assert set.equals(Sets.newHashSet(mutObj, "a"));
}
/**
*
*/
public void testHashSetOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutObj = wrap(obj);
Set<Object> c = Sets.newHashSet(mutObj, "a", Sets.newHashSet(1, 2));
mutObj.setField("foo", c);
TestObjectContainer res = mutObj.build().deserialize();
c.remove(mutObj);
c.add(res);
assertNotSame(c, res.foo);
assertEquals(c, res.foo);
}
/**
*
*/
public void testHashSetModification() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Sets.newHashSet("a", "b", "c");
PortableBuilderImpl mutObj = wrap(obj);
Set<String> set = mutObj.getField("foo");
set.remove("b");
set.add("!");
assertEquals(Sets.newHashSet("a", "!", "c"), set);
assertTrue(set.contains("a"));
assertTrue(set.contains("!"));
TestObjectContainer res = mutObj.build().deserialize();
assertTrue(res.foo instanceof HashSet);
assertEquals(Sets.newHashSet("a", "!", "c"), res.foo);
}
/**
*
*/
public void testHashSetWriteUnmodifiable() {
TestObjectContainer obj = new TestObjectContainer();
Set<Object> src = Sets.newHashSet(obj, "a", "b", "c");
obj.foo = src;
TestObjectContainer deserialized = wrap(obj).build().deserialize();
Set<Object> res = (Set<Object>)deserialized.foo;
src.remove(obj);
src.add(deserialized);
assertEquals(src, res);
}
/**
*
*/
public void testMapRead() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Maps.newHashMap(ImmutableMap.of(obj, "a", "b", obj));
PortableBuilderImpl mutObj = wrap(obj);
Map<Object, Object> map = mutObj.getField("foo");
assert map.equals(ImmutableMap.of(mutObj, "a", "b", mutObj));
}
/**
*
*/
public void testMapOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutObj = wrap(obj);
Map<Object, Object> map = Maps.newHashMap(ImmutableMap.of(mutObj, "a", "b", mutObj));
mutObj.setField("foo", map);
TestObjectContainer res = mutObj.build().deserialize();
assertEquals(ImmutableMap.of(res, "a", "b", res), res.foo);
}
/**
*
*/
public void testMapModification() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Maps.newHashMap(ImmutableMap.of(1, "a", 2, "b"));
PortableBuilderImpl mutObj = wrap(obj);
Map<Object, Object> map = mutObj.getField("foo");
map.put(3, mutObj);
Object rmv = map.remove(1);
assertEquals("a", rmv);
TestObjectContainer res = mutObj.build().deserialize();
assertEquals(ImmutableMap.of(2, "b", 3, res), res.foo);
}
/**
*
*/
public void testEnumArrayModification() {
TestObjectAllTypes obj = new TestObjectAllTypes();
obj.enumArr = new TestObjectEnum[]{TestObjectEnum.A, TestObjectEnum.B};
PortableBuilderImpl mutObj = wrap(obj);
PortableBuilderEnum[] arr = mutObj.getField("enumArr");
arr[0] = new PortableBuilderEnum(mutObj.typeId(), TestObjectEnum.B);
TestObjectAllTypes res = mutObj.build().deserialize();
Assert.assertArrayEquals(new TestObjectEnum[] {TestObjectEnum.A, TestObjectEnum.B}, res.enumArr);
}
/**
*
*/
public void testEditObjectWithRawData() {
GridPortableMarshalerAwareTestClass obj = new GridPortableMarshalerAwareTestClass();
obj.s = "a";
obj.sRaw = "aa";
PortableBuilderImpl mutableObj = wrap(obj);
mutableObj.setField("s", "z");
GridPortableMarshalerAwareTestClass res = mutableObj.build().deserialize();
assertEquals("z", res.s);
assertEquals("aa", res.sRaw);
}
/**
*
*/
public void testHashCode() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutableObj = wrap(obj);
assertEquals(obj.hashCode(), mutableObj.build().hashCode());
mutableObj.hashCode(25);
assertEquals(25, mutableObj.build().hashCode());
}
/**
*
*/
public void testCollectionsInCollection() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newArrayList(
Lists.newArrayList(1, 2),
Lists.newLinkedList(Arrays.asList(1, 2)),
Sets.newHashSet("a", "b"),
Sets.newLinkedHashSet(Arrays.asList("a", "b")),
Maps.newHashMap(ImmutableMap.of(1, "a", 2, "b")));
TestObjectContainer deserialized = wrap(obj).build().deserialize();
assertEquals(obj.foo, deserialized.foo);
}
/**
*
*/
public void testMapEntryModification() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = ImmutableMap.of(1, "a").entrySet().iterator().next();
PortableBuilderImpl mutableObj = wrap(obj);
Map.Entry<Object, Object> entry = mutableObj.getField("foo");
assertEquals(1, entry.getKey());
assertEquals("a", entry.getValue());
entry.setValue("b");
TestObjectContainer res = mutableObj.build().deserialize();
assertEquals(new GridMapEntry<>(1, "b"), res.foo);
}
/**
*
*/
public void testMapEntryOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutableObj = wrap(obj);
mutableObj.setField("foo", new GridMapEntry<>(1, "a"));
TestObjectContainer res = mutableObj.build().deserialize();
assertEquals(new GridMapEntry<>(1, "a"), res.foo);
}
/**
*
*/
public void testMetadataChangingDoublePut() {
PortableBuilderImpl mutableObj = wrap(new TestObjectContainer());
mutableObj.setField("xx567", "a");
mutableObj.setField("xx567", "b");
mutableObj.build();
PortableMetadata metadata = portables().metadata(TestObjectContainer.class);
assertEquals("String", metadata.fieldTypeName("xx567"));
}
/**
*
*/
public void testMetadataChangingDoublePut2() {
PortableBuilderImpl mutableObj = wrap(new TestObjectContainer());
mutableObj.setField("xx567", "a");
mutableObj.setField("xx567", "b");
mutableObj.build();
PortableMetadata metadata = portables().metadata(TestObjectContainer.class);
assertEquals("String", metadata.fieldTypeName("xx567"));
}
/**
*
*/
public void testMetadataChanging() {
TestObjectContainer c = new TestObjectContainer();
PortableBuilderImpl mutableObj = wrap(c);
mutableObj.setField("intField", 1);
mutableObj.setField("intArrField", new int[] {1});
mutableObj.setField("arrField", new String[] {"1"});
mutableObj.setField("strField", "1");
mutableObj.setField("colField", Lists.newArrayList("1"));
mutableObj.setField("mapField", Maps.newHashMap(ImmutableMap.of(1, "1")));
mutableObj.setField("enumField", TestObjectEnum.A);
mutableObj.setField("enumArrField", new Enum[] {TestObjectEnum.A});
mutableObj.build();
PortableMetadata metadata = portables().metadata(c.getClass());
assertTrue(metadata.fields().containsAll(Arrays.asList("intField", "intArrField", "arrField", "strField",
"colField", "mapField", "enumField", "enumArrField")));
assertEquals("int", metadata.fieldTypeName("intField"));
assertEquals("int[]", metadata.fieldTypeName("intArrField"));
assertEquals("String[]", metadata.fieldTypeName("arrField"));
assertEquals("String", metadata.fieldTypeName("strField"));
assertEquals("Collection", metadata.fieldTypeName("colField"));
assertEquals("Map", metadata.fieldTypeName("mapField"));
assertEquals("Enum", metadata.fieldTypeName("enumField"));
assertEquals("Enum[]", metadata.fieldTypeName("enumArrField"));
}
/**
*
*/
public void testDateInObjectField() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = new Date();
PortableBuilderImpl mutableObj = wrap(obj);
assertEquals(Timestamp.class, mutableObj.getField("foo").getClass());
}
/**
*
*/
public void testDateInCollection() {
TestObjectContainer obj = new TestObjectContainer();
obj.foo = Lists.newArrayList(new Date());
PortableBuilderImpl mutableObj = wrap(obj);
assertEquals(Timestamp.class, ((List<?>)mutableObj.getField("foo")).get(0).getClass());
}
/**
*
*/
@SuppressWarnings("AssertEqualsBetweenInconvertibleTypes")
public void testDateArrayOverride() {
TestObjectContainer obj = new TestObjectContainer();
PortableBuilderImpl mutableObj = wrap(obj);
Date[] arr = {new Date()};
mutableObj.setField("foo", arr);
TestObjectContainer res = mutableObj.build().deserialize();
assertEquals(Date[].class, res.foo.getClass());
assertTrue(Objects.deepEquals(arr, res.foo));
}
/**
*
*/
public void testChangeMap() {
AddressBook addrBook = new AddressBook();
addrBook.addCompany(new Company(1, "Google inc", 100, new Address("Saint-Petersburg", "Torzhkovskya", 1, 53), "occupation"));
addrBook.addCompany(new Company(2, "Apple inc", 100, new Address("Saint-Petersburg", "Torzhkovskya", 1, 54), "occupation"));
addrBook.addCompany(new Company(3, "Microsoft", 100, new Address("Saint-Petersburg", "Torzhkovskya", 1, 55), "occupation"));
addrBook.addCompany(new Company(4, "Oracle", 100, new Address("Saint-Petersburg", "Nevskiy", 1, 1), "occupation"));
PortableBuilderImpl mutableObj = wrap(addrBook);
Map<String, List<PortableBuilderImpl>> map = mutableObj.getField("companyByStreet");
List<PortableBuilderImpl> list = map.get("Torzhkovskya");
PortableBuilderImpl company = list.get(0);
assert "Google inc".equals(company.<String>getField("name"));
list.remove(0);
AddressBook res = mutableObj.build().deserialize();
assertEquals(Arrays.asList("Nevskiy", "Torzhkovskya"), new ArrayList<>(res.getCompanyByStreet().keySet()));
List<Company> torzhkovskyaCompanies = res.getCompanyByStreet().get("Torzhkovskya");
assertEquals(2, torzhkovskyaCompanies.size());
assertEquals("Apple inc", torzhkovskyaCompanies.get(0).name);
}
/**
*
*/
public void testSavingObjectWithNotZeroStart() {
TestObjectOuter out = new TestObjectOuter();
TestObjectInner inner = new TestObjectInner();
out.inner = inner;
inner.outer = out;
PortableBuilderImpl builder = wrap(out);
PortableBuilderImpl innerBuilder = builder.getField("inner");
TestObjectInner res = innerBuilder.build().deserialize();
assertSame(res, res.outer.inner);
}
/**
*
*/
public void testPortableObjectField() {
TestObjectContainer container = new TestObjectContainer(toPortable(new TestObjectArrayList()));
PortableBuilderImpl wrapper = wrap(container);
assertTrue(wrapper.getField("foo") instanceof PortableObject);
TestObjectContainer deserialized = wrapper.build().deserialize();
assertTrue(deserialized.foo instanceof PortableObject);
}
/**
*
*/
public void testAssignPortableObject() {
TestObjectContainer container = new TestObjectContainer();
PortableBuilderImpl wrapper = wrap(container);
wrapper.setField("foo", toPortable(new TestObjectArrayList()));
TestObjectContainer deserialized = wrapper.build().deserialize();
assertTrue(deserialized.foo instanceof TestObjectArrayList);
}
/**
*
*/
public void testRemoveFromNewObject() {
PortableBuilderImpl wrapper = newWrapper(TestObjectAllTypes.class);
wrapper.setField("str", "a");
wrapper.removeField("str");
assertNull(wrapper.build().<TestObjectAllTypes>deserialize().str);
}
/**
*
*/
public void testRemoveFromExistingObject() {
TestObjectAllTypes obj = new TestObjectAllTypes();
obj.setDefaultData();
PortableBuilderImpl wrapper = wrap(toPortable(obj));
wrapper.removeField("str");
assertNull(wrapper.build().<TestObjectAllTypes>deserialize().str);
}
/**
*
*/
public void testCyclicArrays() {
fail("https://issues.apache.org/jira/browse/IGNITE-1273");
TestObjectContainer obj = new TestObjectContainer();
Object[] arr1 = new Object[1];
Object[] arr2 = new Object[]{arr1};
arr1[0] = arr2;
obj.foo = arr1;
TestObjectContainer res = toPortable(obj).deserialize();
Object[] resArr = (Object[])res.foo;
assertSame(((Object[])resArr[0])[0], resArr);
}
/**
*
*/
@SuppressWarnings("TypeMayBeWeakened")
public void testCyclicArrayList() {
fail("https://issues.apache.org/jira/browse/IGNITE-1273");
TestObjectContainer obj = new TestObjectContainer();
List<Object> arr1 = new ArrayList<>();
List<Object> arr2 = new ArrayList<>();
arr1.add(arr2);
arr2.add(arr1);
obj.foo = arr1;
TestObjectContainer res = toPortable(obj).deserialize();
List<?> resArr = (List<?>)res.foo;
assertSame(((List<Object>)resArr.get(0)).get(0), resArr);
}
/**
* @param obj Object.
* @return Object in portable format.
*/
private PortableObject toPortable(Object obj) {
return portables().toPortable(obj);
}
/**
* @param obj Object.
* @return GridMutablePortableObject.
*/
private PortableBuilderImpl wrap(Object obj) {
return PortableBuilderImpl.wrap(toPortable(obj));
}
/**
* @param aCls Class.
* @return Wrapper.
*/
private PortableBuilderImpl newWrapper(Class<?> aCls) {
CacheObjectPortableProcessorImpl processor = (CacheObjectPortableProcessorImpl)(
(IgnitePortablesImpl)portables()).processor();
return new PortableBuilderImpl(processor.portableContext(), processor.typeId(aCls.getName()),
aCls.getSimpleName());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.functions.worker;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.distributedlog.AppendOnlyStreamWriter;
import org.apache.distributedlog.DistributedLogConfiguration;
import org.apache.distributedlog.api.DistributedLogManager;
import org.apache.distributedlog.api.namespace.Namespace;
import org.apache.distributedlog.exceptions.ZKException;
import org.apache.distributedlog.impl.metadata.BKDLConfig;
import org.apache.distributedlog.metadata.DLMetadata;
import org.apache.pulsar.client.admin.PulsarAdmin;
import org.apache.pulsar.client.admin.PulsarAdminBuilder;
import org.apache.pulsar.client.api.ClientBuilder;
import org.apache.pulsar.client.api.PulsarClient;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.common.policies.data.FunctionStats;
import org.apache.pulsar.functions.proto.Function;
import org.apache.pulsar.functions.proto.InstanceCommunication;
import org.apache.pulsar.functions.runtime.Runtime;
import org.apache.pulsar.functions.runtime.RuntimeSpawner;
import org.apache.pulsar.functions.utils.FunctionCommon;
import org.apache.pulsar.functions.worker.dlog.DLInputStream;
import org.apache.pulsar.functions.worker.dlog.DLOutputStream;
import org.apache.zookeeper.KeeperException.Code;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URI;
import java.nio.file.Files;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.stream.Collectors;
import static java.nio.file.StandardCopyOption.REPLACE_EXISTING;
import static org.apache.commons.lang3.StringUtils.isNotBlank;
@Slf4j
public final class WorkerUtils {
private WorkerUtils(){}
public static void uploadFileToBookkeeper(String packagePath, File sourceFile, Namespace dlogNamespace) throws IOException {
FileInputStream uploadedInputStream = new FileInputStream(sourceFile);
uploadToBookeeper(dlogNamespace, uploadedInputStream, packagePath);
}
public static void uploadToBookeeper(Namespace dlogNamespace,
InputStream uploadedInputStream,
String destPkgPath)
throws IOException {
// if the dest directory does not exist, create it.
if (dlogNamespace.logExists(destPkgPath)) {
// if the destination file exists, write a log message
log.info(String.format("Target function file already exists at '%s'. Overwriting it now",
destPkgPath));
dlogNamespace.deleteLog(destPkgPath);
}
// copy the topology package to target working directory
log.info(String.format("Uploading function package to '%s'",
destPkgPath));
try (DistributedLogManager dlm = dlogNamespace.openLog(destPkgPath)) {
try (AppendOnlyStreamWriter writer = dlm.getAppendOnlyStreamWriter()){
try (OutputStream out = new DLOutputStream(dlm, writer)) {
int read = 0;
byte[] bytes = new byte[1024];
while ((read = uploadedInputStream.read(bytes)) != -1) {
out.write(bytes, 0, read);
}
out.flush();
}
}
}
}
public static void downloadFromBookkeeper(Namespace namespace,
File outputFile,
String packagePath) throws IOException {
downloadFromBookkeeper(namespace, new FileOutputStream(outputFile), packagePath);
}
public static void downloadFromBookkeeper(Namespace namespace,
OutputStream outputStream,
String packagePath) throws IOException {
log.info("Downloading {} from BK...", packagePath);
DistributedLogManager dlm = namespace.openLog(packagePath);
try (InputStream in = new DLInputStream(dlm)) {
int read = 0;
byte[] bytes = new byte[1024];
while ((read = in.read(bytes)) != -1) {
outputStream.write(bytes, 0, read);
}
outputStream.flush();
}
}
public static DistributedLogConfiguration getDlogConf(WorkerConfig workerConfig) {
int numReplicas = workerConfig.getNumFunctionPackageReplicas();
DistributedLogConfiguration conf = new DistributedLogConfiguration()
.setWriteLockEnabled(false)
.setOutputBufferSize(256 * 1024) // 256k
.setPeriodicFlushFrequencyMilliSeconds(0) // disable periodical flush
.setImmediateFlushEnabled(false) // disable immediate flush
.setLogSegmentRollingIntervalMinutes(0) // disable time-based rolling
.setMaxLogSegmentBytes(Long.MAX_VALUE) // disable size-based rolling
.setExplicitTruncationByApplication(true) // no auto-truncation
.setRetentionPeriodHours(Integer.MAX_VALUE) // long retention
.setEnsembleSize(numReplicas) // replica settings
.setWriteQuorumSize(numReplicas)
.setAckQuorumSize(numReplicas)
.setUseDaemonThread(true);
conf.setProperty("bkc.allowShadedLedgerManagerFactoryClass", true);
conf.setProperty("bkc.shadedLedgerManagerFactoryClassPrefix", "dlshade.");
if (isNotBlank(workerConfig.getBookkeeperClientAuthenticationPlugin())) {
conf.setProperty("bkc.clientAuthProviderFactoryClass",
workerConfig.getBookkeeperClientAuthenticationPlugin());
if (isNotBlank(workerConfig.getBookkeeperClientAuthenticationParametersName())) {
conf.setProperty("bkc." + workerConfig.getBookkeeperClientAuthenticationParametersName(),
workerConfig.getBookkeeperClientAuthenticationParameters());
}
}
return conf;
}
public static URI initializeDlogNamespace(String zkServers, String ledgersRootPath) throws IOException {
BKDLConfig dlConfig = new BKDLConfig(zkServers, ledgersRootPath);
DLMetadata dlMetadata = DLMetadata.create(dlConfig);
URI dlogUri = URI.create(String.format("distributedlog://%s/pulsar/functions", zkServers));
try {
dlMetadata.create(dlogUri);
} catch (ZKException e) {
if (e.getKeeperExceptionCode() == Code.NODEEXISTS) {
return dlogUri;
}
throw e;
}
return dlogUri;
}
public static PulsarAdmin getPulsarAdminClient(String pulsarWebServiceUrl) {
return getPulsarAdminClient(pulsarWebServiceUrl, null, null, null, null, null);
}
public static PulsarAdmin getPulsarAdminClient(String pulsarWebServiceUrl, String authPlugin, String authParams,
String tlsTrustCertsFilePath, Boolean allowTlsInsecureConnection,
Boolean enableTlsHostnameVerificationEnable) {
try {
PulsarAdminBuilder adminBuilder = PulsarAdmin.builder().serviceHttpUrl(pulsarWebServiceUrl);
if (isNotBlank(authPlugin) && isNotBlank(authParams)) {
adminBuilder.authentication(authPlugin, authParams);
}
if (isNotBlank(tlsTrustCertsFilePath)) {
adminBuilder.tlsTrustCertsFilePath(tlsTrustCertsFilePath);
}
if (allowTlsInsecureConnection != null) {
adminBuilder.allowTlsInsecureConnection(allowTlsInsecureConnection);
}
if (enableTlsHostnameVerificationEnable != null) {
adminBuilder.enableTlsHostnameVerification(enableTlsHostnameVerificationEnable);
}
return adminBuilder.build();
} catch (PulsarClientException e) {
log.error("Error creating pulsar admin client", e);
throw new RuntimeException(e);
}
}
public static PulsarClient getPulsarClient(String pulsarServiceUrl) {
return getPulsarClient(pulsarServiceUrl, null, null, null,
null, null, null);
}
public static PulsarClient getPulsarClient(String pulsarServiceUrl, String authPlugin, String authParams,
Boolean useTls, String tlsTrustCertsFilePath,
Boolean allowTlsInsecureConnection,
Boolean enableTlsHostnameVerificationEnable) {
try {
ClientBuilder clientBuilder = PulsarClient.builder().serviceUrl(pulsarServiceUrl);
if (isNotBlank(authPlugin)
&& isNotBlank(authParams)) {
clientBuilder.authentication(authPlugin, authParams);
}
if (useTls != null) {
clientBuilder.enableTls(useTls);
}
if (allowTlsInsecureConnection != null) {
clientBuilder.allowTlsInsecureConnection(allowTlsInsecureConnection);
}
if (isNotBlank(tlsTrustCertsFilePath)) {
clientBuilder.tlsTrustCertsFilePath(tlsTrustCertsFilePath);
}
if (enableTlsHostnameVerificationEnable != null) {
clientBuilder.enableTlsHostnameVerification(enableTlsHostnameVerificationEnable);
}
return clientBuilder.build();
} catch (PulsarClientException e) {
log.error("Error creating pulsar client", e);
throw new RuntimeException(e);
}
}
public static FunctionStats.FunctionInstanceStats getFunctionInstanceStats(String fullyQualifiedInstanceName,
FunctionRuntimeInfo functionRuntimeInfo,
int instanceId) {
RuntimeSpawner functionRuntimeSpawner = functionRuntimeInfo.getRuntimeSpawner();
FunctionStats.FunctionInstanceStats functionInstanceStats = new FunctionStats.FunctionInstanceStats();
if (functionRuntimeSpawner != null) {
Runtime functionRuntime = functionRuntimeSpawner.getRuntime();
if (functionRuntime != null) {
try {
InstanceCommunication.MetricsData metricsData = functionRuntime.getMetrics(instanceId).get();
functionInstanceStats.setInstanceId(instanceId);
FunctionStats.FunctionInstanceStats.FunctionInstanceStatsData functionInstanceStatsData
= new FunctionStats.FunctionInstanceStats.FunctionInstanceStatsData();
functionInstanceStatsData.setReceivedTotal(metricsData.getReceivedTotal());
functionInstanceStatsData.setProcessedSuccessfullyTotal(metricsData.getProcessedSuccessfullyTotal());
functionInstanceStatsData.setSystemExceptionsTotal(metricsData.getSystemExceptionsTotal());
functionInstanceStatsData.setUserExceptionsTotal(metricsData.getUserExceptionsTotal());
functionInstanceStatsData.setAvgProcessLatency(metricsData.getAvgProcessLatency() == 0.0 ? null : metricsData.getAvgProcessLatency());
functionInstanceStatsData.setLastInvocation(metricsData.getLastInvocation() == 0 ? null : metricsData.getLastInvocation());
functionInstanceStatsData.oneMin.setReceivedTotal(metricsData.getReceivedTotal1Min());
functionInstanceStatsData.oneMin.setProcessedSuccessfullyTotal(metricsData.getProcessedSuccessfullyTotal1Min());
functionInstanceStatsData.oneMin.setSystemExceptionsTotal(metricsData.getSystemExceptionsTotal1Min());
functionInstanceStatsData.oneMin.setUserExceptionsTotal(metricsData.getUserExceptionsTotal1Min());
functionInstanceStatsData.oneMin.setAvgProcessLatency(metricsData.getAvgProcessLatency1Min() == 0.0 ? null : metricsData.getAvgProcessLatency1Min());
// Filter out values that are NaN
Map<String, Double> statsDataMap = metricsData.getUserMetricsMap().entrySet().stream()
.filter(stringDoubleEntry -> !stringDoubleEntry.getValue().isNaN())
.collect(Collectors.toMap(x -> x.getKey(), x -> x.getValue()));
functionInstanceStatsData.setUserMetrics(statsDataMap);
functionInstanceStats.setMetrics(functionInstanceStatsData);
} catch (InterruptedException | ExecutionException e) {
log.warn("Failed to collect metrics for function instance {}", fullyQualifiedInstanceName, e);
}
}
}
return functionInstanceStats;
}
public static File dumpToTmpFile(final InputStream uploadedInputStream) {
try {
File tmpFile = FunctionCommon.createPkgTempFile();
tmpFile.deleteOnExit();
Files.copy(uploadedInputStream, tmpFile.toPath(), REPLACE_EXISTING);
return tmpFile;
} catch (IOException e) {
throw new RuntimeException("Cannot create a temporary file", e);
}
}
public static boolean isFunctionCodeBuiltin(Function.FunctionDetailsOrBuilder functionDetails) {
if (functionDetails.hasSource()) {
Function.SourceSpec sourceSpec = functionDetails.getSource();
if (!StringUtils.isEmpty(sourceSpec.getBuiltin())) {
return true;
}
}
if (functionDetails.hasSink()) {
Function.SinkSpec sinkSpec = functionDetails.getSink();
if (!StringUtils.isEmpty(sinkSpec.getBuiltin())) {
return true;
}
}
return false;
}
}
| |
package it.unibz.krdb.obda.owlrefplatform.core.reformulation;
/*
* #%L
* ontop-reformulation-core
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.model.Function;
import it.unibz.krdb.obda.model.CQIE;
import it.unibz.krdb.obda.model.Term;
import it.unibz.krdb.obda.model.Predicate;
import it.unibz.krdb.obda.model.Variable;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
/**
* QueryConnectedComponent represents a connected component of a CQ
*
* keeps track of variables (both quantified and free) and edges
*
* a connected component can either be degenerate (if it has no proper edges, i.e., just a loop)
*
* @author Roman Kontchakov
*
*
* types of predicates (as of 1 October 2014)
*
* Constant: NULL (string), TRUE, FALSE (boolean)
*
* NumericalOperationPredicate: MINUS, ADD, SUBSTRACT, MULTIPLY
* BooleanOperationPredicate: AND, NOT, OR, EQ, NEQ, GTE, GT, LTE, LT, IS_NULL, IS_NOT_NULL, IS_TRUE,
* SPARQL_IS_LITERAL_URI, SPARQL_IS_URI, SPARQL_IS_IRI, SPARQL_IS_BLANK, SPARQL_LANGMATCHES,
* SPARQL_REGEX, SPARQL_LIKE
* NonBooleanOperationPredicate: SPARQL_STR, SPARQL_DATATYPE, SPARQL_LANG
* DataTypePredicate: RDFS_LITERAL, RDFS_LITERAL_LANG, XSD_STRING, XSD_INTEGER, XSD_DECIMAL, XSD_DOUBLE, XSD_DATETIME,
* XSD_BOOLEAN, XSD_DATE, XSD_TIME, XSD_YEAR
* Predicate: QUEST_TRIPLE_PRED, QUEST_CAST
* AlgebraOperatorPredicate: SPARQL_JOIN, SPARQL_LEFTJOIN
*
*/
public class QueryConnectedComponent {
private List<Term> variables;
private List<Loop> quantifiedVariables;
private List<Term> freeVariables;
private final List<Edge> edges; // a connect component contains a list of edges
private final Loop loop; // or a loop if it is degenerate
private final List<Function> nonDLAtoms;
private boolean noFreeTerms; // no free variables and no constants
// if true the component can be mapped onto the anonymous part of the canonical model
/**
* constructor is private as instances created only by the static method getConnectedComponents
*
* @param edges: a list of edges in the connected component
* @param loop: a loop if the component is degenerate
* @param nonDLAtoms: a list of non-DL atoms in the connected component
* @param terms: terms that are covered by the edges
*/
private QueryConnectedComponent(List<Edge> edges, List<Function> nonDLAtoms, List<Loop> terms) {
this.edges = edges;
this.nonDLAtoms = nonDLAtoms;
this.loop = isDegenerate() ? terms.get(0) : null;
quantifiedVariables = new ArrayList<Loop>(terms.size());
variables = new ArrayList<Term>(terms.size());
freeVariables = new ArrayList<Term>(terms.size());
noFreeTerms = true;
for (Loop l: terms) {
Term t = l.getTerm();
if (t instanceof Variable) {
variables.add(t);
//if (headterms.contains(t))
if (l.isExistentialVariable())
quantifiedVariables.add(l);
else {
freeVariables.add(t);
noFreeTerms = false;
}
}
else
noFreeTerms = false; // not a variable -- better definition?
}
}
public static Loop getLoop(Term t, Map<Term, Loop> allLoops, Set<Term> headTerms) {
Loop l = allLoops.get(t);
if (l == null) {
boolean isExistentialVariable = ((t instanceof Variable) && !headTerms.contains(t));
l = new Loop(t, isExistentialVariable);
allLoops.put(t, l);
}
return l;
}
private static QueryConnectedComponent getConnectedComponent(Map<TermPair, Edge> pairs, Map<Term, Loop> allLoops, List<Function> nonDLAtoms,
Term seed) {
Set<Term> ccTerms = new HashSet<Term>((allLoops.size() * 2) / 3);
List<Edge> ccEdges = new ArrayList<Edge>(pairs.size());
List<Function> ccNonDLAtoms = new LinkedList<Function>();
List<Loop> ccLoops = new ArrayList<Loop>(allLoops.size());
ccTerms.add(seed);
Loop seedLoop = allLoops.get(seed);
if (seedLoop != null) {
ccLoops.add(seedLoop);
allLoops.remove(seed);
}
// expand the current CC by adding all edges that are have at least one of the terms in them
boolean expanded = true;
while (expanded) {
expanded = false;
Iterator<Entry<TermPair, Edge>> i = pairs.entrySet().iterator();
//i = pairs.entrySet().iterator();
while (i.hasNext()) {
Edge edge = i.next().getValue();
Term t0 = edge.getTerm0();
Term t1 = edge.getTerm1();
if (ccTerms.contains(t0)) {
if (ccTerms.add(t1)) { // the other term is already there
ccLoops.add(edge.getLoop1());
allLoops.remove(t1); // remove the loops that are covered by the edges in CC
}
}
else if (ccTerms.contains(t1)) {
if (ccTerms.add(t0)) { // the other term is already there
ccLoops.add(edge.getLoop0());
allLoops.remove(t0); // remove the loops that are covered by the edges in CC
}
}
else
continue;
ccEdges.add(edge);
expanded = true;
i.remove();
}
// non-DL atoms
Iterator<Function> ni = nonDLAtoms.iterator();
while (ni.hasNext()) {
Function atom = ni.next();
boolean intersects = false;
Set<Variable> atomVars = atom.getReferencedVariables();
for (Variable t : atomVars)
if (ccTerms.contains(t)) {
intersects = true;
break;
}
if (intersects) {
ccNonDLAtoms.add(atom);
ccTerms.addAll(atomVars);
for (Variable v : atomVars) {
allLoops.remove(v);
}
expanded = true;
ni.remove();
}
}
}
return new QueryConnectedComponent(ccEdges, ccNonDLAtoms, ccLoops);
}
/**
* getConnectedComponents creates a list of connected components of a given CQ
*
* @param cqie: CQ to be split into connected components
* @return list of connected components
*/
public static List<QueryConnectedComponent> getConnectedComponents(CQIE cqie) {
Set<Term> headTerms = new HashSet<Term>(cqie.getHead().getTerms());
// collect all edges and loops
// an edge is a binary predicate P(t, t') with t \ne t'
// a loop is either a unary predicate A(t) or a binary predicate P(t,t)
// a nonDL atom is an atom with a non-data predicate
Map<TermPair, Edge> pairs = new HashMap<TermPair, Edge>();
Map<Term, Loop> allLoops = new HashMap<Term, Loop>();
List<Function> nonDLAtoms = new LinkedList<Function>();
for (Function a: cqie.getBody()) {
Predicate p = a.getFunctionSymbol();
if (p.isDataPredicate() && !p.isTriplePredicate()) { // if DL predicates
//if (p.isClass() || p.isObjectProperty() || p.isDataProperty()) { // if DL predicate (throws NullPointerException)
Term t0 = a.getTerm(0);
if (a.getArity() == 2 && !t0.equals(a.getTerm(1))) {
// proper DL edge between two distinct terms
Term t1 = a.getTerm(1);
TermPair pair = new TermPair(t0, t1);
Edge edge = pairs.get(pair);
if (edge == null) {
Loop l0 = getLoop(t0, allLoops, headTerms);
Loop l1 = getLoop(t1, allLoops, headTerms);
edge = new Edge(l0, l1);
pairs.put(pair, edge);
}
edge.bAtoms.add(a);
}
else {
Loop l0 = getLoop(t0, allLoops, headTerms);
l0.atoms.add(a);
}
}
else { // non-DL precicate
//log.debug("NON-DL ATOM {}", a);
nonDLAtoms.add(a);
}
}
List<QueryConnectedComponent> ccs = new LinkedList<QueryConnectedComponent>();
// form the list of connected components from the list of edges
while (!pairs.isEmpty()) {
Edge edge = pairs.entrySet().iterator().next().getValue();
ccs.add(getConnectedComponent(pairs, allLoops, nonDLAtoms, edge.getTerm0()));
}
while (!nonDLAtoms.isEmpty()) {
//log.debug("NON-DL ATOMS ARE NOT EMPTY: {}", nonDLAtoms);
Function f = nonDLAtoms.iterator().next();
Set<Variable> vars = f.getReferencedVariables();
Variable v = vars.iterator().next();
ccs.add(getConnectedComponent(pairs, allLoops, nonDLAtoms, v));
}
// create degenerate connected components for all remaining loops (which are disconnected from anything else)
//for (Entry<term, Loop> loop : allLoops.entrySet()) {
while (!allLoops.isEmpty()) {
Term seed = allLoops.keySet().iterator().next();
ccs.add(getConnectedComponent(pairs, allLoops, nonDLAtoms, seed));
//ccs.add(new QueryConnectedComponent(Collections.EMPTY_LIST, loop.getValue(), Collections.EMPTY_LIST, Collections.singletonList(loop.getValue())));
}
return ccs;
}
public Loop getLoop() {
return loop;
}
/**
* boolean isDenenerate()
*
* @return true if the component is degenerate (has no proper edges with two distinct terms)
*/
public boolean isDegenerate() {
return edges.isEmpty(); // && nonDLAtoms.isEmpty();
}
/**
* boolean hasNoFreeTerms()
*
* @return true if all terms of the connected component are existentially quantified variables
*/
public boolean hasNoFreeTerms() {
return noFreeTerms;
}
/**
* List<Edge> getEdges()
*
* @return the list of edges in the connected component
*/
public List<Edge> getEdges() {
return edges;
}
/**
* List<Term> getVariables()
*
* @return the list of variables in the connected components
*/
public List<Term> getVariables() {
return variables;
}
/**
* Set<Variable> getQuantifiedVariables()
*
* @return the collection of existentially quantified variables
*/
public Collection<Loop> getQuantifiedVariables() {
return quantifiedVariables;
}
/**
* List<Term> getFreeVariables()
*
* @return the list of free variables in the connected component
*/
public List<Term> getFreeVariables() {
return freeVariables;
}
public List<Function> getNonDLAtoms() {
return nonDLAtoms;
}
/**
* Loop: class representing loops of connected components
*
* a loop is characterized by a term and a set of atoms involving only that term
*
* @author Roman Kontchakov
*
*/
static class Loop {
private final Term term;
private final List<Function> atoms;
private final boolean isExistentialVariable;
public Loop(Term term, boolean isExistentialVariable) {
this.term = term;
this.isExistentialVariable = isExistentialVariable;
this.atoms = new ArrayList<Function>(10);
}
public Term getTerm() {
return term;
}
public Collection<Function> getAtoms() {
return atoms;
}
public boolean isExistentialVariable() {
return isExistentialVariable;
}
@Override
public String toString() {
return "loop: {" + term + "}" + atoms;
}
@Override
public boolean equals(Object o) {
if (o instanceof Loop)
return term.equals(((Loop)o).term);
return false;
}
@Override
public int hashCode() {
return term.hashCode();
}
}
/**
* Edge: class representing edges of connected components
*
* an edge is characterized by a pair of terms and a set of atoms involving only those terms
*
* @author Roman Kontchakov
*
*/
static class Edge {
private final Loop l0, l1;
private final List<Function> bAtoms;
public Edge(Loop l0, Loop l1) {
this.bAtoms = new ArrayList<Function>(10);
this.l0 = l0;
this.l1 = l1;
}
public Loop getLoop0() {
return l0;
}
public Loop getLoop1() {
return l1;
}
public Term getTerm0() {
return l0.term;
}
public Term getTerm1() {
return l1.term;
}
public Collection<Function> getBAtoms() {
return bAtoms;
}
public List<Function> getAtoms() {
List<Function> allAtoms = new ArrayList<Function>(bAtoms.size() + l0.atoms.size() + l1.atoms.size());
allAtoms.addAll(bAtoms);
allAtoms.addAll(l0.atoms);
allAtoms.addAll(l1.atoms);
return allAtoms;
}
@Override
public String toString() {
return "edge: {" + l0.term + ", " + l1.term + "}" + bAtoms + l0.atoms + l1.atoms;
}
}
/**
* TermPair: a simple abstraction of *unordered* pair of Terms (i.e., {t1, t2} and {t2, t1} are equal)
*
* @author Roman Kontchakov
*
*/
private static class TermPair {
private final Term t0, t1;
public TermPair(Term t0, Term t1) {
this.t0 = t0;
this.t1 = t1;
}
@Override
public boolean equals(Object o) {
if (o instanceof TermPair) {
TermPair other = (TermPair) o;
if (this.t0.equals(other.t0) && this.t1.equals(other.t1))
return true;
if (this.t0.equals(other.t1) && this.t1.equals(other.t0))
return true;
}
return false;
}
@Override
public String toString() {
return "term pair: {" + t0 + ", " + t1 + "}";
}
@Override
public int hashCode() {
return t0.hashCode() ^ t1.hashCode();
}
}
}
| |
package org.broadinstitute.hellbender.utils;
import htsjdk.samtools.SAMFileHeader;
import htsjdk.samtools.SAMSequenceDictionary;
import htsjdk.samtools.SAMSequenceRecord;
import htsjdk.samtools.util.Locatable;
import htsjdk.tribble.Feature;
import htsjdk.tribble.SimpleFeature;
import htsjdk.variant.variantcontext.Allele;
import htsjdk.variant.variantcontext.VariantContext;
import htsjdk.variant.variantcontext.VariantContextBuilder;
import htsjdk.variant.vcf.VCFFileReader;
import org.broadinstitute.hellbender.exceptions.UserException;
import org.broadinstitute.hellbender.utils.fasta.CachingIndexedFastaSequenceFile;
import org.broadinstitute.hellbender.utils.io.IOUtils;
import org.broadinstitute.hellbender.utils.read.ArtificialReadUtils;
import org.broadinstitute.hellbender.utils.read.GATKRead;
import org.broadinstitute.hellbender.utils.read.ReadUtils;
import org.broadinstitute.hellbender.GATKBaseTest;
import org.testng.Assert;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.assertFalse;
/**
* Test out the functionality of the new genome loc parser
*/
public final class GenomeLocParserUnitTest extends GATKBaseTest {
private GenomeLocParser genomeLocParser;
private SAMFileHeader header;
@BeforeClass
public void init() {
header = ArtificialReadUtils.createArtificialSamHeader(1, 1, 10);
genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testGetContigIndex() {
assertEquals(genomeLocParser.getContigIndex("blah"), -1); // should not be in the reference
}
@Test
public void testGetContigIndexValid() {
assertEquals(genomeLocParser.getContigIndex("1"), 0); // should be in the reference
}
@Test(expectedExceptions=UserException.class)
public void testGetContigInfoUnknownContig1() {
assertEquals(null, genomeLocParser.getContigInfo("blah")); // should *not* be in the reference
}
@Test(expectedExceptions=UserException.class)
public void testGetContigInfoUnknownContig2() {
assertEquals(null, genomeLocParser.getContigInfo(null)); // should *not* be in the reference
}
@Test()
public void testHasContigInfoUnknownContig1() {
assertEquals(false, genomeLocParser.contigIsInDictionary("blah")); // should *not* be in the reference
}
@Test()
public void testHasContigInfoUnknownContig2() {
assertEquals(false, genomeLocParser.contigIsInDictionary(null)); // should *not* be in the reference
}
@Test()
public void testHasContigInfoKnownContig() {
assertEquals(true, genomeLocParser.contigIsInDictionary("1")); // should be in the reference
}
@Test
public void testGetContigInfoKnownContig() {
assertEquals(0, "1".compareTo(genomeLocParser.getContigInfo("1").getSequenceName())); // should be in the reference
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testParseBadString() {
genomeLocParser.parseGenomeLoc("Bad:0-1");
}
@Test
public void testParseUnknownSequenceLength() {
SAMSequenceDictionary seqDict = new SAMSequenceDictionary();
seqDict.addSequence(new SAMSequenceRecord("1", SAMSequenceRecord.UNKNOWN_SEQUENCE_LENGTH));
Assert.assertEquals(seqDict.getSequence("1").getSequenceLength(), SAMSequenceRecord.UNKNOWN_SEQUENCE_LENGTH);
GenomeLocParser myLocParser = new GenomeLocParser(seqDict);
GenomeLoc genomeLoc = myLocParser.parseGenomeLoc("1:1-99");
Assert.assertEquals(genomeLoc.getEnd(), 99);
}
@Test
public void testContigHasColon() {
SAMFileHeader header = new SAMFileHeader();
header.setSortOrder(htsjdk.samtools.SAMFileHeader.SortOrder.coordinate);
SAMSequenceDictionary dict = new SAMSequenceDictionary();
SAMSequenceRecord rec = new SAMSequenceRecord("c:h:r1", 10);
rec.setSequenceLength(10);
dict.addSequence(rec);
header.setSequenceDictionary(dict);
final GenomeLocParser myGenomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
GenomeLoc loc = myGenomeLocParser.parseGenomeLoc("c:h:r1:4-5");
assertEquals(0, loc.getContigIndex());
assertEquals(loc.getStart(), 4);
assertEquals(loc.getStop(), 5);
}
@Test
public void testParseGoodString() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1-10");
assertEquals(0, loc.getContigIndex());
assertEquals(loc.getStop(), 10);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc1() {
GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1, 100);
assertEquals(0, loc.getContigIndex());
assertEquals(loc.getStop(), 100);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc1point5() { // in honor of VAAL!
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1");
assertEquals(0, loc.getContigIndex());
assertEquals(loc.getStop(), 1);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc2() {
GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1, 100);
assertEquals("1", loc.getContig());
assertEquals(loc.getStop(), 100);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc3() {
GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1);
assertEquals("1", loc.getContig());
assertEquals(loc.getStop(), 1);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc4() {
GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1);
assertEquals(0, loc.getContigIndex());
assertEquals(loc.getStop(), 1);
assertEquals(loc.getStart(), 1);
}
@Test
public void testCreateGenomeLoc5() {
GenomeLoc loc = genomeLocParser.createGenomeLoc("1", 1, 100);
GenomeLoc copy = genomeLocParser.createGenomeLoc(loc.getContig(),loc.getStart(),loc.getStop());
assertEquals(0, copy.getContigIndex());
assertEquals(copy.getStop(), 100);
assertEquals(copy.getStart(), 1);
}
@Test
public void testGenomeLocPlusSign() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1+");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
@Test
public void testGenomeLocParseOnlyChrome() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testGenomeLocParseOnlyBadChrome() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("12");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testGenomeLocBad() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1-");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testGenomeLocBad2() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1-500-0");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
@Test(expectedExceptions=UserException.MalformedGenomeLoc.class)
public void testGenomeLocBad3() {
GenomeLoc loc = genomeLocParser.parseGenomeLoc("1:1--0");
assertEquals(loc.getContigIndex(), 0);
assertEquals(loc.getStop(), 10); // the size
assertEquals(loc.getStart(), 1);
}
// test out the validating methods
@Test
public void testValidationOfGenomeLocs() {
assertTrue(genomeLocParser.isValidGenomeLoc("1",1,1));
assertFalse(genomeLocParser.isValidGenomeLoc("2",1,1)); // shouldn't have an entry
assertFalse(genomeLocParser.isValidGenomeLoc("1",1,11)); // past the end of the contig
assertFalse(genomeLocParser.isValidGenomeLoc("1",-1,10)); // bad start
assertFalse(genomeLocParser.isValidGenomeLoc("1",1,-2)); // bad stop
assertTrue( genomeLocParser.isValidGenomeLoc("1",-1,2, false)); // bad stop
assertFalse(genomeLocParser.isValidGenomeLoc("1",10,11)); // bad start, past end
assertTrue( genomeLocParser.isValidGenomeLoc("1",10,11, false)); // bad start, past end
assertFalse(genomeLocParser.isValidGenomeLoc("1",2,1)); // stop < start
}
@Test(expectedExceptions = UserException.MalformedGenomeLoc.class)
public void testValidateGenomeLoc() {
// bad contig index
genomeLocParser.validateGenomeLoc("1", 1, 1, 2, false);
}
private static class FlankingGenomeLocTestData extends TestDataProvider {
final GenomeLocParser parser;
final int basePairs;
final GenomeLoc original, flankStart, flankStop;
private FlankingGenomeLocTestData(String name, GenomeLocParser parser, int basePairs, String original, String flankStart, String flankStop) {
super(FlankingGenomeLocTestData.class, name);
this.parser = parser;
this.basePairs = basePairs;
this.original = parse(parser, original);
this.flankStart = flankStart == null ? null : parse(parser, flankStart);
this.flankStop = flankStop == null ? null : parse(parser, flankStop);
}
private static GenomeLoc parse(GenomeLocParser parser, String str) {
return "unmapped".equals(str) ? GenomeLoc.UNMAPPED : parser.parseGenomeLoc(str);
}
}
@DataProvider(name = "flankingGenomeLocs")
public Object[][] getFlankingGenomeLocs() {
int contigLength = 10000;
SAMFileHeader header = ArtificialReadUtils.createArtificialSamHeader(1, 1, contigLength);
GenomeLocParser parser = new GenomeLocParser(header.getSequenceDictionary());
new FlankingGenomeLocTestData("atStartBase1", parser, 1,
"1:1", null, "1:2");
new FlankingGenomeLocTestData("atStartBase50", parser, 50,
"1:1", null, "1:2-51");
new FlankingGenomeLocTestData("atStartRange50", parser, 50,
"1:1-10", null, "1:11-60");
new FlankingGenomeLocTestData("atEndBase1", parser, 1,
"1:" + contigLength, "1:" + (contigLength - 1), null);
new FlankingGenomeLocTestData("atEndBase50", parser, 50,
"1:" + contigLength, String.format("1:%d-%d", contigLength - 50, contigLength - 1), null);
new FlankingGenomeLocTestData("atEndRange50", parser, 50,
String.format("1:%d-%d", contigLength - 10, contigLength),
String.format("1:%d-%d", contigLength - 60, contigLength - 11),
null);
new FlankingGenomeLocTestData("nearStartBase1", parser, 1,
"1:2", "1:1", "1:3");
new FlankingGenomeLocTestData("nearStartRange50", parser, 50,
"1:21-30", "1:1-20", "1:31-80");
new FlankingGenomeLocTestData("nearEndBase1", parser, 1,
"1:" + (contigLength - 1), "1:" + (contigLength - 2), "1:" + contigLength);
new FlankingGenomeLocTestData("nearEndRange50", parser, 50,
String.format("1:%d-%d", contigLength - 30, contigLength - 21),
String.format("1:%d-%d", contigLength - 80, contigLength - 31),
String.format("1:%d-%d", contigLength - 20, contigLength));
new FlankingGenomeLocTestData("beyondStartBase1", parser, 1,
"1:3", "1:2", "1:4");
new FlankingGenomeLocTestData("beyondStartRange50", parser, 50,
"1:101-200", "1:51-100", "1:201-250");
new FlankingGenomeLocTestData("beyondEndBase1", parser, 1,
"1:" + (contigLength - 3),
"1:" + (contigLength - 4),
"1:" + (contigLength - 2));
new FlankingGenomeLocTestData("beyondEndRange50", parser, 50,
String.format("1:%d-%d", contigLength - 200, contigLength - 101),
String.format("1:%d-%d", contigLength - 250, contigLength - 201),
String.format("1:%d-%d", contigLength - 100, contigLength - 51));
new FlankingGenomeLocTestData("unmapped", parser, 50,
"unmapped", null, null);
new FlankingGenomeLocTestData("fullContig", parser, 50,
"1", null, null);
return FlankingGenomeLocTestData.getTests(FlankingGenomeLocTestData.class);
}
@Test(dataProvider = "flankingGenomeLocs")
public void testCreateGenomeLocAtStart(FlankingGenomeLocTestData data) {
GenomeLoc actual = data.parser.createGenomeLocAtStart(data.original, data.basePairs);
String description = String.format("%n name: %s%n original: %s%n actual: %s%n expected: %s%n",
data.toString(), data.original, actual, data.flankStart);
assertEquals(actual, data.flankStart, description);
}
@Test(dataProvider = "flankingGenomeLocs")
public void testCreateGenomeLocAtStop(FlankingGenomeLocTestData data) {
GenomeLoc actual = data.parser.createGenomeLocAtStop(data.original, data.basePairs);
String description = String.format("%n name: %s%n original: %s%n actual: %s%n expected: %s%n",
data.toString(), data.original, actual, data.flankStop);
assertEquals(actual, data.flankStop, description);
}
@DataProvider(name = "parseGenomeLoc")
public Object[][] makeParsingTest() {
final List<Object[]> tests = new LinkedList<>();
tests.add(new Object[]{ "1:10", "1", 10 });
tests.add(new Object[]{ "1:100", "1", 100 });
tests.add(new Object[]{ "1:1000", "1", 1000 });
tests.add(new Object[]{ "1:1,000", "1", 1000 });
tests.add(new Object[]{ "1:10000", "1", 10000 });
tests.add(new Object[]{ "1:10,000", "1", 10000 });
tests.add(new Object[]{ "1:100000", "1", 100000 });
tests.add(new Object[]{ "1:100,000", "1", 100000 });
tests.add(new Object[]{ "1:1000000", "1", 1000000 });
tests.add(new Object[]{ "1:1,000,000", "1", 1000000 });
tests.add(new Object[]{ "1:1000,000", "1", 1000000 });
tests.add(new Object[]{ "1:1,000000", "1", 1000000 });
return tests.toArray(new Object[][]{});
}
@Test( dataProvider = "parseGenomeLoc")
public void testParsingPositions(final String string, final String contig, final int start) {
SAMFileHeader header = ArtificialReadUtils.createArtificialSamHeader(1, 1, 10000000);
GenomeLocParser genomeLocParser = new GenomeLocParser(header.getSequenceDictionary());
final GenomeLoc loc = genomeLocParser.parseGenomeLoc(string);
Assert.assertEquals(loc.getContig(), contig);
Assert.assertEquals(loc.getStart(), start);
Assert.assertEquals(loc.getStop(), start);
}
@Test( )
public void testCreationFromSAMRecord() {
final GATKRead read = ArtificialReadUtils.createArtificialRead(header, "foo", 0, 1, 5);
final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
Assert.assertEquals(loc.getContig(), read.getContig());
Assert.assertEquals(loc.getContigIndex(), ReadUtils.getReferenceIndex(read, header));
Assert.assertEquals(loc.getStart(), read.getStart());
Assert.assertEquals(loc.getStop(), read.getEnd());
}
@Test( )
public void testCreationFromSAMRecordUnmapped() {
final GATKRead read = ArtificialReadUtils.createArtificialRead(header, "foo", 0, 1, 5);
read.setIsUnmapped();
final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
Assert.assertTrue(loc.isUnmapped());
}
@Test( )
public void testCreationFromSAMRecordUnmappedButOnGenome() {
final GATKRead read = ArtificialReadUtils.createArtificialRead(header, "foo", 0, 1, 5);
read.setIsUnmapped();
read.setCigar("*");
final GenomeLoc loc = genomeLocParser.createGenomeLoc(read);
Assert.assertEquals(loc.getContig(), read.getContig());
Assert.assertEquals(loc.getContigIndex(), ReadUtils.getReferenceIndex(read, header));
Assert.assertEquals(loc.getStart(), read.getStart());
Assert.assertEquals(loc.getStop(), read.getStart());
}
@Test
public void testCreationFromFeature() {
final Feature feature = new SimpleFeature("1", 1, 5);
final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
Assert.assertEquals(loc.getContig(), feature.getContig());
Assert.assertEquals(loc.getStart(), feature.getStart());
Assert.assertEquals(loc.getStop(), feature.getEnd());
}
@Test
public void testCreationFromLocatable() {
final Locatable locatable = new SimpleInterval("1", 1, 5);
final GenomeLoc loc = genomeLocParser.createGenomeLoc(locatable);
Assert.assertEquals(loc.getContig(), locatable.getContig());
Assert.assertEquals(loc.getStart(), locatable.getStart());
Assert.assertEquals(loc.getStop(), locatable.getEnd());
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void testCreationFromNullLocatable() {
genomeLocParser.createGenomeLoc((Locatable)null);
}
@Test
public void testCreationFromVariantContext() {
final VariantContext feature = new VariantContextBuilder("x", "1", 1, 5, Arrays.asList(Allele.create("AAAAA", true))).make();
final GenomeLoc loc = genomeLocParser.createGenomeLoc(feature);
Assert.assertEquals(loc.getContig(), feature.getContig());
Assert.assertEquals(loc.getStart(), feature.getStart());
Assert.assertEquals(loc.getStop(), feature.getEnd());
}
@Test
public void testcreateGenomeLocOnContig() throws FileNotFoundException {
final CachingIndexedFastaSequenceFile seq = new CachingIndexedFastaSequenceFile(IOUtils.getPath(exampleReference));
final SAMSequenceDictionary dict = seq.getSequenceDictionary();
final GenomeLocParser genomeLocParser = new GenomeLocParser(dict);
for ( final SAMSequenceRecord rec : dict.getSequences() ) {
final GenomeLoc loc = genomeLocParser.createOverEntireContig(rec.getSequenceName());
Assert.assertEquals(loc.getContig(), rec.getSequenceName());
Assert.assertEquals(loc.getStart(), 1);
Assert.assertEquals(loc.getStop(), rec.getSequenceLength());
}
}
@DataProvider(name = "GenomeLocOnContig")
public Object[][] makeGenomeLocOnContig() {
final List<Object[]> tests = new LinkedList<>();
final int contigLength = header.getSequence(0).getSequenceLength();
for ( int start = -10; start < contigLength + 10; start++ ) {
for ( final int len : Arrays.asList(1, 10, 20) ) {
tests.add(new Object[]{ "1", start, start + len });
}
}
return tests.toArray(new Object[][]{});
}
@Test( dataProvider = "GenomeLocOnContig")
public void testGenomeLocOnContig(final String contig, final int start, final int stop) {
final int contigLength = header.getSequence(0).getSequenceLength();
final GenomeLoc loc = genomeLocParser.createGenomeLocOnContig(contig, start, stop);
if ( stop < 1 || start > contigLength )
Assert.assertNull(loc, "GenomeLoc should be null if the start/stops are not meaningful");
else {
Assert.assertNotNull(loc);
Assert.assertEquals(loc.getContig(), contig);
Assert.assertEquals(loc.getStart(), Math.max(start, 1));
Assert.assertEquals(loc.getStop(), Math.min(stop, contigLength));
}
}
@DataProvider(name = "GenomeLocPadding")
public Object[][] makeGenomeLocPadding() {
final List<Object[]> tests = new LinkedList<>();
final int contigLength = header.getSequence(0).getSequenceLength();
for ( int pad = 0; pad < contigLength + 1; pad++) {
for ( int start = 1; start < contigLength; start++ ) {
for ( int stop = start; stop < contigLength; stop++ ) {
tests.add(new Object[]{ genomeLocParser.createGenomeLoc("1", start, stop), pad});
}
}
}
return tests.toArray(new Object[][]{});
}
@Test( dataProvider = "GenomeLocPadding")
public void testGenomeLocPadding(final GenomeLoc input, final int pad) {
final int contigLength = header.getSequence(0).getSequenceLength();
final GenomeLoc padded = genomeLocParser.createPaddedGenomeLoc(input, pad);
Assert.assertNotNull(padded);
Assert.assertEquals(padded.getContig(), input.getContig());
Assert.assertEquals(padded.getStart(), Math.max(input.getStart() - pad, 1));
Assert.assertEquals(padded.getStop(), Math.min(input.getStop() + pad, contigLength));
}
@Test
public void testQueryAllHG38Intervals() {
SAMSequenceDictionary sd;
final File testFile = new File (publicTestDir, "org/broadinstitute/hellbender/engine/Homo_sapiens_assembly38.headerOnly.vcf.gz");
try (VCFFileReader vcfReader = new VCFFileReader(testFile, false)) {
sd = vcfReader.getFileHeader().getSequenceDictionary();
}
// Test that we can use any contig from hg38 as a query against a VCF with an hg38 sequence dictionary, in any
// query format, without ambiguity.
final GenomeLocParser localGenomeLocParser = new GenomeLocParser(sd);
sd.getSequences().stream().forEach(
hg38Contig -> {
assertValidUniqueInterval(
localGenomeLocParser,
hg38Contig.getSequenceName(),
new SimpleInterval(hg38Contig.getSequenceName(), 1, hg38Contig.getSequenceLength()));
assertValidUniqueInterval(
localGenomeLocParser,
hg38Contig.getSequenceName() + ":1",
new SimpleInterval(hg38Contig.getSequenceName(), 1, 1));
assertValidUniqueInterval(
localGenomeLocParser,
hg38Contig.getSequenceName() + ":1+",
new SimpleInterval(hg38Contig.getSequenceName(), 1, hg38Contig.getSequenceLength()));
assertValidUniqueInterval(
localGenomeLocParser,
hg38Contig.getSequenceName() + ":1-1",
new SimpleInterval(hg38Contig.getSequenceName(), 1, 1));
}
);
}
private void assertValidUniqueInterval(
final GenomeLocParser localGenomeLocParser,
final String queryString,
final SimpleInterval expectedInterval) {
final SimpleInterval actualInterval = new SimpleInterval(localGenomeLocParser.parseGenomeLoc(queryString));
Assert.assertEquals(actualInterval, expectedInterval);
}
}
| |
/*******************************************************************************
* Copyright (c) 2000, 2005 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*******************************************************************************/
package org.eclipse.swt.examples.hoverhelp;
import java.io.*;
import java.text.*;
import java.util.*;
import org.eclipse.swt.*;
import org.eclipse.swt.events.*;
import org.eclipse.swt.graphics.*;
import org.eclipse.swt.layout.*;
import org.eclipse.swt.widgets.*;
/**
* This example demonstrates how to implement hover help feedback
* using the MouseTrackListener.
*/
public class HoverHelp {
private static ResourceBundle resourceBundle = ResourceBundle.getBundle("examples_hoverhelp");
static final int
hhiInformation = 0,
hhiWarning = 1;
static final String[] imageLocations = {
"information.gif",
"warning.gif"
};
Image images[];
/**
* Runs main program.
*/
public static void main (String [] args) {
Display display = new Display();
Shell shell = new HoverHelp().open(display);
// Event loop
while (shell != null && ! shell.isDisposed()) {
if (! display.readAndDispatch()) display.sleep();
}
// Cleanup
display.dispose();
}
/**
* Opens the main program.
*/
public Shell open(Display display) {
// Load the images
Class<HoverHelp> clazz = HoverHelp.class;
try {
if (images == null) {
images = new Image[imageLocations.length];
for (int i = 0; i < imageLocations.length; ++i) {
InputStream stream = clazz.getResourceAsStream(imageLocations[i]);
ImageData source = new ImageData(stream);
ImageData mask = source.getTransparencyMask();
images[i] = new Image(display, source, mask);
try {
stream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
} catch (Exception ex) {
System.err.println(getResourceString("error.CouldNotLoadResources",
new Object[] { ex.getMessage() }));
return null;
}
// Create the window
Shell shell = new Shell();
createPartControl(shell);
shell.addDisposeListener(new DisposeListener() {
public void widgetDisposed(DisposeEvent e) {
/* Free resources */
if (images != null) {
for (int i = 0; i < images.length; i++) {
final Image image = images[i];
if (image != null) image.dispose();
}
images = null;
}
}
});
shell.pack();
shell.open();
return shell;
}
/**
* Gets a string from the resource bundle.
* We don't want to crash because of a missing String.
* Returns the key if not found.
*/
public String getResourceString(String key) {
try {
return resourceBundle.getString(key);
} catch (MissingResourceException e) {
return key;
} catch (NullPointerException e) {
return "!" + key + "!";
}
}
/**
* Gets a string from the resource bundle and binds it
* with the given arguments. If the key is not found,
* return the key.
*/
public String getResourceString(String key, Object[] args) {
try {
return MessageFormat.format(getResourceString(key), args);
} catch (MissingResourceException e) {
return key;
} catch (NullPointerException e) {
return "!" + key + "!";
}
}
/**
* Creates the example
*/
public void createPartControl(Composite frame) {
final ToolTipHandler tooltip = new ToolTipHandler(frame.getShell());
GridLayout layout = new GridLayout();
layout.numColumns = 3;
frame.setLayout(layout);
String platform = SWT.getPlatform();
String helpKey = "F1";
if (platform.equals("gtk")) helpKey = "Ctrl+F1";
if (platform.equals("carbon") || platform.equals("cocoa")) helpKey = "Help";
ToolBar bar = new ToolBar (frame, SWT.BORDER);
for (int i=0; i<5; i++) {
ToolItem item = new ToolItem (bar, SWT.PUSH);
item.setText (getResourceString("ToolItem.text", new Object[] { new Integer(i) }));
item.setData ("TIP_TEXT", getResourceString("ToolItem.tooltip",
new Object[] { item.getText(), helpKey }));
item.setData ("TIP_HELPTEXTHANDLER", new ToolTipHelpTextHandler() {
public String getHelpText(Widget widget) {
Item item = (Item) widget;
return getResourceString("ToolItem.help", new Object[] { item.getText() });
}
});
}
GridData gridData = new GridData();
gridData.horizontalSpan = 3;
bar.setLayoutData(gridData);
tooltip.activateHoverHelp(bar);
Table table = new Table (frame, SWT.BORDER);
for (int i=0; i<4; i++) {
TableItem item = new TableItem (table, SWT.PUSH);
item.setText (getResourceString("Item", new Object[] { new Integer(i) }));
item.setData ("TIP_IMAGE", images[hhiInformation]);
item.setText (getResourceString("TableItem.text", new Object[] { new Integer(i) }));
item.setData ("TIP_TEXT", getResourceString("TableItem.tooltip",
new Object[] { item.getText(), helpKey }));
item.setData ("TIP_HELPTEXTHANDLER", new ToolTipHelpTextHandler() {
public String getHelpText(Widget widget) {
Item item = (Item) widget;
return getResourceString("TableItem.help", new Object[] { item.getText() });
}
});
}
table.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_FILL));
tooltip.activateHoverHelp(table);
Tree tree = new Tree (frame, SWT.BORDER);
for (int i=0; i<4; i++) {
TreeItem item = new TreeItem (tree, SWT.PUSH);
item.setText (getResourceString("Item", new Object[] { new Integer(i) }));
item.setData ("TIP_IMAGE", images[hhiWarning]);
item.setText (getResourceString("TreeItem.text", new Object[] { new Integer(i) }));
item.setData ("TIP_TEXT", getResourceString("TreeItem.tooltip",
new Object[] { item.getText(), helpKey}));
item.setData ("TIP_HELPTEXTHANDLER", new ToolTipHelpTextHandler() {
public String getHelpText(Widget widget) {
Item item = (Item) widget;
return getResourceString("TreeItem.help", new Object[] { item.getText() });
}
});
}
tree.setLayoutData(new GridData(GridData.VERTICAL_ALIGN_FILL));
tooltip.activateHoverHelp(tree);
Button button = new Button (frame, SWT.PUSH);
button.setText (getResourceString("Hello.text"));
button.setData ("TIP_TEXT", getResourceString("Hello.tooltip"));
tooltip.activateHoverHelp(button);
}
/**
* Emulated tooltip handler
* Notice that we could display anything in a tooltip besides text and images.
* For instance, it might make sense to embed large tables of data or buttons linking
* data under inspection to material elsewhere, or perform dynamic lookup for creating
* tooltip text on the fly.
*/
protected static class ToolTipHandler {
private Shell parentShell;
private Shell tipShell;
private Label tipLabelImage, tipLabelText;
private Widget tipWidget; // widget this tooltip is hovering over
private Point tipPosition; // the position being hovered over
/**
* Creates a new tooltip handler
*
* @param parent the parent Shell
*/
public ToolTipHandler(Shell parent) {
final Display display = parent.getDisplay();
this.parentShell = parent;
tipShell = new Shell(parent, SWT.ON_TOP | SWT.TOOL);
GridLayout gridLayout = new GridLayout();
gridLayout.numColumns = 2;
gridLayout.marginWidth = 2;
gridLayout.marginHeight = 2;
tipShell.setLayout(gridLayout);
tipShell.setBackground(display.getSystemColor(SWT.COLOR_INFO_BACKGROUND));
tipLabelImage = new Label(tipShell, SWT.NONE);
tipLabelImage.setForeground(display.getSystemColor(SWT.COLOR_INFO_FOREGROUND));
tipLabelImage.setBackground(display.getSystemColor(SWT.COLOR_INFO_BACKGROUND));
tipLabelImage.setLayoutData(new GridData(GridData.FILL_HORIZONTAL |
GridData.VERTICAL_ALIGN_CENTER));
tipLabelText = new Label(tipShell, SWT.NONE);
tipLabelText.setForeground(display.getSystemColor(SWT.COLOR_INFO_FOREGROUND));
tipLabelText.setBackground(display.getSystemColor(SWT.COLOR_INFO_BACKGROUND));
tipLabelText.setLayoutData(new GridData(GridData.FILL_HORIZONTAL |
GridData.VERTICAL_ALIGN_CENTER));
}
/**
* Enables customized hover help for a specified control
*
* @control the control on which to enable hoverhelp
*/
public void activateHoverHelp(final Control control) {
/*
* Get out of the way if we attempt to activate the control underneath the tooltip
*/
control.addMouseListener(new MouseAdapter () {
@Override
public void mouseDown (MouseEvent e) {
if (tipShell.isVisible()) tipShell.setVisible(false);
}
});
/*
* Trap hover events to pop-up tooltip
*/
control.addMouseTrackListener(new MouseTrackAdapter () {
@Override
public void mouseExit(MouseEvent e) {
if (tipShell.isVisible()) tipShell.setVisible(false);
tipWidget = null;
}
@Override
public void mouseHover (MouseEvent event) {
Point pt = new Point (event.x, event.y);
Widget widget = event.widget;
if (widget instanceof ToolBar) {
ToolBar w = (ToolBar) widget;
widget = w.getItem (pt);
}
if (widget instanceof Table) {
Table w = (Table) widget;
widget = w.getItem (pt);
}
if (widget instanceof Tree) {
Tree w = (Tree) widget;
widget = w.getItem (pt);
}
if (widget == null) {
tipShell.setVisible(false);
tipWidget = null;
return;
}
if (widget == tipWidget) return;
tipWidget = widget;
tipPosition = control.toDisplay(pt);
String text = (String) widget.getData("TIP_TEXT");
Image image = (Image) widget.getData("TIP_IMAGE");
tipLabelText.setText(text != null ? text : "");
tipLabelImage.setImage(image); // accepts null
tipShell.pack();
setHoverLocation(tipShell, tipPosition);
tipShell.setVisible(true);
}
});
/*
* Trap F1 Help to pop up a custom help box
*/
control.addHelpListener(new HelpListener () {
public void helpRequested(HelpEvent event) {
if (tipWidget == null) return;
ToolTipHelpTextHandler handler = (ToolTipHelpTextHandler)
tipWidget.getData("TIP_HELPTEXTHANDLER");
if (handler == null) return;
String text = handler.getHelpText(tipWidget);
if (text == null) return;
if (tipShell.isVisible()) {
tipShell.setVisible(false);
Shell helpShell = new Shell(parentShell, SWT.SHELL_TRIM);
helpShell.setLayout(new FillLayout());
Label label = new Label(helpShell, SWT.NONE);
label.setText(text);
helpShell.pack();
setHoverLocation(helpShell, tipPosition);
helpShell.open();
}
}
});
}
/**
* Sets the location for a hovering shell
* @param shell the object that is to hover
* @param position the position of a widget to hover over
* @return the top-left location for a hovering box
*/
private void setHoverLocation(Shell shell, Point position) {
Rectangle displayBounds = shell.getDisplay().getBounds();
Rectangle shellBounds = shell.getBounds();
shellBounds.x = Math.max(Math.min(position.x, displayBounds.width - shellBounds.width), 0);
shellBounds.y = Math.max(Math.min(position.y + 16, displayBounds.height - shellBounds.height), 0);
shell.setBounds(shellBounds);
}
}
/**
* ToolTip help handler
*/
protected interface ToolTipHelpTextHandler {
/**
* Get help text
* @param widget the widget that is under help
* @return a help text string
*/
public String getHelpText(Widget widget);
}
}
| |
/*
* Copyright 2015 Brent Douglas and other contributors
* as indicated by the @author tags. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.machinecode.chainlink.repository.infinispan;
import gnu.trove.set.hash.THashSet;
import io.machinecode.chainlink.core.repository.BaseMapRepository;
import io.machinecode.chainlink.spi.marshalling.Marshalling;
import io.machinecode.chainlink.spi.repository.ExtendedJobExecution;
import io.machinecode.chainlink.spi.repository.ExtendedJobInstance;
import io.machinecode.chainlink.spi.repository.ExtendedStepExecution;
import io.machinecode.chainlink.spi.repository.PartitionExecution;
import org.infinispan.AdvancedCache;
import org.infinispan.Cache;
import org.infinispan.configuration.cache.TransactionConfiguration;
import org.infinispan.distexec.DefaultExecutorService;
import org.infinispan.distexec.DistributedExecutorService;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.transaction.LockingMode;
import javax.batch.runtime.JobExecution;
import javax.batch.runtime.JobInstance;
import javax.transaction.TransactionManager;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.Future;
/**
* @author <a href="mailto:brent.n.douglas@gmail.com">Brent Douglas</a>
* @since 1.0
*/
public class InfinispanRepository extends BaseMapRepository {
protected static final String IDS = InfinispanRepository.class.getCanonicalName() + ".ids";
protected static final String JOB_INSTANCES = InfinispanRepository.class.getCanonicalName() + ".jobInstances";
protected static final String JOB_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".jobExecutions";
protected static final String STEP_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".stepExecutions";
protected static final String PARTITION_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".partitionExecutions";
protected static final String JOB_INSTANCE_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".jobInstanceExecutions";
protected static final String JOB_EXECUTION_INSTANCES = InfinispanRepository.class.getCanonicalName() + ".jobExecutionInstances";
protected static final String JOB_EXECUTION_STEP_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".jobExecutionStepExecutions";
protected static final String LATEST_JOB_EXECUTION_FOR_INSTANCE = InfinispanRepository.class.getCanonicalName() + ".latestJobExecutionForInstance";
protected static final String STEP_EXECUTION_PARTITION_EXECUTIONS = InfinispanRepository.class.getCanonicalName() + ".stepExecutionPartitionExecutions";
protected static final String JOB_EXECUTION_HISTORY = InfinispanRepository.class.getCanonicalName() + ".jobExecutionHistory";
protected final AdvancedCache<String, Long> ids;
protected final AdvancedCache<Long, ExtendedJobInstance> jobInstances;
protected final AdvancedCache<Long, ExtendedJobExecution> jobExecutions;
protected final AdvancedCache<Long, ExtendedStepExecution> stepExecutions;
protected final AdvancedCache<Long, PartitionExecution> partitionExecutions;
protected final AdvancedCache<Long, List<Long>> jobInstanceExecutions;
protected final AdvancedCache<Long, Long> jobExecutionInstances;
protected final AdvancedCache<Long, Set<Long>> jobExecutionStepExecutions;
protected final AdvancedCache<Long, Long> latestJobExecutionForInstance;
protected final AdvancedCache<Long, List<Long>> stepExecutionPartitionExecutions;
protected final AdvancedCache<Long, Set<Long>> jobExecutionHistory;
protected final DistributedExecutorService jobInstanceExecutor;
protected final DistributedExecutorService jobExecutionExecutor;
protected final EmbeddedCacheManager cacheManager;
public InfinispanRepository(final Marshalling marshalling, final EmbeddedCacheManager cacheManager) throws InterruptedException {
super(marshalling);
this.cacheManager = cacheManager;
this.ids = _cache(cacheManager, IDS);
this.jobInstances = _cache(cacheManager, JOB_INSTANCES);
this.jobExecutions = _cache(cacheManager, JOB_EXECUTIONS);
this.stepExecutions = _cache(cacheManager, STEP_EXECUTIONS);
this.partitionExecutions = _cache(cacheManager, PARTITION_EXECUTIONS);
this.jobInstanceExecutions = _cache(cacheManager, JOB_INSTANCE_EXECUTIONS);
this.jobExecutionInstances = _cache(cacheManager, JOB_EXECUTION_INSTANCES);
this.jobExecutionStepExecutions = _cache(cacheManager, JOB_EXECUTION_STEP_EXECUTIONS);
this.latestJobExecutionForInstance = _cache(cacheManager, LATEST_JOB_EXECUTION_FOR_INSTANCE);
this.stepExecutionPartitionExecutions = _cache(cacheManager, STEP_EXECUTION_PARTITION_EXECUTIONS);
this.jobExecutionHistory = _cache(cacheManager, JOB_EXECUTION_HISTORY);
this.jobInstanceExecutor = new DefaultExecutorService(jobInstances);
this.jobExecutionExecutor = new DefaultExecutorService(jobExecutions);
}
private static void assertConfig(final Cache<?,?> cache) {
final TransactionConfiguration tr = cache.getCacheConfiguration().transaction();
if (tr.lockingMode() != LockingMode.PESSIMISTIC
|| !tr.transactionMode().isTransactional()) {
throw new IllegalStateException("Cache " + cache.getName() + " must be transactional with pessimistic locking."); //TODO Message
}
}
private static <K,V> AdvancedCache<K,V> _cache(final EmbeddedCacheManager cacheManager, final String region) throws InterruptedException {
final Cache<K, V> cache = cacheManager.getCache(region, true);
if (!cacheManager.isRunning(region)) {
cacheManager.startCaches(region);
}
while (!cacheManager.isRunning(region)) {
Thread.sleep(100);
}
assertConfig(cache);
return cache.getAdvancedCache();
}
@Override
protected Map<String, Long> ids() {
return this.ids;
}
@Override
protected Map<Long, ExtendedJobInstance> jobInstances() {
return this.jobInstances;
}
@Override
protected Map<Long, ExtendedJobExecution> jobExecutions() {
return this.jobExecutions;
}
@Override
protected Map<Long, ExtendedStepExecution> stepExecutions() {
return this.stepExecutions;
}
@Override
protected Map<Long, PartitionExecution> partitionExecutions() {
return this.partitionExecutions;
}
@Override
protected Map<Long, List<Long>> jobInstanceExecutions() {
return this.jobInstanceExecutions;
}
@Override
protected Map<Long, Long> jobExecutionInstances() {
return this.jobExecutionInstances;
}
@Override
protected Map<Long, Set<Long>> jobExecutionStepExecutions() {
return this.jobExecutionStepExecutions;
}
@Override
protected Map<Long, Long> latestJobExecutionForInstance() {
return this.latestJobExecutionForInstance;
}
@Override
protected Map<Long, List<Long>> stepExecutionPartitionExecutions() {
return this.stepExecutionPartitionExecutions;
}
@Override
protected Map<Long, Set<Long>> jobExecutionHistory() {
return this.jobExecutionHistory;
}
@Override
protected long _id(final String key) throws Exception {
final TransactionManager transactionManager = ids.getTransactionManager();
transactionManager.begin();
try {
final long id;
ids.lock(key);
final Long that = ids.get(key);
id = that == null ? 1 : that + 1;
ids.put(key, id);
transactionManager.commit();
return id;
} catch (final Exception e) {
transactionManager.rollback();
throw e;
}
}
@Override
protected Set<String> fetchJobNames() throws Exception {
final Set<String> ret = new THashSet<>();
final List<Future<Set<String>>> futures = jobInstanceExecutor.submitEverywhere(new JobNameCallable());
for (final Future<Set<String>> future : futures) {
final Set<String> value = future.get();
if (value != null) {
ret.addAll(future.get());
}
}
return ret;
}
@Override
protected int fetchJobInstanceCount(final String jobName) throws Exception {
int count = 0;
final List<Future<Integer>> futures = jobInstanceExecutor.submitEverywhere(new JobInstanceCountCallable(jobName));
for (final Future<Integer> future : futures) {
final Integer value = future.get();
if (value != null) {
count += value;
}
}
return count;
}
@Override
protected List<JobInstance> fetchJobInstances(final String jobName) throws Exception {
final List<JobInstance> ret = new ArrayList<>();
final List<Future<List<JobInstance>>> futures = jobInstanceExecutor.submitEverywhere(new JobInstanceCallable(jobName));
for (final Future<List<JobInstance>> future : futures) {
final List<JobInstance> value = future.get();
if (value != null) {
ret.addAll(value);
}
}
return ret;
}
@Override
protected List<Long> fetchRunningJobExecutionIds(final String jobName) throws Exception {
final List<Long> ret = new ArrayList<>();
final List<Future<List<Long>>> futures = jobExecutionExecutor.submitEverywhere(new RunningJobExecutionIdCallable(jobName));
for (final Future<List<Long>> future : futures) {
final List<Long> value = future.get();
if (value != null) {
ret.addAll(value);
}
}
return ret;
}
@Override
protected List<JobExecution> fetchJobExecutionsForJobInstance(final long jobInstanceId) throws Exception {
final List<JobExecution> ret = new ArrayList<>();
final List<Future<List<JobExecution>>> futures = jobExecutionExecutor.submitEverywhere(new JobExecutionsForJobInstanceCallable(jobInstanceId));
for (final Future<List<JobExecution>> future : futures) {
final List<JobExecution> value = future.get();
if (value != null) {
ret.addAll(value);
}
}
return ret;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.segment;
import static com.google.common.base.Charsets.UTF_8;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkElementIndex;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkPositionIndex;
import static com.google.common.base.Preconditions.checkPositionIndexes;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.Iterables.addAll;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Lists.newArrayListWithCapacity;
import static com.google.common.collect.Lists.newArrayListWithExpectedSize;
import static com.google.common.collect.Lists.partition;
import static com.google.common.collect.Maps.newHashMap;
import static com.google.common.io.ByteStreams.read;
import static java.lang.Long.numberOfLeadingZeros;
import static java.lang.Math.min;
import static java.util.Arrays.asList;
import static java.util.Collections.emptyMap;
import static java.util.Collections.nCopies;
import static org.apache.jackrabbit.oak.api.Type.BINARIES;
import static org.apache.jackrabbit.oak.api.Type.BINARY;
import static org.apache.jackrabbit.oak.api.Type.NAME;
import static org.apache.jackrabbit.oak.api.Type.NAMES;
import static org.apache.jackrabbit.oak.api.Type.STRING;
import static org.apache.jackrabbit.oak.segment.MapEntry.newModifiedMapEntry;
import static org.apache.jackrabbit.oak.segment.MapRecord.BUCKETS_PER_LEVEL;
import static org.apache.jackrabbit.oak.segment.RecordWriters.newNodeStateWriter;
import static org.apache.jackrabbit.oak.segment.SegmentNodeState.getStableId;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.SequenceInputStream;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jcr.PropertyType;
import com.google.common.io.Closeables;
import org.apache.jackrabbit.oak.api.Blob;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.plugins.memory.ModifiedNodeState;
import org.apache.jackrabbit.oak.segment.WriteOperationHandler.WriteOperation;
import org.apache.jackrabbit.oak.segment.file.tar.GCGeneration;
import org.apache.jackrabbit.oak.spi.blob.BlobStore;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.DefaultNodeStateDiff;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Converts nodes, properties, values, etc. to records and persists them with
* the help of a {@link WriteOperationHandler}. All public methods of this class
* are thread safe if and only if the {@link WriteOperationHandler} passed to
* the constructor is thread safe.
*/
public class DefaultSegmentWriter implements SegmentWriter {
private static final Logger LOG = LoggerFactory.getLogger(DefaultSegmentWriter.class);
@Nonnull
private final WriterCacheManager cacheManager;
@Nonnull
private final SegmentStore store;
@Nonnull
private final SegmentReader reader;
@Nonnull
private final SegmentIdProvider idProvider;
@CheckForNull
private final BlobStore blobStore;
@Nonnull
private final WriteOperationHandler writeOperationHandler;
/**
* Create a new instance of a {@code SegmentWriter}. Note the thread safety
* properties pointed out in the class comment.
*
* @param store store to write to
* @param reader segment reader for the {@code store}
* @param idProvider segment id provider for the {@code store}
* @param blobStore the blog store or {@code null} for inlined
* blobs
* @param cacheManager cache manager instance for the
* de-duplication caches used by this writer
* @param writeOperationHandler handler for write operations.
*/
public DefaultSegmentWriter(
@Nonnull SegmentStore store,
@Nonnull SegmentReader reader,
@Nonnull SegmentIdProvider idProvider,
@Nullable BlobStore blobStore,
@Nonnull WriterCacheManager cacheManager,
@Nonnull WriteOperationHandler writeOperationHandler
) {
this.store = checkNotNull(store);
this.reader = checkNotNull(reader);
this.idProvider = checkNotNull(idProvider);
this.blobStore = blobStore;
this.cacheManager = checkNotNull(cacheManager);
this.writeOperationHandler = checkNotNull(writeOperationHandler);
}
@Override
public void flush() throws IOException {
writeOperationHandler.flush(store);
}
@Override
@Nonnull
public RecordId writeMap(@Nullable final MapRecord base,
@Nonnull final Map<String, RecordId> changes
)
throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeMap(base, changes);
}
});
}
@Override
@Nonnull
public RecordId writeList(@Nonnull final List<RecordId> list) throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeList(list);
}
});
}
@Override
@Nonnull
public RecordId writeString(@Nonnull final String string) throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeString(string);
}
});
}
@Override
@Nonnull
public RecordId writeBlob(@Nonnull final Blob blob) throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeBlob(blob);
}
});
}
@Override
@Nonnull
public RecordId writeBlock(@Nonnull final byte[] bytes, final int offset, final int length)
throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeBlock(bytes, offset, length);
}
});
}
@Override
@Nonnull
public RecordId writeStream(@Nonnull final InputStream stream) throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeStream(stream);
}
});
}
@Override
@Nonnull
public RecordId writeProperty(@Nonnull final PropertyState state) throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeProperty(state);
}
});
}
@Override
@Nonnull
public RecordId writeNode(
@Nonnull final NodeState state,
@Nullable final ByteBuffer stableIdBytes)
throws IOException {
return writeOperationHandler.execute(new SegmentWriteOperation() {
@Nonnull
@Override
public RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException {
return with(writer).writeNode(state, stableIdBytes);
}
});
}
/**
* This {@code WriteOperation} implementation is used internally to provide
* context to a recursive chain of calls without having pass the context
* as a separate argument (a poor mans monad). As such it is entirely
* <em>not thread safe</em>.
*/
private abstract class SegmentWriteOperation implements WriteOperation {
private SegmentBufferWriter writer;
private Cache<String, RecordId> stringCache;
private Cache<Template, RecordId> templateCache;
private Cache<String, RecordId> nodeCache;
@Nonnull
@Override
public abstract RecordId execute(@Nonnull SegmentBufferWriter writer) throws IOException;
@Nonnull
SegmentWriteOperation with(@Nonnull SegmentBufferWriter writer) {
checkState(this.writer == null);
this.writer = writer;
int generation = writer.getGCGeneration().getGeneration();
this.stringCache = cacheManager.getStringCache(generation);
this.templateCache = cacheManager.getTemplateCache(generation);
this.nodeCache = cacheManager.getNodeCache(generation);
return this;
}
private RecordId writeMap(@Nullable MapRecord base,
@Nonnull Map<String, RecordId> changes
)
throws IOException {
if (base != null && base.isDiff()) {
Segment segment = base.getSegment();
RecordId key = segment.readRecordId(base.getRecordNumber(), 8);
String name = reader.readString(key);
if (!changes.containsKey(name)) {
changes.put(name, segment.readRecordId(base.getRecordNumber(), 8, 1));
}
base = new MapRecord(reader, segment.readRecordId(base.getRecordNumber(), 8, 2));
}
if (base != null && changes.size() == 1) {
Map.Entry<String, RecordId> change =
changes.entrySet().iterator().next();
RecordId value = change.getValue();
if (value != null) {
MapEntry entry = base.getEntry(change.getKey());
if (entry != null) {
if (value.equals(entry.getValue())) {
return base.getRecordId();
} else {
return RecordWriters.newMapBranchWriter(entry.getHash(), asList(entry.getKey(),
value, base.getRecordId())).write(writer, store);
}
}
}
}
List<MapEntry> entries = newArrayList();
for (Map.Entry<String, RecordId> entry : changes.entrySet()) {
String key = entry.getKey();
RecordId keyId = null;
if (base != null) {
MapEntry e = base.getEntry(key);
if (e != null) {
keyId = e.getKey();
}
}
if (keyId == null && entry.getValue() != null) {
keyId = writeString(key);
}
if (keyId != null) {
entries.add(newModifiedMapEntry(reader, key, keyId, entry.getValue()));
}
}
return writeMapBucket(base, entries, 0);
}
private RecordId writeMapLeaf(int level, Collection<MapEntry> entries) throws IOException {
checkNotNull(entries);
int size = entries.size();
checkElementIndex(size, MapRecord.MAX_SIZE);
checkPositionIndex(level, MapRecord.MAX_NUMBER_OF_LEVELS);
checkArgument(size != 0 || level == MapRecord.MAX_NUMBER_OF_LEVELS);
return RecordWriters.newMapLeafWriter(level, entries).write(writer, store);
}
private RecordId writeMapBranch(int level, int size, MapRecord... buckets) throws IOException {
int bitmap = 0;
List<RecordId> bucketIds = newArrayListWithCapacity(buckets.length);
for (int i = 0; i < buckets.length; i++) {
if (buckets[i] != null) {
bitmap |= 1L << i;
bucketIds.add(buckets[i].getRecordId());
}
}
return RecordWriters.newMapBranchWriter(level, size, bitmap, bucketIds).write(writer, store);
}
private RecordId writeMapBucket(MapRecord base, Collection<MapEntry> entries, int level)
throws IOException {
// when no changed entries, return the base map (if any) as-is
if (entries == null || entries.isEmpty()) {
if (base != null) {
return base.getRecordId();
} else if (level == 0) {
return RecordWriters.newMapLeafWriter().write(writer, store);
} else {
return null;
}
}
// when no base map was given, write a fresh new map
if (base == null) {
// use leaf records for small maps or the last map level
if (entries.size() <= BUCKETS_PER_LEVEL
|| level == MapRecord.MAX_NUMBER_OF_LEVELS) {
return writeMapLeaf(level, entries);
}
// write a large map by dividing the entries into buckets
MapRecord[] buckets = new MapRecord[BUCKETS_PER_LEVEL];
List<List<MapEntry>> changes = splitToBuckets(entries, level);
for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
buckets[i] = mapRecordOrNull(writeMapBucket(null, changes.get(i), level + 1));
}
// combine the buckets into one big map
return writeMapBranch(level, entries.size(), buckets);
}
// if the base map is small, update in memory and write as a new map
if (base.isLeaf()) {
Map<String, MapEntry> map = newHashMap();
for (MapEntry entry : base.getEntries()) {
map.put(entry.getName(), entry);
}
for (MapEntry entry : entries) {
if (entry.isDeleted()) {
map.remove(entry.getName());
} else {
map.put(entry.getName(), entry);
}
}
return writeMapBucket(null, map.values(), level);
}
// finally, the if the base map is large, handle updates per bucket
int newSize = 0;
int newCount = 0;
MapRecord[] buckets = base.getBuckets();
List<List<MapEntry>> changes = splitToBuckets(entries, level);
for (int i = 0; i < BUCKETS_PER_LEVEL; i++) {
buckets[i] = mapRecordOrNull(writeMapBucket(buckets[i], changes.get(i), level + 1));
if (buckets[i] != null) {
newSize += buckets[i].size();
newCount++;
}
}
// OAK-654: what if the updated map is smaller?
if (newSize > BUCKETS_PER_LEVEL) {
return writeMapBranch(level, newSize, buckets);
} else if (newCount <= 1) {
// up to one bucket contains entries, so return that as the new map
for (MapRecord bucket : buckets) {
if (bucket != null) {
return bucket.getRecordId();
}
}
// no buckets remaining, return empty map
return writeMapBucket(null, null, level);
} else {
// combine all remaining entries into a leaf record
List<MapEntry> list = newArrayList();
for (MapRecord bucket : buckets) {
if (bucket != null) {
addAll(list, bucket.getEntries());
}
}
return writeMapLeaf(level, list);
}
}
private MapRecord mapRecordOrNull(RecordId id) {
return id == null ? null : new MapRecord(reader, id);
}
/**
* Writes a list record containing the given list of record identifiers.
*
* @param list list of record identifiers
* @return list record identifier
*/
private RecordId writeList(@Nonnull List<RecordId> list) throws IOException {
checkNotNull(list);
checkArgument(!list.isEmpty());
List<RecordId> thisLevel = list;
while (thisLevel.size() > 1) {
List<RecordId> nextLevel = newArrayList();
for (List<RecordId> bucket :
partition(thisLevel, ListRecord.LEVEL_SIZE)) {
if (bucket.size() > 1) {
nextLevel.add(writeListBucket(bucket));
} else {
nextLevel.add(bucket.get(0));
}
}
thisLevel = nextLevel;
}
return thisLevel.iterator().next();
}
private RecordId writeListBucket(List<RecordId> bucket) throws IOException {
checkArgument(bucket.size() > 1);
return RecordWriters.newListBucketWriter(bucket).write(writer, store);
}
private List<List<MapEntry>> splitToBuckets(Collection<MapEntry> entries, int level) {
int mask = (1 << MapRecord.BITS_PER_LEVEL) - 1;
int shift = 32 - (level + 1) * MapRecord.BITS_PER_LEVEL;
List<List<MapEntry>> buckets =
newArrayList(nCopies(MapRecord.BUCKETS_PER_LEVEL, (List<MapEntry>) null));
for (MapEntry entry : entries) {
int index = (entry.getHash() >> shift) & mask;
List<MapEntry> bucket = buckets.get(index);
if (bucket == null) {
bucket = newArrayList();
buckets.set(index, bucket);
}
bucket.add(entry);
}
return buckets;
}
private RecordId writeValueRecord(long length, RecordId blocks) throws IOException {
long len = (length - Segment.MEDIUM_LIMIT) | (0x3L << 62);
return RecordWriters.newValueWriter(blocks, len).write(writer, store);
}
private RecordId writeValueRecord(int length, byte... data) throws IOException {
checkArgument(length < Segment.MEDIUM_LIMIT);
return RecordWriters.newValueWriter(length, data).write(writer, store);
}
/**
* Writes a string value record.
*
* @param string string to be written
* @return value record identifier
*/
private RecordId writeString(@Nonnull String string) throws IOException {
RecordId id = stringCache.get(string);
if (id != null) {
return id; // shortcut if the same string was recently stored
}
byte[] data = string.getBytes(UTF_8);
if (data.length < Segment.MEDIUM_LIMIT) {
// only cache short strings to avoid excessive memory use
id = writeValueRecord(data.length, data);
stringCache.put(string, id);
return id;
}
int pos = 0;
List<RecordId> blockIds = newArrayListWithExpectedSize(
data.length / SegmentStream.BLOCK_SIZE + 1);
// write as many full bulk segments as possible
while (pos + Segment.MAX_SEGMENT_SIZE <= data.length) {
SegmentId bulkId = idProvider.newBulkSegmentId();
store.writeSegment(bulkId, data, pos, Segment.MAX_SEGMENT_SIZE);
for (int i = 0; i < Segment.MAX_SEGMENT_SIZE; i += SegmentStream.BLOCK_SIZE) {
blockIds.add(new RecordId(bulkId, i));
}
pos += Segment.MAX_SEGMENT_SIZE;
}
// inline the remaining data as block records
while (pos < data.length) {
int len = min(SegmentStream.BLOCK_SIZE, data.length - pos);
blockIds.add(writeBlock(data, pos, len));
pos += len;
}
return writeValueRecord(data.length, writeList(blockIds));
}
private boolean sameStore(SegmentId id) {
return id.sameStore(store);
}
/**
* @param blob
* @return {@code true} iff {@code blob} is a {@code SegmentBlob} and
* originates from the same segment store.
*/
private boolean sameStore(Blob blob) {
return (blob instanceof SegmentBlob)
&& sameStore(((Record) blob).getRecordId().getSegmentId());
}
private RecordId writeBlob(@Nonnull Blob blob) throws IOException {
if (sameStore(blob)) {
SegmentBlob segmentBlob = (SegmentBlob) blob;
if (!isOldGeneration(segmentBlob.getRecordId())) {
return segmentBlob.getRecordId();
}
if (segmentBlob.isExternal()) {
return writeBlobId(segmentBlob.getBlobId());
}
}
String reference = blob.getReference();
if (reference != null && blobStore != null) {
String blobId = blobStore.getBlobId(reference);
if (blobId != null) {
return writeBlobId(blobId);
} else {
LOG.debug("No blob found for reference {}, inlining...", reference);
}
}
return writeStream(blob.getNewStream());
}
/**
* Write a reference to an external blob. This method handles blob IDs
* of every length, but behaves differently for small and large blob
* IDs.
*
* @param blobId Blob ID.
* @return Record ID pointing to the written blob ID.
* @see Segment#BLOB_ID_SMALL_LIMIT
*/
private RecordId writeBlobId(String blobId) throws IOException {
byte[] data = blobId.getBytes(UTF_8);
RecordId recordId;
if (data.length < Segment.BLOB_ID_SMALL_LIMIT) {
recordId = RecordWriters.newBlobIdWriter(data).write(writer, store);
} else {
RecordId refId = writeString(blobId);
recordId = RecordWriters.newBlobIdWriter(refId).write(writer, store);
}
return recordId;
}
private RecordId writeBlock(@Nonnull byte[] bytes, int offset, int length)
throws IOException {
checkNotNull(bytes);
checkPositionIndexes(offset, offset + length, bytes.length);
return RecordWriters.newBlockWriter(bytes, offset, length).write(writer, store);
}
private RecordId writeStream(@Nonnull InputStream stream) throws IOException {
boolean threw = true;
try {
RecordId id = SegmentStream.getRecordIdIfAvailable(stream, store);
if (id == null) {
// This is either not a segment stream or a one from another store:
// fully serialise the stream.
id = internalWriteStream(stream);
} else if (isOldGeneration(id)) {
// This is a segment stream from this store but from an old generation:
// try to link to the blocks if there are any.
SegmentStream segmentStream = (SegmentStream) stream;
List<RecordId> blockIds = segmentStream.getBlockIds();
if (blockIds == null) {
return internalWriteStream(stream);
} else {
return writeValueRecord(segmentStream.getLength(), writeList(blockIds));
}
}
threw = false;
return id;
} finally {
Closeables.close(stream, threw);
}
}
private RecordId internalWriteStream(@Nonnull InputStream stream) throws IOException {
// Special case for short binaries (up to about 16kB):
// store them directly as small- or medium-sized value records
byte[] data = new byte[Segment.MEDIUM_LIMIT];
int n = read(stream, data, 0, data.length);
if (n < Segment.MEDIUM_LIMIT) {
return writeValueRecord(n, data);
}
if (blobStore != null) {
String blobId = blobStore.writeBlob(new SequenceInputStream(
new ByteArrayInputStream(data, 0, n), stream));
return writeBlobId(blobId);
}
data = Arrays.copyOf(data, Segment.MAX_SEGMENT_SIZE);
n += read(stream, data, n, Segment.MAX_SEGMENT_SIZE - n);
long length = n;
List<RecordId> blockIds =
newArrayListWithExpectedSize(2 * n / SegmentStream.BLOCK_SIZE);
// Write the data to bulk segments and collect the list of block ids
while (n != 0) {
SegmentId bulkId = idProvider.newBulkSegmentId();
LOG.debug("Writing bulk segment {} ({} bytes)", bulkId, n);
store.writeSegment(bulkId, data, 0, n);
for (int i = 0; i < n; i += SegmentStream.BLOCK_SIZE) {
blockIds.add(new RecordId(bulkId, data.length - n + i));
}
n = read(stream, data, 0, data.length);
length += n;
}
return writeValueRecord(length, writeList(blockIds));
}
private RecordId writeProperty(@Nonnull PropertyState state) throws IOException {
Map<String, RecordId> previousValues = emptyMap();
return writeProperty(state, previousValues);
}
private RecordId writeProperty(@Nonnull PropertyState state,
@Nonnull Map<String, RecordId> previousValues
)
throws IOException {
Type<?> type = state.getType();
int count = state.count();
List<RecordId> valueIds = newArrayList();
for (int i = 0; i < count; i++) {
if (type.tag() == PropertyType.BINARY) {
try {
valueIds.add(writeBlob(state.getValue(BINARY, i)));
} catch (IOException e) {
throw new IllegalStateException("Unexpected IOException", e);
}
} else {
String value = state.getValue(STRING, i);
RecordId valueId = previousValues.get(value);
if (valueId == null) {
valueId = writeString(value);
}
valueIds.add(valueId);
}
}
if (!type.isArray()) {
return valueIds.iterator().next();
} else if (count == 0) {
return RecordWriters.newListWriter().write(writer, store);
} else {
return RecordWriters.newListWriter(count, writeList(valueIds)).write(writer, store);
}
}
private RecordId writeTemplate(Template template) throws IOException {
checkNotNull(template);
RecordId id = templateCache.get(template);
if (id != null) {
return id; // shortcut if the same template was recently stored
}
Collection<RecordId> ids = newArrayList();
int head = 0;
RecordId primaryId = null;
PropertyState primaryType = template.getPrimaryType();
if (primaryType != null) {
head |= 1 << 31;
primaryId = writeString(primaryType.getValue(NAME));
ids.add(primaryId);
}
List<RecordId> mixinIds = null;
PropertyState mixinTypes = template.getMixinTypes();
if (mixinTypes != null) {
head |= 1 << 30;
mixinIds = newArrayList();
for (String mixin : mixinTypes.getValue(NAMES)) {
mixinIds.add(writeString(mixin));
}
ids.addAll(mixinIds);
checkState(mixinIds.size() < (1 << 10));
head |= mixinIds.size() << 18;
}
RecordId childNameId = null;
String childName = template.getChildName();
if (childName == Template.ZERO_CHILD_NODES) {
head |= 1 << 29;
} else if (childName == Template.MANY_CHILD_NODES) {
head |= 1 << 28;
} else {
childNameId = writeString(childName);
ids.add(childNameId);
}
PropertyTemplate[] properties = template.getPropertyTemplates();
RecordId[] propertyNames = new RecordId[properties.length];
byte[] propertyTypes = new byte[properties.length];
for (int i = 0; i < properties.length; i++) {
// Note: if the property names are stored in more than 255 separate
// segments, this will not work.
propertyNames[i] = writeString(properties[i].getName());
Type<?> type = properties[i].getType();
if (type.isArray()) {
propertyTypes[i] = (byte) -type.tag();
} else {
propertyTypes[i] = (byte) type.tag();
}
}
RecordId propNamesId = null;
if (propertyNames.length > 0) {
propNamesId = writeList(asList(propertyNames));
ids.add(propNamesId);
}
checkState(propertyNames.length < (1 << 18));
head |= propertyNames.length;
RecordId tid = RecordWriters.newTemplateWriter(ids, propertyNames,
propertyTypes, head, primaryId, mixinIds, childNameId,
propNamesId).write(writer, store);
templateCache.put(template, tid);
return tid;
}
private RecordId writeNode(@Nonnull NodeState state, @Nullable ByteBuffer stableIdBytes)
throws IOException {
RecordId compactedId = deduplicateNode(state);
if (compactedId != null) {
return compactedId;
}
if (state instanceof SegmentNodeState && stableIdBytes == null) {
stableIdBytes = ((SegmentNodeState) state).getStableIdBytes();
}
RecordId recordId = writeNodeUncached(state, stableIdBytes);
if (stableIdBytes != null) {
// This node state has been rewritten because it is from an older
// generation (e.g. due to compaction). Put it into the cache for
// deduplication of hard links to it (e.g. checkpoints).
nodeCache.put(getStableId(stableIdBytes), recordId, cost(state));
}
return recordId;
}
private byte cost(NodeState node) {
long childCount = node.getChildNodeCount(Long.MAX_VALUE);
return (byte) (Byte.MIN_VALUE + 64 - numberOfLeadingZeros(childCount));
}
private RecordId writeNodeUncached(@Nonnull NodeState state, @Nullable ByteBuffer stableIdBytes)
throws IOException {
ModifiedNodeState after = null;
if (state instanceof ModifiedNodeState) {
after = (ModifiedNodeState) state;
}
RecordId beforeId = null;
if (after != null) {
// Pass null to indicate we don't want to update the node write statistics
// when deduplicating the base state
beforeId = deduplicateNode(after.getBaseState());
}
SegmentNodeState before = null;
Template beforeTemplate = null;
if (beforeId != null) {
before = reader.readNode(beforeId);
beforeTemplate = before.getTemplate();
}
List<RecordId> ids = newArrayList();
Template template = new Template(reader, state);
if (template.equals(beforeTemplate)) {
ids.add(before.getTemplateId());
} else {
ids.add(writeTemplate(template));
}
String childName = template.getChildName();
if (childName == Template.MANY_CHILD_NODES) {
MapRecord base;
Map<String, RecordId> childNodes;
if (before != null
&& before.getChildNodeCount(2) > 1
&& after.getChildNodeCount(2) > 1) {
base = before.getChildNodeMap();
childNodes = new ChildNodeCollectorDiff().diff(before, after);
} else {
base = null;
childNodes = newHashMap();
for (ChildNodeEntry entry : state.getChildNodeEntries()) {
childNodes.put(
entry.getName(),
writeNode(entry.getNodeState(), null));
}
}
ids.add(writeMap(base, childNodes));
} else if (childName != Template.ZERO_CHILD_NODES) {
ids.add(writeNode(state.getChildNode(template.getChildName()), null));
}
List<RecordId> pIds = newArrayList();
for (PropertyTemplate pt : template.getPropertyTemplates()) {
String name = pt.getName();
PropertyState property = state.getProperty(name);
assert property != null;
if (before != null) {
// If this property is already present in before (the base state)
// and it hasn't been modified use that one. This will result
// in an already compacted property to be reused given before
// has been already compacted.
PropertyState beforeProperty = before.getProperty(name);
if (property.equals(beforeProperty)) {
property = beforeProperty;
}
}
if (sameStore(property)) {
RecordId pid = ((Record) property).getRecordId();
if (isOldGeneration(pid)) {
pIds.add(writeProperty(property));
} else {
pIds.add(pid);
}
} else if (before == null || !sameStore(before)) {
pIds.add(writeProperty(property));
} else {
// reuse previously stored property, if possible
PropertyTemplate bt = beforeTemplate.getPropertyTemplate(name);
if (bt == null) {
pIds.add(writeProperty(property)); // new property
} else {
SegmentPropertyState bp = beforeTemplate.getProperty(before.getRecordId(), bt.getIndex());
if (property.equals(bp)) {
pIds.add(bp.getRecordId()); // no changes
} else if (bp.isArray() && bp.getType() != BINARIES) {
// reuse entries from the previous list
pIds.add(writeProperty(property, bp.getValueRecords()));
} else {
pIds.add(writeProperty(property));
}
}
}
}
if (!pIds.isEmpty()) {
ids.add(writeList(pIds));
}
RecordId stableId = null;
if (stableIdBytes != null) {
ByteBuffer buffer = stableIdBytes.duplicate();
byte[] bytes = new byte[buffer.remaining()];
buffer.get(bytes);
stableId = writeBlock(bytes, 0, bytes.length);
}
return newNodeStateWriter(stableId, ids).write(writer, store);
}
/**
* Try to deduplicate the passed {@code node}. This succeeds if
* the passed node state has already been persisted to this store and
* either it has the same generation or it has been already compacted
* and is still in the de-duplication cache for nodes.
*
* @param node The node states to de-duplicate.
* @return the id of the de-duplicated node or {@code null} if none.
*/
@CheckForNull
private RecordId deduplicateNode(@Nonnull NodeState node) {
if (!(node instanceof SegmentNodeState)) {
// De-duplication only for persisted node states
return null;
}
SegmentNodeState sns = (SegmentNodeState) node;
if (!sameStore(sns)) {
// De-duplication only within same store
return null;
}
if (!isOldGeneration(sns.getRecordId())) {
// This segment node state is already in this store, no need to
// write it again
return sns.getRecordId();
}
// This is a segment node state from an old generation. Check
// whether an equivalent one of the current generation is in the
// cache
return nodeCache.get(sns.getStableId());
}
/**
* @param node
* @return {@code true} iff {@code node} originates from the same
* segment store.
*/
private boolean sameStore(SegmentNodeState node) {
return sameStore(node.getRecordId().getSegmentId());
}
/**
* @param property
* @return {@code true} iff {@code property} is a {@code
* SegmentPropertyState} and originates from the same segment store.
*/
private boolean sameStore(PropertyState property) {
return (property instanceof SegmentPropertyState)
&& sameStore(((Record) property).getRecordId().getSegmentId());
}
private boolean isOldGeneration(RecordId id) {
try {
GCGeneration thatGen = id.getSegmentId().getGcGeneration();
GCGeneration thisGen = writer.getGCGeneration();
if (thatGen.isCompacted()) {
// If the segment containing the base state is compacted it is
// only considered old if it is from a earlier full generation.
// Otherwise it is from the same tail and it is safe to reference.
return thatGen.getFullGeneration() < thisGen.getFullGeneration();
} else {
// If the segment containing the base state is from a regular writer
// it is considered old as soon as it is from an earlier generation.
return thatGen.compareWith(thisGen) < 0;
}
} catch (SegmentNotFoundException snfe) {
// This SNFE means a defer compacted node state is too far
// in the past. It has been gc'ed already and cannot be
// compacted.
// Consider increasing SegmentGCOptions.getRetainedGenerations()
throw new SegmentNotFoundException(
"Cannot copy record from a generation that has been gc'ed already", snfe);
}
}
private class ChildNodeCollectorDiff extends DefaultNodeStateDiff {
private final Map<String, RecordId> childNodes = newHashMap();
private IOException exception;
public Map<String, RecordId> diff(SegmentNodeState before, ModifiedNodeState after) throws IOException {
after.compareAgainstBaseState(before, this);
if (exception != null) {
throw new IOException(exception);
} else {
return childNodes;
}
}
@Override
public boolean childNodeAdded(String name, NodeState after) {
try {
childNodes.put(name, writeNode(after, null));
} catch (IOException e) {
exception = e;
return false;
}
return true;
}
@Override
public boolean childNodeChanged(
String name, NodeState before, NodeState after
) {
try {
childNodes.put(name, writeNode(after, null));
} catch (IOException e) {
exception = e;
return false;
}
return true;
}
@Override
public boolean childNodeDeleted(String name, NodeState before) {
childNodes.put(name, null);
return true;
}
}
}
}
| |
/**
* Copyright (c) 2015 Source Auditor Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.spdx.compare;
import java.util.Arrays;
import java.util.Comparator;
import org.apache.poi.ss.usermodel.Cell;
import org.apache.poi.ss.usermodel.CellStyle;
import org.apache.poi.ss.usermodel.Row;
import org.apache.poi.ss.usermodel.Sheet;
import org.apache.poi.ss.usermodel.Workbook;
import org.spdx.rdfparser.InvalidSPDXAnalysisException;
import org.spdx.rdfparser.model.ExternalDocumentRef;
import org.spdx.spdxspreadsheet.AbstractSheet;
import com.google.common.base.Objects;
/**
* Sheet that compares the external document references
* @author Gary O'Neall
*
*/
public class ExternalReferencesSheet extends AbstractSheet {
private static class ExternalDocRefComparator implements Comparator<ExternalDocumentRef> {
/* (non-Javadoc)
* @see java.util.Comparator#compare(java.lang.Object, java.lang.Object)
*/
@Override
public int compare(ExternalDocumentRef o1, ExternalDocumentRef o2) {
if (o1 != null) {
if (o2 != null) {
ExternalDocumentRef r1 = o1;
ExternalDocumentRef r2 = o2;
int retval = r1.getSpdxDocumentNamespace().compareTo((r2.getSpdxDocumentNamespace()));
if (retval == 0) {
try {
return r1.getChecksum().getValue().compareTo(r2.getChecksum().getValue());
} catch (InvalidSPDXAnalysisException e) {
return retval;
}
} else {
return retval;
}
} else {
return 1;
}
} else {
return -1;
}
}
}
ExternalDocRefComparator externalDocRefComparator = new ExternalDocRefComparator();
static final int NAMESPACE_COL = 0;
static final String NAMESPACE_TEXT_TITLE = "External Document Namespace";
static final int NAMESPACE_COL_WIDTH = 80;
static final int CHECKSUM_COL = 1;
static final String CHECKSUM_TEXT_TITLE = "External Doc Checksum";
static final int CHECKSUM_COL_WIDTH = 55;
static final int FIRST_DOC_ID_COL = 2;
static final int DOC_ID_COL_WIDTH = 30;
public ExternalReferencesSheet(Workbook workbook, String sheetName) {
super(workbook, sheetName);
}
/* (non-Javadoc)
* @see org.spdx.spdxspreadsheet.AbstractSheet#verify()
*/
@Override
public String verify() {
return null; // nothing to verify
}
/**
* @param wb
* @param sheetName
*/
public static void create(Workbook wb, String sheetName) {
int sheetNum = wb.getSheetIndex(sheetName);
if (sheetNum >= 0) {
wb.removeSheetAt(sheetNum);
}
Sheet sheet = wb.createSheet(sheetName);
CellStyle headerStyle = AbstractSheet.createHeaderStyle(wb);
CellStyle defaultStyle = AbstractSheet.createLeftWrapStyle(wb);
Row row = sheet.createRow(0);
sheet.setColumnWidth(NAMESPACE_COL, NAMESPACE_COL_WIDTH*256);
sheet.setDefaultColumnStyle(NAMESPACE_COL, defaultStyle);
Cell extractedHeaderCell = row.createCell(NAMESPACE_COL);
extractedHeaderCell.setCellStyle(headerStyle);
extractedHeaderCell.setCellValue(NAMESPACE_TEXT_TITLE);
sheet.setColumnWidth(CHECKSUM_COL, CHECKSUM_COL_WIDTH*256);
sheet.setDefaultColumnStyle(CHECKSUM_COL, defaultStyle);
Cell checksumHeaderCell = row.createCell(CHECKSUM_COL);
checksumHeaderCell.setCellStyle(headerStyle);
checksumHeaderCell.setCellValue(CHECKSUM_TEXT_TITLE);
for (int i = FIRST_DOC_ID_COL; i < MultiDocumentSpreadsheet.MAX_DOCUMENTS; i++) {
sheet.setColumnWidth(i, DOC_ID_COL_WIDTH*256);
sheet.setDefaultColumnStyle(i, defaultStyle);
Cell cell = row.createCell(i);
cell.setCellStyle(headerStyle);
}
}
/**
* @param comparer
* @param docNames
* @throws InvalidSPDXAnalysisException
*/
public void importCompareResults(SpdxComparer comparer, String[] docNames) throws SpdxCompareException, InvalidSPDXAnalysisException {
if (comparer.getNumSpdxDocs() != docNames.length) {
throw(new SpdxCompareException("Number of document names does not match the number of SPDX documents"));
}
this.clear();
Row header = sheet.getRow(0);
int[] refIndexes = new int[comparer.getNumSpdxDocs()];
ExternalDocumentRef[][] externalRefs = new ExternalDocumentRef[comparer.getNumSpdxDocs()][];
for (int i = 0; i < externalRefs.length; i++) {
Cell headerCell = header.getCell(FIRST_DOC_ID_COL+i);
headerCell.setCellValue(docNames[i]);
ExternalDocumentRef[] docExternalRefs = comparer.getSpdxDoc(i).getExternalDocumentRefs();
Arrays.sort(docExternalRefs, externalDocRefComparator);
externalRefs[i] = docExternalRefs;
refIndexes[i] = 0;
}
while (!allExternalRefsExhausted(externalRefs, refIndexes)) {
Row currentRow = this.addRow();
ExternalDocumentRef nextRef = getNextExternalRef(externalRefs, refIndexes);
Cell namespaceCell = currentRow.createCell(NAMESPACE_COL);
namespaceCell.setCellValue(nextRef.getSpdxDocumentNamespace());
Cell checksumCell = currentRow.createCell(CHECKSUM_COL);
checksumCell.setCellValue(CompareHelper.checksumToString(nextRef.getChecksum()));
for (int i = 0; i < externalRefs.length; i++) {
if (externalRefs[i].length > refIndexes[i]) {
ExternalDocumentRef compareRef = externalRefs[i][refIndexes[i]];
if (Objects.equal(nextRef.getSpdxDocumentNamespace(),
compareRef.getSpdxDocumentNamespace()) &&
nextRef.equivalentConsideringNull(nextRef.getChecksum(),
compareRef.getChecksum())) {
Cell docIdCell = currentRow.createCell(FIRST_DOC_ID_COL+i);
docIdCell.setCellValue(externalRefs[i][refIndexes[i]].getExternalDocumentId());
refIndexes[i]++;
}
}
}
}
}
/**
* @param externalRefs
* @param refIndexes
* @return
*/
private ExternalDocumentRef getNextExternalRef(
ExternalDocumentRef[][] externalRefs, int[] refIndexes) {
ExternalDocumentRef retval = null;
for (int i = 0; i < externalRefs.length; i++) {
if (externalRefs[i].length > refIndexes[i]) {
ExternalDocumentRef candidate = externalRefs[i][refIndexes[i]];
if (retval == null || this.externalDocRefComparator.compare(retval, candidate) > 0) {
retval = candidate;
}
}
}
return retval;
}
/**
* @param externalRefs
* @param refIndexes
* @return
*/
private boolean allExternalRefsExhausted(
ExternalDocumentRef[][] externalRefs, int[] refIndexes) {
for (int i = 0; i < externalRefs.length; i++) {
if (refIndexes[i] < externalRefs[i].length) {
return false;
}
}
return true;
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInsight.completion;
import com.intellij.application.options.editor.XmlEditorOptions;
import com.intellij.codeInsight.TailType;
import com.intellij.codeInsight.editorActions.XmlTagNameSynchronizer;
import com.intellij.codeInsight.lookup.Lookup;
import com.intellij.codeInsight.lookup.LookupElement;
import com.intellij.codeInsight.lookup.LookupItem;
import com.intellij.codeInsight.template.Template;
import com.intellij.codeInsight.template.TemplateEditingAdapter;
import com.intellij.codeInsight.template.TemplateManager;
import com.intellij.codeInsight.template.impl.MacroCallNode;
import com.intellij.codeInsight.template.macro.CompleteMacro;
import com.intellij.codeInsight.template.macro.CompleteSmartMacro;
import com.intellij.codeInspection.InspectionProfile;
import com.intellij.codeInspection.htmlInspections.XmlEntitiesInspection;
import com.intellij.lang.ASTNode;
import com.intellij.openapi.command.WriteCommandAction;
import com.intellij.openapi.command.undo.UndoManager;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.Editor;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.ScrollType;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.profile.codeInspection.InspectionProjectProfileManager;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiFile;
import com.intellij.psi.html.HtmlTag;
import com.intellij.psi.impl.source.tree.injected.InjectedLanguageUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.xml.XmlTag;
import com.intellij.psi.xml.XmlTokenType;
import com.intellij.xml.*;
import com.intellij.xml.actions.GenerateXmlTagAction;
import com.intellij.xml.impl.schema.XmlElementDescriptorImpl;
import com.intellij.xml.util.HtmlUtil;
import com.intellij.xml.util.XmlUtil;
import consulo.ui.annotation.RequiredUIAccess;
import javax.annotation.Nullable;
import java.util.*;
public class XmlTagInsertHandler implements InsertHandler<LookupElement>
{
public static final XmlTagInsertHandler INSTANCE = new XmlTagInsertHandler();
@Override
@RequiredUIAccess
public void handleInsert(InsertionContext context, LookupElement item)
{
Project project = context.getProject();
Editor editor = context.getEditor();
Document document = InjectedLanguageUtil.getTopLevelEditor(editor).getDocument();
int startOffset = context.getStartOffset();
Ref<PsiElement> currentElementRef = Ref.create();
// Need to insert " " to prevent creating tags like <tagThis is my text
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument());
// Need to insert " " to prevent creating tags like <tagThis is my text
XmlTagNameSynchronizer.runWithoutCancellingSyncTagsEditing(document, () -> {
final int offset = editor.getCaretModel().getOffset();
editor.getDocument().insertString(offset, " ");
PsiDocumentManager.getInstance(project).commitDocument(editor.getDocument());
currentElementRef.set(context.getFile().findElementAt(startOffset));
editor.getDocument().deleteString(offset, offset + 1);
});
final XmlTag tag = PsiTreeUtil.getContextOfType(currentElementRef.get(), XmlTag.class, true);
if(tag == null)
{
return;
}
if(context.getCompletionChar() != Lookup.COMPLETE_STATEMENT_SELECT_CHAR)
{
context.setAddCompletionChar(false);
}
final XmlElementDescriptor descriptor = tag.getDescriptor();
if(XmlUtil.getTokenOfType(tag, XmlTokenType.XML_TAG_END) == null && XmlUtil.getTokenOfType(tag, XmlTokenType.XML_EMPTY_ELEMENT_END) == null)
{
if(descriptor != null)
{
insertIncompleteTag(context.getCompletionChar(), editor, tag);
}
}
else if(context.getCompletionChar() == Lookup.REPLACE_SELECT_CHAR)
{
PsiDocumentManager.getInstance(project).commitAllDocuments();
int caretOffset = editor.getCaretModel().getOffset();
PsiElement otherTag = PsiTreeUtil.getParentOfType(context.getFile().findElementAt(caretOffset), XmlTag.class);
PsiElement endTagStart = XmlUtil.getTokenOfType(otherTag, XmlTokenType.XML_END_TAG_START);
if(endTagStart != null)
{
PsiElement sibling = endTagStart.getNextSibling();
assert sibling != null;
ASTNode node = sibling.getNode();
assert node != null;
if(node.getElementType() == XmlTokenType.XML_NAME)
{
int sOffset = sibling.getTextRange().getStartOffset();
int eOffset = sibling.getTextRange().getEndOffset();
editor.getDocument().deleteString(sOffset, eOffset);
editor.getDocument().insertString(sOffset, ((XmlTag) otherTag).getName());
}
}
editor.getCaretModel().moveToOffset(caretOffset + 1);
editor.getScrollingModel().scrollToCaret(ScrollType.RELATIVE);
editor.getSelectionModel().removeSelection();
}
if(context.getCompletionChar() == ' ' && TemplateManager.getInstance(project).getActiveTemplate(editor) != null)
{
return;
}
final TailType tailType = LookupItem.handleCompletionChar(editor, item, context.getCompletionChar());
tailType.processTail(editor, editor.getCaretModel().getOffset());
}
public static void insertIncompleteTag(char completionChar, final Editor editor, XmlTag tag)
{
XmlElementDescriptor descriptor = tag.getDescriptor();
final Project project = editor.getProject();
TemplateManager templateManager = TemplateManager.getInstance(project);
Template template = templateManager.createTemplate("", "");
template.setToIndent(true);
// temp code
PsiFile containingFile = tag.getContainingFile();
boolean htmlCode = HtmlUtil.hasHtml(containingFile) || HtmlUtil.supportsXmlTypedHandlers(containingFile);
template.setToReformat(!htmlCode);
StringBuilder indirectRequiredAttrs = addRequiredAttributes(descriptor, tag, template, containingFile);
final boolean chooseAttributeName = addTail(completionChar, descriptor, htmlCode, tag, template, indirectRequiredAttrs);
templateManager.startTemplate(editor, template, new TemplateEditingAdapter()
{
private RangeMarker myAttrValueMarker;
@Override
public void waitingForInput(Template template)
{
int offset = editor.getCaretModel().getOffset();
myAttrValueMarker = editor.getDocument().createRangeMarker(offset + 1, offset + 4);
}
@Override
public void templateFinished(final Template template, boolean brokenOff)
{
final int offset = editor.getCaretModel().getOffset();
if(chooseAttributeName && offset > 0)
{
char c = editor.getDocument().getCharsSequence().charAt(offset - 1);
if(c == '/' || (c == ' ' && brokenOff))
{
new WriteCommandAction.Simple(project)
{
@Override
protected void run() throws Throwable
{
editor.getDocument().replaceString(offset, offset + 3, ">");
}
}.execute();
}
}
}
@Override
public void templateCancelled(final Template template)
{
if(myAttrValueMarker == null)
{
return;
}
final UndoManager manager = UndoManager.getInstance(project);
if(manager.isUndoInProgress() || manager.isRedoInProgress())
{
return;
}
if(chooseAttributeName && myAttrValueMarker.isValid())
{
final int startOffset = myAttrValueMarker.getStartOffset();
final int endOffset = myAttrValueMarker.getEndOffset();
new WriteCommandAction.Simple(project)
{
@Override
protected void run() throws Throwable
{
editor.getDocument().replaceString(startOffset, endOffset, ">");
}
}.execute();
}
}
});
}
@Nullable
private static StringBuilder addRequiredAttributes(XmlElementDescriptor descriptor, @Nullable XmlTag tag, Template template, PsiFile containingFile)
{
boolean htmlCode = HtmlUtil.hasHtml(containingFile) || HtmlUtil.supportsXmlTypedHandlers(containingFile);
Set<String> notRequiredAttributes = Collections.emptySet();
if(tag instanceof HtmlTag)
{
final InspectionProfile profile = InspectionProjectProfileManager.getInstance(tag.getProject()).getInspectionProfile();
XmlEntitiesInspection inspection = (XmlEntitiesInspection) profile.getUnwrappedTool(XmlEntitiesInspection.REQUIRED_ATTRIBUTES_SHORT_NAME, tag);
if(inspection != null)
{
StringTokenizer tokenizer = new StringTokenizer(inspection.getAdditionalEntries());
notRequiredAttributes = new HashSet<String>();
while(tokenizer.hasMoreElements())
{
notRequiredAttributes.add(tokenizer.nextToken());
}
}
}
XmlAttributeDescriptor[] attributes = descriptor.getAttributesDescriptors(tag);
StringBuilder indirectRequiredAttrs = null;
if(XmlEditorOptions.getInstance().isAutomaticallyInsertRequiredAttributes())
{
final XmlExtension extension = XmlExtension.getExtension(containingFile);
for(XmlAttributeDescriptor attributeDecl : attributes)
{
String attributeName = attributeDecl.getName(tag);
if(attributeDecl.isRequired() && (tag == null || tag.getAttributeValue(attributeName) == null))
{
if(!notRequiredAttributes.contains(attributeName))
{
if(!extension.isIndirectSyntax(attributeDecl))
{
template.addTextSegment(" " + attributeName + "=\"");
template.addVariable(new MacroCallNode(new CompleteMacro()), true);
template.addTextSegment("\"");
}
else
{
if(indirectRequiredAttrs == null)
{
indirectRequiredAttrs = new StringBuilder();
}
indirectRequiredAttrs.append("\n<jsp:attribute name=\"").append(attributeName).append("\"></jsp:attribute>\n");
}
}
}
else if(attributeDecl.isRequired() && attributeDecl.isFixed() && attributeDecl.getDefaultValue() != null && !htmlCode)
{
template.addTextSegment(" " + attributeName + "=\"" + attributeDecl.getDefaultValue() + "\"");
}
}
}
return indirectRequiredAttrs;
}
protected static boolean addTail(char completionChar, XmlElementDescriptor descriptor, boolean isHtmlCode, XmlTag tag, Template template, StringBuilder indirectRequiredAttrs)
{
if(completionChar == '>' || (completionChar == '/' && indirectRequiredAttrs != null))
{
template.addTextSegment(">");
boolean toInsertCDataEnd = false;
if(descriptor instanceof XmlElementDescriptorWithCDataContent)
{
final XmlElementDescriptorWithCDataContent cDataContainer = (XmlElementDescriptorWithCDataContent) descriptor;
if(cDataContainer.requiresCdataBracesInContext(tag))
{
template.addTextSegment("<![CDATA[\n");
toInsertCDataEnd = true;
}
}
if(indirectRequiredAttrs != null)
{
template.addTextSegment(indirectRequiredAttrs.toString());
}
template.addEndVariable();
if(toInsertCDataEnd)
{
template.addTextSegment("\n]]>");
}
if((!(tag instanceof HtmlTag) || !HtmlUtil.isSingleHtmlTag(tag.getName())) && tag.getAttributes().length == 0)
{
if(XmlEditorOptions.getInstance().isAutomaticallyInsertClosingTag())
{
final String name = descriptor.getName(tag);
if(name != null)
{
template.addTextSegment("</");
template.addTextSegment(name);
template.addTextSegment(">");
}
}
}
}
else if(completionChar == '/')
{
template.addTextSegment("/>");
}
else if(completionChar == ' ' && template.getSegmentsCount() == 0)
{
if(XmlEditorOptions.getInstance().isAutomaticallyStartAttribute() && (descriptor.getAttributesDescriptors(tag).length > 0 || isTagFromHtml(tag) && !HtmlUtil.isTagWithoutAttributes(tag
.getName())))
{
completeAttribute(template);
return true;
}
}
else if(completionChar == Lookup.AUTO_INSERT_SELECT_CHAR || completionChar == Lookup.NORMAL_SELECT_CHAR || completionChar == Lookup.REPLACE_SELECT_CHAR)
{
if(XmlEditorOptions.getInstance().isAutomaticallyInsertClosingTag() && isHtmlCode && HtmlUtil.isSingleHtmlTag(tag.getName()))
{
template.addTextSegment(HtmlUtil.isHtmlTag(tag) ? ">" : "/>");
}
else
{
if(needAlLeastOneAttribute(tag) && XmlEditorOptions.getInstance().isAutomaticallyStartAttribute() && tag.getAttributes().length == 0 && template.getSegmentsCount() == 0)
{
completeAttribute(template);
return true;
}
else
{
completeTagTail(template, descriptor, tag.getContainingFile(), tag, true);
}
}
}
return false;
}
private static void completeAttribute(Template template)
{
template.addTextSegment(" ");
template.addVariable(new MacroCallNode(new CompleteMacro()), true);
template.addTextSegment("=\"");
template.addEndVariable();
template.addTextSegment("\"");
}
private static boolean needAlLeastOneAttribute(XmlTag tag)
{
for(XmlTagRuleProvider ruleProvider : XmlTagRuleProvider.EP_NAME.getExtensions())
{
for(XmlTagRuleProvider.Rule rule : ruleProvider.getTagRule(tag))
{
if(rule.needAtLeastOneAttribute(tag))
{
return true;
}
}
}
return false;
}
private static boolean addRequiredSubTags(Template template, XmlElementDescriptor descriptor, PsiFile file, XmlTag context)
{
if(!XmlEditorOptions.getInstance().isAutomaticallyInsertRequiredSubTags())
{
return false;
}
List<XmlElementDescriptor> requiredSubTags = GenerateXmlTagAction.getRequiredSubTags(descriptor);
if(!requiredSubTags.isEmpty())
{
template.addTextSegment(">");
template.setToReformat(true);
}
for(XmlElementDescriptor subTag : requiredSubTags)
{
if(subTag == null)
{ // placeholder for smart completion
template.addTextSegment("<");
template.addVariable(new MacroCallNode(new CompleteSmartMacro()), true);
continue;
}
String qname = subTag.getName();
if(subTag instanceof XmlElementDescriptorImpl)
{
String prefixByNamespace = context.getPrefixByNamespace(((XmlElementDescriptorImpl) subTag).getNamespace());
if(StringUtil.isNotEmpty(prefixByNamespace))
{
qname = prefixByNamespace + ":" + subTag.getName();
}
}
template.addTextSegment("<" + qname);
addRequiredAttributes(subTag, null, template, file);
completeTagTail(template, subTag, file, context, false);
}
if(!requiredSubTags.isEmpty())
{
addTagEnd(template, descriptor, context);
}
return !requiredSubTags.isEmpty();
}
private static void completeTagTail(Template template, XmlElementDescriptor descriptor, PsiFile file, XmlTag context, boolean firstLevel)
{
boolean completeIt = !firstLevel || descriptor.getAttributesDescriptors(null).length == 0;
switch(descriptor.getContentType())
{
case XmlElementDescriptor.CONTENT_TYPE_UNKNOWN:
return;
case XmlElementDescriptor.CONTENT_TYPE_EMPTY:
if(completeIt)
{
template.addTextSegment("/>");
}
break;
case XmlElementDescriptor.CONTENT_TYPE_MIXED:
if(completeIt)
{
template.addTextSegment(">");
if(firstLevel)
{
template.addEndVariable();
}
else
{
template.addVariable(new MacroCallNode(new CompleteMacro()), true);
}
addTagEnd(template, descriptor, context);
}
break;
default:
if(!addRequiredSubTags(template, descriptor, file, context))
{
if(completeIt)
{
template.addTextSegment(">");
template.addEndVariable();
addTagEnd(template, descriptor, context);
}
}
break;
}
}
private static void addTagEnd(Template template, XmlElementDescriptor descriptor, XmlTag context)
{
template.addTextSegment("</" + descriptor.getName(context) + ">");
}
private static boolean isTagFromHtml(final XmlTag tag)
{
final String ns = tag.getNamespace();
return XmlUtil.XHTML_URI.equals(ns) || XmlUtil.HTML_URI.equals(ns);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.util;
import java.lang.annotation.Annotation;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import org.apache.nifi.logging.ComponentLog;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils;
public class ReflectionUtils {
private final static Logger LOG = LoggerFactory.getLogger(ReflectionUtils.class);
/**
* Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
* those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
* ignored.
*
* @param annotation annotation
* @param instance instance
* @param args args
* @throws InvocationTargetException ex
* @throws IllegalArgumentException ex
* @throws IllegalAccessException ex
*/
public static void invokeMethodsWithAnnotation(
final Class<? extends Annotation> annotation, final Object instance, final Object... args) throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
invokeMethodsWithAnnotations(annotation, null, instance, args);
}
/**
* Invokes all methods on the given instance that have been annotated with the given preferredAnnotation and if no such method exists will invoke all methods on the given instance that have been
* annotated with the given alternateAnnotation, if any exists. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters, those parameters must be
* specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be ignored.
*
* @param preferredAnnotation preferred
* @param alternateAnnotation alternate
* @param instance instance
* @param args args
* @throws InvocationTargetException ex
* @throws IllegalArgumentException ex
* @throws IllegalAccessException ex
*/
@SuppressWarnings("unchecked")
public static void invokeMethodsWithAnnotations(final Class<? extends Annotation> preferredAnnotation,
final Class<? extends Annotation> alternateAnnotation, final Object instance, final Object... args)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
Class<? extends Annotation>[] annotationArray = (Class<? extends Annotation>[]) (alternateAnnotation != null
? new Class<?>[] { preferredAnnotation, alternateAnnotation } : new Class<?>[] { preferredAnnotation });
invokeMethodsWithAnnotations(false, null, instance, annotationArray, args);
}
/**
* Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
* those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
* ignored.
*
* @param annotation annotation
* @param instance instance
* @param args args
* @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
* invoked; if <code>false</code> is returned, an error will have been logged.
*/
public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation, final Object instance, final Object... args) {
return quietlyInvokeMethodsWithAnnotations(annotation, null, instance, null, args);
}
/**
* Invokes all methods on the given instance that have been annotated with the given Annotation. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters,
* those parameters must be specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be
* ignored.
*
* @param annotation annotation
* @param instance instance
* @param logger logger
* @param args args
* @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
* invoked; if <code>false</code> is returned, an error will have been logged.
*/
public static boolean quietlyInvokeMethodsWithAnnotation(final Class<? extends Annotation> annotation,
final Object instance, final ComponentLog logger, final Object... args) {
return quietlyInvokeMethodsWithAnnotations(annotation, null, instance, logger, args);
}
/**
* Invokes all methods on the given instance that have been annotated with the given preferredAnnotation and if no such method exists will invoke all methods on the given instance that have been
* annotated with the given alternateAnnotation, if any exists. If the signature of the method that is defined in <code>instance</code> uses 1 or more parameters, those parameters must be
* specified by the <code>args</code> parameter. However, if more arguments are supplied by the <code>args</code> parameter than needed, the extra arguments will be ignored.
*
* @param preferredAnnotation preferred
* @param alternateAnnotation alternate
* @param instance instance
* @param args args
* @return <code>true</code> if all appropriate methods were invoked and returned without throwing an Exception, <code>false</code> if one of the methods threw an Exception or could not be
* invoked; if <code>false</code> is returned, an error will have been logged.
*/
public static boolean quietlyInvokeMethodsWithAnnotations(final Class<? extends Annotation> preferredAnnotation,
final Class<? extends Annotation> alternateAnnotation, final Object instance, final Object... args) {
return quietlyInvokeMethodsWithAnnotations(preferredAnnotation, alternateAnnotation, instance, null, args);
}
private static boolean invokeMethodsWithAnnotations(boolean quietly, ComponentLog logger, Object instance,
Class<? extends Annotation>[] annotations, Object... args)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
return invokeMethodsWithAnnotations(quietly, logger, instance, instance.getClass(), annotations, args);
}
private static boolean invokeMethodsWithAnnotations(boolean quietly, ComponentLog logger, Object instance,
Class<?> clazz, Class<? extends Annotation>[] annotations, Object... args)
throws IllegalAccessException, IllegalArgumentException, InvocationTargetException {
boolean isSuccess = true;
for (Method method : clazz.getMethods()) {
if (isAnyAnnotationPresent(method, annotations)) {
Object[] modifiedArgs = buildUpdatedArgumentsList(quietly, method, annotations, logger, args);
if (modifiedArgs != null) {
try {
method.invoke(instance, modifiedArgs);
} catch (IllegalAccessException | IllegalArgumentException | InvocationTargetException e) {
isSuccess = false;
if (quietly) {
logErrorMessage("Failed while invoking annotated method '" + method + "' with arguments '"
+ Arrays.asList(modifiedArgs) + "'.", logger, e);
} else {
throw e;
}
}
}
}
}
return isSuccess;
}
private static boolean isAnyAnnotationPresent(Method method, Class<? extends Annotation>[] annotations) {
for (Class<? extends Annotation> annotation : annotations) {
if (AnnotationUtils.findAnnotation(method, annotation) != null) {
return true;
}
}
return false;
}
private static Object[] buildUpdatedArgumentsList(boolean quietly, Method method, Class<?>[] annotations, ComponentLog processLogger, Object... args) {
boolean parametersCompatible = true;
int argsCount = 0;
Class<?>[] paramTypes = method.getParameterTypes();
for (int i = 0; parametersCompatible && i < paramTypes.length && i < args.length; i++) {
if (paramTypes[i].isAssignableFrom(args[i].getClass())) {
argsCount++;
} else {
logErrorMessage("Can not invoke method '" + method + "' with provided arguments since argument " + i + " of type '" + paramTypes[i]
+ "' is not assignable from provided value of type '" + args[i].getClass() + "'.", processLogger, null);
if (quietly){
parametersCompatible = false;
} else {
argsCount++;
}
}
}
Object[] updatedArguments = null;
if (parametersCompatible) {
updatedArguments = Arrays.copyOf(args, argsCount);
}
return updatedArguments;
}
private static void logErrorMessage(String message, ComponentLog processLogger, Exception e) {
if (processLogger != null) {
if (e != null) {
processLogger.error(message, e);
} else {
processLogger.error(message);
}
} else {
if (e != null) {
LOG.error(message, e);
} else {
LOG.error(message);
}
}
}
/**
* Invokes all methods on the given instance that have been annotated with
* the given preferredAnnotation and if no such method exists will invoke
* all methods on the given instance that have been annotated with the given
* alternateAnnotation, if any exists. If the signature of the method that
* is defined in <code>instance</code> uses 1 or more parameters, those
* parameters must be specified by the <code>args</code> parameter. However,
* if more arguments are supplied by the <code>args</code> parameter than
* needed, the extra arguments will be ignored.
*
* @param preferredAnnotation preferred
* @param alternateAnnotation alternate
* @param instance instance
* @param logger the ComponentLog to use for logging any errors. If null, will
* use own logger, but that will not generate bulletins or easily
* tie to the Processor's log messages.
* @param args args
* @return <code>true</code> if all appropriate methods were invoked and
* returned without throwing an Exception, <code>false</code> if one
* of the methods threw an Exception or could not be invoked; if
* <code>false</code> is returned, an error will have been logged.
*/
@SuppressWarnings("unchecked")
public static boolean quietlyInvokeMethodsWithAnnotations(final Class<? extends Annotation> preferredAnnotation,
final Class<? extends Annotation> alternateAnnotation, final Object instance, final ComponentLog logger,
final Object... args) {
Class<? extends Annotation>[] annotationArray = (Class<? extends Annotation>[]) (alternateAnnotation != null
? new Class<?>[] { preferredAnnotation, alternateAnnotation } : new Class<?>[] { preferredAnnotation });
try {
return invokeMethodsWithAnnotations(true, logger, instance, annotationArray, args);
} catch (Exception e) {
LOG.error("Failed while attempting to invoke methods with '" + Arrays.asList(annotationArray) + "' annotations", e);
return false;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.plugins.version;
import java.util.ArrayList;
import java.util.Calendar;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import javax.jcr.RepositoryException;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.UUIDUtils;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.memory.PropertyBuilder;
import org.apache.jackrabbit.oak.plugins.nodetype.ReadOnlyNodeTypeManager;
import org.apache.jackrabbit.oak.plugins.nodetype.TypePredicate;
import org.apache.jackrabbit.oak.plugins.tree.factories.RootFactory;
import org.apache.jackrabbit.oak.plugins.tree.factories.TreeFactory;
import org.apache.jackrabbit.oak.spi.state.ChildNodeEntry;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
import org.apache.jackrabbit.oak.spi.state.NodeState;
import org.apache.jackrabbit.util.ISO8601;
import org.apache.jackrabbit.util.Text;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static com.google.common.base.Preconditions.checkState;
import static org.apache.jackrabbit.JcrConstants.JCR_BASEVERSION;
import static org.apache.jackrabbit.JcrConstants.JCR_CREATED;
import static org.apache.jackrabbit.JcrConstants.JCR_ISCHECKEDOUT;
import static org.apache.jackrabbit.JcrConstants.JCR_PREDECESSORS;
import static org.apache.jackrabbit.JcrConstants.JCR_PRIMARYTYPE;
import static org.apache.jackrabbit.JcrConstants.JCR_ROOTVERSION;
import static org.apache.jackrabbit.JcrConstants.JCR_SUCCESSORS;
import static org.apache.jackrabbit.JcrConstants.JCR_UUID;
import static org.apache.jackrabbit.JcrConstants.JCR_VERSIONABLEUUID;
import static org.apache.jackrabbit.JcrConstants.JCR_VERSIONHISTORY;
import static org.apache.jackrabbit.JcrConstants.JCR_VERSIONLABELS;
import static org.apache.jackrabbit.JcrConstants.NT_VERSION;
import static org.apache.jackrabbit.JcrConstants.NT_VERSIONHISTORY;
import static org.apache.jackrabbit.JcrConstants.NT_VERSIONLABELS;
import static org.apache.jackrabbit.oak.plugins.version.Utils.uuidFromNode;
import static org.apache.jackrabbit.oak.spi.version.VersionConstants.JCR_COPIED_FROM;
import static org.apache.jackrabbit.oak.spi.version.VersionConstants.REP_VERSIONSTORAGE;
import static org.apache.jackrabbit.oak.spi.version.VersionConstants.VERSION_STORE_PATH;
/**
* Extends the {@link ReadOnlyVersionManager} with methods to modify the
* version store.
*/
public class ReadWriteVersionManager extends ReadOnlyVersionManager {
private final NodeBuilder versionStorageNode;
private final NodeBuilder workspaceRoot;
private final TypePredicate isVersion;
private ReadOnlyNodeTypeManager ntMgr;
public ReadWriteVersionManager(NodeBuilder versionStorageNode,
NodeBuilder workspaceRoot) {
this.versionStorageNode = checkNotNull(versionStorageNode);
this.workspaceRoot = checkNotNull(workspaceRoot);
this.isVersion = new TypePredicate(workspaceRoot.getNodeState(), NT_VERSION);
}
@Nonnull
@Override
protected Tree getVersionStorage() {
return TreeFactory.createReadOnlyTree(versionStorageNode.getNodeState());
}
@Nonnull
@Override
protected Root getWorkspaceRoot() {
return RootFactory.createReadOnlyRoot(workspaceRoot.getNodeState());
}
@Nonnull
@Override
protected ReadOnlyNodeTypeManager getNodeTypeManager() {
if (ntMgr == null) {
ntMgr = ReadOnlyNodeTypeManager.getInstance(
getWorkspaceRoot(), NamePathMapper.DEFAULT);
}
return ntMgr;
}
/**
* Gets or creates the version history for the given
* {@code versionable} node.
*
* @param versionable the versionable node.
* @param infoMap The additional information as provided by {@link org.apache.jackrabbit.oak.spi.commit.CommitInfo#getInfo()}
* @return the version history node.
* @throws IllegalArgumentException if the given node does not have a
* {@code jcr:uuid} property.
*/
@Nonnull
public NodeBuilder getOrCreateVersionHistory(@Nonnull NodeBuilder versionable, @Nonnull Map<String, Object> infoMap)
throws CommitFailedException {
checkNotNull(versionable);
String vUUID = uuidFromNode(versionable);
String relPath = getVersionHistoryPath(vUUID);
NodeBuilder node = versionStorageNode;
for (Iterator<String> it = PathUtils.elements(relPath).iterator(); it.hasNext(); ) {
String name = it.next();
node = node.child(name);
if (!node.hasProperty(JCR_PRIMARYTYPE)) {
String nt;
if (it.hasNext()) {
nt = REP_VERSIONSTORAGE;
} else {
// last path element denotes nt:versionHistory node
nt = NT_VERSIONHISTORY;
}
node.setProperty(JCR_PRIMARYTYPE, nt, Type.NAME);
}
}
Object copiedFrom = infoMap.get(JCR_COPIED_FROM);
if (copiedFrom != null) {
node.setProperty(JCR_COPIED_FROM, copiedFrom.toString(), Type.WEAKREFERENCE);
}
// use jcr:rootVersion node to detect if we need to initialize the
// version history
if (!node.hasChildNode(JCR_ROOTVERSION)) {
// jcr:versionableUuid property
node.setProperty(JCR_VERSIONABLEUUID, vUUID, Type.STRING);
node.setProperty(JCR_UUID,
UUIDUtils.generateUUID(), Type.STRING);
// jcr:versionLabels child node
NodeBuilder vLabels = node.child(JCR_VERSIONLABELS);
vLabels.setProperty(JCR_PRIMARYTYPE, NT_VERSIONLABELS, Type.NAME);
// jcr:rootVersion child node
createVersion(node, versionable);
} else if (!versionable.hasProperty(JCR_VERSIONHISTORY)) {
// connect versionable node with existing history
connectHistory(node.getChildNode(JCR_ROOTVERSION),
uuidFromNode(node), versionable);
}
return node;
}
void removeVersion(String versionRelPath) throws CommitFailedException {
String historyRelPath = PathUtils.getAncestorPath(versionRelPath, 1);
String versionName = Text.getName(versionRelPath);
NodeBuilder vh = resolve(versionStorageNode, historyRelPath);
if (JCR_ROOTVERSION.equals(versionName)) {
String msg = "Removal of root version not allowed.";
throw new CommitFailedException(CommitFailedException.VERSION, VersionExceptionCode.ROOT_VERSION_REMOVAL.ordinal(), msg);
}
NodeBuilder versionNode = vh.getChildNode(versionName);
String versionId = versionNode.getProperty(JCR_UUID).getValue(Type.STRING);
// unregister from labels
for (String label : getVersionLabels(versionRelPath, versionId)) {
removeVersionLabel(historyRelPath, label);
}
// reconnected predecessors and successors of the version being removed
PropertyState successorIds = versionNode.getProperty(JCR_SUCCESSORS);
PropertyState predecessorIds = versionNode.getProperty(JCR_PREDECESSORS);
for (String succId : successorIds.getValue(Type.REFERENCES)) {
NodeBuilder successor = getVersionById(vh, succId);
PropertyBuilder<String> pb = PropertyBuilder.array(Type.REFERENCE);
pb.setName(JCR_PREDECESSORS).setValues(successor.getProperty(JCR_PREDECESSORS).getValue(Type.REFERENCES));
pb.removeValue(versionId);
pb.addValues(predecessorIds.getValue(Type.REFERENCES));
successor.setProperty(pb.getPropertyState());
}
for (String predId : predecessorIds.getValue(Type.REFERENCES)) {
NodeBuilder predecessor = getVersionById(vh, predId);
PropertyBuilder<String> pb = PropertyBuilder.array(Type.REFERENCE);
pb.setName(JCR_SUCCESSORS).setValues(predecessor.getProperty(JCR_SUCCESSORS).getValue(Type.REFERENCES));
pb.removeValue(versionId);
pb.addValues(successorIds.getValue(Type.REFERENCES));
predecessor.setProperty(pb.getPropertyState());
}
versionNode.remove();
}
public void checkout(NodeBuilder versionable) {
versionable.setProperty(JCR_ISCHECKEDOUT, true, Type.BOOLEAN);
PropertyState baseVersion = versionable.getProperty(JCR_BASEVERSION);
List<String> predecessors = Collections.singletonList(baseVersion.getValue(Type.REFERENCE));
versionable.setProperty(JCR_PREDECESSORS, predecessors, Type.REFERENCES);
}
public void checkin(@Nonnull NodeBuilder versionable)
throws CommitFailedException {
NodeBuilder history = getOrCreateVersionHistory(versionable,
Collections.<String, Object>emptyMap());
createVersion(history, versionable);
}
public void restore(@Nonnull NodeBuilder versionable,
@Nonnull String versionUUID,
@Nullable VersionSelector selector)
throws CommitFailedException {
String versionPath = getIdentifierManager().getPath(versionUUID);
NodeBuilder history = getOrCreateVersionHistory(versionable,
Collections.<String, Object>emptyMap());
NodeBuilder version = null;
if (versionPath != null) {
String versionName = PathUtils.getName(versionPath);
if (history.hasChildNode(versionName)) {
version = history.getChildNode(versionName);
}
}
if (version == null) {
throw new CommitFailedException(CommitFailedException.VERSION,
VersionExceptionCode.NO_SUCH_VERSION.ordinal(),
"The VersionHistory with UUID: " + uuidFromNode(versionable) +
" does not have a Version with UUID: " + versionUUID);
}
VersionableState versionableState = VersionableState.forRestore(
version, history, versionable, this, ntMgr);
versionableState.restore(selector);
}
/**
* Restores a version from the history identified by {@code historyIdentifier}
* using the given version {@code selector}.
*
* @param historyIdentifier identifier of the version history node.
* @param selector the version selector.
* @param versionable the versionable node where the version is restored to.
* @throws CommitFailedException if an error occurs while restoring.
*/
void restore(@Nonnull String historyIdentifier,
@Nonnull VersionSelector selector,
@Nonnull NodeBuilder versionable)
throws CommitFailedException, RepositoryException {
String historyPath = getIdentifierManager().getPath(historyIdentifier);
String historyRelPath = PathUtils.relativize(VERSION_STORE_PATH, historyPath);
NodeBuilder history = resolve(versionStorageNode, historyRelPath);
checkState(history.exists(), "Version history does not exist: " + historyPath);
NodeBuilder version = selector.select(history);
if (version == null) {
throw new CommitFailedException(CommitFailedException.VERSION,
VersionExceptionCode.NO_VERSION_TO_RESTORE.ordinal(),
"VersionSelector did not select any version from " +
"history: " + historyPath);
}
// make sure versionable nodes has a jcr:uuid
// (required to identify its version history)
String versionableUUUID = history.getProperty(
JCR_VERSIONABLEUUID).getValue(Type.STRING);
versionable.setProperty(JCR_UUID, versionableUUUID, Type.STRING);
restore(versionable, uuidFromNode(version), selector);
}
/**
* Removes a version label from the jcr:versionLabels node of the referenced
* version history.
*
* @param historyRelPath relative path from the jcr:versionStorage node to
* the version history node.
* @param label the version label.
* @throws CommitFailedException if there is no such version history or if
* there is no label with the given name.
*/
public void removeVersionLabel(@Nonnull String historyRelPath,
@Nonnull String label)
throws CommitFailedException {
NodeBuilder labels = getVersionLabelsFor(checkNotNull(historyRelPath));
if (!labels.hasProperty(checkNotNull(label))) {
throw new CommitFailedException(CommitFailedException.VERSION,
VersionExceptionCode.NO_SUCH_VERSION_LABEL.ordinal(),
"Version label " + label + " does not exist on this version history");
}
labels.removeProperty(label);
}
/**
* Removes the version history if it's empty.
*
* @param versionable the versionable node.
*/
void removeEmptyHistory(@Nonnull NodeState versionable) {
NodeBuilder history = getVersionHistory(versionable);
if (isEmptyHistory(history.getNodeState())) {
history.remove();
}
}
// TODO: more methods that modify versions
//------------------------------< internal >--------------------------------
/**
* Resolves the {@code relPath} based on the given {@code node}
* and returns the resulting node, possibly non-existing.
*
* @param node the resolved node.
* @param relPath a relative path.
* @return the resolved node.
*/
@Nonnull
private NodeBuilder resolve(NodeBuilder node, String relPath) {
checkArgument(!PathUtils.isAbsolute(relPath), "Not a relative path");
for (String name : PathUtils.elements(relPath)) {
node = node.getChildNode(name);
}
return node;
}
/**
* Creates a version in the given version history. If the given version
* history does not yet have a version, then a root version is created and
* the versionable node is in a checked out state. Otherwise a version is
* created and the versionable node is set to checked in.
*
* @param vHistory the version history node.
* @param versionable the versionable node.
* @return the created version (nt:version) node.
* @throws CommitFailedException if creating the version fails. E.g. because
* the versionable node contains a OPV item with ABORT.
*/
private NodeBuilder createVersion(@Nonnull NodeBuilder vHistory,
@Nonnull NodeBuilder versionable)
throws IllegalArgumentException, CommitFailedException {
List<String> predecessors;
NodeBuilder version;
boolean isRootVersion;
if (!vHistory.hasChildNode(JCR_ROOTVERSION)) {
// create root version
isRootVersion = true;
predecessors = Collections.emptyList();
version = vHistory.child(JCR_ROOTVERSION);
} else {
isRootVersion = false;
checkState(versionable.hasProperty(JCR_PREDECESSORS));
PropertyState state = versionable.getProperty(JCR_PREDECESSORS);
predecessors = ImmutableList.copyOf(state.getValue(Type.REFERENCES));
version = vHistory.child(calculateVersion(vHistory, versionable));
}
String versionUUID = UUIDUtils.generateUUID();
version.setProperty(JCR_UUID, versionUUID, Type.STRING);
version.setProperty(JCR_PRIMARYTYPE, NT_VERSION, Type.NAME);
version.setProperty(JCR_CREATED, ISO8601.format(Calendar.getInstance()), Type.DATE);
version.setProperty(JCR_PREDECESSORS, predecessors, Type.REFERENCES);
version.setProperty(JCR_SUCCESSORS, Collections.<String>emptyList(), Type.REFERENCES);
// update successors of versions identified by predecessors
for (String id : predecessors) {
String name = PathUtils.getName(getIdentifierManager().getPath(id));
NodeBuilder predecessor = vHistory.getChildNode(name);
PropertyState state = predecessor.getProperty(JCR_SUCCESSORS);
if (state == null) {
throw new IllegalStateException("Missing " + JCR_SUCCESSORS +
" property on " + predecessor);
}
Set<String> refs = Sets.newHashSet(state.getValue(Type.REFERENCES));
refs.add(versionUUID);
predecessor.setProperty(JCR_SUCCESSORS, refs, Type.REFERENCES);
}
// jcr:frozenNode of created version
VersionableState versionableState = VersionableState.fromVersion(
version, vHistory, versionable, this, getNodeTypeManager());
if (!isRootVersion) {
versionableState.create();
}
// set jcr:isCheckedOut, jcr:versionHistory, jcr:baseVersion and
// jcr:predecessors on versionable node
versionable.setProperty(JCR_ISCHECKEDOUT, isRootVersion, Type.BOOLEAN);
versionable.setProperty(JCR_VERSIONHISTORY,
uuidFromNode(vHistory), Type.REFERENCE);
versionable.setProperty(JCR_BASEVERSION, versionUUID, Type.REFERENCE);
if (isRootVersion) {
// set predecessors to base version if this is the root version
predecessors = Collections.singletonList(versionUUID);
} else {
// otherwise clear predecessors for check-in
predecessors = Collections.emptyList();
}
versionable.setProperty(JCR_PREDECESSORS, predecessors, Type.REFERENCES);
return version;
}
/**
* Connects a versionable node with the root version of an existing version
* history.
*
* @param rootVersion the root version of a version history.
* @param vHistoryUUID the uuid of the version history node.
* @param versionable the versionable node.
*/
private void connectHistory(@Nonnull NodeBuilder rootVersion,
@Nonnull String vHistoryUUID,
@Nonnull NodeBuilder versionable) {
String rootVersionUUID = uuidFromNode(rootVersion);
versionable.setProperty(JCR_ISCHECKEDOUT, true, Type.BOOLEAN);
versionable.setProperty(JCR_VERSIONHISTORY, vHistoryUUID, Type.REFERENCE);
versionable.setProperty(JCR_BASEVERSION, rootVersionUUID, Type.REFERENCE);
versionable.setProperty(JCR_PREDECESSORS,
Collections.singleton(rootVersionUUID), Type.REFERENCES);
}
/**
* <i>Copied from Apache Jackrabbit Core</i>
* <p>
* Calculates the name of the new version that will be created by a
* checkin call. The name is determined as follows:
* <ul>
* <li> first the predecessor version with the shortest name is searched.
* <li> if that predecessor version is the root version, the new version gets
* the name "{number of successors}+1" + ".0"
* <li> if that predecessor version has no successor, the last digit of it's
* version number is incremented.
* <li> if that predecessor version has successors but the incremented name
* does not exist, that name is used.
* <li> otherwise a ".0" is added to the name until a non conflicting name
* is found.
* </ul>
* <p>
* Example Graph:
* <pre>
* jcr:rootVersion
* | |
* 1.0 2.0
* |
* 1.1
* |
* 1.2 ---\ ------\
* | \ \
* 1.3 1.2.0 1.2.0.0
* | |
* 1.4 1.2.1 ----\
* | | \
* 1.5 1.2.2 1.2.1.0
* | | |
* 1.6 | 1.2.1.1
* |-----/
* 1.7
* </pre>
*
* @param history the version history
* @param versionable the node to checkin
* @return the new version name
* @throws IllegalStateException if mandatory version properties are missing.
*/
protected String calculateVersion(@Nonnull NodeBuilder history,
@Nonnull NodeBuilder versionable)
throws IllegalStateException {
// 1. search a predecessor, suitable for generating the new name
PropertyState predecessors = versionable.getProperty(JCR_PREDECESSORS);
if (predecessors == null || predecessors.count() == 0) {
String message;
if (predecessors == null) {
message = "Mandatory jcr:predecessors property missing on node " + uuidFromNode(versionable);
} else {
message = "Mandatory jcr:predecessors property is empty on node " + uuidFromNode(versionable);
}
throw new IllegalStateException(message);
}
String best = null;
for (String id : predecessors.getValue(Type.REFERENCES)) {
String name = PathUtils.getName(getIdentifierManager().getPath(id));
if (best == null || name.length() < best.length()) {
best = name;
}
}
if (best == null) {
String message = "Could not find 'best' predecessor node for " +
uuidFromNode(versionable);
throw new IllegalStateException(message);
}
// 2. generate version name (assume no namespaces in version names)
String versionName = best;
int pos = versionName.lastIndexOf('.');
if (pos > 0) {
String newVersionName = versionName.substring(0, pos + 1)
+ (Integer.parseInt(versionName.substring(pos + 1)) + 1);
while (history.hasChildNode(newVersionName)) {
versionName += ".0";
newVersionName = versionName;
}
return newVersionName;
} else {
// best is root version
checkState(history.hasChildNode(JCR_ROOTVERSION));
NodeBuilder v = history.getChildNode(JCR_ROOTVERSION);
return String.valueOf(v.getProperty(JCR_SUCCESSORS).count() + 1) + ".0";
}
}
/**
* Returns the jcr:versionLabels node of the version history referenced
* by the given path.
*
* @param historyRelPath relative path from the jcr:versionStorage node
* to the history node.
* @return the jcr:versionLabels node.
* @throws CommitFailedException if there is no version history at the
* given path.
*/
private NodeBuilder getVersionLabelsFor(String historyRelPath)
throws CommitFailedException {
NodeBuilder history = resolve(versionStorageNode, historyRelPath);
if (!history.exists()) {
throw new CommitFailedException(CommitFailedException.VERSION,
VersionExceptionCode.UNEXPECTED_REPOSITORY_EXCEPTION.ordinal(),
"Version history does not exist: " + PathUtils.concat(
VERSION_STORE_PATH, historyRelPath));
}
return history.child(JCR_VERSIONLABELS);
}
@Nonnull
private Iterable<String> getVersionLabels(@Nonnull String historyRelPath, @Nonnull String versionId) throws CommitFailedException {
List<String> labels = new ArrayList<String>();
NodeBuilder labelNode = getVersionLabelsFor(historyRelPath);
for (PropertyState ps : labelNode.getProperties()) {
if (Type.REFERENCE == ps.getType()) {
if (versionId.equals(ps.getValue(Type.REFERENCE))) {
labels.add(ps.getName());
}
}
}
return labels;
}
@CheckForNull
private NodeBuilder getVersionById(@Nonnull NodeBuilder vhBuilder, @Nonnull String versionId) {
for (String childName : vhBuilder.getChildNodeNames()) {
NodeBuilder nb = vhBuilder.getChildNode(childName);
PropertyState uuid = nb.getProperty(JCR_UUID);
if (uuid != null && versionId.equals(uuid.getValue(Type.STRING))) {
return nb;
}
}
return null;
}
/**
* Gets the version history for the given
* {@code versionable} node.
*
* @param versionable the versionable node.
* @return the version history node.
* @throws IllegalArgumentException if the given node does not have a
* {@code jcr:uuid} property.
*/
@Nonnull
private NodeBuilder getVersionHistory(@Nonnull NodeState versionable) {
checkNotNull(versionable);
String vUUID = uuidFromNode(versionable);
String relPath = getVersionHistoryPath(vUUID);
NodeBuilder node = versionStorageNode;
for (Iterator<String> it = PathUtils.elements(relPath).iterator(); it.hasNext(); ) {
String name = it.next();
node = node.getChildNode(name);
if (!node.exists()) {
throw new IllegalArgumentException("No version history for this node");
}
}
return node;
}
/**
* Checks whether the passed node history hasn't been modified since its
* creation. It means that: (1) there's just one version, called jcr:rootVersion
* and (2) there are no custom labels.
*
* @param versionHistory to test
* @return {@code true} if the version history hasn't been changed yet
*/
private boolean isEmptyHistory(NodeState versionHistory) {
for (ChildNodeEntry entry : versionHistory.getChildNodeEntries()) {
String name = entry.getName();
NodeState node = entry.getNodeState();
if (!JCR_ROOTVERSION.equals(name) && isVersion.apply(node)) {
return false; // a checked-in version
}
}
NodeState labels = versionHistory.getChildNode(JCR_VERSIONLABELS);
for (PropertyState prop : labels.getProperties()) {
if (prop.getType() == Type.REFERENCE) {
return false; // custom label
}
}
return true;
}
}
| |
/*
* Copyright 2014 The LolDevs team (https://github.com/loldevs)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.boreeas.riotapi.rtmp.serialization;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import lombok.extern.log4j.Log4j;
import net.boreeas.riotapi.rtmp.serialization.amf3.DynamicObject;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.*;
/**
* Created on 7/20/2014.
*/
@Log4j
public class TypeConverter {
private static Gson gson;
static {
GsonBuilder builder = new GsonBuilder();
gson = builder.create();
}
/**
* There may be type mismatched caused by the serialization process which we try to fix here
* @param cls The target field type
* @param obj The object which is to be assigned to the field
* @param json Attempt to deserialize objects as json
* @return An object assignable to fields with that type, or the original object if no type conversion exists
*/
public static <T> Object typecast(Class<T> cls, Object obj, boolean json) throws InstantiationException, IllegalAccessException {
if (cls.isInstance(obj)) {
return cls.cast(obj);
}
if (obj == null) {
if (cls.isPrimitive()) {
log.warn("Converting null to primitive");
if (cls == boolean.class) {
return false;
}
return 0;
}
return obj;
}
if (cls.isArray()) {
Class<?> inner = cls.getComponentType();
if (obj.getClass().isArray()) {
return arrayToArray(inner, obj);
} else if (obj instanceof List) {
return listToArray(inner, (List) obj);
} else if (obj instanceof Map) {
return listToArray(inner, mapToList(ArrayList.class, (Map) obj));
}
}
if (List.class.isAssignableFrom(cls)) {
if (obj.getClass().isArray()) {
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
return arrayToList(ArrayList.class, obj);
} else {
return arrayToList((Class<? extends List>) cls, obj);
}
} else if (obj instanceof List) {
List l;
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
l = new ArrayList<>();
} else{
l = (List) cls.newInstance();
}
l.addAll((java.util.Collection) obj);
return l;
} else if (obj instanceof Map) {
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
return mapToList(ArrayList.class, (Map<?, ?>) obj);
} else {
return mapToList((Class<? extends List>) cls, (Map<?, ?>) obj);
}
}
}
if (Map.class.isAssignableFrom(cls)) {
if (obj.getClass().isArray()) {
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
return listToMap(HashMap.class, arrayToList(ArrayList.class, obj));
} else {
return listToMap((Class<? extends Map>) cls, arrayToList(ArrayList.class, obj));
}
} else if (obj instanceof Map) {
Map map;
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
map = new HashMap<>();
} else {
map = (Map) cls.newInstance();
}
map.putAll((Map) obj);
return map;
} else if (obj instanceof List) {
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
return listToMap(HashMap.class, (List) obj);
} else {
return listToMap((Class<? extends Map>) cls, (List) obj);
}
} else if (obj instanceof DynamicObject) {
if (cls.isInterface() || (cls.getModifiers() & Modifier.ABSTRACT) > 0) {
return dynObjectToMap(HashMap.class, (DynamicObject) obj);
} else {
return dynObjectToMap((Class<? extends Map>) cls, (DynamicObject) obj);
}
}
}
if (cls.isAssignableFrom(AmfObject.class)) {
if (obj instanceof Map) {
return mapToAmfObj((Map<?,?>) obj);
} else if (obj instanceof DynamicObject) {
AmfObject dynObj = new AmfObject();
for (Map.Entry<String, Object> field: ((DynamicObject) obj).getFields().entrySet()) {
dynObj.set(field.getKey(), field.getValue());
}
return dynObj;
}
}
if (cls.isAssignableFrom(AnonymousAmfObject.class)) {
if (obj instanceof Map) {
return mapToAnonAmfObj((Map<?,?>) obj);
} else if (obj instanceof AmfObject) {
AnonymousAmfObject dynObj = new AnonymousAmfObject();
for (Map.Entry<String, Object> field: ((DynamicObject) obj).getFields().entrySet()) {
dynObj.put(field.getKey(), field.getValue());
}
return dynObj;
}
}
if (obj instanceof Number) {
if (cls == Long.class || cls == long.class) {
return ((Number) obj).longValue();
} else if (cls == Integer.class || cls == int.class) {
return ((Number) obj).intValue();
} else if (cls == Short.class || cls == short.class) {
return ((Number) obj).shortValue();
} else if (cls == Byte.class || cls == byte.class) {
return ((Number) obj).byteValue();
} else if (cls == Float.class || cls == float.class) {
return ((Number) obj).floatValue();
} else if (cls == Double.class || cls == double.class) {
return ((Number) obj).doubleValue();
} else if (cls == Date.class) {
return new Date(((Number) obj).longValue());
}
}
if (obj instanceof Boolean && cls == boolean.class) {
return obj;
}
if (obj.getClass().isPrimitive()) {
if (obj.getClass() == boolean.class || obj.getClass() == Boolean.class || cls == boolean.class || cls == Boolean.class) {
throw new IllegalArgumentException("Unknown conversion " + obj.getClass() + " => " + cls);
}
if (cls.isPrimitive()) {
return cls.cast(obj);
}
if (cls == Long.class) {
return (long) obj;
} else if (cls == Double.class) {
return (double) obj;
} else if (cls == Integer.class) {
return (int) obj;
} else if (cls == Byte.class) {
return (byte) obj;
} else if (cls == Float.class) {
return (float) obj;
} else if (cls == Short.class) {
return (short) obj;
} else if (cls == Character.class) {
return (char) obj;
}
}
if (cls.isEnum() && obj instanceof String) {
try {
Method method = cls.getMethod("getByName", String.class);
T result = (T) method.invoke(null, obj);
if (result != null) {
return result;
}
} catch (NoSuchMethodException e) {
for (T t: cls.getEnumConstants()) {
if (((Enum) t).name().equals(obj)) {
return t;
}
}
} catch (InvocationTargetException e) {
throw new IllegalStateException(e);
}
}
if (json && obj instanceof String) {
try {
return gson.fromJson((String) obj, cls);
} catch (Exception e) {
System.out.println("Possibly malformed json: " + obj);
return gson.fromJson((String) obj, cls);
}
}
throw new IllegalArgumentException("Unknown conversion " + obj.getClass() + " => " + cls);
}
public static AnonymousAmfObject mapToAnonAmfObj(Map<?, ?> obj) throws IllegalAccessException, InstantiationException {
AnonymousAmfObject instance = new AnonymousAmfObject();
for (Map.Entry entry: obj.entrySet()) {
instance.put(entry.getKey().toString(), entry.getValue());
}
return instance;
}
public static AmfObject mapToAmfObj(Map<?, ?> obj) throws IllegalAccessException, InstantiationException {
AmfObject instance = new AmfObject();
for (Map.Entry entry: obj.entrySet()) {
instance.set(entry.getKey().toString(), entry.getValue());
}
return instance;
}
public static Map dynObjectToMap(Class<? extends Map> mapCls, DynamicObject obj) throws IllegalAccessException, InstantiationException {
Map map = mapCls.newInstance();
for (Map.Entry<String, Object> field: obj.getFields().entrySet()) {
map.put(field.getKey(), field.getValue());
}
return map;
}
public static Object arrayToArray(Class<?> componentType, Object original) {
if (componentType.equals(original.getClass().getComponentType()));
int len = Array.getLength(original);
Object arr = Array.newInstance(componentType, len);
for (int i = 0; i < len; i++) {
Array.set(arr, i, componentType.cast(Array.get(original, i)));
}
return arr;
}
public static List arrayToList(Class<? extends List> listCls, Object original) throws IllegalAccessException, InstantiationException {
List list = listCls.newInstance();
int len = Array.getLength(original);
for (int i = 0; i < len; i++) {
list.add(Array.get(original, i));
}
return list;
}
public static Object listToArray(Class<?> componentType, List list) {
Object arr = Array.newInstance(componentType, list.size());
for (int i = 0; i < list.size(); i++) {
Array.set(arr, i, componentType.cast(list.get(i)));
}
return arr;
}
public static Map listToMap(Class<? extends Map> mapClass, List list) throws IllegalAccessException, InstantiationException {
Map map = mapClass.newInstance();
for (int i = 0; i < list.size(); i++) {
map.put(i, list.get(i));
}
return map;
}
public static List mapToList(Class<? extends List> cls, Map<?, ?> map) throws IllegalAccessException, InstantiationException {
List list = cls.newInstance();
for (Map.Entry entry: map.entrySet()) {
Object key = entry.getKey();
// Filter integer keys from map
if (key instanceof Integer || key instanceof Short || key instanceof Byte) {
int i = (int) key;
// keys are unordered, fill up the list first if needed
if (list.size() <= i) {
for (int n = list.size(); n <= i; n++) {
list.add(null);
}
}
list.set(i, entry.getValue());
}
}
return list;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.indexing;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.common.collect.MapBuilder;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.metrics.CounterMetric;
import org.elasticsearch.common.metrics.MeanMetric;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.index.engine.Engine;
import org.elasticsearch.index.indexing.slowlog.ShardSlowLogIndexingService;
import org.elasticsearch.index.settings.IndexSettings;
import org.elasticsearch.index.shard.AbstractIndexShardComponent;
import org.elasticsearch.index.shard.ShardId;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.TimeUnit;
/**
*/
public class ShardIndexingService extends AbstractIndexShardComponent {
private final ShardSlowLogIndexingService slowLog;
private final StatsHolder totalStats = new StatsHolder();
private final CopyOnWriteArrayList<IndexingOperationListener> listeners = new CopyOnWriteArrayList<>();
private volatile Map<String, StatsHolder> typesStats = ImmutableMap.of();
@Inject
public ShardIndexingService(ShardId shardId, @IndexSettings Settings indexSettings, ShardSlowLogIndexingService slowLog) {
super(shardId, indexSettings);
this.slowLog = slowLog;
}
/**
* Returns the stats, including type specific stats. If the types are null/0 length, then nothing
* is returned for them. If they are set, then only types provided will be returned, or
* <tt>_all</tt> for all types.
*/
public IndexingStats stats(String... types) {
IndexingStats.Stats total = totalStats.stats();
Map<String, IndexingStats.Stats> typesSt = null;
if (types != null && types.length > 0) {
typesSt = new HashMap<>(typesStats.size());
if (types.length == 1 && types[0].equals("_all")) {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
} else {
for (Map.Entry<String, StatsHolder> entry : typesStats.entrySet()) {
if (Regex.simpleMatch(types, entry.getKey())) {
typesSt.put(entry.getKey(), entry.getValue().stats());
}
}
}
}
return new IndexingStats(total, typesSt);
}
public void addListener(IndexingOperationListener listener) {
listeners.add(listener);
}
public void removeListener(IndexingOperationListener listener) {
listeners.remove(listener);
}
public Engine.Create preCreate(Engine.Create create) {
totalStats.indexCurrent.inc();
typeStats(create.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
create = listener.preCreate(create);
}
return create;
}
public void postCreateUnderLock(Engine.Create create) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreateUnderLock(create);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void throttlingActivated() {
totalStats.setThrottled(true);
}
public void throttlingDeactivated() {
totalStats.setThrottled(false);
}
public void postCreate(Engine.Create create) {
long took = create.endTime() - create.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(create.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postCreate(create, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postCreate(create);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void postCreate(Engine.Create create, Throwable ex) {
}
public Engine.Index preIndex(Engine.Index index) {
totalStats.indexCurrent.inc();
typeStats(index.type()).indexCurrent.inc();
for (IndexingOperationListener listener : listeners) {
index = listener.preIndex(index);
}
return index;
}
public void postIndexUnderLock(Engine.Index index) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndexUnderLock(index);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index) {
long took = index.endTime() - index.startTime();
totalStats.indexMetric.inc(took);
totalStats.indexCurrent.dec();
StatsHolder typeStats = typeStats(index.type());
typeStats.indexMetric.inc(took);
typeStats.indexCurrent.dec();
slowLog.postIndex(index, took);
for (IndexingOperationListener listener : listeners) {
try {
listener.postIndex(index);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void postIndex(Engine.Index index, Throwable ex) {
totalStats.indexCurrent.dec();
typeStats(index.type()).indexCurrent.dec();
}
public Engine.Delete preDelete(Engine.Delete delete) {
totalStats.deleteCurrent.inc();
typeStats(delete.type()).deleteCurrent.inc();
for (IndexingOperationListener listener : listeners) {
delete = listener.preDelete(delete);
}
return delete;
}
public void postDeleteUnderLock(Engine.Delete delete) {
for (IndexingOperationListener listener : listeners) {
try {
listener.postDeleteUnderLock(delete);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete) {
long took = delete.endTime() - delete.startTime();
totalStats.deleteMetric.inc(took);
totalStats.deleteCurrent.dec();
StatsHolder typeStats = typeStats(delete.type());
typeStats.deleteMetric.inc(took);
typeStats.deleteCurrent.dec();
for (IndexingOperationListener listener : listeners) {
try {
listener.postDelete(delete);
} catch (Exception e) {
logger.warn("post listener [{}] failed", e, listener);
}
}
}
public void postDelete(Engine.Delete delete, Throwable ex) {
totalStats.deleteCurrent.dec();
typeStats(delete.type()).deleteCurrent.dec();
}
public void noopUpdate(String type) {
totalStats.noopUpdates.inc();
typeStats(type).noopUpdates.inc();
}
public void clear() {
totalStats.clear();
synchronized (this) {
if (!typesStats.isEmpty()) {
MapBuilder<String, StatsHolder> typesStatsBuilder = MapBuilder.newMapBuilder();
for (Map.Entry<String, StatsHolder> typeStats : typesStats.entrySet()) {
if (typeStats.getValue().totalCurrent() > 0) {
typeStats.getValue().clear();
typesStatsBuilder.put(typeStats.getKey(), typeStats.getValue());
}
}
typesStats = typesStatsBuilder.immutableMap();
}
}
}
private StatsHolder typeStats(String type) {
StatsHolder stats = typesStats.get(type);
if (stats == null) {
synchronized (this) {
stats = typesStats.get(type);
if (stats == null) {
stats = new StatsHolder();
typesStats = MapBuilder.newMapBuilder(typesStats).put(type, stats).immutableMap();
}
}
}
return stats;
}
static class StatsHolder {
public final MeanMetric indexMetric = new MeanMetric();
public final MeanMetric deleteMetric = new MeanMetric();
public final CounterMetric indexCurrent = new CounterMetric();
public final CounterMetric deleteCurrent = new CounterMetric();
public final CounterMetric noopUpdates = new CounterMetric();
public final CounterMetric throttleTimeMillisMetric = new CounterMetric();
volatile boolean isThrottled = false;
volatile long startOfThrottleNS;
public IndexingStats.Stats stats() {
long currentThrottleNS = 0;
if (isThrottled && startOfThrottleNS != 0) {
currentThrottleNS += System.nanoTime() - startOfThrottleNS;
if (currentThrottleNS < 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip must have happened, have to ignore this value
currentThrottleNS = 0;
}
}
return new IndexingStats.Stats(
indexMetric.count(), TimeUnit.NANOSECONDS.toMillis(indexMetric.sum()), indexCurrent.count(),
deleteMetric.count(), TimeUnit.NANOSECONDS.toMillis(deleteMetric.sum()), deleteCurrent.count(),
noopUpdates.count(), isThrottled, TimeUnit.MILLISECONDS.toMillis(throttleTimeMillisMetric.count() + TimeValue.nsecToMSec(currentThrottleNS)));
}
void setThrottled(boolean isThrottled) {
if (!this.isThrottled && isThrottled) {
startOfThrottleNS = System.nanoTime();
} else if (this.isThrottled && !isThrottled) {
assert startOfThrottleNS > 0 : "Bad state of startOfThrottleNS";
long throttleTimeNS = System.nanoTime() - startOfThrottleNS;
if (throttleTimeNS >= 0) {
// Paranoia (System.nanoTime() is supposed to be monotonic): time slip may have occurred but never want to add a negative number
throttleTimeMillisMetric.inc(TimeValue.nsecToMSec(throttleTimeNS));
}
}
this.isThrottled = isThrottled;
}
public long totalCurrent() {
return indexCurrent.count() + deleteMetric.count();
}
public void clear() {
indexMetric.clear();
deleteMetric.clear();
}
}
}
| |
package com.gta.recommend;
import java.util.Set;
import java.util.TreeSet;
import java.util.List;
import java.util.ArrayList;
import java.util.Map;
import java.util.HashMap;
public class Apriori {
private double minSupport;
private double minConfidence;
public Apriori(double minSupport, double minConfidence)
{
this.minSupport = minSupport;
this.minConfidence = minConfidence;
}
/**
* create one list
* @param set : the data set with set and list
* @return the list generated
*/
public List<Set<String>> createInitSet(List<Set<String>> set)
{
List<Set<String>> list = new ArrayList<Set<String>>();
for (Set<String> item : set) {
for (String element : item) {
Set<String> e = new TreeSet<String>();
e.add(element);
if (!list.contains(e))
list.add(e);
}
}
return list;
}
/**
* create one list
* @param set : the data set
* @return the list generated
*/
public List<Set<String>> createInitSet(Set<String> set)
{
List<Set<String>> list = new ArrayList<Set<String>>();
for (String element : set) {
Set<String> e = new TreeSet<String>();
e.add(element);
if (!list.contains(e))
list.add(e);
}
return list;
}
/**
* scan the set to find the support data
* @param set : the source set
* @param tmp : the list of Li frequent items
* @return
*/
public DataPack ScanD(List<Set<String>> set, List<Set<String>> tmp)
{
Map<Set<String>, Integer> setMap = new HashMap<Set<String>, Integer>();
for (Set<String> element : set) {
for (Set<String> item : tmp) {
if (isContain(element, item)) {
if (!setMap.containsKey(item))
setMap.put(item, 1);
else
setMap.put(item, setMap.get(item)+1);
}
}
}
int numItems = set.size();
List<Set<String>> list = new ArrayList<Set<String>>();
Map<Set<String>, Double> supportData = new HashMap<Set<String>, Double>();
for (Set<String> element : setMap.keySet()) {
double support = (double)setMap.get(element)/numItems;
if (support >= minSupport)
list.add(0, element);
supportData.put(element, support);
}
return new DataPack(list, supportData);
}
/**
* generate the number of k items
* @param list : the source list
* @param k : the numbers
* @return the list with set
*/
public List<Set<String>> aprioriGen(List<Set<String>> list, int k)
{
List<Set<String>> retList = new ArrayList<Set<String>>();
int size = list.size();
for (int i = 0; i < size; i++) {
Set<String> set1 = list.get(i);
for (int j = i+1; j < size; j++) {
Set<String> set2 = list.get(j);
if (isEquals(getSubclass(list.get(i), k-2), getSubclass(list.get(j), k-2))) {
retList.add(getUnion(set1, set2));
}
}
}
return retList;
}
//print test
public void print(List<Set<String>> set)
{
// System.out.println("size: " + set.size());
for (Set<String> si : set) {
for (String s : si) {
System.out.print(s + " ");
}
System.out.println();
}
}
// print test
public void print(Set<String> set)
{
// System.out.println("size: " + set.size());
for (String s : set) {
System.out.print(s + " ");
}
// System.out.println();
}
/**
* get the multiple sets of the frequent items
* @param set : the original data set
* @return
*/
public DataProc getApriori(List<Set<String>> set)
{
List<Set<String>> initSet = createInitSet(set);
DataPack dp = ScanD(set, initSet);
List<Set<String>> list = dp.getList();
Map<Set<String>, Double> supportData = dp.getSetMap();
List<List<Set<String>>> L = new ArrayList<List<Set<String>>>();
L.add(list);
int k = 2;
while (L.get(k-2).size() > 0) {
List<Set<String>> CK = aprioriGen(L.get(k-2), k);
DataPack dataPack = ScanD(set, CK);
supportData.putAll(dataPack.getSetMap());
L.add(dataPack.getList());
k += 1;
}
return new DataProc(L, supportData);
}
/**
* generate the rules
* @param L : the list of the frequent items
* @param supportData : the support data
* @return the list
*/
public List<DataSet> generateRules(List<List<Set<String>>> L, Map<Set<String>, Double> supportData)
{
List<DataSet> bigRules = new ArrayList<DataSet>();
int size = L.size();
for (int i = 1; i < size; i++) {
for (Set<String> si : L.get(i)) {
List<Set<String>> initSet = createInitSet(si);
if (i > 1)
rulesFromConSeq(si, initSet, supportData, bigRules);
else
calcConfidence(si, initSet, supportData, bigRules);
}
}
return bigRules;
}
/**
* calculate the confidence of the set
* @param freSet : one frequent items set
* @param initSet : one element construct the set
* @param supportData : the support data
* @param bigRules : the last list of rules
* @return the list with set
*/
public List<Set<String>> calcConfidence(Set<String> freSet, List<Set<String>> initSet,
Map<Set<String>, Double> supportData, List<DataSet> bigRules)
{
List<Set<String>> list = new ArrayList<Set<String>>();
for (Set<String> conSeq : initSet) {
Set<String> conSet = getSubtract(freSet, conSeq);
double conf = supportData.get(freSet)/supportData.get(conSet);
if (conf >= minConfidence) {
DataSet ds = new DataSet(conSet, conSeq, conf);
bigRules.add(ds);
list.add(conSeq);
}
}
return list;
}
/**
* find the rules by recurse
* @param freSet : one frequent items set
* @param initSet : one element construct the set
* @param supportData : the support data
* @param bigRules : the last list of rules
*/
public void rulesFromConSeq(Set<String> freSet, List<Set<String>> initSet,
Map<Set<String>, Double> supportData, List<DataSet> bigRules)
{
int m = initSet.get(0).size();
if (freSet.size() > m+1) {
List<Set<String>> list = aprioriGen(initSet, m+1);
list = calcConfidence(freSet, list, supportData, bigRules);
if (list.size() > 1) {
rulesFromConSeq(freSet, list, supportData, bigRules);
}
}
}
/**
* assert one set contain another set
* @param src
* @param dst
* @return if src contain dst then return true else false
*/
public boolean isContain(Set<String> src, Set<String> dst)
{
boolean ret = false;
if (src.containsAll(dst))
ret = true;
return ret;
}
/**
* get the conjunction of two sets
* @param src : the source set
* @param dst : the destination set
* @return one set
*/
public Set<String> getConjunction(Set<String> src, Set<String> dst)
{
Set<String> set = new TreeSet<String>();
for (String element : src) {
for (String item : dst) {
if (element.equals(item)) {
set.add(element);
}
}
}
return set;
}
/**
* get the union of two sets
* @param src : the source set
* @param dst : destination set
* @return one set
*/
public Set<String> getUnion(Set<String> src, Set<String> dst)
{
Set<String> set = new TreeSet<String>();
for (String element : src)
set.add(element);
for (String item : dst)
if (!set.contains(item))
set.add(item);
return set;
}
/**
* get the top K elements of the set
* @param src : the source set
* @param k : the top K
* @return the top K elements
*/
public Set<String> getSubclass(Set<String> src, int k)
{
Set<String> set = new TreeSet<String>();
int index = 0;
for (String s : src) {
if (index < k)
set.add(s);
else
break;
index++;
}
return set;
}
/**
* get the subtract of two sets
* @param src : the source set
* @param dst : the destination set
* @return the subtract set with above sets
*/
public Set<String> getSubtract(Set<String> src, Set<String> dst)
{
Set<String> set = new TreeSet<String>();
for (String element : src) {
if (!dst.contains(element))
set.add(element);
}
return set;
}
/**
* assert the equals of tow sets
* @param src : the source set
* @param dst : the destination set
* @return if src == dst, return true else return false
*/
public boolean isEquals(Set<String> src, Set<String> dst)
{
boolean ret = false;
if (src.size() == dst.size()) {
if (src.size() == 0)
ret = true;
else if (src.containsAll(dst))
ret = true;
}
return ret;
}
}
| |
/*
* Copyright (C) 2013 47 Degrees, LLC
* http://47deg.com
* hello@47deg.com
*
* Copyright 2012 Roman Nurik
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.fortysevendeg.swipelistview;
import android.graphics.Rect;
import android.os.Build;
import android.os.Handler;
import android.support.v4.view.MotionEventCompat;
import android.util.Log;
import android.view.MotionEvent;
import android.view.VelocityTracker;
import android.view.View;
import android.view.ViewConfiguration;
import android.view.ViewGroup;
import android.widget.AbsListView;
import android.widget.ListView;
import com.nineoldandroids.animation.Animator;
import com.nineoldandroids.animation.AnimatorListenerAdapter;
import com.nineoldandroids.animation.ValueAnimator;
import com.nineoldandroids.view.ViewHelper;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.nineoldandroids.view.ViewHelper.setAlpha;
import static com.nineoldandroids.view.ViewHelper.setTranslationX;
import static com.nineoldandroids.view.ViewPropertyAnimator.animate;
/**
* Touch listener impl for the SwipeListView
*/
public class SwipeListViewTouchListener implements View.OnTouchListener {
private static final int DISPLACE_CHOICE = 80;
private int swipeMode = SwipeListView.SWIPE_MODE_BOTH;
private boolean swipeOpenOnLongPress = true;
private boolean swipeClosesAllItemsWhenListMoves = true;
private int swipeFrontView = 0;
private int swipeBackView = 0;
private Rect rect = new Rect();
// Cached ViewConfiguration and system-wide constant values
private int slop;
private int minFlingVelocity;
private int maxFlingVelocity;
private long configShortAnimationTime;
private long animationTime;
private float leftOffset = 0;
private float rightOffset = 0;
private int swipeDrawableChecked = 0;
private int swipeDrawableUnchecked = 0;
// Fixed properties
private SwipeListView swipeListView;
private int viewWidth = 1; // 1 and not 0 to prevent dividing by zero
private List<PendingDismissData> pendingDismisses = new ArrayList<PendingDismissData>();
private int dismissAnimationRefCount = 0;
private float downX;
private boolean swiping;
private boolean swipingRight;
private VelocityTracker velocityTracker;
private int downPosition;
private View parentView;
private View frontView;
private View backView;
private boolean paused;
private int swipeCurrentAction = SwipeListView.SWIPE_ACTION_NONE;
private int swipeActionLeft = SwipeListView.SWIPE_ACTION_REVEAL;
private int swipeActionRight = SwipeListView.SWIPE_ACTION_REVEAL;
private List<Boolean> opened = new ArrayList<Boolean>();
private List<Boolean> openedRight = new ArrayList<Boolean>();
private boolean listViewMoving;
private List<Boolean> checked = new ArrayList<Boolean>();
private int oldSwipeActionRight;
private int oldSwipeActionLeft;
/**
* Constructor
*
* @param swipeListView SwipeListView
* @param swipeFrontView front view Identifier
* @param swipeBackView back view Identifier
*/
public SwipeListViewTouchListener(SwipeListView swipeListView, int swipeFrontView, int swipeBackView) {
this.swipeFrontView = swipeFrontView;
this.swipeBackView = swipeBackView;
ViewConfiguration vc = ViewConfiguration.get(swipeListView.getContext());
slop = vc.getScaledTouchSlop();
minFlingVelocity = vc.getScaledMinimumFlingVelocity();
maxFlingVelocity = vc.getScaledMaximumFlingVelocity();
configShortAnimationTime = swipeListView.getContext().getResources().getInteger(android.R.integer.config_shortAnimTime);
animationTime = configShortAnimationTime;
this.swipeListView = swipeListView;
}
/**
* Sets current item's parent view
*
* @param parentView Parent view
*/
private void setParentView(View parentView) {
this.parentView = parentView;
}
/**
* Sets current item's front view
*
* @param frontView Front view
*/
private void setFrontView(View frontView) {
this.frontView = frontView;
frontView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
swipeListView.onClickFrontView(downPosition);
}
});
if (swipeOpenOnLongPress) {
frontView.setOnLongClickListener(new View.OnLongClickListener() {
@Override
public boolean onLongClick(View v) {
openAnimate(downPosition);
return false;
}
});
}
}
/**
* Set current item's back view
*
* @param backView
*/
private void setBackView(View backView) {
this.backView = backView;
backView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
swipeListView.onClickBackView(downPosition);
}
});
}
/**
* @return true if the list is in motion
*/
public boolean isListViewMoving() {
return listViewMoving;
}
/**
* Sets animation time when the user drops the cell
*
* @param animationTime milliseconds
*/
public void setAnimationTime(long animationTime) {
if (animationTime > 0) {
this.animationTime = animationTime;
} else {
this.animationTime = configShortAnimationTime;
}
}
/**
* Sets the right offset
*
* @param rightOffset Offset
*/
public void setRightOffset(float rightOffset) {
this.rightOffset = rightOffset;
}
/**
* Set the left offset
*
* @param leftOffset Offset
*/
public void setLeftOffset(float leftOffset) {
this.leftOffset = leftOffset;
}
/**
* Set if all item opened will be close when the user move ListView
*
* @param swipeClosesAllItemsWhenListMoves
*/
public void setSwipeClosesAllItemsWhenListMoves(boolean swipeClosesAllItemsWhenListMoves) {
this.swipeClosesAllItemsWhenListMoves = swipeClosesAllItemsWhenListMoves;
}
/**
* Set if the user can open an item with long press on cell
*
* @param swipeOpenOnLongPress
*/
public void setSwipeOpenOnLongPress(boolean swipeOpenOnLongPress) {
this.swipeOpenOnLongPress = swipeOpenOnLongPress;
}
/**
* Sets the swipe mode
*
* @param swipeMode
*/
public void setSwipeMode(int swipeMode) {
this.swipeMode = swipeMode;
}
/**
* Check is swiping is enabled
*
* @return
*/
protected boolean isSwipeEnabled() {
return swipeMode != SwipeListView.SWIPE_MODE_NONE;
}
/**
* Return action on left
*
* @return Action
*/
public int getSwipeActionLeft() {
return swipeActionLeft;
}
/**
* Set action on left
*
* @param swipeActionLeft Action
*/
public void setSwipeActionLeft(int swipeActionLeft) {
this.swipeActionLeft = swipeActionLeft;
}
/**
* Return action on right
*
* @return Action
*/
public int getSwipeActionRight() {
return swipeActionRight;
}
/**
* Set action on right
*
* @param swipeActionRight Action
*/
public void setSwipeActionRight(int swipeActionRight) {
this.swipeActionRight = swipeActionRight;
}
/**
* Set drawable checked (only SWIPE_ACTION_CHOICE)
*
* @param swipeDrawableChecked drawable
*/
protected void setSwipeDrawableChecked(int swipeDrawableChecked) {
this.swipeDrawableChecked = swipeDrawableChecked;
}
/**
* Set drawable unchecked (only SWIPE_ACTION_CHOICE)
*
* @param swipeDrawableUnchecked drawable
*/
protected void setSwipeDrawableUnchecked(int swipeDrawableUnchecked) {
this.swipeDrawableUnchecked = swipeDrawableUnchecked;
}
/**
* Adds new items when adapter is modified
*/
public void resetItems() {
if (swipeListView.getAdapter() != null) {
int count = swipeListView.getAdapter().getCount();
for (int i = opened.size(); i <= count; i++) {
opened.add(false);
openedRight.add(false);
checked.add(false);
}
}
}
/**
* Open item
*
* @param position Position of list
*/
protected void openAnimate(int position) {
openAnimate(swipeListView.getChildAt(position - swipeListView.getFirstVisiblePosition()).findViewById(swipeFrontView), position);
}
/**
* Close item
*
* @param position Position of list
*/
protected void closeAnimate(int position) {
closeAnimate(swipeListView.getChildAt(position - swipeListView.getFirstVisiblePosition()).findViewById(swipeFrontView), position);
}
/**
* Swap choice state in item
*
* @param position position of list
*/
private void swapChoiceState(int position) {
int lastCount = getCountSelected();
boolean lastChecked = checked.get(position);
checked.set(position, !lastChecked);
int count = lastChecked ? lastCount - 1 : lastCount + 1;
if (lastCount == 0 && count == 1) {
swipeListView.onChoiceStarted();
closeOpenedItems();
setActionsTo(SwipeListView.SWIPE_ACTION_CHOICE);
}
if (lastCount == 1 && count == 0) {
swipeListView.onChoiceEnded();
returnOldActions();
}
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
swipeListView.setItemChecked(position, !lastChecked);
}
swipeListView.onChoiceChanged(position, !lastChecked);
reloadChoiceStateInView(frontView, position);
}
/**
* Unselected choice state in item
*/
protected void unselectedChoiceStates() {
int start = swipeListView.getFirstVisiblePosition();
int end = swipeListView.getLastVisiblePosition();
for (int i = 0; i < checked.size(); i++) {
if (checked.get(i) && i >= start && i <= end) {
reloadChoiceStateInView(swipeListView.getChildAt(i - start).findViewById(swipeFrontView), i);
}
checked.set(i, false);
}
swipeListView.onChoiceEnded();
returnOldActions();
}
/**
* Unselected choice state in item
*/
protected int dismiss(int position) {
int start = swipeListView.getFirstVisiblePosition();
int end = swipeListView.getLastVisiblePosition();
View view = swipeListView.getChildAt(position - start);
++dismissAnimationRefCount;
if (position >= start && position <= end) {
performDismiss(view, position, false);
return view.getHeight();
} else {
pendingDismisses.add(new PendingDismissData(position, null));
return 0;
}
}
/**
* Draw cell for display if item is selected or not
*
* @param frontView view to draw
* @param position position in list
*/
protected void reloadChoiceStateInView(View frontView, int position) {
if (isChecked(position)) {
if (swipeDrawableChecked > 0) frontView.setBackgroundResource(swipeDrawableChecked);
} else {
if (swipeDrawableUnchecked > 0) frontView.setBackgroundResource(swipeDrawableUnchecked);
}
}
/**
* Reset the state of front view when the it's recycled by ListView
*
* @param frontView view to re-draw
*
*/
protected void reloadSwipeStateInView(View frontView) {
if(this.swipeClosesAllItemsWhenListMoves){
frontView.setTranslationX(0f);
}
}
/**
* Get if item is selected
*
* @param position position in list
* @return
*/
protected boolean isChecked(int position) {
return position < checked.size() && checked.get(position);
}
/**
* Count selected
*
* @return
*/
protected int getCountSelected() {
int count = 0;
for (int i = 0; i < checked.size(); i++) {
if (checked.get(i)) {
count++;
}
}
Log.d("SwipeListView", "selected: " + count);
return count;
}
/**
* Get positions selected
*
* @return
*/
protected List<Integer> getPositionsSelected() {
List<Integer> list = new ArrayList<Integer>();
for (int i = 0; i < checked.size(); i++) {
if (checked.get(i)) {
list.add(i);
}
}
return list;
}
/**
* Open item
*
* @param view affected view
* @param position Position of list
*/
private void openAnimate(View view, int position) {
if (!opened.get(position)) {
generateRevealAnimate(view, true, false, position);
}
}
/**
* Close item
*
* @param view affected view
* @param position Position of list
*/
private void closeAnimate(View view, int position) {
if (opened.get(position)) {
generateRevealAnimate(view, true, false, position);
}
}
/**
* Create animation
*
* @param view affected view
* @param swap If state should change. If "false" returns to the original position
* @param swapRight If swap is true, this parameter tells if move is to the right or left
* @param position Position of list
*/
private void generateAnimate(final View view, final boolean swap, final boolean swapRight, final int position) {
Log.d("SwipeListView", "swap: " + swap + " - swapRight: " + swapRight + " - position: " + position);
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_REVEAL) {
generateRevealAnimate(view, swap, swapRight, position);
}
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_DISMISS) {
generateDismissAnimate(parentView, swap, swapRight, position);
}
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
generateChoiceAnimate(view, position);
}
}
/**
* Create choice animation
*
* @param view affected view
* @param position list position
*/
private void generateChoiceAnimate(final View view, final int position) {
animate(view)
.translationX(0)
.setDuration(animationTime)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
swipeListView.resetScrolling();
resetCell();
}
});
}
/**
* Create dismiss animation
*
* @param view affected view
* @param swap If will change state. If is "false" returns to the original position
* @param swapRight If swap is true, this parameter tells if move is to the right or left
* @param position Position of list
*/
private void generateDismissAnimate(final View view, final boolean swap, final boolean swapRight, final int position) {
int moveTo = 0;
if (opened.get(position)) {
if (!swap) {
moveTo = openedRight.get(position) ? (int) (viewWidth - rightOffset) : (int) (-viewWidth + leftOffset);
}
} else {
if (swap) {
moveTo = swapRight ? (int) (viewWidth - rightOffset) : (int) (-viewWidth + leftOffset);
}
}
int alpha = 1;
if (swap) {
++dismissAnimationRefCount;
alpha = 0;
}
animate(view)
.translationX(moveTo)
.alpha(alpha)
.setDuration(animationTime)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
if (swap) {
closeOpenedItems();
performDismiss(view, position, true);
}
resetCell();
}
});
}
/**
* Create reveal animation
*
* @param view affected view
* @param swap If will change state. If "false" returns to the original position
* @param swapRight If swap is true, this parameter tells if movement is toward right or left
* @param position list position
*/
private void generateRevealAnimate(final View view, final boolean swap, final boolean swapRight, final int position) {
int moveTo = 0;
if (opened.get(position)) {
if (!swap) {
moveTo = openedRight.get(position) ? (int) (viewWidth - rightOffset) : (int) (-viewWidth + leftOffset);
}
} else {
if (swap) {
moveTo = swapRight ? (int) (viewWidth - rightOffset) : (int) (-viewWidth + leftOffset);
}
}
animate(view)
.translationX(moveTo)
.setDuration(animationTime)
.setListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
swipeListView.resetScrolling();
if (swap) {
boolean aux = !opened.get(position);
opened.set(position, aux);
if (aux) {
swipeListView.onOpened(position, swapRight);
openedRight.set(position, swapRight);
} else {
swipeListView.onClosed(position, openedRight.get(position));
}
}
resetCell();
}
});
}
private void resetCell() {
if (downPosition != ListView.INVALID_POSITION) {
// Hack to fix issue https://github.com/47deg/android-swipelistview/issues/76
swipeListView.resetScrolling();
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
backView.setVisibility(View.VISIBLE);
}
frontView.setClickable(opened.get(downPosition));
frontView.setLongClickable(opened.get(downPosition));
frontView = null;
backView = null;
downPosition = ListView.INVALID_POSITION;
}
}
/**
* Set enabled
*
* @param enabled
*/
public void setEnabled(boolean enabled) {
paused = !enabled;
}
/**
* Return ScrollListener for ListView
*
* @return OnScrollListener
*/
public AbsListView.OnScrollListener makeScrollListener() {
return new AbsListView.OnScrollListener() {
private boolean isFirstItem = false;
private boolean isLastItem = false;
@Override
public void onScrollStateChanged(AbsListView absListView, int scrollState) {
setEnabled(scrollState != AbsListView.OnScrollListener.SCROLL_STATE_TOUCH_SCROLL);
if (swipeClosesAllItemsWhenListMoves && scrollState == SCROLL_STATE_TOUCH_SCROLL) {
closeOpenedItems();
}
if (scrollState == SCROLL_STATE_TOUCH_SCROLL) {
listViewMoving = true;
setEnabled(false);
}
if (scrollState != AbsListView.OnScrollListener.SCROLL_STATE_FLING && scrollState != SCROLL_STATE_TOUCH_SCROLL) {
listViewMoving = false;
downPosition = ListView.INVALID_POSITION;
swipeListView.resetScrolling();
new Handler().postDelayed(new Runnable() {
public void run() {
setEnabled(true);
}
}, 500);
}
}
@Override
public void onScroll(AbsListView view, int firstVisibleItem, int visibleItemCount, int totalItemCount) {
if (isFirstItem) {
boolean onSecondItemList = firstVisibleItem == 1;
if (onSecondItemList) {
isFirstItem = false;
}
} else {
boolean onFirstItemList = firstVisibleItem == 0;
if (onFirstItemList) {
isFirstItem = true;
swipeListView.onFirstListItem();
}
}
if (isLastItem) {
boolean onBeforeLastItemList = firstVisibleItem + visibleItemCount == totalItemCount - 1;
if (onBeforeLastItemList) {
isLastItem = false;
}
} else {
boolean onLastItemList = firstVisibleItem + visibleItemCount >= totalItemCount;
if (onLastItemList) {
isLastItem = true;
swipeListView.onLastListItem();
}
}
}
};
}
/**
* Close all opened items
*/
void closeOpenedItems() {
if (opened != null) {
int start = swipeListView.getFirstVisiblePosition();
int end = swipeListView.getLastVisiblePosition();
for (int i = start; i <= end; i++) {
if (opened.get(i)) {
closeAnimate(swipeListView.getChildAt(i - start).findViewById(swipeFrontView), i);
}
}
}
}
/**
* @see View.OnTouchListener#onTouch(android.view.View, android.view.MotionEvent)
*/
@Override
public boolean onTouch(View view, MotionEvent motionEvent) {
if (!isSwipeEnabled()) {
return false;
}
if (viewWidth < 2) {
viewWidth = swipeListView.getWidth();
}
switch (MotionEventCompat.getActionMasked(motionEvent)) {
case MotionEvent.ACTION_DOWN: {
if (paused && downPosition != ListView.INVALID_POSITION) {
return false;
}
swipeCurrentAction = SwipeListView.SWIPE_ACTION_NONE;
int childCount = swipeListView.getChildCount();
int[] listViewCoords = new int[2];
swipeListView.getLocationOnScreen(listViewCoords);
int x = (int) motionEvent.getRawX() - listViewCoords[0];
int y = (int) motionEvent.getRawY() - listViewCoords[1];
View child;
for (int i = 0; i < childCount; i++) {
child = swipeListView.getChildAt(i);
child.getHitRect(rect);
int childPosition = swipeListView.getPositionForView(child);
// dont allow swiping if this is on the header or footer or IGNORE_ITEM_VIEW_TYPE or enabled is false on the adapter
boolean allowSwipe = swipeListView.getAdapter().isEnabled(childPosition) && swipeListView.getAdapter().getItemViewType(childPosition) >= 0;
if (allowSwipe && rect.contains(x, y)) {
setParentView(child);
setFrontView(child.findViewById(swipeFrontView));
downX = motionEvent.getRawX();
downPosition = childPosition;
frontView.setClickable(!opened.get(downPosition));
frontView.setLongClickable(!opened.get(downPosition));
velocityTracker = VelocityTracker.obtain();
velocityTracker.addMovement(motionEvent);
if (swipeBackView > 0) {
setBackView(child.findViewById(swipeBackView));
}
break;
}
}
view.onTouchEvent(motionEvent);
return true;
}
case MotionEvent.ACTION_UP: {
// Fix for issue https://github.com/47deg/android-swipelistview/issues/41#issuecomment-45064712
view.onTouchEvent(motionEvent);
if (velocityTracker == null || !swiping || downPosition == ListView.INVALID_POSITION) {
break;
}
float deltaX = motionEvent.getRawX() - downX;
velocityTracker.addMovement(motionEvent);
velocityTracker.computeCurrentVelocity(1000);
float velocityX = Math.abs(velocityTracker.getXVelocity());
if (!opened.get(downPosition)) {
if (swipeMode == SwipeListView.SWIPE_MODE_LEFT && velocityTracker.getXVelocity() > 0) {
velocityX = 0;
}
if (swipeMode == SwipeListView.SWIPE_MODE_RIGHT && velocityTracker.getXVelocity() < 0) {
velocityX = 0;
}
}
float velocityY = Math.abs(velocityTracker.getYVelocity());
boolean swap = false;
boolean swapRight = false;
if (minFlingVelocity <= velocityX && velocityX <= maxFlingVelocity && velocityY * 2 < velocityX) {
swapRight = velocityTracker.getXVelocity() > 0;
Log.d("SwipeListView", "swapRight: " + swapRight + " - swipingRight: " + swipingRight);
if (swapRight != swipingRight && swipeActionLeft != swipeActionRight) {
swap = false;
} else if (opened.get(downPosition) && openedRight.get(downPosition) && swapRight) {
swap = false;
} else if (opened.get(downPosition) && !openedRight.get(downPosition) && !swapRight) {
swap = false;
} else {
swap = true;
}
} else if (Math.abs(deltaX) > viewWidth / 2) {
swap = true;
swapRight = deltaX > 0;
}
generateAnimate(frontView, swap, swapRight, downPosition);
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
swapChoiceState(downPosition);
}
velocityTracker.recycle();
velocityTracker = null;
downX = 0;
// change clickable front view
// if (swap) {
// frontView.setClickable(opened.get(downPosition));
// frontView.setLongClickable(opened.get(downPosition));
// }
swiping = false;
break;
}
case MotionEvent.ACTION_MOVE: {
if (velocityTracker == null || paused || downPosition == ListView.INVALID_POSITION) {
break;
}
velocityTracker.addMovement(motionEvent);
velocityTracker.computeCurrentVelocity(1000);
float velocityX = Math.abs(velocityTracker.getXVelocity());
float velocityY = Math.abs(velocityTracker.getYVelocity());
float deltaX = motionEvent.getRawX() - downX;
float deltaMode = Math.abs(deltaX);
int swipeMode = this.swipeMode;
int changeSwipeMode = swipeListView.changeSwipeMode(downPosition);
if (changeSwipeMode >= 0) {
swipeMode = changeSwipeMode;
}
if (swipeMode == SwipeListView.SWIPE_MODE_NONE) {
deltaMode = 0;
} else if (swipeMode != SwipeListView.SWIPE_MODE_BOTH) {
if (opened.get(downPosition)) {
if (swipeMode == SwipeListView.SWIPE_MODE_LEFT && deltaX < 0) {
deltaMode = 0;
} else if (swipeMode == SwipeListView.SWIPE_MODE_RIGHT && deltaX > 0) {
deltaMode = 0;
}
} else {
if (swipeMode == SwipeListView.SWIPE_MODE_LEFT && deltaX > 0) {
deltaMode = 0;
} else if (swipeMode == SwipeListView.SWIPE_MODE_RIGHT && deltaX < 0) {
deltaMode = 0;
}
}
}
if (deltaMode > slop && swipeCurrentAction == SwipeListView.SWIPE_ACTION_NONE && velocityY < velocityX) {
swiping = true;
swipingRight = (deltaX > 0);
Log.d("SwipeListView", "deltaX: " + deltaX + " - swipingRight: " + swipingRight);
if (opened.get(downPosition)) {
swipeListView.onStartClose(downPosition, swipingRight);
swipeCurrentAction = SwipeListView.SWIPE_ACTION_REVEAL;
} else {
if (swipingRight && swipeActionRight == SwipeListView.SWIPE_ACTION_DISMISS) {
swipeCurrentAction = SwipeListView.SWIPE_ACTION_DISMISS;
} else if (!swipingRight && swipeActionLeft == SwipeListView.SWIPE_ACTION_DISMISS) {
swipeCurrentAction = SwipeListView.SWIPE_ACTION_DISMISS;
} else if (swipingRight && swipeActionRight == SwipeListView.SWIPE_ACTION_CHOICE) {
swipeCurrentAction = SwipeListView.SWIPE_ACTION_CHOICE;
} else if (!swipingRight && swipeActionLeft == SwipeListView.SWIPE_ACTION_CHOICE) {
swipeCurrentAction = SwipeListView.SWIPE_ACTION_CHOICE;
} else {
swipeCurrentAction = SwipeListView.SWIPE_ACTION_REVEAL;
}
swipeListView.onStartOpen(downPosition, swipeCurrentAction, swipingRight);
}
swipeListView.requestDisallowInterceptTouchEvent(true);
MotionEvent cancelEvent = MotionEvent.obtain(motionEvent);
cancelEvent.setAction(MotionEvent.ACTION_CANCEL |
(MotionEventCompat.getActionIndex(motionEvent) << MotionEventCompat.ACTION_POINTER_INDEX_SHIFT));
swipeListView.onTouchEvent(cancelEvent);
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
backView.setVisibility(View.GONE);
}
}
if (swiping && downPosition != ListView.INVALID_POSITION) {
if (opened.get(downPosition)) {
deltaX += openedRight.get(downPosition) ? viewWidth - rightOffset : -viewWidth + leftOffset;
}
move(deltaX);
return true;
}
break;
}
}
return false;
}
private void setActionsTo(int action) {
oldSwipeActionRight = swipeActionRight;
oldSwipeActionLeft = swipeActionLeft;
swipeActionRight = action;
swipeActionLeft = action;
}
protected void returnOldActions() {
swipeActionRight = oldSwipeActionRight;
swipeActionLeft = oldSwipeActionLeft;
}
/**
* Moves the view
*
* @param deltaX delta
*/
public void move(float deltaX) {
swipeListView.onMove(downPosition, deltaX);
float posX = ViewHelper.getX(frontView);
if (opened.get(downPosition)) {
posX += openedRight.get(downPosition) ? -viewWidth + rightOffset : viewWidth - leftOffset;
}
if (posX > 0 && !swipingRight) {
Log.d("SwipeListView", "change to right");
swipingRight = !swipingRight;
swipeCurrentAction = swipeActionRight;
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
backView.setVisibility(View.GONE);
} else {
backView.setVisibility(View.VISIBLE);
}
}
if (posX < 0 && swipingRight) {
Log.d("SwipeListView", "change to left");
swipingRight = !swipingRight;
swipeCurrentAction = swipeActionLeft;
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
backView.setVisibility(View.GONE);
} else {
backView.setVisibility(View.VISIBLE);
}
}
if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_DISMISS) {
setTranslationX(parentView, deltaX);
setAlpha(parentView, Math.max(0f, Math.min(1f,
1f - 2f * Math.abs(deltaX) / viewWidth)));
} else if (swipeCurrentAction == SwipeListView.SWIPE_ACTION_CHOICE) {
if ((swipingRight && deltaX > 0 && posX < DISPLACE_CHOICE)
|| (!swipingRight && deltaX < 0 && posX > -DISPLACE_CHOICE)
|| (swipingRight && deltaX < DISPLACE_CHOICE)
|| (!swipingRight && deltaX > -DISPLACE_CHOICE)) {
setTranslationX(frontView, deltaX);
}
} else {
setTranslationX(frontView, deltaX);
}
}
/**
* Class that saves pending dismiss data
*/
class PendingDismissData implements Comparable<PendingDismissData> {
public int position;
public View view;
public PendingDismissData(int position, View view) {
this.position = position;
this.view = view;
}
@Override
public int compareTo(PendingDismissData other) {
// Sort by descending position
return other.position - position;
}
}
/**
* Perform dismiss action
*
* @param dismissView View
* @param dismissPosition Position of list
*/
protected void performDismiss(final View dismissView, final int dismissPosition, boolean doPendingDismiss) {
final ViewGroup.LayoutParams lp = dismissView.getLayoutParams();
final int originalHeight = dismissView.getHeight();
ValueAnimator animator = ValueAnimator.ofInt(originalHeight, 1).setDuration(animationTime);
if (doPendingDismiss) {
animator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
--dismissAnimationRefCount;
if (dismissAnimationRefCount == 0) {
removePendingDismisses(originalHeight);
}
}
});
}
animator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator valueAnimator) {
lp.height = (Integer) valueAnimator.getAnimatedValue();
dismissView.setLayoutParams(lp);
}
});
pendingDismisses.add(new PendingDismissData(dismissPosition, dismissView));
animator.start();
}
protected void resetPendingDismisses() {
pendingDismisses.clear();
}
protected void handlerPendingDismisses(final int originalHeight) {
Handler handler = new Handler();
handler.postDelayed(new Runnable() {
@Override
public void run() {
removePendingDismisses(originalHeight);
}
}, animationTime + 100);
}
private void removePendingDismisses(int originalHeight) {
// No active animations, process all pending dismisses.
// Sort by descending position
Collections.sort(pendingDismisses);
int[] dismissPositions = new int[pendingDismisses.size()];
for (int i = pendingDismisses.size() - 1; i >= 0; i--) {
dismissPositions[i] = pendingDismisses.get(i).position;
}
swipeListView.onDismiss(dismissPositions);
ViewGroup.LayoutParams lp;
for (PendingDismissData pendingDismiss : pendingDismisses) {
// Reset view presentation
if (pendingDismiss.view != null) {
setAlpha(pendingDismiss.view, 1f);
setTranslationX(pendingDismiss.view, 0);
lp = pendingDismiss.view.getLayoutParams();
lp.height = originalHeight;
pendingDismiss.view.setLayoutParams(lp);
}
}
resetPendingDismisses();
}
}
| |
/**
* JavaFX application which interacts with the Google Maps API to provide a
* mapping interface with which to test and develop graph algorithms and data
* structures
*
* @author UCSD MOOC development team
*
*/
package application;
import javafx.application.Application;
import javafx.geometry.Insets;
import javafx.geometry.Pos;
import javafx.scene.Scene;
import javafx.scene.control.Alert;
import javafx.scene.control.Alert.AlertType;
import javafx.scene.control.Button;
import javafx.scene.control.ComboBox;
import javafx.scene.control.Label;
import javafx.scene.control.RadioButton;
import javafx.scene.control.Tab;
import javafx.scene.control.TabPane;
import javafx.scene.control.TextField;
import javafx.scene.control.ToggleGroup;
import javafx.scene.image.Image;
import javafx.scene.image.ImageView;
import javafx.scene.layout.BorderPane;
import javafx.scene.layout.HBox;
import javafx.scene.layout.StackPane;
import javafx.scene.layout.VBox;
import javafx.scene.text.Font;
import javafx.scene.text.Text;
import javafx.scene.web.*;
import javafx.stage.Modality;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import application.controllers.FetchController;
import application.controllers.RouteController;
import application.services.GeneralService;
import application.services.RouteService;
import gmapsfx.GoogleMapView;
import gmapsfx.MapComponentInitializedListener;
import gmapsfx.javascript.object.GoogleMap;
import gmapsfx.javascript.object.LatLong;
import gmapsfx.javascript.object.MapOptions;
import gmapsfx.javascript.object.MapTypeIdEnum;
public class MapApp extends Application
implements MapComponentInitializedListener {
protected GoogleMapView mapComponent;
protected GoogleMap map;
protected BorderPane bp;
protected Stage primaryStage;
// CONSTANTS
private static final double MARGIN_VAL = 10;
private static final double FETCH_COMPONENT_WIDTH = 160.0;
public static void main(String[] args) {
launch(args);
}
/**
* Application entry point
*/
@Override
public void start(Stage primaryStage) throws Exception {
this.primaryStage = primaryStage;
// MAIN CONTAINER
bp = new BorderPane();
// set up map
mapComponent = new GoogleMapView();
mapComponent.addMapInitializedListener(this);
// initialize tabs for data fetching and route controls
Tab routeTab = new Tab("Routing");
// create components for fetch tab
Button fetchButton = new Button("Fetch Data");
Button displayButton = new Button("Show Intersections");
TextField tf = new TextField();
ComboBox<DataSet> cb = new ComboBox<DataSet>();
// set on mouse pressed, this fixes Windows 10 / Surface bug
cb.setOnMousePressed(e -> {
cb.requestFocus();
});
HBox fetchControls = getBottomBox(tf, fetchButton);
VBox fetchBox = getFetchBox(displayButton, cb);
// create components for fetch tab
Button routeButton = new Button("Show Route");
Button hideRouteButton = new Button("Hide Route");
Button resetButton = new Button("Reset");
Button visualizationButton = new Button("Start Visualization");
Image sImage = new Image(MarkerManager.startURL);
Image dImage = new Image(MarkerManager.destinationURL);
CLabel<geography.GeographicPoint> startLabel = new CLabel<geography.GeographicPoint>("Empty.", new ImageView(sImage), null);
CLabel<geography.GeographicPoint> endLabel = new CLabel<geography.GeographicPoint>("Empty.", new ImageView(dImage), null);
//TODO -- hot fix
startLabel.setMinWidth(180);
endLabel.setMinWidth(180);
// startLabel.setWrapText(true);
// endLabel.setWrapText(true);
Button startButton = new Button("Start");
Button destinationButton = new Button("Dest");
// Radio buttons for selecting search algorithm
final ToggleGroup group = new ToggleGroup();
List<RadioButton> searchOptions = setupToggle(group);
// Select and marker managers for route choosing and marker display/visuals
// should only be one instance (singleton)
SelectManager manager = new SelectManager();
MarkerManager markerManager = new MarkerManager();
markerManager.setSelectManager(manager);
manager.setMarkerManager(markerManager);
markerManager.setVisButton(visualizationButton);
// create components for route tab
CLabel<geography.GeographicPoint> pointLabel = new CLabel<geography.GeographicPoint>("No point Selected.", null);
manager.setPointLabel(pointLabel);
manager.setStartLabel(startLabel);
manager.setDestinationLabel(endLabel);
setupRouteTab(routeTab, fetchBox, startLabel, endLabel, pointLabel, routeButton, hideRouteButton,
resetButton, visualizationButton, startButton, destinationButton, searchOptions);
// add tabs to pane, give no option to close
TabPane tp = new TabPane(routeTab);
tp.setTabClosingPolicy(TabPane.TabClosingPolicy.UNAVAILABLE);
// initialize Services and controllers after map is loaded
mapComponent.addMapReadyListener(() -> {
GeneralService gs = new GeneralService(mapComponent, manager, markerManager);
RouteService rs = new RouteService(mapComponent, markerManager);
//System.out.println("in map ready : " + this.getClass());
// initialize controllers
new RouteController(rs, routeButton, hideRouteButton, resetButton, startButton, destinationButton, group, searchOptions, visualizationButton,
startLabel, endLabel, pointLabel, manager, markerManager);
new FetchController(gs, rs, tf, fetchButton, cb, displayButton);
});
// add components to border pane
bp.setRight(tp);
bp.setBottom(fetchControls);
bp.setCenter(mapComponent);
Scene scene = new Scene(bp);
scene.getStylesheets().add("html/routing.css");
primaryStage.setScene(scene);
primaryStage.show();
}
@Override
public void mapInitialized() {
LatLong center = new LatLong(32.8810, -117.2380);
// set map options
MapOptions options = new MapOptions();
options.center(center)
.mapMarker(false)
.mapType(MapTypeIdEnum.ROADMAP)
//maybe set false
.mapTypeControl(true)
.overviewMapControl(false)
.panControl(true)
.rotateControl(false)
.scaleControl(false)
.streetViewControl(false)
.zoom(14)
.zoomControl(true);
// create map;
map = mapComponent.createMap(options);
setupJSAlerts(mapComponent.getWebView());
}
// SETTING UP THE VIEW
private HBox getBottomBox(TextField tf, Button fetchButton) {
HBox box = new HBox();
tf.setPrefWidth(FETCH_COMPONENT_WIDTH);
box.getChildren().add(tf);
fetchButton.setPrefWidth(FETCH_COMPONENT_WIDTH);
box.getChildren().add(fetchButton);
return box;
}
/**
* Setup layout and controls for Fetch tab
*
* @param fetchTab
* @param fetchButton
* @param displayButton
* @param tf
*/
private VBox getFetchBox(Button displayButton, ComboBox<DataSet> cb) {
// add button to tab, rethink design and add V/HBox for content
VBox v = new VBox();
HBox h = new HBox();
HBox intersectionControls = new HBox();
// cb.setMinWidth(displayButton.getWidth());
cb.setPrefWidth(FETCH_COMPONENT_WIDTH);
intersectionControls.getChildren().add(cb);
displayButton.setPrefWidth(FETCH_COMPONENT_WIDTH);
intersectionControls.getChildren().add(displayButton);
h.getChildren().add(v);
v.getChildren().add(new Label("Choose map file : "));
v.getChildren().add(intersectionControls);
//v.setSpacing(MARGIN_VAL);
return v;
}
/**
* Setup layout of route tab and controls
*
* @param routeTab
* @param box
*/
private void setupRouteTab(Tab routeTab, VBox fetchBox, Label startLabel, Label endLabel, Label pointLabel,
Button showButton, Button hideButton, Button resetButton, Button vButton, Button startButton,
Button destButton, List<RadioButton> searchOptions) {
//set up tab layout
HBox h = new HBox();
// v is inner container
VBox v = new VBox();
h.getChildren().add(v);
VBox selectLeft = new VBox();
selectLeft.getChildren().add(startLabel);
HBox startBox = new HBox();
startBox.getChildren().add(startLabel);
startBox.getChildren().add(startButton);
startBox.setSpacing(20);
HBox destinationBox = new HBox();
destinationBox.getChildren().add(endLabel);
destinationBox.getChildren().add(destButton);
destinationBox.setSpacing(20);
VBox markerBox = new VBox();
Label markerLabel = new Label("Selected Marker : ");
markerBox.getChildren().add(markerLabel);
markerBox.getChildren().add(pointLabel);
VBox.setMargin(markerLabel, new Insets(MARGIN_VAL, MARGIN_VAL, MARGIN_VAL, MARGIN_VAL));
VBox.setMargin(pointLabel, new Insets(0, MARGIN_VAL, MARGIN_VAL, MARGIN_VAL));
VBox.setMargin(fetchBox, new Insets(0, 0, MARGIN_VAL * 2, 0));
HBox showHideBox = new HBox();
showHideBox.getChildren().add(showButton);
showHideBox.getChildren().add(hideButton);
showHideBox.setSpacing(2 * MARGIN_VAL);
v.getChildren().add(fetchBox);
v.getChildren().add(new Label("Start Position : "));
v.getChildren().add(startBox);
v.getChildren().add(new Label("Goal : "));
v.getChildren().add(destinationBox);
v.getChildren().add(showHideBox);
for (RadioButton rb : searchOptions) {
v.getChildren().add(rb);
}
v.getChildren().add(vButton);
VBox.setMargin(showHideBox, new Insets(MARGIN_VAL, MARGIN_VAL, MARGIN_VAL, MARGIN_VAL));
VBox.setMargin(vButton, new Insets(MARGIN_VAL, MARGIN_VAL, MARGIN_VAL, MARGIN_VAL));
vButton.setDisable(true);
v.getChildren().add(markerBox);
//v.getChildren().add(resetButton);
routeTab.setContent(h);
}
private void setupJSAlerts(WebView webView) {
webView.getEngine().setOnAlert(e -> {
Stage popup = new Stage();
popup.initOwner(primaryStage);
popup.initStyle(StageStyle.UTILITY);
popup.initModality(Modality.WINDOW_MODAL);
StackPane content = new StackPane();
content.getChildren().setAll(
new Label(e.getData())
);
content.setPrefSize(200, 100);
popup.setScene(new Scene(content));
popup.showAndWait();
});
}
private LinkedList<RadioButton> setupToggle(ToggleGroup group) {
// Use Dijkstra as default
RadioButton rbD = new RadioButton("Dijkstra");
rbD.setUserData("Dijkstra");
rbD.setSelected(true);
RadioButton rbA = new RadioButton("A*");
rbA.setUserData("A*");
RadioButton rbB = new RadioButton("BFS");
rbB.setUserData("BFS");
rbB.setToggleGroup(group);
rbD.setToggleGroup(group);
rbA.setToggleGroup(group);
return new LinkedList<RadioButton>(Arrays.asList(rbB, rbD, rbA));
}
/*
* METHODS FOR SHOWING DIALOGS/ALERTS
*/
public void showLoadStage(Stage loadStage, String text) {
loadStage.initModality(Modality.APPLICATION_MODAL);
loadStage.initOwner(primaryStage);
VBox loadVBox = new VBox(20);
loadVBox.setAlignment(Pos.CENTER);
Text tNode = new Text(text);
tNode.setFont(new Font(16));
loadVBox.getChildren().add(new HBox());
loadVBox.getChildren().add(tNode);
loadVBox.getChildren().add(new HBox());
Scene loadScene = new Scene(loadVBox, 300, 200);
loadStage.setScene(loadScene);
loadStage.show();
}
public static void showInfoAlert(String header, String content) {
Alert alert = getInfoAlert(header, content);
alert.showAndWait();
}
public static Alert getInfoAlert(String header, String content) {
Alert alert = new Alert(AlertType.INFORMATION);
alert.setTitle("Information");
alert.setHeaderText(header);
alert.setContentText(content);
return alert;
}
public static void showErrorAlert(String header, String content) {
Alert alert = new Alert(AlertType.ERROR);
alert.setTitle("File Name Error");
alert.setHeaderText(header);
alert.setContentText(content);
alert.showAndWait();
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.njfsoft_utils.cutOuts;
import com.quickorder.R;
import java.util.HashMap;
import android.os.Environment;
import java.io.IOException;
import java.util.Collection;
import android.app.Activity;
import android.content.ContentValues;
import android.content.ContentUris;
import android.content.Context;
import android.content.Intent;
import android.content.res.Resources;
import android.content.SharedPreferences;
import android.database.Cursor;
import android.graphics.*;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Paint;
import android.graphics.Point;
import android.graphics.PointF;
import android.graphics.Rect;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.AnimationDrawable;
import android.graphics.drawable.BitmapDrawable;
import android.net.Uri;
import android.widget.ImageButton;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.media.MediaRecorder;
import android.media.MediaPlayer;
import android.os.AsyncTask;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.provider.MediaStore;
import android.util.Log;
import android.view.animation.AnimationUtils;
import android.view.animation.Animation;
import android.view.animation.*;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.Window;
import android.view.LayoutInflater;
import android.view.ViewGroup.LayoutParams;
import org.apache.http.HttpResponse;
import org.apache.http.NameValuePair;
import org.apache.http.client.HttpClient;
import org.apache.http.client.entity.UrlEncodedFormEntity;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.impl.client.DefaultHttpClient;
import org.apache.http.message.BasicNameValuePair;
import org.apache.http.protocol.BasicHttpContext;
import org.apache.http.protocol.HttpContext;
import org.apache.http.entity.mime.HttpMultipartMode;
import org.apache.http.entity.mime.MultipartEntity;
import org.apache.http.entity.mime.content.FileBody;
import org.apache.http.entity.mime.content.StringBody;
import org.json.JSONArray;
import org.json.JSONObject;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.InputStream;
import android.speech.tts.TextToSpeech;
import android.speech.tts.TextToSpeech.OnUtteranceCompletedListener;
import android.view.WindowManager;
import android.widget.ZoomControls;
import android.widget.Toast;
import android.widget.ImageView;
import android.widget.TextView;
import android.graphics.BitmapFactory;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;
import com.njfsoft_utils.cutOuts.JSI_CutOuts;
import com.njfsoft_utils.artpad.filters.ColorFilterGenerator;
import com.njfsoft_utils.anim.AnimationDrawableCallback;
import com.njfsoft_utils.anim.AnimEngine;
import com.njfsoft_utils.anim.AnimatedGifRecorder;
import com.njfsoft_utils.anim.AnimFrameSingleton;
import com.njfsoft_utils.anim.AnimMovBuilder;
import com.njfsoft_utils.anim.AnimMovSingleton;
import com.njfsoft_utils.anim.AnimMovSoundPool;
import com.njfsoft_utils.anim.AnimView;
import com.njfsoft_utils.anim.AnimBGView;
import com.njfsoft_utils.anim.AnimPrevView;
import com.njfsoft_utils.anim.MPFourRecorder;
import com.njfsoft_utils.anim.UtilsBitmap;
import com.njfsoft_utils.core.Base64;
import com.njfsoft_utils.core.OnTaskExecutionFinished;
import com.njfsoft_utils.camcapture.IDecoderActivity;
import com.njfsoft_utils.camcapture.DecoderActivityHandler;
import com.njfsoft_utils.camcapture.CameraManager;
import com.njfsoft_utils.camcapture.VerticalTextView;
import com.njfsoft_utils.shareutil.ShareDataResult;
import com.njfsoft_utils.webviewutil.UtilWebDialog;
import com.uraroji.garage.android.mp3recvoice.RecMicToMp3;
import java.io.ByteArrayOutputStream;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.Set;
/**
* Example Decoder Activity.
* IDecoderActivity,
* @author Justin Wetherell (phishman3579@gmail.com)
*/
public class CutOuts extends Activity implements IDecoderActivity, SurfaceHolder.Callback, TextToSpeech.OnInitListener {
UtilsBitmap utilsBitmap;
ColorFilterGenerator mednFilter;
public UtilWebDialog utilWDialog;
private static final String TAG = CutOuts.class.getSimpleName();
DecoderActivityHandler handler = null;
CutOutsView viewfinderView = null;
CameraManager cameraManager = null;
boolean hasSurface = false;
byte[] currImgByte = null;
String characterSet = null;
String currGiphyGID;
String currMovType = "mp4";
String currMovFName;
String currMovUri;
String currMovPath;
int currentZoomLevel = 0, maxZoomLevel = 0;
private boolean isRecording = false;
MediaPlayer mPlayer;
private MediaRecorder mMediaRecorder;
SurfaceView surfaceView;
SurfaceHolder surfaceHolder;
TextView vtv;
private boolean mPreviewRunning = false;
private boolean mCaptureFrame = false;
AnimatedGifRecorder gagRecorder;
MPFourRecorder agRecorder;
Bitmap currFBmap;
RecMicToMp3 mRecMicToMp3;
AnimView gifView;
AnimBGView gifBgView;
AnimPrevView gifPrevView;
AnimMovBuilder animMovBuilder;
AnimMovSoundPool animMovSoundPool;
RelativeLayout lnrLyGBGView;
RelativeLayout lnrLyGPView;
AnimEngine animEngine;
AnimationDrawable gifAnimation = null;
AnimationDrawable gifBgAnimation = null;
Paint ctClrPaint;
Animation animPropA;
Animation animPropB;
AnimationSet animSet;
TextView stv;
Bitmap bmpMainCanvImg;
TextView tvAPVEdit;
TextView tvAPVPreview;
TextView tvAPVSave;
TextView tvAPVClear;
TextView btnCam;
TextView btnVid;
TextView btnCreate;
TextView btnGifVid;
TextView btnSettings;
Timer tmrMovRec;
int iMovStartTstamp;
long lMovStartTstamp;
ArrayList<AnimFrameSingleton> arrAnimFSing;
int iPrepArrPFrame = 0;
int iMovFDelay = 400;
int iAnimFrmIdx;
int iBgAnimFrmIdx;
int iMovWidth = 0;
int iMovHeight = 0;
private final static int INT_RES_EDIT_IMG = 5;
int currEditImgIndx;
Bundle extras;
Bundle retExtras;
TextToSpeech cuMTts;
JSONObject cumetaObject;
SharedPreferences configCUSettings;
Bundle currCUConfBundle;
private SharedPreferences.Editor configCUEditor;
private static final int[] IMAGE_RESOURCES = { R.drawable.an_drag1,
R.drawable.an_drag2, R.drawable.an_drag3, R.drawable.an_drag4,
R.drawable.an_drag5, R.drawable.an_drag6, R.drawable.an_drag7,
R.drawable.an_drag8, R.drawable.an_drag9, R.drawable.an_drag10,
R.drawable.an_drag11, R.drawable.an_drag12, R.drawable.an_drag13 };
private static final int ANIMATION_INTERVAL = 500;// 200ms
public void loadGifView() {
if (gifView == null) {
gifView = (AnimView) findViewById(R.id.gif_view);
gifView.setAdjustViewBounds(true);
gifView.setScaleType(ImageView.ScaleType.CENTER_INSIDE);
animEngine = AnimEngine.getInstance(gifView);
animEngine.addAllFrames(IMAGE_RESOURCES, ANIMATION_INTERVAL);
}
// animEngine.start();
mednFilter = new com.njfsoft_utils.artpad.filters.ColorFilterGenerator();
ctClrPaint = new Paint();
ctClrPaint.setColorFilter(mednFilter.adjustColor(30,15,95,95));
}
public void startAnimProps(){
}
@Override
public void onCreate(Bundle icicle) {
super.onCreate(icicle);
setContentView(R.layout.com_njfsoft_utils_cutouts);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN, WindowManager.LayoutParams.FLAG_FULLSCREEN);
animMovBuilder = new AnimMovBuilder(this,this);
Log.v(TAG, "onCreate()");
configCUSettings = this.getPreferences(MODE_WORLD_WRITEABLE);
configCUEditor = configCUSettings.edit();
currCUConfBundle = getCUConfBundle();
try {
File extBaseDir = Environment.getExternalStorageDirectory();
File file = new File(extBaseDir.getAbsoluteFile()+ File.separator + "quick-order");
if(!file.exists()){
file.mkdirs();
}
} catch (Exception e) {
e.printStackTrace();
}
extras = getIntent().getExtras();
animMovSoundPool = new AnimMovSoundPool(this);
// gifView = (AnimView) findViewById(R.id.gif_view);
loadGifView();
// gifAnimation = (AnimationDrawable) gifView.getBackground();
viewfinderView = (CutOutsView) findViewById(R.id.cutouts_view);
currMovType = "mp4";
currMovFName = "outa";
currMovUri = "noQvalue";
currMovPath = "noQvalue";
currEditImgIndx = 0;
cumetaObject = new JSONObject();
vtv = (TextView) findViewById(R.id.status_view);
/*
vtv.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
currMovType = "gif";
onRecClick(view);
// takeAPicture();
} catch (Exception e) {
}
}
});
*/
arrAnimFSing = new ArrayList<AnimFrameSingleton>();
btnCreate = (TextView) findViewById(R.id.btn_create);
btnVid = (TextView) findViewById(R.id.btn_vid);
btnGifVid = (TextView) findViewById(R.id.btn_gifvid);
btnCam = (TextView) findViewById(R.id.btn_cam);
btnSettings = (TextView) findViewById(R.id.btn_settings);
btnVid.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
currMovType = "mp4";
onRecClick(view);
}
});
btnGifVid.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
currMovType = "gif";
onRecClick(view);
}
});
btnCam.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
currMovType = "jpeg";
cameraManager.getCamera().autoFocus(new AutoFocusCallback() {
public void onAutoFocus(boolean success, Camera camera) {
if(success){
takeAPicture();
}
}
});
}
});
btnSettings.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
setPagePopUp("quickorder/media_settings.html","noQvalue");
}
});
tvAPVPreview = (TextView) findViewById(R.id.btnAPVPreview);
tvAPVPreview.setVisibility(View.INVISIBLE);
tvAPVPreview.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
AnimMovSingleton tmpAMS = animMovBuilder.getAnimMovSing();
if(tmpAMS != null) {
setToggleAPViewBtns(false);
boolean isGTPV = gifPrevView.setAnimMovSing(tmpAMS);
playFile();
}
} catch (Exception e) {
System.out.println("tvAPVPreview.error: " + e.toString());
}
}
});
tvAPVEdit = (TextView) findViewById(R.id.btnAPVEdit);
tvAPVEdit.setVisibility(View.INVISIBLE);
tvAPVEdit.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
if(currMovType == "jpeg") {
prepArtPad();
} else {
prepMovEditPop();
}
} catch (Exception e) {
System.out.println("tvAPVEdit.error: " + e.toString());
}
}
});
tvAPVSave = (TextView) findViewById(R.id.btnAPVSave);
tvAPVSave.setVisibility(View.INVISIBLE);
tvAPVSave.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
System.out.println("tvAPVSave.type: " + currMovType);
doMediaResult();
} catch (Exception e) {
System.out.println("tvAPVSave.error: " + e.toString());
}
}
});
tvAPVClear = (TextView) findViewById(R.id.btnAPVClear);
tvAPVClear.setVisibility(View.INVISIBLE);
tvAPVClear.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
gifBgView.clearBmap();
setToggleAPViewBtns(false);
AnimMovSingleton anms = AnimMovSingleton.getInstance();
anms.clearAMS();
arrAnimFSing = null;
arrAnimFSing = new ArrayList<AnimFrameSingleton>();
} catch (Exception e) {
System.out.println("tvAPVClear.error: " + e.toString());
}
}
});
stv = (TextView) findViewById(R.id.status_stop);
/*
stv.setOnClickListener(new View.OnClickListener() {
public void onClick(View view) {
try {
prepMovCrunch();
} catch (Exception e) {
System.out.println("stv.error: " + e.toString());
}
}
});
*/
gifBgView = new AnimBGView(this, this);
gifBgView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
lnrLyGBGView = (RelativeLayout) this.findViewById(R.id.gbgv_main);
lnrLyGBGView.addView(gifBgView);
gifPrevView = new AnimPrevView(this, this);
gifPrevView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
lnrLyGPView = (RelativeLayout) this.findViewById(R.id.gpv_main);
lnrLyGPView.addView(gifPrevView);
// gifPrevView.setBackgroundResource(R.drawable.com_elastic_pad_games_selfielander_lander_firing);
// addContentView(gifPrevView, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
utilsBitmap = new UtilsBitmap(this,this);
agRecorder = new MPFourRecorder(this,this);
gagRecorder = new AnimatedGifRecorder(this,this);
// agRecorder = new AnimatedGifRecorder(this,this);
Window window = getWindow();
window.addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
bmpMainCanvImg = null;
handler = null;
hasSurface = false;
mPlayer = new MediaPlayer();
iMovStartTstamp = 0;
lMovStartTstamp = 11;
currGiphyGID = "123";
preparePagePopUps("quickorder/blank.html", "noQvalue");
if (extras != null) {
if (extras.containsKey("apmode")) {
currMovType = extras.getString("apmode");
System.out.println("extras.apmode: " + currMovType);
}
if (extras.containsKey("apmeta")) {
setCUAPmeta(extras.getString("apmeta"));
System.out.println("CutOuts:extras.containsKey APMETA: " + extras.getString("apmeta"));
animMovBuilder.setAPmeta(extras.getString("apmeta"));
}
if (extras.containsKey("apfile")) {
currMovFName = extras.getString("apfile");
}
}
cuMTts = new TextToSpeech(this, this); // TextToSpeech.OnInitListener
}
@Override
protected void onDestroy() {
super.onDestroy();
Log.v(TAG, "onDestroy()");
}
@Override
protected void onResume() {
try {
super.onResume();
Log.v(TAG, "onResume()");
loadGifView();
// CameraManager must be initialized here, not in onCreate().
if (cameraManager == null) cameraManager = new CameraManager(getApplication());
// cameraManager.setManualFramingRect(280, 280);
if (viewfinderView == null) {
viewfinderView = (CutOutsView) findViewById(R.id.cutouts_view);
viewfinderView.setCameraManager(cameraManager);
}
showScanner();
surfaceView = (SurfaceView) findViewById(R.id.preview_view);
// surfaceView.setZOrderOnTop(true); // necessary
surfaceHolder = surfaceView.getHolder();
// surfaceHolder.setFormat(PixelFormat.TRANSPARENT);
if (hasSurface) {
// The activity was paused but not stopped, so the surface still
// exists. Therefore
// surfaceCreated() won't be called, so init the camera here.
initCamera(surfaceHolder);
} else {
// Install the callback and wait for surfaceCreated() to init the
// camera.
surfaceHolder.addCallback(this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
} catch(Exception e) {
System.out.println("onResume.CutOuts.error: " + e);
e.printStackTrace();
}
if (gifBgView == null) {
gifBgView = new AnimBGView(this, this);
gifBgView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
lnrLyGBGView = (RelativeLayout) this.findViewById(R.id.gbgv_main);
lnrLyGBGView.addView(gifBgView);
}
if (gifPrevView == null) {
gifPrevView = new AnimPrevView(this, this);
gifPrevView.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
// gifPrevView.setBackgroundResource(R.drawable.com_elastic_pad_games_selfielander_lander_firing);
lnrLyGPView = (RelativeLayout) this.findViewById(R.id.gpv_main);
lnrLyGPView.addView(gifPrevView);
}
// gifPrevView = new AnimPrevView(this, this);
// lnrLyGPView = (LinearLayout) this.findViewById(R.id.gpv_main);
// lnrLyGPView.addView(gifPrevView);
// addContentView(gifPrevView, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
}
@Override
protected void onPause() {
super.onPause();
Log.v(TAG, "onPause()");
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
}
/*
@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_FOCUS || keyCode == KeyEvent.KEYCODE_CAMERA) {
// Handle these events so they don't launch the Camera app
return true;
}
return super.onKeyDown(keyCode, event);
}
*/
@Override
public void surfaceCreated(SurfaceHolder holder) {
if (holder == null)
Log.e(TAG, "*** WARNING *** surfaceCreated() gave us a null surface!");
if (!hasSurface) {
hasSurface = true;
initCamera(holder);
}
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
hasSurface = false;
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public CutOutsView getViewfinder() {
return viewfinderView;
}
/*
*/
@Override
public Handler getHandler() {
return handler;
}
@Override
public CameraManager getCameraManager() {
return cameraManager;
}
@Override
public void handleDecode(Bitmap barcode) {
System.out.println("Cutouts:handleDecode");
}
protected void drawResultPoints(Bitmap barcode) {
}
protected static void drawLine(Canvas canvas, Paint paint) {
}
protected void showScanner() {
viewfinderView.setVisibility(View.VISIBLE);
}
protected void initCamera(SurfaceHolder surfaceHolder) {
try {
cameraManager.openDriver(surfaceHolder);
// cameraManager.getCamera().setPreviewCallback(prevCallBack);
// Creating the handler starts the preview, which can also throw a
// RuntimeException.
if (handler == null) {
handler = new DecoderActivityHandler(this, characterSet, cameraManager);
}
Camera.Parameters parameters = cameraManager.getCamera().getParameters();
iMovWidth = parameters.getPreviewSize().width;
iMovHeight = parameters.getPreviewSize().height;
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);
//some more settings
cameraManager.getCamera().setParameters(parameters);
} catch (IOException ioe) {
Log.w(TAG, ioe);
} catch (RuntimeException e) {
// Barcode Scanner has seen crashes in the wild of this variety:
// java.?lang.?RuntimeException: Fail to connect to camera service
Log.w(TAG, "Unexpected error initializing camera", e);
}
}
public Bitmap getCurrCanvasImg() {
return currFBmap;
}
public String sendCUConfBundle() {
String tretStr = "noQvalue";
try {
JSONObject atjson = new JSONObject();
atjson.put("confCUuseDSpeak", configCUSettings.getString("confCUuseDSpeak", "no"));
atjson.put("confCUTitleWmark", configCUSettings.getString("confCUTitleWmark", "no"));
atjson.put("confCUUseFilter", configCUSettings.getString("confCUUseFilter", "no"));
tretStr = atjson.toString();
} catch(Exception e) {
System.out.println("sendCUConfBundle: " + e.toString());
e.printStackTrace();
return tretStr;
//Handle exception here
}
return tretStr;
}
// returns current CutOut user settings as an android Bundle
Bundle getCUConfBundle() {
Bundle theCUConfBundle = new Bundle();
theCUConfBundle.putString("confCUuseDSpeak", configCUSettings.getString("confCUuseDSpeak", "no"));
theCUConfBundle.putString("confCUTitleWmark", configCUSettings.getString("confCUTitleWmark", "no"));
theCUConfBundle.putString("confCUUseFilter", configCUSettings.getString("confCUUseFilter", "no"));
// passes changes to animMovBuilder
animMovBuilder.setAMBstgsObj(sendCUConfBundle());
return theCUConfBundle;
}
public void putCUConfValStr(String theKey, String theVal) {
configCUEditor = configCUSettings.edit();
configCUEditor.putString(theKey, theVal);
configCUEditor.commit();
currCUConfBundle = getCUConfBundle();
}
public Camera.PreviewCallback aframeCallBack = new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
try {
if(isRecording) {
// prepPFrame(data);
}
} catch (Exception e) {
System.out.println("onPictureTaken: " + e.toString());
}
}
};
public void sendAnotherFrame(int fcnt) {
cameraManager.getCamera().setOneShotPreviewCallback(aframeCallBack);
}
protected void takeAPicture() {
System.out.println("CutOuts:using takeAPicture: ");
cameraManager.getCamera().setOneShotPreviewCallback(prevCallBack);
}
public Camera.PreviewCallback prevCallBack = new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
try {
System.out.println("CutOuts:using prevCallBack : ");
int[] previewPixels = new int[iMovWidth * iMovHeight];
utilsBitmap.decodeYUV420SP(previewPixels, data, iMovWidth, iMovHeight);
Bitmap b = Bitmap.createBitmap(previewPixels, iMovWidth, iMovHeight, Bitmap.Config.RGB_565);
Bitmap adbmpAToMask = utilsBitmap.scaleBoundBitmap(b, 520);
currFBmap = null;
currFBmap = animMovBuilder.getItemBmp(adbmpAToMask);
gifBgView.setBmap(currFBmap);
setToggleAPViewBtns(true);
} catch (Exception e) {
System.out.println("CutOuts:prevCallBack: " + e.toString());
e.printStackTrace();
}
}
};
public Camera.PreviewCallback cbMovFrame = new Camera.PreviewCallback() {
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
try {
System.out.println("CutOuts:using cbMovFrame : " + arrAnimFSing.size());
if(isRecording) {
int itmpIFI = 0;
long l = System.currentTimeMillis();
int iNTime = (int)(l - lMovStartTstamp);
int[] previewPixels = new int[iMovWidth * iMovHeight];
utilsBitmap.decodeYUV420SP(previewPixels, data, iMovWidth, iMovHeight);
Bitmap b = Bitmap.createBitmap(previewPixels, iMovWidth, iMovHeight, Bitmap.Config.RGB_565);
Bitmap adbmpAToMask = utilsBitmap.scaleBoundBitmap(b, 320);
if(arrAnimFSing.size() == 0) { // add first fram at 0 timestamp
System.out.println("cbMovFrame: 0");
AnimFrameSingleton anfs = new AnimFrameSingleton();
anfs.setIpst(0);
anfs.setMBytes(data);
anfs.setIanmFrmTS(0);
anfs.setMBitmap(adbmpAToMask);
arrAnimFSing.add(anfs);
// gifAnimation.getAnimFrmIdx();
}
AnimFrameSingleton aanfs = new AnimFrameSingleton();
aanfs.setIpst(iNTime);
// if(currMovType.equals("gif")) { }
aanfs.setMBytes(data);
previewPixels = null;
gifBgView.setBmap(adbmpAToMask);
// aanfs.setMAnimResInt(animEngine.getAEngFrameRes());
aanfs.setIanmFrmTS(iAnimFrmIdx);
aanfs.setMBitmap(adbmpAToMask);
arrAnimFSing.add(aanfs);
if(iAnimFrmIdx == 2) {
}
if(iAnimFrmIdx == 7) {
// playFile();
// animMovSoundPool.playSound(1);
}
if(arrAnimFSing.size() == 14) {
onRecClick(vtv);
}
vtv.setText("Frame : " + arrAnimFSing.size() + " of 14");
// itmpIFI = gifAnimation.getAnimFrmIdx();
System.out.println("CutOuts.cbMovFrame: " + iNTime + "iAnimFrmIdx: " + iAnimFrmIdx);
// prepArrPFrame(data);
}
} catch (Exception e) {
System.out.println("cbMovFrame: " + e.toString());
e.printStackTrace();
}
}
};
public void onRecClick(View view) {
try {
System.out.println("onRecClick: isRecording " + isRecording);
if(isRecording) {
setCaptureButtonText("Capture");
isRecording = false;
animEngine.stop();
// gifAnimation.stop();
if(gifBgAnimation != null) {
gifBgAnimation.stop();
}
tmrMovRec.cancel();
tmrMovRec.purge();
if(mMediaRecorder != null) {
mMediaRecorder.stop(); // stop the recording
releaseMediaRecorder(); // release the MediaRecorder object
System.out.println("onRecClick.CronTime:MediaRecorder stop" + System.currentTimeMillis());
}
prepMovBuild();
// prepMovCrunch();
// releaseCamera();
} else {
isRecording = true;
// Toast.makeText(CutOuts.this, "Loading...", Toast.LENGTH_LONG).show();
// callAsynchronousTask();
callMediaPrepTask();
}
} catch (Exception e) {
System.out.println("onRecClick: " + e.toString());
e.printStackTrace();
}
}
private void setCaptureButtonText(String title) {
// vtv.setText(title);
}
// ----------------- unused native recording stuff
private void releaseMediaRecorder(){
if (mMediaRecorder != null) {
// clear recorder configuration
mMediaRecorder.reset();
// release the recorder object
mMediaRecorder.release();
mMediaRecorder = null;
// Lock camera for later use i.e taking it back from MediaRecorder.
// MediaRecorder doesn't need it anymore and we will release it if the activity pauses.
// cameraManager.getCamera().lock();
}
}
private void releaseCamera(){
// cameraManager.closeDriver();
}
private boolean prepareVideoRecorder(){
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
try {
File filePath = new File(Environment.getExternalStorageDirectory().getPath() + File.separator + "quick-order");
filePath.mkdirs();
// String filename = timeStamp + ".3gp";
String filename = "outa.3gp";
File ffile = new File(filePath,filename);
mMediaRecorder = new MediaRecorder();
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setOutputFile(ffile.toString());
} catch (Exception e) {
Log.d(TAG, "Exception preparing MediaRecorder: " + e.getMessage());
e.printStackTrace();
}
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.d(TAG, "IllegalStateException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
} catch (IOException e) {
Log.d(TAG, "IOException preparing MediaRecorder: " + e.getMessage());
releaseMediaRecorder();
return false;
}
return true;
}
/**
* Asynchronous task for preparing the {@link android.media.MediaRecorder} since it's a long blocking
* operation.
*/
class MediaPrepareTask extends AsyncTask<Void, Void, Boolean> {
@Override
protected Boolean doInBackground(Void... voids) {
// initialize video camera
if (prepareVideoRecorder()) {
// Camera is available and unlocked, MediaRecorder is prepared,
// now you can start recording
System.out.println("MediaPrepareTask.CronTime:Recorder start" + System.currentTimeMillis());
mMediaRecorder.start();
isRecording = true;
} else {
// prepare didn't work, release the camera
releaseMediaRecorder();
System.out.println("MediaPrepareTask.releaseMediaRecorder.error");
return false;
}
return true;
}
@Override
protected void onPostExecute(Boolean result) {
System.out.println("MediaPrepareTask.CronTime:Recorder onPostExecute" + System.currentTimeMillis());
if (!result) {
// MainActivity.this.finish();
}
// inform the user that recording has started
// setCaptureButtonText("Stop");
if(currMovType.equals("jpeg")) {
setCaptureButtonText("");
} else {
setCaptureButtonText("recording " + currMovType + " ");
}
animEngine.start();
// gifAnimation.start();
if(gifBgAnimation != null) {
gifBgAnimation.start();
}
callAsynchronousTask();
isRecording = true;
// mRecMicToMp3.start();
// sendAnotherFrame(0);
}
}
public void setToggleAPViewBtns(final boolean fnlBooltoShow) {
this.runOnUiThread(new Runnable() {
public void run() {
try {
if(fnlBooltoShow) {
tvAPVPreview.setVisibility(View.VISIBLE);
tvAPVEdit.setVisibility(View.VISIBLE);
tvAPVSave.setVisibility(View.VISIBLE);
tvAPVClear.setVisibility(View.VISIBLE);
} else {
tvAPVPreview.setVisibility(View.INVISIBLE);
tvAPVEdit.setVisibility(View.INVISIBLE);
tvAPVSave.setVisibility(View.INVISIBLE);
tvAPVClear.setVisibility(View.INVISIBLE);
}
} catch(Exception e) {
System.out.println("dev:ERROR:setToggleAPViewBtns: " + e);
}
}
});
}
public void preparePagePopUps(String pageUrl, String pageHtml) {
String fullUrl = "file:///android_asset/" + pageUrl;
String newHTML = "";
UtilWebDialog.UtilWDListener utilWDListener = new UtilWebDialog.UtilWDListener() {
public void epMDcom(int cbType, String cbArgs, UtilWebDialog epmd) {
final String fnlCbArgs;
epmd.doDismiss();
System.out.println("CutOuts.preparePagePopUps: " + cbType + " : " + cbArgs);
// stopPlayFile();
// handler.removeCallbacks(thrdTask);
// handler.postDelayed(thrdTask, 0);
switch (cbType) {
case 60:
editMovImage(Integer.parseInt(cbArgs));
default:
break;
}
}
};
utilWDialog = new UtilWebDialog(this, fullUrl, pageHtml, utilWDListener, new JSI_CutOuts(this), "app_cutouts");
utilWDialog.getWindow().setSoftInputMode(WindowManager.LayoutParams.SOFT_INPUT_STATE_ALWAYS_HIDDEN);
}
public void setPagePopUp(final String pageUrl, final String pageHtml) {
try {
this.runOnUiThread(new Runnable() {
public void run() {
utilWDialog.setPopPage(pageUrl, pageHtml);
}
});
} catch (Exception e) {
System.out.println("dev:ERROR:setPagePopUp:" + e.toString());
e.printStackTrace();
}
}
public void stopPlayFile()
{
try {
mPlayer.stop();
} catch (Exception e) {
e.printStackTrace();
}
}//end stopPlayFile
public void playFile()
{
try {
String path = new File(Environment.getExternalStorageDirectory() + "/quick-order/outa.3gp").getAbsolutePath();
mPlayer.reset();
mPlayer.setLooping(false);
mPlayer.setDataSource(path);
mPlayer.prepare();
mPlayer.start();
} catch (Exception e) {
e.printStackTrace();
}
}//end playFile
public void timeMovParse() {
try {
String tmpFnae = currMovFName + "." + currMovType;
final File tmpTMPFile = new File(Environment.getExternalStorageDirectory().getPath() + File.separator + "quick-order" + File.separator + tmpFnae);
System.out.println("Cutouts:timeMovParse.tmpFnae : " + tmpFnae);
if(currMovType.equals("mp4")) {
new com.njfsoft_utils.anim.MPFourParser().procMPFourPars(tmpTMPFile.toString(), new com.njfsoft_utils.core.OnTaskExecutionFinished()
{
@Override
public void OnTaskFihishedEvent(String result)
{
System.out.println("Cutouts:timeMovParse: " + (String)result);
Uri daUri = getMovContentUri(getApplicationContext(), tmpTMPFile);
currMovUri = daUri.toString();
currMovPath = tmpTMPFile.toString();
onEPResult();
}
});
} else {
Uri daUri = getMovContentUri(getApplicationContext(), tmpTMPFile);
currMovUri = daUri.toString();
currMovPath = tmpTMPFile.toString();
onEPResult();
}
} catch (Exception e) {
System.out.println("Cutouts:timeMovParse:Error" + e);
e.printStackTrace();
}
}
public void animPlayEnded() {
setToggleAPViewBtns(true);
// setPagePopUp("quickorder/media_chooser.html","noQvalue");
}
public void timeAnimMovBuild() {
System.out.println("Cutouts:timeAnimMovBuild: nada ");
animMovBuilder.procAnimBuild("nada", new com.njfsoft_utils.core.OnTaskExecutionFinished()
{
@Override
public void OnTaskFihishedEvent(String result)
{
try {
if(result.equals("isDone")) {
stv.setText("Done");
// dont make it yet, just show it:
// prepMovCrunch();
AnimMovSingleton tmpAMS = animMovBuilder.getAnimMovSing();
if(tmpAMS != null) {
setToggleAPViewBtns(true);
// setPagePopUp("quickorder/media_chooser.html","noQvalue");
// System.out.println("Cutouts:timeAnimMovBuild:getAnimMovSing tmpAMS" + tmpAMS.getMamsArrAFS().size());
// boolean isGTPV = gifPrevView.setAnimMovSing(tmpAMS);
// playFile();
}
} else {
iPrepArrPFrame++;
vtv.setText("encoding frame : " + animMovBuilder.allBmaps + " of " + animMovBuilder.getAnimMovSing().getMamsArrAFS().size());
timeAnimMovBuild();
}
System.out.println("Cutouts:timeAnimMovBuild Response From Asynchronous task: " + animMovBuilder.allBmaps + " :: " + (String)result);
} catch (Exception e) {
System.out.println("Cutouts:timeAnimMovBuild:getAnimMovSing Error" + e);
e.printStackTrace();
// TODO Auto-generated catch block
}
}
});
}
public void timeAnimMovFrame() {
OnTaskExecutionFinished tAMFTEF = new com.njfsoft_utils.core.OnTaskExecutionFinished()
{
@Override
public void OnTaskFihishedEvent(String result)
{
if(result.equals("isDone")) {
iPrepArrPFrame = 0;
try {
vtv.setText("encoding ");
timeMovParse();
} catch (Exception e) {
System.out.println("Cutouts:timeAnimMovFrame:getAnimMovSing Error" + e);
e.printStackTrace();
// TODO Auto-generated catch block
}
// timeMovParse();
} else {
iPrepArrPFrame++;
vtv.setText("encoding frame " + iPrepArrPFrame + " of 14");
timeAnimMovFrame();
}
System.out.println("Cutouts:timeAnimMovFrame Response From Asynchronous task: " + iPrepArrPFrame + " :: " + (String)result);
}
};
if(currMovType.equals("mp4")) {
agRecorder.procAnimFnlFrames("nada", tAMFTEF);
} else {
gagRecorder.procAnimFnlFrames("nada", tAMFTEF);
}
}
public void callMediaPrepTask() {
try {
final Handler whandler = new Handler();
Timer atmrMovRec = new Timer();
TimerTask adoAsynchronousTask = new TimerTask() {
@Override
public void run() {
whandler.post(new Runnable() {
public void run() {
try {
new MediaPrepareTask().execute(null, null, null);
} catch (Exception e) {
// TODO Auto-generated catch block
}
}
});
}
};
atmrMovRec.schedule(adoAsynchronousTask, 600); //execute in every 600 ms
new android.os.Handler().postDelayed(
new Runnable() {
public void run() {
getProdSpeach();
}
}, 1600);
} catch (Exception e) {
System.out.println("Cutouts:callMediaPrepTask:Error " + e);
e.printStackTrace();
// TODO Auto-generated catch block
}
}
public void callAsynchronousTask() {
lMovStartTstamp = System.currentTimeMillis();
final Handler wwhandler = new Handler();
tmrMovRec = new Timer();
TimerTask doAsynchronousTask = new TimerTask() {
@Override
public void run() {
wwhandler.post(new Runnable() {
public void run() {
try {
cameraManager.getCamera().setOneShotPreviewCallback(cbMovFrame);
} catch (Exception e) {
// TODO Auto-generated catch block
}
}
});
}
};
tmrMovRec.schedule(doAsynchronousTask, 0, setIMovFDelay(500)); //execute in every 285 ms
}
public void prepMovBuild() {
try {
System.out.println("Cutouts:prepMovBuild: start");
Camera.Parameters parameters = cameraManager.getCamera().getParameters();
// AnimMovSingleton anms = new AnimMovSingleton();
AnimMovSingleton anms = AnimMovSingleton.getInstance();
anms.setMamsArrAFS(arrAnimFSing);
anms.setMamsAnimInt(IMAGE_RESOURCES);
// anms.setMamsAnimation(gifAnimation);
if(gifBgAnimation != null) {
anms.setMamsBgAnimation(gifBgAnimation);
}
anms.setMmovWidth(parameters.getPreviewSize().width);
anms.setMmovHeight(parameters.getPreviewSize().height);
// anms.setMmovFPS((int)getIMovFDelay()/10);
anms.setMmovFPS(2);
// anms.setMmovFPS(21);
anms.setMmovFDelay(getIMovFDelay());
// anms.setMmovTSstart(lMovStartTstamp);
// anms.setMmovTSstop(System.currentTimeMillis());
boolean isFramed = animMovBuilder.setAnimMovSing(anms);
if(isFramed) {
System.out.println("Cutouts:prepMovBuild.isFramed: " + isFramed);
timeAnimMovBuild();
}
} catch(Exception e) {
System.out.println("Cutouts:prepMovBuild.error: " + e.toString());
}
}
public void prepMovCrunch() {
String tcurrMovFName = currMovFName + "." + currMovType;
if(currMovType.equals("mp4")) {
boolean isFramed = agRecorder.setAnimMovSing(animMovBuilder.getAnimMovSing());
if(isFramed) {
boolean isPrepped = agRecorder.prepare();
if(isPrepped) {
timeAnimMovFrame();
}
}
} else {
boolean isFramed = gagRecorder.setAnimMovSing(animMovBuilder.getAnimMovSing());
if(isFramed) {
gagRecorder.setFNameString(tcurrMovFName);
boolean isPrepped = gagRecorder.prepare();
if(isPrepped) {
timeAnimMovFrame();
}
}
}
}
public int getIMovFDelay() {
return iMovFDelay;
}
public int setIMovFDelay(int tmpval) {
iMovFDelay = tmpval;
return tmpval;
}
public String convertResponseToString(HttpResponse response) throws IllegalStateException, IOException {
String res = "noQvalue";
StringBuffer buffer = new StringBuffer();
InputStream inputStream = response.getEntity().getContent();
int contentLength = (int) response.getEntity().getContentLength();
if (contentLength < 0) {
} else {
byte[] data = new byte[512];
int len = 0;
try {
while (-1 != (len = inputStream.read(data))) {
buffer.append(new String(data, 0, len));
}
} catch (IOException e) {
e.printStackTrace();
}
try {
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
res = buffer.toString();
}
return res;
}
public static byte[] convertFileToByteArray(File f)
{
byte[] byteArray = null;
try
{
InputStream inputStream = new FileInputStream(f);
ByteArrayOutputStream bos = new ByteArrayOutputStream();
byte[] b = new byte[1024*8];
int bytesRead =0;
while ((bytesRead = inputStream.read(b)) != -1)
{
bos.write(b, 0, bytesRead);
}
byteArray = bos.toByteArray();
}
catch (IOException e)
{
e.printStackTrace();
}
return byteArray;
}
public void doGiphyShare(HttpResponse response) throws IllegalStateException, IOException {
String res = "noQvalue";
StringBuffer buffer = new StringBuffer();
InputStream inputStream = response.getEntity().getContent();
int contentLength = (int) response.getEntity().getContentLength();
if (contentLength < 0) {
} else {
byte[] data = new byte[512];
int len = 0;
try {
while (-1 != (len = inputStream.read(data))) {
buffer.append(new String(data, 0, len));
}
} catch (IOException e) {
e.printStackTrace();
}
try {
inputStream.close();
} catch (IOException e) {
e.printStackTrace();
}
res = buffer.toString();
System.out.println("CutOuts:doGiphyShare:res: " + res);
try {
JSONObject jobj = new JSONObject(res);
JSONObject jdata = jobj.getJSONObject("data");
String param = jdata.getString("id");
currGiphyGID = param; // sending back the giphyID
onEPResult();
System.out.println("CutOuts:doGiphyShare:param: " + param);
// Intent browserIntent = new Intent(Intent.ACTION_VIEW, Uri.parse("http://www.giphy.com/gifs/" + param));
// startActivity(browserIntent);
} catch (Exception e) {
e.printStackTrace();
System.out.println("CutOuts:doGiphyShare:error: " + e);
}
}
}
public void postGiphyVid() {
HttpClient httpClient = new DefaultHttpClient();
HttpContext localContext = new BasicHttpContext();
HttpPost httpPost = new HttpPost("http://upload.giphy.com/v1/gifs");
String fnime = agRecorder.getFNameString();
String fNewM = Environment.getExternalStorageDirectory() + "/quick-order/" + fnime + ".gif";
String strOutput = "noQvalue";
ArrayList<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("username", "njflabs"));
nameValuePairs.add(new BasicNameValuePair("api_key", "dc6zaTOxFJmzC"));
nameValuePairs.add(new BasicNameValuePair("file", fNewM));
// nameValuePairs.add(new BasicNameValuePair("is_hidden", false));
nameValuePairs.add(new BasicNameValuePair("tags", "beta,test"));
try {
MultipartEntity entity = new MultipartEntity(HttpMultipartMode.BROWSER_COMPATIBLE);
for(int index=0; index < nameValuePairs.size(); index++) {
if(nameValuePairs.get(index).getName().equalsIgnoreCase("file")) {
// If the key equals to "image", we use FileBody to transfer the data
entity.addPart(nameValuePairs.get(index).getName(), new FileBody(new File (nameValuePairs.get(index).getValue())));
} else {
// Normal string data
entity.addPart(nameValuePairs.get(index).getName(), new StringBody(nameValuePairs.get(index).getValue()));
}
}
httpPost.setEntity(entity);
HttpResponse response = httpClient.execute(httpPost, localContext);
doGiphyShare(response);
} catch (IOException e) {
e.printStackTrace();
System.out.println("CutOuts:postVid:error: " + e);
}
}
public void postVid() {
HttpClient httpClient = new DefaultHttpClient();
HttpContext localContext = new BasicHttpContext();
HttpPost httpPost = new HttpPost("http://a-njfsoft.rhcloud.com/index.html");
String strCurrSID = Long.toString(System.currentTimeMillis());
String fnime = agRecorder.getFNameString();
String fNewM = Environment.getExternalStorageDirectory() + "/quick-orerder/" + fnime + ".gif";
String strOutput = "noQvalue";
ArrayList<NameValuePair> nameValuePairs = new ArrayList<NameValuePair>();
nameValuePairs.add(new BasicNameValuePair("do", "add"));
nameValuePairs.add(new BasicNameValuePair("sttl", "My test vid title"));
nameValuePairs.add(new BasicNameValuePair("sdesc", "my test vid desc"));
nameValuePairs.add(new BasicNameValuePair("stime", strCurrSID));
nameValuePairs.add(new BasicNameValuePair("snwork", "vp"));
nameValuePairs.add(new BasicNameValuePair("epvideo", fNewM));
try {
MultipartEntity entity = new MultipartEntity(HttpMultipartMode.BROWSER_COMPATIBLE);
for(int index=0; index < nameValuePairs.size(); index++) {
if(nameValuePairs.get(index).getName().equalsIgnoreCase("epvideo")) {
// If the key equals to "image", we use FileBody to transfer the data
entity.addPart(nameValuePairs.get(index).getName(), new FileBody(new File (nameValuePairs.get(index).getValue())));
} else {
// Normal string data
entity.addPart(nameValuePairs.get(index).getName(), new StringBody(nameValuePairs.get(index).getValue()));
}
}
httpPost.setEntity(entity);
HttpResponse response = httpClient.execute(httpPost, localContext);
strOutput = convertResponseToString(response);
System.out.println("CutOuts:postVid:response: " + strOutput);
if (strOutput.startsWith("http")) {
// mWebView.loadUrl(strOutput);
}
} catch (IOException e) {
e.printStackTrace();
System.out.println("CutOuts:postVid:error: " + e);
}
}
public void post(String url, ArrayList<NameValuePair> nameValuePairs) {
HttpClient httpClient = new DefaultHttpClient();
HttpContext localContext = new BasicHttpContext();
HttpPost httpPost = new HttpPost(url);
try {
MultipartEntity entity = new MultipartEntity(HttpMultipartMode.BROWSER_COMPATIBLE);
for(int index=0; index < nameValuePairs.size(); index++) {
if(nameValuePairs.get(index).getName().equalsIgnoreCase("epvid")) {
// If the key equals to "image", we use FileBody to transfer the data
entity.addPart(nameValuePairs.get(index).getName(), new FileBody(new File (nameValuePairs.get(index).getValue())));
} else {
// Normal string data
entity.addPart(nameValuePairs.get(index).getName(), new StringBody(nameValuePairs.get(index).getValue()));
}
}
httpPost.setEntity(entity);
HttpResponse response = httpClient.execute(httpPost, localContext);
} catch (IOException e) {
e.printStackTrace();
}
}
protected void onPicResult() {
Log.v(TAG, "onPicResult()");
ShareDataResult.getInstance().setCallingApp("CutOuts");
ShareDataResult.getInstance().setImgStr(currGiphyGID);
// ShareDataResult.getInstance().setImgName(currMovFName.substring(0, currMovFName.lastIndexOf(".") - 1) + ".jpeg");
// ShareDataResult.getInstance().setImgBytes(agRecorder.boaAGR.toByteArray());
ShareDataResult.getInstance().setTitle("Quick-Oder Title");
ShareDataResult.getInstance().setDesc("Quick-Oder Desc");
// ShareDataResult.getInstance().setMsg("Quick-Order User Message");
Intent retintent = new Intent();
retintent.putExtra("currMovType", currMovType);
retintent.putExtra("currMovFName", currMovFName);
retintent.putExtra("currMovUri", currMovUri);
retintent.putExtra("currMovPath", currMovFName);
setResult(RESULT_OK, retintent);
finish();
}
protected void onEPResult() {
try {
System.out.println("onEPResult");
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
Intent retintent = new Intent();
retintent.putExtra("currMovType", currMovType);
retintent.putExtra("currMovFName", currMovFName);
retintent.putExtra("currMovUri", currMovUri);
retintent.putExtra("currMovPath", currMovPath);
setResult(RESULT_OK, retintent);
finish();
} catch (Exception e) {
System.out.println("onEPResult.error: ");
e.printStackTrace();
}
}
public String getImgLoadStr() {
/*
try {
Bitmap result = utilsBitmap.getResizedBitmap(animMovBuilder.getAnimMovSing().getMamsArrAFS().get(2).getMBitmap(), 250, 200);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
result.compress(Bitmap.CompressFormat.JPEG, 50, baos);
baos.close();
byte[] bMapArray = baos.toByteArray();
String encodedImage = Base64.encodeBytes(bMapArray);
return encodedImage;
} catch(Exception e) {
System.out.println("CutOuts:getImgLoadStr: " + e.toString());
return "noQvalue";
}
*/
return currGiphyGID;
}
public boolean onKeyDown(int keyCode, KeyEvent event) {
if (keyCode == KeyEvent.KEYCODE_FOCUS || keyCode == KeyEvent.KEYCODE_CAMERA) {
// Handle these events so they don't launch the Camera app
return true;
}
if (keyCode == KeyEvent.KEYCODE_BACK) {
if (handler != null) {
handler.quitSynchronously();
handler = null;
}
cameraManager.closeDriver();
if (!hasSurface) {
SurfaceView surfaceView = (SurfaceView) findViewById(R.id.preview_view);
SurfaceHolder surfaceHolder = surfaceView.getHolder();
surfaceHolder.removeCallback(this);
}
Intent intent = new Intent();
setResult(RESULT_OK, intent);
finish();
}
return super.onKeyDown(keyCode, event);
}
public static Uri getMovContentUri(Context context, File imageFile) {
String filePath = imageFile.getAbsolutePath();
Cursor cursor = context.getContentResolver().query(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI,
new String[] {
MediaStore.Images.Media._ID
},
MediaStore.Images.Media.DATA + "=? ",
new String[] {
filePath
}, null);
if (cursor != null && cursor.moveToFirst()) {
int id = cursor.getInt(cursor.getColumnIndex(MediaStore.MediaColumns._ID));
cursor.close();
return Uri.withAppendedPath(MediaStore.Images.Media.EXTERNAL_CONTENT_URI, "" + id);
} else {
if (imageFile.exists()) {
ContentValues values = new ContentValues();
values.put(MediaStore.Images.Media.DATA, filePath);
return context.getContentResolver().insert(
MediaStore.Images.Media.EXTERNAL_CONTENT_URI, values);
} else {
return null;
}
}
}
public void picFileSaved(byte[] byte_arr, String theOutfile) {
JSONArray resultSet;
String encoded;
String uString;
resultSet = new JSONArray();
encoded = "noQvalue";
uString = "noQvalue";
File mediaStorageDir = new File(Environment.getExternalStorageDirectory().getPath() + File.separator + "quick-order");
try {
boolean fileCreated = false;
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
File ffile = new File(mediaStorageDir, theOutfile + ".jpeg");
if (mediaStorageDir.exists()) {
fileCreated = true;
System.out.println("Cutouts.picFileSaved.exists: " + fileCreated);
} else {
mediaStorageDir.mkdirs();
}
fileCreated = ffile.createNewFile();
if (fileCreated) {
FileOutputStream os = new FileOutputStream(ffile, true);
os.write(byte_arr);
os.flush();
os.close();
System.out.println("Cutouts.picFileSaved.created: " + fileCreated);
Uri daUri = getMovContentUri(getApplicationContext(), ffile);
uString = ffile.toString();
long id = ContentUris.parseId(daUri);
currMovType = "jpeg";
currMovUri = daUri.toString();
currMovPath = uString;
System.out.println("Cutouts.picFileSaved: " + theOutfile + " : " + currMovFName + " : " + currMovUri + " : " + currMovPath);
onEPResult();
}
} catch(Exception e) {
System.out.println("Cutouts.picFileSaved error: " + theOutfile + " : " + currMovFName + " : " + currMovUri + " : " + currMovPath);
e.printStackTrace();
}
}
// Implements TextToSpeech.OnInitListener.
public void onInit(int status) {
// status can be either TextToSpeech.SUCCESS or TextToSpeech.ERROR.
if (status == TextToSpeech.SUCCESS) {
cuMTts.setOnUtteranceCompletedListener(new OnUtteranceCompletedListener() {
@Override
public void onUtteranceCompleted(String utteranceId) {
if (utteranceId.contains("repeat")) {
try {
System.out.println("onUtteranceCompleted: " + utteranceId);
// getSpeechToText();
} catch (Exception e) {
System.out.println("onUtteranceError: " + utteranceId);
e.printStackTrace();
}
} else {
// showDaToast("onUtteranceCompleted: " + utteranceId);
}
}
});
/*
// Set preferred language to US english.
// Note that a language may not be available, and the result will indicate this.
int result = mTts.setLanguage(Locale.US);
// Try this someday for some interesting results.
// int result mTts.setLanguage(Locale.FRANCE);
if (result == TextToSpeech.LANG_MISSING_DATA ||
result == TextToSpeech.LANG_NOT_SUPPORTED) {
// Lanuage data is missing or the language is not supported.
Log.e(LOG_TAG, "Language is not available.");
} else {
// Check the documentation for other possible result codes.
// For example, the language may be available for the locale,
// but not for the specified country and variant.
// The TTS engine has been successfully initialized.
// Allow the user to press the button for the app to speak again.
// mAgainButton.setEnabled(true);
// Greet the user.
// sayHello();
}
*/
} else {
// Initialization failed.
System.out.println("dev:ERROR:onInit:TexttoSpeach failed");
}
}
// KEY_PARAM_UTTERANCE_ID is used to prolong the silence period in speaking basically.
private void doCUSpeechOut(String outstr, String utterID) {
try {
HashMap < String, String > map = new HashMap();
map.put(TextToSpeech.Engine.KEY_PARAM_UTTERANCE_ID, "naday");
cuMTts.speak(outstr, TextToSpeech.QUEUE_FLUSH, map); // Drop all pending entries in the playback queue.
} catch(Exception e) {
System.out.println("dev:ERROR:doSpeechOut: " + e);
}
}
private void doCUSpeechOut(String outstr) {
doCUSpeechOut(outstr, "nadayet");
}
public void setCUAPmeta(String str) {
System.out.println("CutOuts:setCUAPmeta:APMETA: " + str);
try {
cumetaObject = null;
cumetaObject = new JSONObject(str);
} catch(Exception e) {
System.out.println("CutOuts:setAPmeta:ERROR: " + e);
e.printStackTrace();
}
}
public void getProdSpeach() {
if(currMovType.equals("mp4") && currCUConfBundle.getString("confCUuseDSpeak").equals("yes")) {
try {
JSONObject jdata = cumetaObject.getJSONObject("qco");
JSONObject ji = cumetaObject.getJSONObject("qitem");
String ctstrP = jdata.getString("c_title");
String tstrP = ji.getString("i_title") + ". for only $" + ji.getString("i_price_b");
doCUSpeechOut(tstrP);
} catch(Exception e) {
System.out.println("getProdSpeach:ERROR: " + e);
e.printStackTrace();
}
}
}
public int getMovArrSize() {
return animMovBuilder.getAnimMovSing().getMamsArrAFS().size();
}
public String getMovImgString(int iMISnum) {
try {
Bitmap result = animMovBuilder.getAnimMovSing().getMamsArrAFS().get(iMISnum).getMThumbBmp();
// Bitmap result = utilsBitmap.getResizedBitmap(animMovBuilder.getAnimMovSing().getMamsArrAFS().get(iMISnum).getMBitmap(), 35, 35);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
result.compress(Bitmap.CompressFormat.JPEG, 30, baos);
baos.close();
byte[] bMapArray = baos.toByteArray();
String encodedImage = Base64.encodeBytes(bMapArray);
return encodedImage;
} catch(Exception e) {
System.out.println("CutOuts:getMovImgString: " + e.toString());
return "noQvalue";
}
}
public void editMovImage(int tmpIAI) {
try {
currEditImgIndx = tmpIAI;
Bitmap tmpMovB = animMovBuilder.getAnimMovSing().getMamsArrAFS().get(tmpIAI).getMBitmap();
ByteArrayOutputStream baos = new ByteArrayOutputStream();
tmpMovB.compress(Bitmap.CompressFormat.JPEG, 100, baos);
baos.close();
byte[] bMapArray = baos.toByteArray();
String encodedImage = Base64.encodeBytes(bMapArray);
Intent toAMain = new Intent(getApplicationContext(), com.njfsoft_utils.artpad.ArtPad.class);
toAMain.putExtra("apmode", "apmodeEdit");
toAMain.putExtra("encdBmp", encodedImage);
startActivityForResult(toAMain, INT_RES_EDIT_IMG);
} catch (Exception e) {
System.out.println("CutOuts.editMovImage.error: " + e.toString());
}
}
public void prepArtPad() {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
currFBmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
baos.close();
byte[] bMapArray = baos.toByteArray();
String encodedImage = Base64.encodeBytes(bMapArray);
Intent toAMain = new Intent(getApplicationContext(), com.njfsoft_utils.artpad.ArtPad.class);
toAMain.putExtra("apmode", "apmodeEdit");
toAMain.putExtra("encdBmp", encodedImage);
startActivityForResult(toAMain, 2);
} catch (Exception e) {
System.out.println("CutOuts.prepArtPad.error: " + e.toString());
}
}
public void prepMovEditPop() {
try {
setPagePopUp("quickorder/media_edit.html","noQvalue");
} catch (Exception e) {
System.out.println("CutOuts.prepMovEditPop.error: " + e.toString());
}
}
public void doMediaResult() {
try {
System.out.println("CutOuts.doMediaResult.type: " + currMovType);
if(currMovType == "jpeg") {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
// Bitmap ntBitmap = gifBgView.get();
currFBmap.compress(Bitmap.CompressFormat.JPEG, 100, baos);
baos.close();
byte[] bMapArray = baos.toByteArray();
picFileSaved(bMapArray, currMovFName);
} else {
prepMovCrunch();
}
} catch (Exception e) {
System.out.println("CutOuts.doMediaResult.error: " + e.toString());
}
}
// onActivityResult functions for mostly everything.
// need get back to this. mostly used for media-fetching and editing, and speech functions.
@Override
protected void onActivityResult(int requestCode, int resultCode, final Intent data) {
System.out.println("onActivityResult.resultCode: " + resultCode);
switch (requestCode) {
case (2):
System.out.println("Activity.RESULT_OK: ");
try {
Bundle aextras = data.getExtras();
System.out.println("ArtPadRequest: aextras not null: ");
if (aextras.containsKey("encdBmp")) {
byte[] decodedString = Base64.decode(aextras.getString("encdBmp"));
Bitmap bitmap = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length, null);
currFBmap = bitmap;
gifBgView.setBmap(bitmap);
}
} catch (Exception e) {
System.out.println("onActivityResult: " + e.toString());
e.printStackTrace();
}
break;
case (INT_RES_EDIT_IMG):
System.out.println("Activity.RESULT_OK: ");
try {
Bundle aextras = data.getExtras();
System.out.println("ArtPadRequest: aextras not null: ");
if (aextras.containsKey("encdBmp")) {
byte[] decodedString = Base64.decode(aextras.getString("encdBmp"));
Bitmap tmprbitmap = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length, null);
Bitmap bmpBmap = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length, null);
Bitmap tpBmap = BitmapFactory.decodeByteArray(decodedString, 0, decodedString.length, null);
currFBmap = tmprbitmap;
gifBgView.setBmap(tmprbitmap);
animMovBuilder.getAnimMovSing().getMamsArrAFS().get(currEditImgIndx).setMBitmap(bmpBmap);
animMovBuilder.getAnimMovSing().getMamsArrAFS().get(currEditImgIndx).setMThumbBmp(utilsBitmap.scaleBoundBitmap(tpBmap, 35));
}
} catch (Exception e) {
System.out.println("onActivityResult: " + e.toString());
e.printStackTrace();
}
break;
default:
System.out.println("Its default");
}
}
}
| |
/*
* Copyright (c) 2003, 2004, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package sun.awt;
import java.awt.Color;
import java.io.UnsupportedEncodingException;
import java.util.HashMap;
import java.util.Map;
/**
* Per-screen XSETTINGS.
*/
public class XSettings {
/**
*/
private long serial = -1;
/**
* Update these settings with <code>data</code> obtained from
* XSETTINGS manager.
*
* @param data settings data obtained from
* <code>_XSETTINGS_SETTINGS</code> window property of the
* settings manager.
* @return a <code>Map</code> of changed settings.
*/
public Map update(byte[] data) {
return (new Update(data)).update();
}
/**
* TBS ...
*/
class Update {
/* byte order mark */
private static final int LITTLE_ENDIAN = 0;
private static final int BIG_ENDIAN = 1;
/* setting type */
private static final int TYPE_INTEGER = 0;
private static final int TYPE_STRING = 1;
private static final int TYPE_COLOR = 2;
private byte[] data;
private int dlen;
private int idx;
private boolean isLittle;
private long serial = -1;
private int nsettings = 0;
private boolean isValid;
private HashMap updatedSettings;
/**
* Construct an Update object for the data read from
* <code>_XSETTINGS_SETTINGS</code> property of the XSETTINGS
* selection owner.
*
* @param data <code>_XSETTINGS_SETTINGS</code> contents.
*/
Update(byte[] data) {
this.data = data;
dlen = data.length;
if (dlen < 12) {
// XXX: debug trace?
return;
}
// first byte gives endianness of the data
// next 3 bytes are unused (pad to 32 bit)
idx = 0;
isLittle = (getCARD8() == LITTLE_ENDIAN);
idx = 4;
serial = getCARD32();
// N_SETTINGS is actually CARD32 (i.e. unsigned), but
// since java doesn't have an unsigned int type, and
// N_SETTINGS cannot realistically exceed 2^31 (so we
// gonna use int anyway), just read it as INT32.
idx = 8;
nsettings = getINT32();
updatedSettings = new HashMap();
isValid = true;
}
private void needBytes(int n)
throws IndexOutOfBoundsException
{
if (idx + n <= dlen) {
return;
}
throw new IndexOutOfBoundsException("at " + idx
+ " need " + n
+ " length " + dlen);
}
private int getCARD8()
throws IndexOutOfBoundsException
{
needBytes(1);
int val = data[idx] & 0xff;
++idx;
return val;
}
private int getCARD16()
throws IndexOutOfBoundsException
{
needBytes(2);
int val;
if (isLittle) {
val = ((data[idx + 0] & 0xff) )
| ((data[idx + 1] & 0xff) << 8);
} else {
val = ((data[idx + 0] & 0xff) << 8)
| ((data[idx + 1] & 0xff) );
}
idx += 2;
return val;
}
private int getINT32()
throws IndexOutOfBoundsException
{
needBytes(4);
int val;
if (isLittle) {
val = ((data[idx + 0] & 0xff) )
| ((data[idx + 1] & 0xff) << 8)
| ((data[idx + 2] & 0xff) << 16)
| ((data[idx + 3] & 0xff) << 24);
} else {
val = ((data[idx + 0] & 0xff) << 24)
| ((data[idx + 1] & 0xff) << 16)
| ((data[idx + 2] & 0xff) << 8)
| ((data[idx + 3] & 0xff) << 0);
}
idx += 4;
return val;
}
private long getCARD32()
throws IndexOutOfBoundsException
{
return getINT32() & 0x00000000ffffffffL;
}
private String getString(int len)
throws IndexOutOfBoundsException
{
needBytes(len);
String str = null;
try {
str = new String(data, idx, len, "UTF-8");
} catch (UnsupportedEncodingException e) {
// XXX: cannot happen, "UTF-8" is always supported
}
idx = (idx + len + 3) & ~0x3;
return str;
}
/**
* Update settings.
*/
public Map update() {
if (!isValid) {
return null;
}
synchronized (XSettings.this) {
long currentSerial = XSettings.this.serial;
if (this.serial <= currentSerial) {
return null;
}
for (int i = 0; i < nsettings && idx < dlen; ++i) {
updateOne(currentSerial);
}
XSettings.this.serial = this.serial;
}
return updatedSettings;
}
/**
* Parses a particular x setting.
*
* @exception IndexOutOfBoundsException if there isn't enough
* data for a setting.
*/
private void updateOne(long currentSerial)
throws IndexOutOfBoundsException,
IllegalArgumentException
{
int type = getCARD8();
++idx; // pad to next CARD16
// save position of the property name, skip to serial
int nameLen = getCARD16();
int nameIdx = idx;
// check if we should bother
idx = (idx + nameLen + 3) & ~0x3; // pad to 32 bit
long lastChanged = getCARD32();
// Avoid constructing garbage for properties that has not
// changed, skip the data for this property.
if (lastChanged <= currentSerial) { // skip
if (type == TYPE_INTEGER) {
idx += 4;
} else if (type == TYPE_STRING) {
int len = getINT32();
idx = (idx + len + 3) & ~0x3;
} else if (type == TYPE_COLOR) {
idx += 8; // 4 CARD16
} else {
throw new IllegalArgumentException("Unknown type: "
+ type);
}
return;
}
idx = nameIdx;
String name = getString(nameLen);
idx += 4; // skip serial, parsed above
Object value = null;
if (type == TYPE_INTEGER) {
value = Integer.valueOf(getINT32());
}
else if (type == TYPE_STRING) {
value = getString(getINT32());
}
else if (type == TYPE_COLOR) {
int r = getCARD16();
int g = getCARD16();
int b = getCARD16();
int a = getCARD16();
value = new Color(r / 65535.0f,
g / 65535.0f,
b / 65535.0f,
a / 65535.0f);
}
else {
throw new IllegalArgumentException("Unknown type: " + type);
}
if (name == null) {
// dtrace???
return;
}
updatedSettings.put(name, value);
}
} // class XSettings.Update
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.druid.metadata;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Iterators;
import com.google.common.hash.Hashing;
import com.google.common.io.BaseEncoding;
import com.google.inject.Inject;
import org.apache.druid.indexing.overlord.DataSourceMetadata;
import org.apache.druid.indexing.overlord.IndexerMetadataStorageCoordinator;
import org.apache.druid.indexing.overlord.SegmentPublishResult;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.IAE;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.java.util.common.lifecycle.LifecycleStart;
import org.apache.druid.java.util.common.logger.Logger;
import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.TimelineObjectHolder;
import org.apache.druid.timeline.VersionedIntervalTimeline;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.apache.druid.timeline.partition.PartitionChunk;
import org.apache.druid.timeline.partition.ShardSpec;
import org.apache.druid.timeline.partition.ShardSpecFactory;
import org.joda.time.Interval;
import org.skife.jdbi.v2.FoldController;
import org.skife.jdbi.v2.Folder3;
import org.skife.jdbi.v2.Handle;
import org.skife.jdbi.v2.Query;
import org.skife.jdbi.v2.ResultIterator;
import org.skife.jdbi.v2.StatementContext;
import org.skife.jdbi.v2.TransactionCallback;
import org.skife.jdbi.v2.TransactionStatus;
import org.skife.jdbi.v2.exceptions.CallbackFailedException;
import org.skife.jdbi.v2.tweak.HandleCallback;
import org.skife.jdbi.v2.tweak.ResultSetMapper;
import org.skife.jdbi.v2.util.ByteArrayMapper;
import org.skife.jdbi.v2.util.StringMapper;
import javax.annotation.Nullable;
import java.io.IOException;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.stream.Collectors;
import java.util.stream.StreamSupport;
/**
*/
public class IndexerSQLMetadataStorageCoordinator implements IndexerMetadataStorageCoordinator
{
private static final Logger log = new Logger(IndexerSQLMetadataStorageCoordinator.class);
private final ObjectMapper jsonMapper;
private final MetadataStorageTablesConfig dbTables;
private final SQLMetadataConnector connector;
@Inject
public IndexerSQLMetadataStorageCoordinator(
ObjectMapper jsonMapper,
MetadataStorageTablesConfig dbTables,
SQLMetadataConnector connector
)
{
this.jsonMapper = jsonMapper;
this.dbTables = dbTables;
this.connector = connector;
}
enum DataSourceMetadataUpdateResult
{
SUCCESS,
FAILURE,
TRY_AGAIN
}
@LifecycleStart
public void start()
{
connector.createDataSourceTable();
connector.createPendingSegmentsTable();
connector.createSegmentTable();
}
@Override
public List<DataSegment> getUsedSegmentsForIntervals(final String dataSource, final List<Interval> intervals)
{
return connector.retryWithHandle(
handle -> {
final VersionedIntervalTimeline<String, DataSegment> timeline = getTimelineForIntervalsWithHandle(
handle,
dataSource,
intervals
);
return intervals
.stream()
.flatMap((Interval interval) -> timeline.lookup(interval).stream())
.flatMap(timelineObjectHolder -> {
return StreamSupport.stream(timelineObjectHolder.getObject().payloads().spliterator(), false);
})
.distinct()
.collect(Collectors.toList());
}
);
}
private List<SegmentIdWithShardSpec> getPendingSegmentsForIntervalWithHandle(
final Handle handle,
final String dataSource,
final Interval interval
) throws IOException
{
final List<SegmentIdWithShardSpec> identifiers = new ArrayList<>();
final ResultIterator<byte[]> dbSegments =
handle.createQuery(
StringUtils.format(
"SELECT payload FROM %1$s WHERE dataSource = :dataSource AND start <= :end and %2$send%2$s >= :start",
dbTables.getPendingSegmentsTable(), connector.getQuoteString()
)
)
.bind("dataSource", dataSource)
.bind("start", interval.getStart().toString())
.bind("end", interval.getEnd().toString())
.map(ByteArrayMapper.FIRST)
.iterator();
while (dbSegments.hasNext()) {
final byte[] payload = dbSegments.next();
final SegmentIdWithShardSpec identifier = jsonMapper.readValue(payload, SegmentIdWithShardSpec.class);
if (interval.overlaps(identifier.getInterval())) {
identifiers.add(identifier);
}
}
dbSegments.close();
return identifiers;
}
private VersionedIntervalTimeline<String, DataSegment> getTimelineForIntervalsWithHandle(
final Handle handle,
final String dataSource,
final List<Interval> intervals
)
{
if (intervals == null || intervals.isEmpty()) {
throw new IAE("null/empty intervals");
}
final StringBuilder sb = new StringBuilder();
sb.append("SELECT payload FROM %s WHERE used = true AND dataSource = ? AND (");
for (int i = 0; i < intervals.size(); i++) {
sb.append(
StringUtils.format("(start <= ? AND %1$send%1$s >= ?)", connector.getQuoteString())
);
if (i == intervals.size() - 1) {
sb.append(")");
} else {
sb.append(" OR ");
}
}
Query<Map<String, Object>> sql = handle.createQuery(
StringUtils.format(
sb.toString(),
dbTables.getSegmentsTable()
)
).bind(0, dataSource);
for (int i = 0; i < intervals.size(); i++) {
Interval interval = intervals.get(i);
sql = sql
.bind(2 * i + 1, interval.getEnd().toString())
.bind(2 * i + 2, interval.getStart().toString());
}
try (final ResultIterator<byte[]> dbSegments = sql.map(ByteArrayMapper.FIRST).iterator()) {
return VersionedIntervalTimeline.forSegments(
Iterators.transform(
dbSegments,
payload -> {
try {
return jsonMapper.readValue(payload, DataSegment.class);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
)
);
}
}
/**
* Attempts to insert a set of segments to the database. Returns the set of segments actually added (segments
* with identifiers already in the database will not be added).
*
* @param segments set of segments to add
*
* @return set of segments actually added
*/
@Override
public Set<DataSegment> announceHistoricalSegments(final Set<DataSegment> segments) throws IOException
{
final SegmentPublishResult result = announceHistoricalSegments(segments, null, null);
// Metadata transaction cannot fail because we are not trying to do one.
if (!result.isSuccess()) {
throw new ISE("WTF?! announceHistoricalSegments failed with null metadata, should not happen.");
}
return result.getSegments();
}
@Override
public SegmentPublishResult announceHistoricalSegments(
final Set<DataSegment> segments,
@Nullable final DataSourceMetadata startMetadata,
@Nullable final DataSourceMetadata endMetadata
) throws IOException
{
if (segments.isEmpty()) {
throw new IllegalArgumentException("segment set must not be empty");
}
final String dataSource = segments.iterator().next().getDataSource();
for (DataSegment segment : segments) {
if (!dataSource.equals(segment.getDataSource())) {
throw new IllegalArgumentException("segments must all be from the same dataSource");
}
}
if ((startMetadata == null && endMetadata != null) || (startMetadata != null && endMetadata == null)) {
throw new IllegalArgumentException("start/end metadata pair must be either null or non-null");
}
// Find which segments are used (i.e. not overshadowed).
final Set<DataSegment> usedSegments = new HashSet<>();
List<TimelineObjectHolder<String, DataSegment>> segmentHolders =
VersionedIntervalTimeline.forSegments(segments).lookupWithIncompletePartitions(Intervals.ETERNITY);
for (TimelineObjectHolder<String, DataSegment> holder : segmentHolders) {
for (PartitionChunk<DataSegment> chunk : holder.getObject()) {
usedSegments.add(chunk.getObject());
}
}
final AtomicBoolean definitelyNotUpdated = new AtomicBoolean(false);
try {
return connector.retryTransaction(
new TransactionCallback<SegmentPublishResult>()
{
@Override
public SegmentPublishResult inTransaction(
final Handle handle,
final TransactionStatus transactionStatus
) throws Exception
{
// Set definitelyNotUpdated back to false upon retrying.
definitelyNotUpdated.set(false);
final Set<DataSegment> inserted = new HashSet<>();
if (startMetadata != null) {
final DataSourceMetadataUpdateResult result = updateDataSourceMetadataWithHandle(
handle,
dataSource,
startMetadata,
endMetadata
);
if (result != DataSourceMetadataUpdateResult.SUCCESS) {
// Metadata was definitely not updated.
transactionStatus.setRollbackOnly();
definitelyNotUpdated.set(true);
if (result == DataSourceMetadataUpdateResult.FAILURE) {
throw new RuntimeException("Aborting transaction!");
} else if (result == DataSourceMetadataUpdateResult.TRY_AGAIN) {
throw new RetryTransactionException("Aborting transaction!");
}
}
}
for (final DataSegment segment : segments) {
if (announceHistoricalSegment(handle, segment, usedSegments.contains(segment))) {
inserted.add(segment);
}
}
return SegmentPublishResult.ok(ImmutableSet.copyOf(inserted));
}
},
3,
SQLMetadataConnector.DEFAULT_MAX_TRIES
);
}
catch (CallbackFailedException e) {
if (definitelyNotUpdated.get()) {
return SegmentPublishResult.fail(e.getMessage());
} else {
// Must throw exception if we are not sure if we updated or not.
throw e;
}
}
}
@Override
public SegmentIdWithShardSpec allocatePendingSegment(
final String dataSource,
final String sequenceName,
@Nullable final String previousSegmentId,
final Interval interval,
final ShardSpecFactory shardSpecFactory,
final String maxVersion,
final boolean skipSegmentLineageCheck
)
{
Preconditions.checkNotNull(dataSource, "dataSource");
Preconditions.checkNotNull(sequenceName, "sequenceName");
Preconditions.checkNotNull(interval, "interval");
Preconditions.checkNotNull(maxVersion, "version");
return connector.retryWithHandle(
handle -> {
if (skipSegmentLineageCheck) {
return allocatePendingSegment(
handle,
dataSource,
sequenceName,
interval,
shardSpecFactory,
maxVersion
);
} else {
return allocatePendingSegmentWithSegmentLineageCheck(
handle,
dataSource,
sequenceName,
previousSegmentId,
interval,
shardSpecFactory,
maxVersion
);
}
}
);
}
@Nullable
private SegmentIdWithShardSpec allocatePendingSegmentWithSegmentLineageCheck(
final Handle handle,
final String dataSource,
final String sequenceName,
@Nullable final String previousSegmentId,
final Interval interval,
final ShardSpecFactory shardSpecFactory,
final String maxVersion
) throws IOException
{
final String previousSegmentIdNotNull = previousSegmentId == null ? "" : previousSegmentId;
final CheckExistingSegmentIdResult result = checkAndGetExistingSegmentId(
handle.createQuery(
StringUtils.format(
"SELECT payload FROM %s WHERE "
+ "dataSource = :dataSource AND "
+ "sequence_name = :sequence_name AND "
+ "sequence_prev_id = :sequence_prev_id",
dbTables.getPendingSegmentsTable()
)
),
interval,
sequenceName,
previousSegmentIdNotNull,
Pair.of("dataSource", dataSource),
Pair.of("sequence_name", sequenceName),
Pair.of("sequence_prev_id", previousSegmentIdNotNull)
);
if (result.found) {
// The found existing segment identifier can be null if its interval doesn't match with the given interval
return result.segmentIdentifier;
}
final SegmentIdWithShardSpec newIdentifier = createNewSegment(
handle,
dataSource,
interval,
shardSpecFactory,
maxVersion
);
if (newIdentifier == null) {
return null;
}
// SELECT -> INSERT can fail due to races; callers must be prepared to retry.
// Avoiding ON DUPLICATE KEY since it's not portable.
// Avoiding try/catch since it may cause inadvertent transaction-splitting.
// UNIQUE key for the row, ensuring sequences do not fork in two directions.
// Using a single column instead of (sequence_name, sequence_prev_id) as some MySQL storage engines
// have difficulty with large unique keys (see https://github.com/apache/incubator-druid/issues/2319)
final String sequenceNamePrevIdSha1 = BaseEncoding.base16().encode(
Hashing.sha1()
.newHasher()
.putBytes(StringUtils.toUtf8(sequenceName))
.putByte((byte) 0xff)
.putBytes(StringUtils.toUtf8(previousSegmentIdNotNull))
.hash()
.asBytes()
);
insertToMetastore(
handle,
newIdentifier,
dataSource,
interval,
previousSegmentIdNotNull,
sequenceName,
sequenceNamePrevIdSha1
);
return newIdentifier;
}
@Nullable
private SegmentIdWithShardSpec allocatePendingSegment(
final Handle handle,
final String dataSource,
final String sequenceName,
final Interval interval,
final ShardSpecFactory shardSpecFactory,
final String maxVersion
) throws IOException
{
final CheckExistingSegmentIdResult result = checkAndGetExistingSegmentId(
handle.createQuery(
StringUtils.format(
"SELECT payload FROM %s WHERE "
+ "dataSource = :dataSource AND "
+ "sequence_name = :sequence_name AND "
+ "start = :start AND "
+ "%2$send%2$s = :end",
dbTables.getPendingSegmentsTable(),
connector.getQuoteString()
)
),
interval,
sequenceName,
null,
Pair.of("dataSource", dataSource),
Pair.of("sequence_name", sequenceName),
Pair.of("start", interval.getStart().toString()),
Pair.of("end", interval.getEnd().toString())
);
if (result.found) {
// The found existing segment identifier can be null if its interval doesn't match with the given interval
return result.segmentIdentifier;
}
final SegmentIdWithShardSpec newIdentifier = createNewSegment(
handle,
dataSource,
interval,
shardSpecFactory,
maxVersion
);
if (newIdentifier == null) {
return null;
}
// SELECT -> INSERT can fail due to races; callers must be prepared to retry.
// Avoiding ON DUPLICATE KEY since it's not portable.
// Avoiding try/catch since it may cause inadvertent transaction-splitting.
// UNIQUE key for the row, ensuring we don't have more than one segment per sequence per interval.
// Using a single column instead of (sequence_name, sequence_prev_id) as some MySQL storage engines
// have difficulty with large unique keys (see https://github.com/apache/incubator-druid/issues/2319)
final String sequenceNamePrevIdSha1 = BaseEncoding.base16().encode(
Hashing.sha1()
.newHasher()
.putBytes(StringUtils.toUtf8(sequenceName))
.putByte((byte) 0xff)
.putLong(interval.getStartMillis())
.putLong(interval.getEndMillis())
.hash()
.asBytes()
);
// always insert empty previous sequence id
insertToMetastore(handle, newIdentifier, dataSource, interval, "", sequenceName, sequenceNamePrevIdSha1);
log.info("Allocated pending segment [%s] for sequence[%s] in DB", newIdentifier, sequenceName);
return newIdentifier;
}
private CheckExistingSegmentIdResult checkAndGetExistingSegmentId(
final Query<Map<String, Object>> query,
final Interval interval,
final String sequenceName,
final @Nullable String previousSegmentId,
final Pair<String, String>... queryVars
) throws IOException
{
Query<Map<String, Object>> boundQuery = query;
for (Pair<String, String> var : queryVars) {
boundQuery = boundQuery.bind(var.lhs, var.rhs);
}
final List<byte[]> existingBytes = boundQuery.map(ByteArrayMapper.FIRST).list();
if (!existingBytes.isEmpty()) {
final SegmentIdWithShardSpec existingIdentifier = jsonMapper.readValue(
Iterables.getOnlyElement(existingBytes),
SegmentIdWithShardSpec.class
);
if (existingIdentifier.getInterval().getStartMillis() == interval.getStartMillis()
&& existingIdentifier.getInterval().getEndMillis() == interval.getEndMillis()) {
if (previousSegmentId == null) {
log.info("Found existing pending segment [%s] for sequence[%s] in DB", existingIdentifier, sequenceName);
} else {
log.info(
"Found existing pending segment [%s] for sequence[%s] (previous = [%s]) in DB",
existingIdentifier,
sequenceName,
previousSegmentId
);
}
return new CheckExistingSegmentIdResult(true, existingIdentifier);
} else {
if (previousSegmentId == null) {
log.warn(
"Cannot use existing pending segment [%s] for sequence[%s] in DB, "
+ "does not match requested interval[%s]",
existingIdentifier,
sequenceName,
interval
);
} else {
log.warn(
"Cannot use existing pending segment [%s] for sequence[%s] (previous = [%s]) in DB, "
+ "does not match requested interval[%s]",
existingIdentifier,
sequenceName,
previousSegmentId,
interval
);
}
return new CheckExistingSegmentIdResult(true, null);
}
}
return new CheckExistingSegmentIdResult(false, null);
}
private static class CheckExistingSegmentIdResult
{
private final boolean found;
@Nullable
private final SegmentIdWithShardSpec segmentIdentifier;
CheckExistingSegmentIdResult(boolean found, @Nullable SegmentIdWithShardSpec segmentIdentifier)
{
this.found = found;
this.segmentIdentifier = segmentIdentifier;
}
}
private void insertToMetastore(
Handle handle,
SegmentIdWithShardSpec newIdentifier,
String dataSource,
Interval interval,
String previousSegmentId,
String sequenceName,
String sequenceNamePrevIdSha1
) throws JsonProcessingException
{
handle.createStatement(
StringUtils.format(
"INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, sequence_name, sequence_prev_id, sequence_name_prev_id_sha1, payload) "
+ "VALUES (:id, :dataSource, :created_date, :start, :end, :sequence_name, :sequence_prev_id, :sequence_name_prev_id_sha1, :payload)",
dbTables.getPendingSegmentsTable(),
connector.getQuoteString()
)
)
.bind("id", newIdentifier.toString())
.bind("dataSource", dataSource)
.bind("created_date", DateTimes.nowUtc().toString())
.bind("start", interval.getStart().toString())
.bind("end", interval.getEnd().toString())
.bind("sequence_name", sequenceName)
.bind("sequence_prev_id", previousSegmentId)
.bind("sequence_name_prev_id_sha1", sequenceNamePrevIdSha1)
.bind("payload", jsonMapper.writeValueAsBytes(newIdentifier))
.execute();
}
@Nullable
private SegmentIdWithShardSpec createNewSegment(
final Handle handle,
final String dataSource,
final Interval interval,
final ShardSpecFactory shardSpecFactory,
final String maxVersion
) throws IOException
{
final List<TimelineObjectHolder<String, DataSegment>> existingChunks = getTimelineForIntervalsWithHandle(
handle,
dataSource,
ImmutableList.of(interval)
).lookup(interval);
if (existingChunks.size() > 1) {
// Not possible to expand more than one chunk with a single segment.
log.warn(
"Cannot allocate new segment for dataSource[%s], interval[%s]: already have [%,d] chunks.",
dataSource,
interval,
existingChunks.size()
);
return null;
} else {
if (existingChunks
.stream()
.flatMap(holder -> StreamSupport.stream(holder.getObject().spliterator(), false))
.anyMatch(chunk -> !chunk.getObject().getShardSpec().isCompatible(shardSpecFactory.getShardSpecClass()))) {
// All existing segments should have a compatible shardSpec with shardSpecFactory.
return null;
}
// max partitionId of the SAME shardSpec
SegmentIdWithShardSpec maxId = null;
if (!existingChunks.isEmpty()) {
TimelineObjectHolder<String, DataSegment> existingHolder = Iterables.getOnlyElement(existingChunks);
maxId = StreamSupport.stream(existingHolder.getObject().spliterator(), false)
// Here we check only the segments of the same shardSpec to find out the max partitionId.
// Note that OverwriteShardSpec has the higher range for partitionId than others.
// See PartitionIds.
.filter(chunk -> chunk.getObject().getShardSpec().getClass() == shardSpecFactory.getShardSpecClass())
.max(Comparator.comparing(chunk -> chunk.getObject().getShardSpec().getPartitionNum()))
.map(chunk -> SegmentIdWithShardSpec.fromDataSegment(chunk.getObject()))
.orElse(null);
}
final List<SegmentIdWithShardSpec> pendings = getPendingSegmentsForIntervalWithHandle(
handle,
dataSource,
interval
);
if (maxId != null) {
pendings.add(maxId);
}
maxId = pendings.stream()
.filter(id -> id.getShardSpec().getClass() == shardSpecFactory.getShardSpecClass())
.max((id1, id2) -> {
final int versionCompare = id1.getVersion().compareTo(id2.getVersion());
if (versionCompare != 0) {
return versionCompare;
} else {
return Integer.compare(id1.getShardSpec().getPartitionNum(), id2.getShardSpec().getPartitionNum());
}
})
.orElse(null);
// Find the major version of existing segments
@Nullable final String versionOfExistingChunks;
if (!existingChunks.isEmpty()) {
versionOfExistingChunks = existingChunks.get(0).getVersion();
} else if (!pendings.isEmpty()) {
versionOfExistingChunks = pendings.get(0).getVersion();
} else {
versionOfExistingChunks = null;
}
if (maxId == null) {
final ShardSpec shardSpec = shardSpecFactory.create(jsonMapper, null);
return new SegmentIdWithShardSpec(dataSource, interval, versionOfExistingChunks == null ? maxVersion : versionOfExistingChunks, shardSpec);
} else if (!maxId.getInterval().equals(interval) || maxId.getVersion().compareTo(maxVersion) > 0) {
log.warn(
"Cannot allocate new segment for dataSource[%s], interval[%s], maxVersion[%s]: conflicting segment[%s].",
dataSource,
interval,
maxVersion,
maxId
);
return null;
} else {
final ShardSpec newShardSpec = shardSpecFactory.create(jsonMapper, maxId.getShardSpec());
return new SegmentIdWithShardSpec(
dataSource,
maxId.getInterval(),
Preconditions.checkNotNull(versionOfExistingChunks, "versionOfExistingChunks"),
newShardSpec
);
}
}
}
@Override
public int deletePendingSegments(String dataSource, Interval deleteInterval)
{
return connector.getDBI().inTransaction(
(handle, status) -> handle
.createStatement(
StringUtils.format(
"delete from %s where datasource = :dataSource and created_date >= :start and created_date < :end",
dbTables.getPendingSegmentsTable()
)
)
.bind("dataSource", dataSource)
.bind("start", deleteInterval.getStart().toString())
.bind("end", deleteInterval.getEnd().toString())
.execute()
);
}
/**
* Attempts to insert a single segment to the database. If the segment already exists, will do nothing; although,
* this checking is imperfect and callers must be prepared to retry their entire transaction on exceptions.
*
* @return true if the segment was added, false if it already existed
*/
private boolean announceHistoricalSegment(
final Handle handle,
final DataSegment segment,
final boolean used
) throws IOException
{
try {
if (segmentExists(handle, segment)) {
log.info("Found [%s] in DB, not updating DB", segment.getId());
return false;
}
// SELECT -> INSERT can fail due to races; callers must be prepared to retry.
// Avoiding ON DUPLICATE KEY since it's not portable.
// Avoiding try/catch since it may cause inadvertent transaction-splitting.
final int numRowsInserted = handle.createStatement(
StringUtils.format(
"INSERT INTO %1$s (id, dataSource, created_date, start, %2$send%2$s, partitioned, version, used, payload) "
+ "VALUES (:id, :dataSource, :created_date, :start, :end, :partitioned, :version, :used, :payload)",
dbTables.getSegmentsTable(),
connector.getQuoteString()
)
)
.bind("id", segment.getId().toString())
.bind("dataSource", segment.getDataSource())
.bind("created_date", DateTimes.nowUtc().toString())
.bind("start", segment.getInterval().getStart().toString())
.bind("end", segment.getInterval().getEnd().toString())
.bind("partitioned", (segment.getShardSpec() instanceof NoneShardSpec) ? false : true)
.bind("version", segment.getVersion())
.bind("used", used)
.bind("payload", jsonMapper.writeValueAsBytes(segment))
.execute();
if (numRowsInserted == 1) {
log.info("Published segment [%s] to DB with used flag [%s], json[%s]", segment.getId(), used, jsonMapper.writeValueAsString(segment));
} else if (numRowsInserted == 0) {
throw new ISE("Failed to publish segment[%s] to DB with used flag[%s], json[%s]", segment.getId(), used, jsonMapper.writeValueAsString(segment));
} else {
throw new ISE("WTH? numRowsInserted[%s] is larger than 1 after inserting segment[%s] with used flag[%s], json[%s]", numRowsInserted, segment.getId(), used, jsonMapper.writeValueAsString(segment));
}
}
catch (Exception e) {
log.error(e, "Exception inserting segment [%s] with used flag [%s] into DB", segment.getId(), used);
throw e;
}
return true;
}
private boolean segmentExists(final Handle handle, final DataSegment segment)
{
return !handle
.createQuery(StringUtils.format("SELECT id FROM %s WHERE id = :identifier", dbTables.getSegmentsTable()))
.bind("identifier", segment.getId().toString())
.map(StringMapper.FIRST)
.list()
.isEmpty();
}
/**
* Read dataSource metadata. Returns null if there is no metadata.
*/
@Override
public DataSourceMetadata getDataSourceMetadata(final String dataSource)
{
final byte[] bytes = connector.lookup(
dbTables.getDataSourceTable(),
"dataSource",
"commit_metadata_payload",
dataSource
);
if (bytes == null) {
return null;
}
try {
return jsonMapper.readValue(bytes, DataSourceMetadata.class);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
/**
* Read dataSource metadata as bytes, from a specific handle. Returns null if there is no metadata.
*/
private byte[] getDataSourceMetadataWithHandleAsBytes(
final Handle handle,
final String dataSource
)
{
return connector.lookupWithHandle(
handle,
dbTables.getDataSourceTable(),
"dataSource",
"commit_metadata_payload",
dataSource
);
}
/**
* Compare-and-swap dataSource metadata in a transaction. This will only modify dataSource metadata if it equals
* oldCommitMetadata when this function is called (based on T.equals). This method is idempotent in that if
* the metadata already equals newCommitMetadata, it will return true.
*
* @param handle database handle
* @param dataSource druid dataSource
* @param startMetadata dataSource metadata pre-insert must match this startMetadata according to
* {@link DataSourceMetadata#matches(DataSourceMetadata)}
* @param endMetadata dataSource metadata post-insert will have this endMetadata merged in with
* {@link DataSourceMetadata#plus(DataSourceMetadata)}
*
* @return SUCCESS if dataSource metadata was updated from matching startMetadata to matching endMetadata, FAILURE or
* TRY_AGAIN if it definitely was not updated. This guarantee is meant to help
* {@link #announceHistoricalSegments(Set, DataSourceMetadata, DataSourceMetadata)}
* achieve its own guarantee.
*
* @throws RuntimeException if state is unknown after this call
*/
protected DataSourceMetadataUpdateResult updateDataSourceMetadataWithHandle(
final Handle handle,
final String dataSource,
final DataSourceMetadata startMetadata,
final DataSourceMetadata endMetadata
) throws IOException
{
Preconditions.checkNotNull(dataSource, "dataSource");
Preconditions.checkNotNull(startMetadata, "startMetadata");
Preconditions.checkNotNull(endMetadata, "endMetadata");
final byte[] oldCommitMetadataBytesFromDb = getDataSourceMetadataWithHandleAsBytes(handle, dataSource);
final String oldCommitMetadataSha1FromDb;
final DataSourceMetadata oldCommitMetadataFromDb;
if (oldCommitMetadataBytesFromDb == null) {
oldCommitMetadataSha1FromDb = null;
oldCommitMetadataFromDb = null;
} else {
oldCommitMetadataSha1FromDb = BaseEncoding.base16().encode(
Hashing.sha1().hashBytes(oldCommitMetadataBytesFromDb).asBytes()
);
oldCommitMetadataFromDb = jsonMapper.readValue(oldCommitMetadataBytesFromDb, DataSourceMetadata.class);
}
final boolean startMetadataMatchesExisting;
if (oldCommitMetadataFromDb == null) {
startMetadataMatchesExisting = startMetadata.isValidStart();
} else {
// Checking against the last committed metadata.
// Converting the last one into start metadata for checking since only the same type of metadata can be matched.
// Even though kafka/kinesis indexing services use different sequenceNumber types for representing
// start and end sequenceNumbers, the below conversion is fine because the new start sequenceNumbers are supposed
// to be same with end sequenceNumbers of the last commit.
startMetadataMatchesExisting = startMetadata.asStartMetadata().matches(oldCommitMetadataFromDb.asStartMetadata());
}
if (!startMetadataMatchesExisting) {
// Not in the desired start state.
log.error(
"Not updating metadata, existing state[%s] in metadata store doesn't match to the new start state[%s].",
oldCommitMetadataFromDb,
startMetadata
);
return DataSourceMetadataUpdateResult.FAILURE;
}
// Only endOffsets should be stored in metadata store
final DataSourceMetadata newCommitMetadata = oldCommitMetadataFromDb == null
? endMetadata
: oldCommitMetadataFromDb.plus(endMetadata);
final byte[] newCommitMetadataBytes = jsonMapper.writeValueAsBytes(newCommitMetadata);
final String newCommitMetadataSha1 = BaseEncoding.base16().encode(
Hashing.sha1().hashBytes(newCommitMetadataBytes).asBytes()
);
final DataSourceMetadataUpdateResult retVal;
if (oldCommitMetadataBytesFromDb == null) {
// SELECT -> INSERT can fail due to races; callers must be prepared to retry.
final int numRows = handle.createStatement(
StringUtils.format(
"INSERT INTO %s (dataSource, created_date, commit_metadata_payload, commit_metadata_sha1) "
+ "VALUES (:dataSource, :created_date, :commit_metadata_payload, :commit_metadata_sha1)",
dbTables.getDataSourceTable()
)
)
.bind("dataSource", dataSource)
.bind("created_date", DateTimes.nowUtc().toString())
.bind("commit_metadata_payload", newCommitMetadataBytes)
.bind("commit_metadata_sha1", newCommitMetadataSha1)
.execute();
retVal = numRows == 1 ? DataSourceMetadataUpdateResult.SUCCESS : DataSourceMetadataUpdateResult.TRY_AGAIN;
} else {
// Expecting a particular old metadata; use the SHA1 in a compare-and-swap UPDATE
final int numRows = handle.createStatement(
StringUtils.format(
"UPDATE %s SET "
+ "commit_metadata_payload = :new_commit_metadata_payload, "
+ "commit_metadata_sha1 = :new_commit_metadata_sha1 "
+ "WHERE dataSource = :dataSource AND commit_metadata_sha1 = :old_commit_metadata_sha1",
dbTables.getDataSourceTable()
)
)
.bind("dataSource", dataSource)
.bind("old_commit_metadata_sha1", oldCommitMetadataSha1FromDb)
.bind("new_commit_metadata_payload", newCommitMetadataBytes)
.bind("new_commit_metadata_sha1", newCommitMetadataSha1)
.execute();
retVal = numRows == 1 ? DataSourceMetadataUpdateResult.SUCCESS : DataSourceMetadataUpdateResult.TRY_AGAIN;
}
if (retVal == DataSourceMetadataUpdateResult.SUCCESS) {
log.info("Updated metadata from[%s] to[%s].", oldCommitMetadataFromDb, newCommitMetadata);
} else {
log.info("Not updating metadata, compare-and-swap failure.");
}
return retVal;
}
@Override
public boolean deleteDataSourceMetadata(final String dataSource)
{
return connector.retryWithHandle(
new HandleCallback<Boolean>()
{
@Override
public Boolean withHandle(Handle handle)
{
int rows = handle.createStatement(
StringUtils.format("DELETE from %s WHERE dataSource = :dataSource", dbTables.getDataSourceTable())
)
.bind("dataSource", dataSource)
.execute();
return rows > 0;
}
}
);
}
@Override
public boolean resetDataSourceMetadata(final String dataSource, final DataSourceMetadata dataSourceMetadata)
throws IOException
{
final byte[] newCommitMetadataBytes = jsonMapper.writeValueAsBytes(dataSourceMetadata);
final String newCommitMetadataSha1 = BaseEncoding.base16().encode(
Hashing.sha1().hashBytes(newCommitMetadataBytes).asBytes()
);
return connector.retryWithHandle(
new HandleCallback<Boolean>()
{
@Override
public Boolean withHandle(Handle handle)
{
final int numRows = handle.createStatement(
StringUtils.format(
"UPDATE %s SET "
+ "commit_metadata_payload = :new_commit_metadata_payload, "
+ "commit_metadata_sha1 = :new_commit_metadata_sha1 "
+ "WHERE dataSource = :dataSource",
dbTables.getDataSourceTable()
)
)
.bind("dataSource", dataSource)
.bind("new_commit_metadata_payload", newCommitMetadataBytes)
.bind("new_commit_metadata_sha1", newCommitMetadataSha1)
.execute();
return numRows == 1;
}
}
);
}
@Override
public void updateSegmentMetadata(final Set<DataSegment> segments)
{
connector.getDBI().inTransaction(
new TransactionCallback<Void>()
{
@Override
public Void inTransaction(Handle handle, TransactionStatus transactionStatus) throws Exception
{
for (final DataSegment segment : segments) {
updatePayload(handle, segment);
}
return null;
}
}
);
}
@Override
public void deleteSegments(final Set<DataSegment> segments)
{
connector.getDBI().inTransaction(
new TransactionCallback<Void>()
{
@Override
public Void inTransaction(Handle handle, TransactionStatus transactionStatus)
{
for (final DataSegment segment : segments) {
deleteSegment(handle, segment);
}
return null;
}
}
);
}
private void deleteSegment(final Handle handle, final DataSegment segment)
{
handle.createStatement(StringUtils.format("DELETE from %s WHERE id = :id", dbTables.getSegmentsTable()))
.bind("id", segment.getId().toString())
.execute();
}
private void updatePayload(final Handle handle, final DataSegment segment) throws IOException
{
try {
handle
.createStatement(
StringUtils.format("UPDATE %s SET payload = :payload WHERE id = :id", dbTables.getSegmentsTable())
)
.bind("id", segment.getId().toString())
.bind("payload", jsonMapper.writeValueAsBytes(segment))
.execute();
}
catch (IOException e) {
log.error(e, "Exception inserting into DB");
throw e;
}
}
@Override
public List<DataSegment> getUnusedSegmentsForInterval(final String dataSource, final Interval interval)
{
List<DataSegment> matchingSegments = connector.inReadOnlyTransaction(
new TransactionCallback<List<DataSegment>>()
{
@Override
public List<DataSegment> inTransaction(final Handle handle, final TransactionStatus status)
{
// 2 range conditions are used on different columns, but not all SQL databases properly optimize it.
// Some databases can only use an index on one of the columns. An additional condition provides
// explicit knowledge that 'start' cannot be greater than 'end'.
return handle
.createQuery(
StringUtils.format(
"SELECT payload FROM %1$s WHERE dataSource = :dataSource and start >= :start "
+ "and start <= :end and %2$send%2$s <= :end and used = false",
dbTables.getSegmentsTable(), connector.getQuoteString()
)
)
.setFetchSize(connector.getStreamingFetchSize())
.bind("dataSource", dataSource)
.bind("start", interval.getStart().toString())
.bind("end", interval.getEnd().toString())
.map(ByteArrayMapper.FIRST)
.fold(
new ArrayList<>(),
new Folder3<List<DataSegment>, byte[]>()
{
@Override
public List<DataSegment> fold(
List<DataSegment> accumulator,
byte[] payload,
FoldController foldController,
StatementContext statementContext
)
{
try {
accumulator.add(jsonMapper.readValue(payload, DataSegment.class));
return accumulator;
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
}
);
}
}
);
log.info("Found %,d segments for %s for interval %s.", matchingSegments.size(), dataSource, interval);
return matchingSegments;
}
@Override
public List<Pair<DataSegment, String>> getUsedSegmentAndCreatedDateForInterval(String dataSource, Interval interval)
{
return connector.retryWithHandle(
handle -> handle.createQuery(
StringUtils.format(
"SELECT created_date, payload FROM %1$s WHERE dataSource = :dataSource " +
"AND start >= :start AND %2$send%2$s <= :end AND used = true",
dbTables.getSegmentsTable(), connector.getQuoteString()
)
)
.bind("dataSource", dataSource)
.bind("start", interval.getStart().toString())
.bind("end", interval.getEnd().toString())
.map(new ResultSetMapper<Pair<DataSegment, String>>()
{
@Override
public Pair<DataSegment, String> map(int index, ResultSet r, StatementContext ctx)
throws SQLException
{
try {
return new Pair<>(
jsonMapper.readValue(r.getBytes("payload"), DataSegment.class),
r.getString("created_date")
);
}
catch (IOException e) {
throw new RuntimeException(e);
}
}
})
.list()
);
}
@Override
public boolean insertDataSourceMetadata(String dataSource, DataSourceMetadata metadata)
{
return 1 == connector.getDBI().inTransaction(
(handle, status) -> handle
.createStatement(
StringUtils.format(
"INSERT INTO %s (dataSource, created_date, commit_metadata_payload, commit_metadata_sha1) VALUES" +
" (:dataSource, :created_date, :commit_metadata_payload, :commit_metadata_sha1)",
dbTables.getDataSourceTable()
)
)
.bind("dataSource", dataSource)
.bind("created_date", DateTimes.nowUtc().toString())
.bind("commit_metadata_payload", jsonMapper.writeValueAsBytes(metadata))
.bind("commit_metadata_sha1", BaseEncoding.base16().encode(
Hashing.sha1().hashBytes(jsonMapper.writeValueAsBytes(metadata)).asBytes()))
.execute()
);
}
}
| |
package edu.packt.neuralnet.learn;
import java.util.ArrayList;
import edu.packt.neuralnet.HiddenLayer;
import edu.packt.neuralnet.NeuralNet;
import edu.packt.neuralnet.Neuron;
public class Backpropagation extends Training {
public NeuralNet train(NeuralNet n) {
int epoch = 0;
setMse(1.0);
while(getMse() > n.getTargetError()) {
if ( epoch >= n.getMaxEpochs() ) break;
int rows = n.getTrainSet().length;
double sumErrors = 0.0;
for (int rows_i = 0; rows_i < rows; rows_i++) {
n = forward(n, rows_i);
n = backpropagation(n, rows_i);
sumErrors = sumErrors + n.getErrorMean();
}
setMse( sumErrors / rows );
System.out.println( getMse() );
epoch++;
}
System.out.println("Number of epochs: "+epoch);
return n;
}
protected NeuralNet forward(NeuralNet n, int row) {
ArrayList<HiddenLayer> listOfHiddenLayer = new ArrayList<HiddenLayer>();
listOfHiddenLayer = n.getListOfHiddenLayer();
double estimatedOutput = 0.0;
double realOutput = 0.0;
double sumError = 0.0;
if (listOfHiddenLayer.size() > 0) {
int hiddenLayer_i = 0;
for (HiddenLayer hiddenLayer : listOfHiddenLayer) {
int numberOfNeuronsInLayer = hiddenLayer.getNumberOfNeuronsInLayer();
for (Neuron neuron : hiddenLayer.getListOfNeurons()) {
double netValueOut = 0.0;
if(neuron.getListOfWeightIn().size() > 0) { //exclude bias
double netValue = 0.0;
for (int layer_j = 0; layer_j < numberOfNeuronsInLayer - 1; layer_j++) { //exclude bias
double hiddenWeightIn = neuron.getListOfWeightIn().get(layer_j);
netValue = netValue + hiddenWeightIn * n.getTrainSet()[row][layer_j];
}
//output hidden layer (1)
netValueOut = super.activationFnc(n.getActivationFnc(), netValue);
neuron.setOutputValue( netValueOut );
} else {
neuron.setOutputValue( 1.0 );
}
}
//output hidden layer (2)
for (int outLayer_i = 0; outLayer_i < n.getOutputLayer().getNumberOfNeuronsInLayer(); outLayer_i++){
double netValue = 0.0;
double netValueOut = 0.0;
for (Neuron neuron : hiddenLayer.getListOfNeurons()) {
double hiddenWeightOut = neuron.getListOfWeightOut().get(outLayer_i);
netValue = netValue + hiddenWeightOut * neuron.getOutputValue();
}
netValueOut = activationFnc(n.getActivationFncOutputLayer(), netValue);
n.getOutputLayer().getListOfNeurons().get(outLayer_i).setOutputValue( netValueOut );
//error
estimatedOutput = netValueOut;
realOutput = n.getRealMatrixOutputSet()[row][outLayer_i];
double error = realOutput - estimatedOutput;
n.getOutputLayer().getListOfNeurons().get(outLayer_i).setError(error);
sumError = sumError + Math.pow(error, 2.0);
}
//error mean
double errorMean = sumError / n.getOutputLayer().getNumberOfNeuronsInLayer();
n.setErrorMean(errorMean);
n.getListOfHiddenLayer().get(hiddenLayer_i).setListOfNeurons( hiddenLayer.getListOfNeurons() );
hiddenLayer_i++;
}
}
return n;
}
protected NeuralNet backpropagation(NeuralNet n, int row) {
ArrayList<Neuron> outputLayer = new ArrayList<Neuron>();
outputLayer = n.getOutputLayer().getListOfNeurons();
ArrayList<Neuron> hiddenLayer = new ArrayList<Neuron>();
hiddenLayer = n.getListOfHiddenLayer().get(0).getListOfNeurons();
double error = 0.0;
double netValue = 0.0;
double sensibility = 0.0;
//sensibility output layer
for (Neuron neuron : outputLayer) {
error = neuron.getError();
netValue = neuron.getOutputValue();
sensibility = derivativeActivationFnc(n.getActivationFncOutputLayer(), netValue) * error;
neuron.setSensibility(sensibility);
}
n.getOutputLayer().setListOfNeurons(outputLayer);
//sensibility hidden layer
for (Neuron neuron : hiddenLayer) {
sensibility = 0.0;
if(neuron.getListOfWeightIn().size() > 0) { //exclude bias
ArrayList<Double> listOfWeightsOut = new ArrayList<Double>();
listOfWeightsOut = neuron.getListOfWeightOut();
double tempSensibility = 0.0;
int weight_i = 0;
for (Double weight : listOfWeightsOut) {
tempSensibility = tempSensibility + (weight * outputLayer.get(weight_i).getSensibility());
weight_i++;
}
sensibility = derivativeActivationFnc(n.getActivationFnc(), neuron.getOutputValue()) * tempSensibility;
neuron.setSensibility(sensibility);
}
}
//fix weights (teach) [output layer to hidden layer]
for (int outLayer_i = 0; outLayer_i < n.getOutputLayer().getNumberOfNeuronsInLayer(); outLayer_i++) {
for (Neuron neuron : hiddenLayer) {
double newWeight = neuron.getListOfWeightOut().get( outLayer_i ) +
( n.getLearningRate() *
outputLayer.get( outLayer_i ).getSensibility() *
neuron.getOutputValue() );
neuron.getListOfWeightOut().set(outLayer_i, newWeight);
}
}
//fix weights (teach) [hidden layer to input layer]
for (Neuron neuron : hiddenLayer) {
ArrayList<Double> hiddenLayerInputWeights = new ArrayList<Double>();
hiddenLayerInputWeights = neuron.getListOfWeightIn();
if(hiddenLayerInputWeights.size() > 0) { //exclude bias
int hidden_i = 0;
double newWeight = 0.0;
for (int i = 0; i < n.getInputLayer().getNumberOfNeuronsInLayer(); i++) {
newWeight = hiddenLayerInputWeights.get(hidden_i) +
( n.getLearningRate() *
neuron.getSensibility() *
n.getTrainSet()[row][i]);
neuron.getListOfWeightIn().set(hidden_i, newWeight);
hidden_i++;
}
}
}
n.getListOfHiddenLayer().get(0).setListOfNeurons(hiddenLayer);
return n;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.cql3;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import com.google.common.collect.ImmutableList;
import org.junit.BeforeClass;
import org.junit.Test;
import org.apache.cassandra.cql3.statements.BatchStatement;
import org.apache.cassandra.cql3.statements.ModificationStatement;
import org.apache.cassandra.db.ConsistencyLevel;
import org.apache.cassandra.service.ClientState;
import org.apache.cassandra.service.QueryState;
import org.apache.cassandra.transport.ProtocolVersion;
import org.apache.cassandra.transport.messages.ResultMessage;
import static java.lang.String.format;
import static org.junit.Assert.assertEquals;
public class CustomNowInSecondsTest extends CQLTester
{
@BeforeClass
public static void setUpClass()
{
prepareServer();
requireNetwork();
}
@Test
public void testSelectQuery()
{
testSelectQuery(false);
testSelectQuery(true);
}
private void testSelectQuery(boolean prepared)
{
int day = 86400;
String ks = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }");
String tbl = createTable(ks, "CREATE TABLE %s (id int primary key, val int)");
// insert a row with TTL = 1 day.
executeModify(format("INSERT INTO %s.%s (id, val) VALUES (0, 0) USING TTL %d", ks, tbl, day), Integer.MIN_VALUE, prepared);
int now = (int) (System.currentTimeMillis() / 1000);
// execute a SELECT query without overriding nowInSeconds - make sure we observe one row.
assertEquals(1, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), Integer.MIN_VALUE, prepared).size());
// execute a SELECT query with nowInSeconds set to [now + 1 day + 1], when the row should have expired.
assertEquals(0, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + day + 1, prepared).size());
}
@Test
public void testModifyQuery()
{
testModifyQuery(false);
testModifyQuery(true);
}
private void testModifyQuery(boolean prepared)
{
int now = (int) (System.currentTimeMillis() / 1000);
int day = 86400;
String ks = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }");
String tbl = createTable(ks, "CREATE TABLE %s (id int primary key, val int)");
// execute an INSERT query with now set to [now + 1 day], with ttl = 1, making its effective ttl = 1 day + 1.
executeModify(format("INSERT INTO %s.%s (id, val) VALUES (0, 0) USING TTL %d", ks, tbl, 1), now + day, prepared);
// verify that despite TTL having passed (if not for nowInSeconds override) the row is still there.
assertEquals(1, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + 1, prepared).size());
// jump in time by one day, make sure the row expired
assertEquals(0, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + day + 1, prepared).size());
}
@Test
public void testBatchQuery()
{
testBatchQuery(false);
testBatchQuery(true);
}
private void testBatchQuery(boolean prepared)
{
int now = (int) (System.currentTimeMillis() / 1000);
int day = 86400;
String ks = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }");
String tbl = createTable(ks, "CREATE TABLE %s (id int primary key, val int)");
// execute an BATCH query with now set to [now + 1 day], with ttl = 1, making its effective ttl = 1 day + 1.
String batch = format("BEGIN BATCH " +
"INSERT INTO %s.%s (id, val) VALUES (0, 0) USING TTL %d; " +
"INSERT INTO %s.%s (id, val) VALUES (1, 1) USING TTL %d; " +
"APPLY BATCH;",
ks, tbl, 1,
ks, tbl, 1);
executeModify(batch, now + day, prepared);
// verify that despite TTL having passed at now + 1 the rows are still there.
assertEquals(2, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + 1, prepared).size());
// jump in time by one day, make sure the row expired.
assertEquals(0, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + day + 1, prepared).size());
}
@Test
public void testBatchMessage()
{
// test BatchMessage path
int now = (int) (System.currentTimeMillis() / 1000);
int day = 86400;
String ks = createKeyspace("CREATE KEYSPACE %s WITH replication={ 'class' : 'SimpleStrategy', 'replication_factor' : 1 }");
String tbl = createTable(ks, "CREATE TABLE %s (id int primary key, val int)");
List<String> queries = ImmutableList.of(
format("INSERT INTO %s.%s (id, val) VALUES (0, 0) USING TTL %d;", ks, tbl, 1),
format("INSERT INTO %s.%s (id, val) VALUES (1, 1) USING TTL %d;", ks, tbl, 1)
);
ClientState cs = ClientState.forInternalCalls();
QueryState qs = new QueryState(cs);
List<ModificationStatement> statements = new ArrayList<>(queries.size());
for (String query : queries)
statements.add((ModificationStatement) QueryProcessor.parseStatement(query, cs));
BatchStatement batch =
new BatchStatement(BatchStatement.Type.UNLOGGED, VariableSpecifications.empty(), statements, Attributes.none());
// execute an BATCH message with now set to [now + 1 day], with ttl = 1, making its effective ttl = 1 day + 1.
QueryProcessor.instance.processBatch(batch, qs, batchQueryOptions(now + day), Collections.emptyMap(), System.nanoTime());
// verify that despite TTL having passed at now + 1 the rows are still there.
assertEquals(2, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + 1, false).size());
// jump in time by one day, make sure the row expired.
assertEquals(0, executeSelect(format("SELECT * FROM %s.%s", ks, tbl), now + day + 1, false).size());
}
private static ResultSet executeSelect(String query, int nowInSeconds, boolean prepared)
{
ResultMessage message = execute(query, nowInSeconds, prepared);
return ((ResultMessage.Rows) message).result;
}
private static void executeModify(String query, int nowInSeconds, boolean prepared)
{
execute(query, nowInSeconds, prepared);
}
// prepared = false tests QueryMessage path, prepared = true tests ExecuteMessage path
private static ResultMessage execute(String query, int nowInSeconds, boolean prepared)
{
ClientState cs = ClientState.forInternalCalls();
QueryState qs = new QueryState(cs);
if (prepared)
{
CQLStatement statement = QueryProcessor.parseStatement(query, cs);
return QueryProcessor.instance.processPrepared(statement, qs, queryOptions(nowInSeconds), Collections.emptyMap(), System.nanoTime());
}
else
{
CQLStatement statement = QueryProcessor.instance.parse(query, qs, queryOptions(nowInSeconds));
return QueryProcessor.instance.process(statement, qs, queryOptions(nowInSeconds), Collections.emptyMap(), System.nanoTime());
}
}
private static QueryOptions queryOptions(int nowInSeconds)
{
return QueryOptions.create(ConsistencyLevel.ONE,
Collections.emptyList(),
false,
Integer.MAX_VALUE,
null,
null,
ProtocolVersion.CURRENT,
null,
Long.MIN_VALUE,
nowInSeconds);
}
private static BatchQueryOptions batchQueryOptions(int nowInSeconds)
{
return BatchQueryOptions.withoutPerStatementVariables(queryOptions(nowInSeconds));
}
}
| |
package org.votesmart.classes;
import org.votesmart.api.*;
import org.votesmart.data.Bill;
import org.votesmart.data.BillAction;
import org.votesmart.data.BillActionVotes;
import org.votesmart.data.BillVetoes;
import org.votesmart.data.Bills;
import org.votesmart.data.BillsByOfficial;
import org.votesmart.data.VotesCategories;
/**
* <pre>
* Votes Class
*
* * - Required
* * - Multiple rows
*
* Votes.getCategories
* This method dumps categories that contain released bills according to year and state.
* Input: year*, stateId (default: 'NA')
* Output: {@link VotesCategories}
*
* Votes.getBill
* This method dumps general information on a bill.
* Input: billId*
* Output: {@link Bill}
*
* Votes.getBillAction
* This gets detailed action information on a certain stage of the bill.
* Input: actionId*
* Output: {@link BillAction}
*
* Votes.getBillActionVotes
* Method provides votes listed by candidate on a certain bill action.
* Input: actionId*
* Output: {@link BillActionVotes}
*
* Votes.getBillActionVoteByOfficial
* Returns a single vote according to official and action.
* Input: actionId*, candidateId*
* Output: {@link BillActionVotes}
*
* Votes.getByBillNumber
* Returns a list of bills that are like the billNumber input.
* Input: billNumber*
* Output: {@link Bills}
*
* Votes.getBillsByCategoryYearState
* Returns a list of bills that fit the category, year, and state input.
* Input: categoryId*, year*, stateId
* Output: {@link Bills}
*
* Votes.getBillsByYearState
* Returns a list of bills that fit the year and state input.
* Input: year*, stateId
* Output: {@link Bills}
*
* Votes.getBillsByOfficialYearOffice
* Returns a list of bills that fit the candidate and year.
* Input: candidateId*, year*, officeId (Default: NULL(all))
* Output: {@link Bills}
*
* Votes.getBillsByOfficialCategoryOffice
* Returns a list of bills that fit the candidate and category.
* Input: candidateId*, categoryId*, officeId (Default: NULL(all))
* Output: {@link Bills}
*
* Votes.getByOfficial
* This method dumps all the bills an official has voted on based on the candidateId, officeId, categoryId, and year
* Input: candidateId*, officeId, categoryId, year
* Output: {@link BillsByOfficial}
*
* Votes.getBillsBySponsorYear
* Returns a list of bills that fit the sponsor's candidateId and year.
* Input: candidateId*, year*
* Output: {@link BillsByOfficial}
*
* Votes.getBillsBySponsorCategory
* Returns a list of bills that fit the sponsor's candidateId and category.
* Input: candidateId*, categoryId*
* Output: {@link BillsByOfficial}
*
* Votes.getBillsByStateRecent
* Returns a list of recent bills according to the state. Max returned is 100 or however much less you want.
* Input: amount (default: 100, max: 100), state (default: 'NA')
* Output: {@link Bills}
*
* Votes.getVetoes
* Returns a list of vetoes according to candidate.
* Input: candidateId
* Output: {@link BillVetoes}
*
* ============== EXAMPLE USAGE ===============
*
* // Votes class
* VotesClass votesClass = new VotesClass();
*
* // total votesCategories
* VotesCategories votesCategories = votesClass.getCategories("2013");
*
* // for State
* votesCategories = votesClass.getCategories("2013", state.stateId);
* CategoryMin votesCategory = votesCategories.category.get(0);
*
* // for category, for 2013, for State
* Bills bills = votesClass.getBillsByCategoryYearState(votesCategory.categoryId, "2013", state.stateId);
*
* // by Official, Category, Office
* bills = votesClass.getBillsByOfficialCategoryOffice(leader.candidateId, votesCategory.categoryId);
* </pre>
*
*/
public class VotesClass extends ClassesBase {
/**
* Constructor for testing purposes
*
* @param api
*/
public VotesClass(VoteSmartAPI api) {
super(api);
}
/**
* Default Constructor
*/
public VotesClass() throws VoteSmartException {
super();
}
/**
* This method dumps categories that contain released bills according to year and state.
* Input: year*, stateId (default: 'NA')
*
* @param year
* @return {@link VotesCategories}
*/
public VotesCategories getCategories(String year) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getCategories", new ArgMap("year", year), VotesCategories.class );
}
/**
* This method dumps categories that contain released bills according to year and state.
* Input: year*, stateId (default: 'NA')
*
* @param year
* @param stateId
* @return {@link VotesCategories}
*/
public VotesCategories getCategories(String year, String stateId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getCategories", new ArgMap("year", year, "stateId", stateId), VotesCategories.class );
}
/**
* This method dumps general information on a bill.
* Input: billId*
*
* @param billId
* @return {@link Bill}
*/
public Bill getBill(String billId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBill", new ArgMap("billId", billId), Bill.class );
}
/**
* This gets detailed action information on a certain stage of the bill
* Input: actionId*
*
* @param actionId
* @return {@link BillAction}
*/
public BillAction getBillAction(String actionId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillAction", new ArgMap("actionId", actionId), BillAction.class );
}
/**
* Method provides votes listed by candidate on a certain bill action.
* Input: actionId*
*
* @param actionId
* @return {@link BillActionVotes}
*/
public BillActionVotes getBillActionVotes(String actionId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillActionVotes", new ArgMap("actionId", actionId), BillActionVotes.class );
}
/**
* Returns a single vote according to official and action.
* Input: actionId*, candidateId*
*
* @param actionId
* @param candidateId
* @return {@link BillActionVotes}
*/
public BillActionVotes getBillActionVoteByOfficial(String actionId, String candidateId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillActionVoteByOfficial", new ArgMap("actionId", actionId, "candidateId", candidateId), BillActionVotes.class );
}
/**
* Returns a list of bills that are like the billNumber input.
* Input: billNumber*
*
* @param billNumber
* @return {@link Bills}
*/
public Bills getByBillNumber(String billNumber) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getByBillNumber", new ArgMap("billNumber", billNumber), Bills.class );
}
/**
* Returns a list of bills that fit the category, year, and state input.
* Input: categoryId*, year*, stateId
*
* @param categoryId
* @param year
* @return {@link Bills}
*/
public Bills getBillsByCategoryYearState(String categoryId, String year) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByCategoryYearState", new ArgMap("categoryId", categoryId, "year", year), Bills.class );
}
/**
* Returns a list of bills that fit the category, year, and state input.
* Input: categoryId*, year*, stateId
*
* @param categoryId
* @param year
* @param stateId
* @return {@link Bills}
*/
public Bills getBillsByCategoryYearState(String categoryId, String year, String stateId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByCategoryYearState", new ArgMap("categoryId", categoryId, "year", year, "stateId", stateId), Bills.class );
}
/**
* Returns a list of bills that fit the candidate and year.
* Input: candidateId*, year*, officeId (Default: NULL(all))
*
* @param candidateId
* @param year
* @return {@link Bills}
*/
public Bills getBillsByOfficialYearOffice(String candidateId, String year) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByOfficialYearOffice", new ArgMap("candidateId", candidateId, "year", year), Bills.class );
}
/**
* Returns a list of bills that fit the candidate and year.
* Input: candidateId*, year*, officeId (Default: NULL(all))
*
* @param candidateId
* @param year
* @param officeId
* @return {@link Bills}
*/
public Bills getBillsByOfficialYearOffice(String candidateId, String year, String officeId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByOfficialYearOffice", new ArgMap("candidateId", candidateId, "year", year, "officeId", officeId), Bills.class );
}
/**
* Returns a list of bills that fit the candidate and category.
* Input: candidateId*, categoryId*, officeId (Default: NULL(all))
*
* @param candidateId
* @param categoryId
* @return {@link Bills}
*/
public Bills getBillsByOfficialCategoryOffice(String candidateId, String categoryId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByOfficialCategoryOffice", new ArgMap("candidateId", candidateId, "categoryId", categoryId), Bills.class );
}
/**
* Returns a list of bills that fit the candidate and category.
* Input: candidateId*, categoryId*, officeId (Default: NULL(all))
*
* @param candidateId
* @param categoryId
* @param officeId
* @return {@link Bills}
*/
public Bills getBillsByOfficialCategoryOffice(String candidateId, String categoryId, String officeId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByOfficialCategoryOffice", new ArgMap("candidateId", candidateId, "categoryId", categoryId, "officeId", officeId), Bills.class );
}
/**
* This method dumps all the bills an official has voted on based on the candidateId, officeId, categoryId, and year
* Input: candidateId*, officeId, categoryId, year
*
* @param candidateId
* @param officeId
* @return {@link BillsByOfficial}
*/
public BillsByOfficial getByOfficial(String candidateId, String officeId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getByOfficial", new ArgMap("candidateId", candidateId, "officeId", officeId), BillsByOfficial.class );
}
/**
* This method dumps all the bills an official has voted on based on the candidateId, officeId, categoryId, and year
* Input: candidateId*, officeId, categoryId, year
*
* @param candidateId
* @param officeId
* @param categoryId
* @return {@link BillsByOfficial}
*/
public BillsByOfficial getByOfficial(String candidateId, String officeId, String categoryId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getByOfficial", new ArgMap("candidateId", candidateId, "officeId", officeId, "categoryId", categoryId), BillsByOfficial.class );
}
/**
* This method dumps all the bills an official has voted on based on the candidateId, officeId, categoryId, and year
* Input: candidateId*, officeId, categoryId, year
*
* @param candidateId
* @param officeId
* @param categoryId
* @param year
* @return {@link BillsByOfficial}
*/
public BillsByOfficial getByOfficial(String candidateId, String officeId, String categoryId, String year) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getByOfficial", new ArgMap("candidateId", candidateId, "officeId", officeId, "categoryId", categoryId, "year", year), BillsByOfficial.class );
}
/**
* Returns a list of bills that fit the sponsor's candidateId and year.
* Input: candidateId*, year*
*
* @param candidateId
* @param year
* @return {@link Bills}
*/
public Bills getBillsBySponsorYear(String candidateId, String year) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsBySponsorYear", new ArgMap("candidateId", candidateId, "year", year), Bills.class );
}
/**
* Returns a list of bills that fit the sponsor's candidateId and category.
* Input: candidateId*, categoryId*
*
* @param candidateId
* @param categoryId
* @return {@link Bills}
*/
public Bills getBillsBySponsorCategory(String candidateId, String categoryId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsBySponsorCategory", new ArgMap("candidateId", candidateId, "categoryId", categoryId), Bills.class );
}
/**
* Returns a list of recent bills according to the state. Max returned is 100 or however much less you want.
* Input: amount (default: 100, max: 100), state (default: 'NA')
*
* Note: Currently, the caching will only the most recent call. It won't cache all calls.
*
* @param amount
* @param state
* @return {@link Bills}
*/
public Bills getBillsByStateRecent(String amount, String state) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getBillsByStateRecent", new ArgMap("amount", amount, "state", state), Bills.class );
}
/**
* Returns a list of vetoes according to candidate.
* Input: candidateId
*
* @param candidateId
* @return {@link BillVetoes}
*/
public BillVetoes getVetoes(String candidateId) throws VoteSmartException, VoteSmartErrorException {
return api.query("Votes.getVetoes", new ArgMap("candidateId", candidateId), BillVetoes.class );
}
}
| |
/*
* Copyright 2010-2012 Luca Garulli (l.garulli--at--orientechnologies.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.orientechnologies.orient.core.config;
import java.io.PrintStream;
import java.lang.management.OperatingSystemMXBean;
import java.util.Map;
import java.util.Map.Entry;
import java.util.logging.ConsoleHandler;
import java.util.logging.FileHandler;
import java.util.logging.Level;
import com.orientechnologies.common.io.OFileUtils;
import com.orientechnologies.common.log.OLogManager;
import com.orientechnologies.orient.core.OConstants;
import com.orientechnologies.orient.core.Orient;
import com.orientechnologies.orient.core.cache.ODefaultCache;
import com.orientechnologies.orient.core.storage.fs.OMMapManagerOld;
/**
* Keeps all configuration settings. At startup assigns the configuration values by reading system properties.
*
* @author Luca Garulli (l.garulli--at--orientechnologies.com)
*
*/
public enum OGlobalConfiguration {
// ENVIRONMENT
ENVIRONMENT_DUMP_CFG_AT_STARTUP("environment.dumpCfgAtStartup", "Dumps the configuration at application startup", Boolean.class,
Boolean.FALSE),
ENVIRONMENT_CONCURRENT("environment.concurrent",
"Specifies if running in multi-thread environment. Setting this to false turns off the internal lock management",
Boolean.class, Boolean.TRUE),
// MEMORY
@Deprecated
MEMORY_OPTIMIZE_THRESHOLD("memory.optimizeThreshold", "Threshold for heap memory at which optimization of memory usage starts. ",
Float.class, 0.70),
// STORAGE
STORAGE_KEEP_OPEN(
"storage.keepOpen",
"Tells to the engine to not close the storage when a database is closed. Storages will be closed when the process shuts down",
Boolean.class, Boolean.FALSE),
STORAGE_RECORD_LOCK_TIMEOUT("storage.record.lockTimeout", "Maximum timeout in milliseconds to lock a shared record",
Integer.class, 5000),
// CACHE
CACHE_LEVEL1_ENABLED("cache.level1.enabled", "Use the level-1 cache", Boolean.class, true),
CACHE_LEVEL1_SIZE("cache.level1.size", "Size of the cache that keeps the record in memory", Integer.class, -1),
CACHE_LEVEL2_ENABLED("cache.level2.enabled", "Use the level-2 cache", Boolean.class, true),
CACHE_LEVEL2_SIZE("cache.level2.size", "Size of the cache that keeps the record in memory", Integer.class, -1),
CACHE_LEVEL2_IMPL("cache.level2.impl", "Actual implementation of secondary cache", String.class, ODefaultCache.class
.getCanonicalName()),
CACHE_LEVEL2_STRATEGY("cache.level2.strategy",
"Strategy to use when a database requests a record: 0 = pop the record, 1 = copy the record", Integer.class, 0,
new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
// UPDATE ALL THE OPENED STORAGES SETTING THE NEW STRATEGY
// for (OStorage s : com.orientechnologies.orient.core.Orient.instance().getStorages()) {
// s.getCache().setStrategy((Integer) iNewValue);
// }
}
}),
// DATABASE
OBJECT_SAVE_ONLY_DIRTY("object.saveOnlyDirty", "Object Database only saves objects bound to dirty records", Boolean.class, false),
DB_MVCC("db.mvcc", "Enables or disables MVCC (Multi-Version Concurrency Control) even outside transactions", Boolean.class, true),
DB_MVCC_THROWFAST(
"db.mvcc.throwfast",
"Use fast-thrown exceptions for MVCC OConcurrentModificationExceptions. No context information will be available, use where these exceptions are handled and the detail is not neccessary",
Boolean.class, false),
DB_VALIDATION("db.validation", "Enables or disables validation of records", Boolean.class, true),
// SETTINGS OF NON-TRANSACTIONAL MODE
NON_TX_RECORD_UPDATE_SYNCH("nonTX.recordUpdate.synch",
"Executes a synch against the file-system at every record operation. This slows down records updates "
+ "but guarantee reliability on unreliable drives", Boolean.class, Boolean.FALSE),
NON_TX_CLUSTERS_SYNC_IMMEDIATELY("nonTX.clusters.sync.immediately",
"List of clusters to sync immediately after update separated by commas. Can be useful for manual index",
String.class, "manindex"),
// TRANSACTIONS
TX_USE_LOG("tx.useLog", "Transactions use log file to store temporary data to be rolled back in case of crash", Boolean.class,
true),
TX_LOG_TYPE("tx.log.fileType", "File type to handle transaction logs: mmap or classic", String.class, "classic"),
TX_LOG_SYNCH(
"tx.log.synch",
"Executes a synch against the file-system at every log entry. This slows down transactions but guarantee transaction reliability on unreliable drives",
Boolean.class, Boolean.FALSE),
TX_COMMIT_SYNCH("tx.commit.synch", "Synchronizes the storage after transaction commit", Boolean.class, false),
// GRAPH
@Deprecated
BLUEPRINTS_TX_MODE("blueprints.graph.txMode",
"Transaction mode used in TinkerPop Blueprints implementation. 0 = Automatic (default), 1 = Manual", Integer.class, 0),
// TREEMAP
MVRBTREE_TIMEOUT("mvrbtree.timeout", "Maximum timeout to get lock against the OMVRB-Tree", Integer.class, 5000),
INDEX_AUTO_LAZY_UPDATES(
"index.auto.lazyUpdates",
"Configure the TreeMaps for automatic indexes as buffered or not. -1 means buffered until tx.commit() or db.close() are called",
Integer.class, 10000),
INDEX_MANUAL_LAZY_UPDATES(
"index.manual.lazyUpdates",
"Configure the TreeMaps for manual indexes as buffered or not. -1 means buffered until tx.commit() or db.close() are called",
Integer.class, 1),
MVRBTREE_NODE_PAGE_SIZE("mvrbtree.nodePageSize",
"Page size of each node. 256 means that 256 entries can be stored inside each node", Integer.class, 256),
MVRBTREE_LOAD_FACTOR("mvrbtree.loadFactor", "HashMap load factor", Float.class, 0.7f),
MVRBTREE_OPTIMIZE_THRESHOLD(
"mvrbtree.optimizeThreshold",
"Auto optimize the TreeMap every X tree rotations. This forces the optimization of the tree after many changes to recompute entry points. -1 means never",
Integer.class, 100000),
MVRBTREE_ENTRYPOINTS("mvrbtree.entryPoints", "Number of entry points to start searching entries", Integer.class, 64),
MVRBTREE_OPTIMIZE_ENTRYPOINTS_FACTOR("mvrbtree.optimizeEntryPointsFactor",
"Multiplicand factor to apply to entry-points list (parameter mvrbtree.entrypoints) to determine optimization is needed",
Float.class, 1.0f),
MVRBTREE_ENTRY_KEYS_IN_MEMORY("mvrbtree.entryKeysInMemory", "Keep unserialized keys in memory", Boolean.class, Boolean.FALSE),
MVRBTREE_ENTRY_VALUES_IN_MEMORY("mvrbtree.entryValuesInMemory", "Keep unserialized values in memory", Boolean.class,
Boolean.FALSE),
// TREEMAP OF RIDS
MVRBTREE_RID_BINARY_THRESHOLD(
"mvrbtree.ridBinaryThreshold",
"Valid for set of rids. It's the threshold as number of entries to use the binary streaming instead of classic string streaming. -1 means never use binary streaming",
Integer.class, 8),
MVRBTREE_RID_NODE_PAGE_SIZE("mvrbtree.ridNodePageSize",
"Page size of each treeset node. 16 means that 16 entries can be stored inside each node", Integer.class, 16),
MVRBTREE_RID_NODE_SAVE_MEMORY("mvrbtree.ridNodeSaveMemory",
"Save memory usage by avoid keeping RIDs in memory but creating them at every access", Boolean.class, Boolean.FALSE),
// COLLECTIONS
LAZYSET_WORK_ON_STREAM("lazyset.workOnStream", "Upon add avoid unmarshalling set", Boolean.class, true),
// FILE
FILE_LOCK("file.lock", "Locks files when used. Default is false", boolean.class, false),
FILE_DEFRAG_STRATEGY("file.defrag.strategy", "Strategy to recycle free space: 0 = synchronous defrag, 1 = asynchronous defrag, ",
Integer.class, 0),
FILE_DEFRAG_HOLE_MAX_DISTANCE(
"file.defrag.holeMaxDistance",
"Max distance in bytes between holes to cause their defrag. Set it to -1 to use dynamic size. Beware that if the db is huge moving blocks to defrag could be expensive",
Integer.class, 32768),
FILE_MMAP_USE_OLD_MANAGER("file.mmap.useOldManager",
"Manager that will be used to handle mmap files. true = USE OLD MANAGER, false = USE NEW MANAGER", boolean.class, false),
FILE_MMAP_LOCK_MEMORY("file.mmap.lockMemory",
"When using new map manager this parameter specify prevent memory swap or not. true = LOCK MEMORY, false = NOT LOCK MEMORY",
boolean.class, true),
FILE_MMAP_STRATEGY(
"file.mmap.strategy",
"Strategy to use with memory mapped files. 0 = USE MMAP ALWAYS, 1 = USE MMAP ON WRITES OR ON READ JUST WHEN THE BLOCK POOL IS FREE, 2 = USE MMAP ON WRITES OR ON READ JUST WHEN THE BLOCK IS ALREADY AVAILABLE, 3 = USE MMAP ONLY IF BLOCK IS ALREADY AVAILABLE, 4 = NEVER USE MMAP",
Integer.class, 0),
FILE_MMAP_BLOCK_SIZE("file.mmap.blockSize", "Size of the memory mapped block, default is 1Mb", Integer.class, 1048576,
new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OMMapManagerOld.setBlockSize(((Number) iNewValue).intValue());
}
}),
FILE_MMAP_BUFFER_SIZE("file.mmap.bufferSize", "Size of the buffer for direct access to the file through the channel",
Integer.class, 8192),
FILE_MMAP_MAX_MEMORY(
"file.mmap.maxMemory",
"Max memory allocatable by memory mapping manager. Note that on 32bit operating systems, the limit is 2Gb but will vary between operating systems",
Long.class, 134217728, new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OMMapManagerOld.setMaxMemory(OFileUtils.getSizeAsNumber(iNewValue));
}
}),
FILE_MMAP_OVERLAP_STRATEGY(
"file.mmap.overlapStrategy",
"Strategy to use when a request overlaps in-memory buffers: 0 = Use the channel access, 1 = force the in-memory buffer and use the channel access, 2 = always create an overlapped in-memory buffer (default)",
Integer.class, 2, new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OMMapManagerOld.setOverlapStrategy((Integer) iNewValue);
}
}),
FILE_MMAP_FORCE_DELAY("file.mmap.forceDelay",
"Delay time in ms to wait for another forced flush of the memory-mapped block to disk", Integer.class, 10),
FILE_MMAP_FORCE_RETRY("file.mmap.forceRetry", "Number of times the memory-mapped block will try to flush to disk", Integer.class,
50),
JNA_DISABLE_USE_SYSTEM_LIBRARY("jna.disable.system.library",
"This property disable to using JNA installed in your system. And use JNA bundled with database.", boolean.class, true),
USE_LHPEPS_CLUSTER("file.cluster.useLHPEPS", "Indicates whether cluster file should be saved as simple persistent"
+ " list or as hash map. Persistent list is used by default.", Boolean.class, Boolean.FALSE),
// NETWORK
NETWORK_SOCKET_BUFFER_SIZE("network.socketBufferSize", "TCP/IP Socket buffer size", Integer.class, 32768),
NETWORK_LOCK_TIMEOUT("network.lockTimeout", "Timeout in ms to acquire a lock against a channel", Integer.class, 15000),
NETWORK_SOCKET_TIMEOUT("network.socketTimeout", "TCP/IP Socket timeout in ms", Integer.class, 10000),
NETWORK_SOCKET_RETRY("network.retry", "Number of times the client retries its connection to the server on failure",
Integer.class, 5),
NETWORK_SOCKET_RETRY_DELAY("network.retryDelay", "Number of ms the client waits before reconnecting to the server on failure",
Integer.class, 500),
NETWORK_BINARY_DNS_LOADBALANCING_ENABLED("network.binary.loadBalancing.enabled",
"Asks for DNS TXT record to determine if load balancing is supported", Boolean.class, Boolean.FALSE),
NETWORK_BINARY_DNS_LOADBALANCING_TIMEOUT("network.binary.loadBalancing.timeout",
"Maximum time (in ms) to wait for the answer from DNS about the TXT record for load balancing", Integer.class, 2000),
NETWORK_BINARY_MAX_CONTENT_LENGTH("network.binary.maxLength", "TCP/IP max content length in bytes of BINARY requests",
Integer.class, 32736),
NETWORK_BINARY_READ_RESPONSE_MAX_TIMES("network.binary.readResponse.maxTimes",
"Maximum times to wait until response will be read. Otherwise response will be dropped from chanel", Integer.class, 20),
NETWORK_BINARY_DEBUG("network.binary.debug", "Debug mode: print all data incoming on the binary channel", Boolean.class, false),
NETWORK_HTTP_MAX_CONTENT_LENGTH("network.http.maxLength", "TCP/IP max content length in bytes for HTTP requests", Integer.class,
1000000),
NETWORK_HTTP_CONTENT_CHARSET("network.http.charset", "Http response charset", String.class, "utf-8"),
NETWORK_HTTP_SESSION_EXPIRE_TIMEOUT("network.http.sessionExpireTimeout",
"Timeout after which an http session is considered tp have expired (seconds)", Integer.class, 300),
// PROFILER
PROFILER_ENABLED("profiler.enabled", "Enable the recording of statistics and counters", Boolean.class, false,
new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
if ((Boolean) iNewValue)
Orient.instance().getProfiler().startRecording();
else
Orient.instance().getProfiler().stopRecording();
}
}),
PROFILER_CONFIG("profiler.config", "Configures the profiler as <seconds-for-snapshot>,<archive-snapshot-size>,<summary-size>",
String.class, null, new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
Orient.instance().getProfiler().configure(iNewValue.toString());
}
}),
PROFILER_AUTODUMP_INTERVAL("profiler.autoDump.interval",
"Dumps the profiler values at regular intervals. Time is expressed in seconds", Integer.class, 0,
new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
Orient.instance().getProfiler().setAutoDump((Integer) iNewValue);
}
}),
@Deprecated
PROFILER_AUTODUMP_RESET("profiler.autoDump.reset", "Resets the profiler at every auto dump", Boolean.class, true),
// LOG
LOG_CONSOLE_LEVEL("log.console.level", "Console logging level", String.class, "info", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, ConsoleHandler.class);
}
}),
LOG_FILE_LEVEL("log.file.level", "File logging level", String.class, "fine", new OConfigurationChangeCallback() {
public void change(final Object iCurrentValue, final Object iNewValue) {
OLogManager.instance().setLevel((String) iNewValue, FileHandler.class);
}
}),
// CLIENT
CLIENT_CHANNEL_MIN_POOL("client.channel.minPool", "Minimum pool size", Integer.class, 1),
CLIENT_CHANNEL_MAX_POOL("client.channel.maxPool", "Maximum channel pool size", Integer.class, 5),
CLIENT_CONNECT_POOL_WAIT_TIMEOUT("client.connectionPool.waitTimeout",
"Maximum time which client should wait connection from the pool", Integer.class, 5000),
CLIENT_DB_RELEASE_WAIT_TIMEOUT("client.channel.dbReleaseWaitTimeout",
"Delay in ms. after which data modification command will be resent if DB was frozen", Integer.class, 10000),
// SERVER
SERVER_CHANNEL_CLEAN_DELAY("server.channel.cleanDelay", "Time in ms of delay to check pending closed connections", Integer.class,
5000),
SERVER_CACHE_FILE_STATIC("server.cache.staticFile", "Cache static resources loading", Boolean.class, false),
SERVER_LOG_DUMP_CLIENT_EXCEPTION_LEVEL(
"server.log.dumpClientExceptionLevel",
"Logs client exceptions. Use any level supported by Java java.util.logging.Level class: OFF, FINE, CONFIG, INFO, WARNING, SEVERE",
Level.class, Level.FINE),
SERVER_LOG_DUMP_CLIENT_EXCEPTION_FULLSTACKTRACE("server.log.dumpClientExceptionFullStackTrace",
"Dumps the full stack trace of the exception to sent to the client", Level.class, Boolean.TRUE);
private final String key;
private final Object defValue;
private final Class<?> type;
private Object value = null;
private String description;
private OConfigurationChangeCallback changeCallback = null;
// AT STARTUP AUTO-CONFIG
static {
readConfiguration();
autoConfig();
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue,
final OConfigurationChangeCallback iChangeAction) {
this(iKey, iDescription, iType, iDefValue);
changeCallback = iChangeAction;
}
OGlobalConfiguration(final String iKey, final String iDescription, final Class<?> iType, final Object iDefValue) {
key = iKey;
description = iDescription;
defValue = iDefValue;
type = iType;
}
public void setValue(final Object iValue) {
Object oldValue = value;
if (iValue != null)
if (type == Boolean.class)
value = Boolean.parseBoolean(iValue.toString());
else if (type == Integer.class)
value = Integer.parseInt(iValue.toString());
else if (type == Float.class)
value = Float.parseFloat(iValue.toString());
else if (type == String.class)
value = iValue.toString();
else
value = iValue;
if (changeCallback != null)
changeCallback.change(oldValue, value);
}
public Object getValue() {
return value != null ? value : defValue;
}
public boolean getValueAsBoolean() {
final Object v = value != null ? value : defValue;
return v instanceof Boolean ? ((Boolean) v).booleanValue() : Boolean.parseBoolean(v.toString());
}
public String getValueAsString() {
final Object v = value != null ? value : defValue;
return v.toString();
}
public int getValueAsInteger() {
final Object v = value != null ? value : defValue;
return (int) (v instanceof Number ? ((Number) v).intValue() : OFileUtils.getSizeAsNumber(v.toString()));
}
public long getValueAsLong() {
final Object v = value != null ? value : defValue;
return v instanceof Number ? ((Number) v).longValue() : OFileUtils.getSizeAsNumber(v.toString());
}
public float getValueAsFloat() {
final Object v = value != null ? value : defValue;
return v instanceof Float ? ((Float) v).floatValue() : OFileUtils.getSizeAsNumber(v.toString());
}
public String getKey() {
return key;
}
public Class<?> getType() {
return type;
}
public String getDescription() {
return description;
}
public static void dumpConfiguration(final PrintStream out) {
out.print("OrientDB ");
out.print(OConstants.getVersion());
out.println(" configuration dump:");
String lastSection = "";
for (OGlobalConfiguration v : values()) {
final String section = v.key.substring(0, v.key.indexOf('.'));
if (!lastSection.equals(section)) {
out.print("- ");
out.println(section.toUpperCase());
lastSection = section;
}
out.print(" + ");
out.print(v.key);
out.print(" = ");
out.println(v.getValue());
}
}
/**
* Find the OGlobalConfiguration instance by the key. Key is case insensitive.
*
* @param iKey
* Key to find. It's case insensitive.
* @return OGlobalConfiguration instance if found, otherwise null
*/
public static OGlobalConfiguration findByKey(final String iKey) {
for (OGlobalConfiguration v : values()) {
if (v.getKey().equalsIgnoreCase(iKey))
return v;
}
return null;
}
/**
* Change configuration values in one shot by passing a Map of values.
*/
public static void setConfiguration(final Map<String, Object> iConfig) {
OGlobalConfiguration cfg;
for (Entry<String, Object> config : iConfig.entrySet()) {
cfg = valueOf(config.getKey());
if (cfg != null)
cfg.setValue(config.getValue());
}
}
/**
* Assign configuration values by reading system properties.
*/
private static void readConfiguration() {
String prop;
for (OGlobalConfiguration config : values()) {
prop = System.getProperty(config.key);
if (prop != null)
config.setValue(prop);
}
}
private static void autoConfig() {
if (System.getProperty("os.arch").indexOf("64") > -1) {
// 64 BIT
if (FILE_MMAP_MAX_MEMORY.getValueAsInteger() == 134217728) {
final OperatingSystemMXBean bean = java.lang.management.ManagementFactory.getOperatingSystemMXBean();
try {
final Class<?> cls = Class.forName("com.sun.management.OperatingSystemMXBean");
if (cls.isAssignableFrom(bean.getClass())) {
final Long maxOsMemory = (Long) cls.getMethod("getTotalPhysicalMemorySize", new Class[] {}).invoke(bean);
final long maxProcessMemory = Runtime.getRuntime().maxMemory();
long mmapBestMemory = (maxOsMemory.longValue() - maxProcessMemory) / 2;
FILE_MMAP_MAX_MEMORY.setValue(mmapBestMemory);
}
} catch (Exception e) {
// SUN JMX CLASS NOT AVAILABLE: CAN'T AUTO TUNE THE ENGINE
}
}
} else {
// 32 BIT, USE THE DEFAULT CONFIGURATION
}
}
}
| |
/*
* Copyright 2018 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.emoji.text;
import static androidx.annotation.RestrictTo.Scope.LIBRARY_GROUP;
import android.os.Build;
import android.text.Editable;
import android.text.Selection;
import android.text.Spannable;
import android.text.SpannableString;
import android.text.Spanned;
import android.text.TextPaint;
import android.text.method.KeyListener;
import android.text.method.MetaKeyKeyListener;
import android.view.KeyEvent;
import android.view.inputmethod.InputConnection;
import androidx.annotation.AnyThread;
import androidx.annotation.IntDef;
import androidx.annotation.IntRange;
import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.annotation.RequiresApi;
import androidx.annotation.RestrictTo;
import androidx.core.graphics.PaintCompat;
import androidx.core.util.Preconditions;
import androidx.emoji.widget.SpannableBuilder;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.util.Arrays;
import java.util.List;
/**
* Processes the CharSequence and adds the emojis.
*
* @hide
*/
@AnyThread
@RestrictTo(LIBRARY_GROUP)
@RequiresApi(19)
final class EmojiProcessor {
/**
* State transition commands.
*/
@IntDef({ACTION_ADVANCE_BOTH, ACTION_ADVANCE_END, ACTION_FLUSH})
@Retention(RetentionPolicy.SOURCE)
private @interface Action {
}
/**
* Advance the end pointer in CharSequence and reset the start to be the end.
*/
private static final int ACTION_ADVANCE_BOTH = 1;
/**
* Advance end pointer in CharSequence.
*/
private static final int ACTION_ADVANCE_END = 2;
/**
* Add a new emoji with the metadata in {@link ProcessorSm#getFlushMetadata()}. Advance end
* pointer in CharSequence and reset the start to be the end.
*/
private static final int ACTION_FLUSH = 3;
/**
* Factory used to create EmojiSpans.
*/
private final EmojiCompat.SpanFactory mSpanFactory;
/**
* Emoji metadata repository.
*/
private final MetadataRepo mMetadataRepo;
/**
* Utility class that checks if the system can render a given glyph.
*/
private GlyphChecker mGlyphChecker = new GlyphChecker();
/**
* @see EmojiCompat.Config#setUseEmojiAsDefaultStyle(boolean)
*/
private final boolean mUseEmojiAsDefaultStyle;
/**
* @see EmojiCompat.Config#setUseEmojiAsDefaultStyle(boolean, List)
*/
private final int[] mEmojiAsDefaultStyleExceptions;
EmojiProcessor(@NonNull final MetadataRepo metadataRepo,
@NonNull final EmojiCompat.SpanFactory spanFactory,
final boolean useEmojiAsDefaultStyle,
@Nullable final int[] emojiAsDefaultStyleExceptions) {
mSpanFactory = spanFactory;
mMetadataRepo = metadataRepo;
mUseEmojiAsDefaultStyle = useEmojiAsDefaultStyle;
mEmojiAsDefaultStyleExceptions = emojiAsDefaultStyleExceptions;
}
EmojiMetadata getEmojiMetadata(@NonNull final CharSequence charSequence) {
final ProcessorSm sm = new ProcessorSm(mMetadataRepo.getRootNode(),
mUseEmojiAsDefaultStyle, mEmojiAsDefaultStyleExceptions);
final int end = charSequence.length();
int currentOffset = 0;
while (currentOffset < end) {
final int codePoint = Character.codePointAt(charSequence, currentOffset);
final int action = sm.check(codePoint);
if (action != ACTION_ADVANCE_END) {
return null;
}
currentOffset += Character.charCount(codePoint);
}
if (sm.isInFlushableState()) {
return sm.getCurrentMetadata();
}
return null;
}
/**
* Checks a given CharSequence for emojis, and adds EmojiSpans if any emojis are found.
* <p>
* <ul>
* <li>If no emojis are found, {@code charSequence} given as the input is returned without
* any changes. i.e. charSequence is a String, and no emojis are found, the same String is
* returned.</li>
* <li>If the given input is not a Spannable (such as String), and at least one emoji is found
* a new {@link android.text.Spannable} instance is returned. </li>
* <li>If the given input is a Spannable, the same instance is returned. </li>
* </ul>
*
* @param charSequence CharSequence to add the EmojiSpans, cannot be {@code null}
* @param start start index in the charSequence to look for emojis, should be greater than or
* equal to {@code 0}, also less than {@code charSequence.length()}
* @param end end index in the charSequence to look for emojis, should be greater than or
* equal to {@code start} parameter, also less than {@code charSequence.length()}
* @param maxEmojiCount maximum number of emojis in the {@code charSequence}, should be greater
* than or equal to {@code 0}
* @param replaceAll whether to replace all emoji with {@link EmojiSpan}s
*/
CharSequence process(@NonNull final CharSequence charSequence, @IntRange(from = 0) int start,
@IntRange(from = 0) int end, @IntRange(from = 0) int maxEmojiCount,
final boolean replaceAll) {
final boolean isSpannableBuilder = charSequence instanceof SpannableBuilder;
if (isSpannableBuilder) {
((SpannableBuilder) charSequence).beginBatchEdit();
}
try {
Spannable spannable = null;
// if it is a spannable already, use the same instance to add/remove EmojiSpans.
// otherwise wait until the the first EmojiSpan found in order to change the result
// into a Spannable.
if (isSpannableBuilder || charSequence instanceof Spannable) {
spannable = (Spannable) charSequence;
} else if (charSequence instanceof Spanned) {
// check if there are any EmojiSpans as cheap as possible
// start-1, end+1 will return emoji span that starts/ends at start/end indices
final int nextSpanTransition = ((Spanned) charSequence).nextSpanTransition(
start - 1, end + 1, EmojiSpan.class);
if (nextSpanTransition <= end) {
spannable = new SpannableString(charSequence);
}
}
if (spannable != null) {
final EmojiSpan[] spans = spannable.getSpans(start, end, EmojiSpan.class);
if (spans != null && spans.length > 0) {
// remove existing spans, and realign the start, end according to spans
// if start or end is in the middle of an emoji they should be aligned
final int length = spans.length;
for (int index = 0; index < length; index++) {
final EmojiSpan span = spans[index];
final int spanStart = spannable.getSpanStart(span);
final int spanEnd = spannable.getSpanEnd(span);
// Remove span only when its spanStart is NOT equal to current end.
// During add operation an emoji at index 0 is added with 0-1 as start and
// end indices. Therefore if there are emoji spans at [0-1] and [1-2]
// and end is 1, the span between 0-1 should be deleted, not 1-2.
if (spanStart != end) {
spannable.removeSpan(span);
}
start = Math.min(spanStart, start);
end = Math.max(spanEnd, end);
}
}
}
if (start == end || start >= charSequence.length()) {
return charSequence;
}
// calculate max number of emojis that can be added. since getSpans call is a relatively
// expensive operation, do it only when maxEmojiCount is not unlimited.
if (maxEmojiCount != EmojiCompat.EMOJI_COUNT_UNLIMITED && spannable != null) {
maxEmojiCount -= spannable.getSpans(0, spannable.length(), EmojiSpan.class).length;
}
// add new ones
int addedCount = 0;
final ProcessorSm sm = new ProcessorSm(mMetadataRepo.getRootNode(),
mUseEmojiAsDefaultStyle, mEmojiAsDefaultStyleExceptions);
int currentOffset = start;
int codePoint = Character.codePointAt(charSequence, currentOffset);
while (currentOffset < end && addedCount < maxEmojiCount) {
final int action = sm.check(codePoint);
switch (action) {
case ACTION_ADVANCE_BOTH:
start += Character.charCount(Character.codePointAt(charSequence, start));
currentOffset = start;
if (currentOffset < end) {
codePoint = Character.codePointAt(charSequence, currentOffset);
}
break;
case ACTION_ADVANCE_END:
currentOffset += Character.charCount(codePoint);
if (currentOffset < end) {
codePoint = Character.codePointAt(charSequence, currentOffset);
}
break;
case ACTION_FLUSH:
if (replaceAll || !hasGlyph(charSequence, start, currentOffset,
sm.getFlushMetadata())) {
if (spannable == null) {
spannable = new SpannableString(charSequence);
}
addEmoji(spannable, sm.getFlushMetadata(), start, currentOffset);
addedCount++;
}
start = currentOffset;
break;
}
}
// After the last codepoint is consumed the state machine might be in a state where it
// identified an emoji before. i.e. abc[women-emoji] when the last codepoint is consumed
// state machine is waiting to see if there is an emoji sequence (i.e. ZWJ).
// Need to check if it is in such a state.
if (sm.isInFlushableState() && addedCount < maxEmojiCount) {
if (replaceAll || !hasGlyph(charSequence, start, currentOffset,
sm.getCurrentMetadata())) {
if (spannable == null) {
spannable = new SpannableString(charSequence);
}
addEmoji(spannable, sm.getCurrentMetadata(), start, currentOffset);
addedCount++;
}
}
return spannable == null ? charSequence : spannable;
} finally {
if (isSpannableBuilder) {
((SpannableBuilder) charSequence).endBatchEdit();
}
}
}
/**
* Handles onKeyDown commands from a {@link KeyListener} and if {@code keyCode} is one of
* {@link KeyEvent#KEYCODE_DEL} or {@link KeyEvent#KEYCODE_FORWARD_DEL} it tries to delete an
* {@link EmojiSpan} from an {@link Editable}. Returns {@code true} if an {@link EmojiSpan} is
* deleted with the characters it covers.
* <p/>
* If there is a selection where selection start is not equal to selection end, does not
* delete.
*
* @param editable Editable instance passed to {@link KeyListener#onKeyDown(android.view.View,
* Editable, int, KeyEvent)}
* @param keyCode keyCode passed to {@link KeyListener#onKeyDown(android.view.View, Editable,
* int, KeyEvent)}
* @param event KeyEvent passed to {@link KeyListener#onKeyDown(android.view.View, Editable,
* int, KeyEvent)}
*
* @return {@code true} if an {@link EmojiSpan} is deleted
*/
static boolean handleOnKeyDown(@NonNull final Editable editable, final int keyCode,
final KeyEvent event) {
final boolean handled;
switch (keyCode) {
case KeyEvent.KEYCODE_DEL:
handled = delete(editable, event, false /*forwardDelete*/);
break;
case KeyEvent.KEYCODE_FORWARD_DEL:
handled = delete(editable, event, true /*forwardDelete*/);
break;
default:
handled = false;
break;
}
if (handled) {
MetaKeyKeyListener.adjustMetaAfterKeypress(editable);
return true;
}
return false;
}
private static boolean delete(final Editable content, final KeyEvent event,
final boolean forwardDelete) {
if (hasModifiers(event)) {
return false;
}
final int start = Selection.getSelectionStart(content);
final int end = Selection.getSelectionEnd(content);
if (hasInvalidSelection(start, end)) {
return false;
}
final EmojiSpan[] spans = content.getSpans(start, end, EmojiSpan.class);
if (spans != null && spans.length > 0) {
final int length = spans.length;
for (int index = 0; index < length; index++) {
final EmojiSpan span = spans[index];
final int spanStart = content.getSpanStart(span);
final int spanEnd = content.getSpanEnd(span);
if ((forwardDelete && spanStart == start)
|| (!forwardDelete && spanEnd == start)
|| (start > spanStart && start < spanEnd)) {
content.delete(spanStart, spanEnd);
return true;
}
}
}
return false;
}
/**
* Handles deleteSurroundingText commands from {@link InputConnection} and tries to delete an
* {@link EmojiSpan} from an {@link Editable}. Returns {@code true} if an {@link EmojiSpan} is
* deleted.
* <p/>
* If there is a selection where selection start is not equal to selection end, does not
* delete.
*
* @param inputConnection InputConnection instance
* @param editable TextView.Editable instance
* @param beforeLength the number of characters before the cursor to be deleted
* @param afterLength the number of characters after the cursor to be deleted
* @param inCodePoints {@code true} if length parameters are in codepoints
*
* @return {@code true} if an {@link EmojiSpan} is deleted
*/
static boolean handleDeleteSurroundingText(@NonNull final InputConnection inputConnection,
@NonNull final Editable editable, @IntRange(from = 0) final int beforeLength,
@IntRange(from = 0) final int afterLength, final boolean inCodePoints) {
//noinspection ConstantConditions
if (editable == null || inputConnection == null) {
return false;
}
if (beforeLength < 0 || afterLength < 0) {
return false;
}
final int selectionStart = Selection.getSelectionStart(editable);
final int selectionEnd = Selection.getSelectionEnd(editable);
if (hasInvalidSelection(selectionStart, selectionEnd)) {
return false;
}
int start;
int end;
if (inCodePoints) {
// go backwards in terms of codepoints
start = CodepointIndexFinder.findIndexBackward(editable, selectionStart,
Math.max(beforeLength, 0));
end = CodepointIndexFinder.findIndexForward(editable, selectionEnd,
Math.max(afterLength, 0));
if (start == CodepointIndexFinder.INVALID_INDEX
|| end == CodepointIndexFinder.INVALID_INDEX) {
return false;
}
} else {
start = Math.max(selectionStart - beforeLength, 0);
end = Math.min(selectionEnd + afterLength, editable.length());
}
final EmojiSpan[] spans = editable.getSpans(start, end, EmojiSpan.class);
if (spans != null && spans.length > 0) {
final int length = spans.length;
for (int index = 0; index < length; index++) {
final EmojiSpan span = spans[index];
int spanStart = editable.getSpanStart(span);
int spanEnd = editable.getSpanEnd(span);
start = Math.min(spanStart, start);
end = Math.max(spanEnd, end);
}
start = Math.max(start, 0);
end = Math.min(end, editable.length());
inputConnection.beginBatchEdit();
editable.delete(start, end);
inputConnection.endBatchEdit();
return true;
}
return false;
}
private static boolean hasInvalidSelection(final int start, final int end) {
return start == -1 || end == -1 || start != end;
}
private static boolean hasModifiers(KeyEvent event) {
return !KeyEvent.metaStateHasNoModifiers(event.getMetaState());
}
private void addEmoji(@NonNull final Spannable spannable, final EmojiMetadata metadata,
final int start, final int end) {
final EmojiSpan span = mSpanFactory.createSpan(metadata);
spannable.setSpan(span, start, end, Spanned.SPAN_EXCLUSIVE_EXCLUSIVE);
}
/**
* Checks whether the current OS can render a given emoji. Used by the system to decide if an
* emoji span should be added. If the system cannot render it, an emoji span will be added.
* Used only for the case where replaceAll is set to {@code false}.
*
* @param charSequence the CharSequence that the emoji is in
* @param start start index of the emoji in the CharSequence
* @param end end index of the emoji in the CharSequence
* @param metadata EmojiMetadata instance for the emoji
*
* @return {@code true} if the OS can render emoji, {@code false} otherwise
*/
private boolean hasGlyph(final CharSequence charSequence, int start, final int end,
final EmojiMetadata metadata) {
// For pre M devices, heuristic in PaintCompat can result in false positives. we are
// adding another heuristic using the sdkAdded field. if the emoji was added to OS
// at a later version we assume that the system probably cannot render it.
if (Build.VERSION.SDK_INT < 23 && metadata.getSdkAdded() > Build.VERSION.SDK_INT) {
return false;
}
// if the existence is not calculated yet
if (metadata.getHasGlyph() == EmojiMetadata.HAS_GLYPH_UNKNOWN) {
final boolean hasGlyph = mGlyphChecker.hasGlyph(charSequence, start, end);
metadata.setHasGlyph(hasGlyph);
}
return metadata.getHasGlyph() == EmojiMetadata.HAS_GLYPH_EXISTS;
}
/**
* Set the GlyphChecker instance used by EmojiProcessor. Used for testing.
*/
void setGlyphChecker(@NonNull final GlyphChecker glyphChecker) {
Preconditions.checkNotNull(glyphChecker);
mGlyphChecker = glyphChecker;
}
/**
* State machine for walking over the metadata trie.
*/
static final class ProcessorSm {
private static final int STATE_DEFAULT = 1;
private static final int STATE_WALKING = 2;
private int mState = STATE_DEFAULT;
/**
* Root of the trie
*/
private final MetadataRepo.Node mRootNode;
/**
* Pointer to the node after last codepoint.
*/
private MetadataRepo.Node mCurrentNode;
/**
* The node where ACTION_FLUSH is called. Required since after flush action is
* returned mCurrentNode is reset to be the root.
*/
private MetadataRepo.Node mFlushNode;
/**
* The code point that was checked.
*/
private int mLastCodepoint;
/**
* Level for mCurrentNode. Root is 0.
*/
private int mCurrentDepth;
/**
* @see EmojiCompat.Config#setUseEmojiAsDefaultStyle(boolean)
*/
private final boolean mUseEmojiAsDefaultStyle;
/**
* @see EmojiCompat.Config#setUseEmojiAsDefaultStyle(boolean, List)
*/
private final int[] mEmojiAsDefaultStyleExceptions;
ProcessorSm(MetadataRepo.Node rootNode, boolean useEmojiAsDefaultStyle,
int[] emojiAsDefaultStyleExceptions) {
mRootNode = rootNode;
mCurrentNode = rootNode;
mUseEmojiAsDefaultStyle = useEmojiAsDefaultStyle;
mEmojiAsDefaultStyleExceptions = emojiAsDefaultStyleExceptions;
}
@Action
int check(final int codePoint) {
final int action;
MetadataRepo.Node node = mCurrentNode.get(codePoint);
switch (mState) {
case STATE_WALKING:
if (node != null) {
mCurrentNode = node;
mCurrentDepth += 1;
action = ACTION_ADVANCE_END;
} else {
if (isTextStyle(codePoint)) {
action = reset();
} else if (isEmojiStyle(codePoint)) {
action = ACTION_ADVANCE_END;
} else if (mCurrentNode.getData() != null) {
if (mCurrentDepth == 1) {
if (shouldUseEmojiPresentationStyleForSingleCodepoint()) {
mFlushNode = mCurrentNode;
action = ACTION_FLUSH;
reset();
} else {
action = reset();
}
} else {
mFlushNode = mCurrentNode;
action = ACTION_FLUSH;
reset();
}
} else {
action = reset();
}
}
break;
case STATE_DEFAULT:
default:
if (node == null) {
action = reset();
} else {
mState = STATE_WALKING;
mCurrentNode = node;
mCurrentDepth = 1;
action = ACTION_ADVANCE_END;
}
break;
}
mLastCodepoint = codePoint;
return action;
}
@Action
private int reset() {
mState = STATE_DEFAULT;
mCurrentNode = mRootNode;
mCurrentDepth = 0;
return ACTION_ADVANCE_BOTH;
}
/**
* @return the metadata node when ACTION_FLUSH is returned
*/
EmojiMetadata getFlushMetadata() {
return mFlushNode.getData();
}
/**
* @return current pointer to the metadata node in the trie
*/
EmojiMetadata getCurrentMetadata() {
return mCurrentNode.getData();
}
/**
* Need for the case where input is consumed, but action_flush was not called. For example
* when the char sequence has single codepoint character which is a default emoji. State
* machine will wait for the next.
*
* @return whether the current state requires an emoji to be added
*/
boolean isInFlushableState() {
return mState == STATE_WALKING && mCurrentNode.getData() != null
&& (mCurrentDepth > 1 || shouldUseEmojiPresentationStyleForSingleCodepoint());
}
private boolean shouldUseEmojiPresentationStyleForSingleCodepoint() {
if (mCurrentNode.getData().isDefaultEmoji()) {
// The codepoint is emoji style by default.
return true;
}
if (isEmojiStyle(mLastCodepoint)) {
// The codepoint was followed by the emoji style variation selector.
return true;
}
if (mUseEmojiAsDefaultStyle) {
// Emoji presentation style for text style default emojis is enabled. We have
// to check that the current codepoint is not an exception.
if (mEmojiAsDefaultStyleExceptions == null) {
return true;
}
final int codepoint = mCurrentNode.getData().getCodepointAt(0);
final int index = Arrays.binarySearch(mEmojiAsDefaultStyleExceptions, codepoint);
if (index < 0) {
// Index is negative, so the codepoint was not found in the array of exceptions.
return true;
}
}
return false;
}
/**
* @param codePoint CodePoint to check
*
* @return {@code true} if the codepoint is a emoji style standardized variation selector
*/
private static boolean isEmojiStyle(int codePoint) {
return codePoint == 0xFE0F;
}
/**
* @param codePoint CodePoint to check
*
* @return {@code true} if the codepoint is a text style standardized variation selector
*/
private static boolean isTextStyle(int codePoint) {
return codePoint == 0xFE0E;
}
}
/**
* Copy of BaseInputConnection findIndexBackward and findIndexForward functions.
*/
private static final class CodepointIndexFinder {
private static final int INVALID_INDEX = -1;
/**
* Find start index of the character in {@code cs} that is {@code numCodePoints} behind
* starting from {@code from}.
*
* @param cs CharSequence to work on
* @param from the index to start going backwards
* @param numCodePoints the number of codepoints
*
* @return start index of the character
*/
private static int findIndexBackward(final CharSequence cs, final int from,
final int numCodePoints) {
int currentIndex = from;
boolean waitingHighSurrogate = false;
final int length = cs.length();
if (currentIndex < 0 || length < currentIndex) {
return INVALID_INDEX; // The starting point is out of range.
}
if (numCodePoints < 0) {
return INVALID_INDEX; // Basically this should not happen.
}
int remainingCodePoints = numCodePoints;
while (true) {
if (remainingCodePoints == 0) {
return currentIndex; // Reached to the requested length in code points.
}
--currentIndex;
if (currentIndex < 0) {
if (waitingHighSurrogate) {
return INVALID_INDEX; // An invalid surrogate pair is found.
}
return 0; // Reached to the beginning of the text w/o any invalid surrogate
// pair.
}
final char c = cs.charAt(currentIndex);
if (waitingHighSurrogate) {
if (!Character.isHighSurrogate(c)) {
return INVALID_INDEX; // An invalid surrogate pair is found.
}
waitingHighSurrogate = false;
--remainingCodePoints;
continue;
}
if (!Character.isSurrogate(c)) {
--remainingCodePoints;
continue;
}
if (Character.isHighSurrogate(c)) {
return INVALID_INDEX; // A invalid surrogate pair is found.
}
waitingHighSurrogate = true;
}
}
/**
* Find start index of the character in {@code cs} that is {@code numCodePoints} ahead
* starting from {@code from}.
*
* @param cs CharSequence to work on
* @param from the index to start going forward
* @param numCodePoints the number of codepoints
*
* @return start index of the character
*/
private static int findIndexForward(final CharSequence cs, final int from,
final int numCodePoints) {
int currentIndex = from;
boolean waitingLowSurrogate = false;
final int length = cs.length();
if (currentIndex < 0 || length < currentIndex) {
return INVALID_INDEX; // The starting point is out of range.
}
if (numCodePoints < 0) {
return INVALID_INDEX; // Basically this should not happen.
}
int remainingCodePoints = numCodePoints;
while (true) {
if (remainingCodePoints == 0) {
return currentIndex; // Reached to the requested length in code points.
}
if (currentIndex >= length) {
if (waitingLowSurrogate) {
return INVALID_INDEX; // An invalid surrogate pair is found.
}
return length; // Reached to the end of the text w/o any invalid surrogate
// pair.
}
final char c = cs.charAt(currentIndex);
if (waitingLowSurrogate) {
if (!Character.isLowSurrogate(c)) {
return INVALID_INDEX; // An invalid surrogate pair is found.
}
--remainingCodePoints;
waitingLowSurrogate = false;
++currentIndex;
continue;
}
if (!Character.isSurrogate(c)) {
--remainingCodePoints;
++currentIndex;
continue;
}
if (Character.isLowSurrogate(c)) {
return INVALID_INDEX; // A invalid surrogate pair is found.
}
waitingLowSurrogate = true;
++currentIndex;
}
}
}
/**
* Utility class that checks if the system can render a given glyph.
*
* @hide
*/
@AnyThread
@RestrictTo(LIBRARY_GROUP)
public static class GlyphChecker {
/**
* Default text size for {@link #mTextPaint}.
*/
private static final int PAINT_TEXT_SIZE = 10;
/**
* Used to create strings required by
* {@link PaintCompat#hasGlyph(android.graphics.Paint, String)}.
*/
private static final ThreadLocal<StringBuilder> sStringBuilder = new ThreadLocal<>();
/**
* TextPaint used during {@link PaintCompat#hasGlyph(android.graphics.Paint, String)} check.
*/
private final TextPaint mTextPaint;
GlyphChecker() {
mTextPaint = new TextPaint();
mTextPaint.setTextSize(PAINT_TEXT_SIZE);
}
/**
* Returns whether the system can render an emoji.
*
* @param charSequence the CharSequence that the emoji is in
* @param start start index of the emoji in the CharSequence
* @param end end index of the emoji in the CharSequence
*
* @return {@code true} if the OS can render emoji, {@code false} otherwise
*/
public boolean hasGlyph(final CharSequence charSequence, int start, final int end) {
final StringBuilder builder = getStringBuilder();
builder.setLength(0);
while (start < end) {
builder.append(charSequence.charAt(start));
start++;
}
return PaintCompat.hasGlyph(mTextPaint, builder.toString());
}
private static StringBuilder getStringBuilder() {
if (sStringBuilder.get() == null) {
sStringBuilder.set(new StringBuilder());
}
return sStringBuilder.get();
}
}
}
| |
/*
* @(#)EightSVXDecoder.java 1.0 1999-10-19
*
* Copyright (c) 1999 Werner Randelshofer, Goldau, Switzerland.
* All rights reserved.
*
* You may not use, copy or modify this file, except in compliance with the
* license agreement you entered into with Werner Randelshofer.
* For details see accompanying license terms.
*/
package org.monte.media.eightsvx;
import org.monte.media.AbortException;
import org.monte.media.ParseException;
import org.monte.media.iff.*;
import java.util.Vector;
import java.io.InputStream;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.applet.AudioClip;
/**
* Creates a collection of EightSVXAudioClip objects by
* reading an IFF 8SVX file.
*
* <p><b>8SVX Type Definitions</b>
* <pre>
* #define ID_8SVX MakeID('8', 'S', 'V', 'X')
* #define ID_VHDR MakeID('V', 'H', 'D', 'R')
*
* typedef LONG Fixed; // A Fixed-point value, 16 bits to the left of
* // the point and 16 to the right. A Fixed is a number
* // of 2^16ths, i.e., 65536ths.
* #define Unity 0x10000L // Unity = Fixed 1.0 = maximum volume
*
* // sCompression: Choice of compression algorithm applied to the samples.
* #define sCmpNone 0 // not compressed
* #define sCmpFibDelta 1 // Fibonacci-delta encoding.
* // Can be more kinds in the future.
*
* typedef struct {
* ULONG oneShotHiSamples, // # samples in the high octave 1-shot part
* repeatHiSamples, // # samples in the high octave repeat part
* samplesPerHiCycle; // # samples/cycle in high octave, else 0
* UWORD samplesPerSec; // data sampling rate
* UBYTE ctOctave, // # octaves of waveform
* sCompression; // data compression technique used
* Fixed volume; // playback volume form 0 to Unity (full
* // volume). Map this value into the output
* // hardware's dynamic range.
* } Voice8Header;
*
* #define ID_NAME MakeID('N', 'A', 'M', 'E')
* // NAME chunk contains a CHAR[], the voice's name.
*
* #define ID_Copyright MakeID('(', 'c', ')', ' ')
* // "(c) " chunk contains a CHAR[], the FORM's copyright notice.
*
* #define ID_AUTH MakeID('A', 'U', 'T', 'H')
* // AUTH chunk contains a CHAR[], the author's name.
*
* #define ID_ANNO MakeID('A', 'N', 'N', 'O')
* // ANNO chunk contains a CHAR[], author's text annotations.
*
* #define ID_ATAK MakeID('A', 'T', 'A', 'K')
* #define ID_RLSE MakeID('R', 'L', 'S', 'E')
*
* typedef struct {
* UWORD duration; // segment duration in milliseconds, > 0
* Fixed dest; // destination volume factor
* } EGPoint;
*
* // ATAK and RLSE chunks contain an EGPoint[], piecewise-linear envelope.
* // The envelope defines a function of time returning Fixed values. It's
* // used to scale the nominal volume specified in the Voice8Header.
*
* #define RIGHT 4L
* #define LEFT 2L
* #define STEREO 6L
*
* #define ID_CHAN MakeID('C', 'H', 'A', 'N')
* typedef sampletype LONG;
*
* #define ID_PAN MakeID('P', 'A', 'N', ' ')
* typedef sposition Fixed; // 0 <= sposition <= Unity
* // Unity refers to the maximum possible volume.
*
*
* #define ID_BODY MakeID('B', 'O', 'D', 'Y')
* typedef character BYTE; // 8 bit signed number, -128 thru 127.
* // BODY chunk contains a BYTE[], array of audio data samples
* </pre>
*
* <p><b>8SVX Regular Expression</b>
* <pre>
* 8SVX ::= "FORM" #{ "8SVX" VHDR [NAME] [Copyright] [AUTH] ANNO* [ATAK] [RLSE] [CHAN] [PAN] BODY }
*
* VHDR ::= "VHDR" #{ Voice8Header }
* NAME ::= "NAME" #{ CHAR* } [0]
* Copyright ::= "(c) " #{ CHAR* } [0]
* AUTH ::= "AUTH" #{ CHAR* } [0]
* ANNO ::= "ANNO" #{ CHAR* } [0]
*
* ATAK ::= "ATAK" #{ EGPoint* }
* RLSE ::= "RLSE" #{ EGPoint* }
* CHAN ::= "CHAN" #{ sampletype }
* PAN ::= "PAN " #{ sposition }
* BODY ::= "BODY" #{ BYTE* } [0]
* </pre>
* The token "#" represents a ckSize LONG count of the following {braced} data bytes.
* E.g., a VHDR's "#" should equal sizeof(Voicd8Header). Literal items are shown in
* "quotes", [square bracket items] are optional, and "*" means 0 ore more replications.
* A sometimes-needed pad byte is shown als "[0]".
*
* @author Werner Randelshofer, Hausmatt 10, CH-6405 Goldau, Switzerland
* @version 1.0 1999-10-19
*/
public class EightSVXDecoder
implements IFFVisitor {
/* Constants */
public final static int EIGHT_SVX_ID = IFFParser.stringToID("8SVX");
public final static int VHDR_ID = IFFParser.stringToID("VHDR");
public final static int NAME_ID = IFFParser.stringToID("NAME");
public final static int COPYRIGHT_ID = IFFParser.stringToID("(c) ");
public final static int ANNO_ID = IFFParser.stringToID("ANNO");
public final static int AUTH_ID = IFFParser.stringToID("AUTH");
//public final static int ATAK_ID = IFFParser.stringToID("ATAK");
//public final static int RLSE_ID = IFFParser.stringToID("RLSE");
public final static int CHAN_ID = IFFParser.stringToID("CHAN");
//public final static int PAN_ID = IFFParser.stringToID("PAN ");
public final static int BODY_ID = IFFParser.stringToID("BODY");
/* Instance variables */
private Vector samples_ = new Vector();
private boolean within8SVXGroup_ = false;
/* Constructors */
/**
* Creates a new Audio Source from the specified InputStream.
*
* Pre condition
* InputStream must contain IFF 8SVX data.
* Post condition
* -
* Obligation
* -
*
* @param in The input stream.
*/
public EightSVXDecoder(InputStream in)
throws IOException {
try {
IFFParser iff = new IFFParser();
registerChunks(iff);
iff.parse(in,this);
}
catch (ParseException e) {
throw new IOException(e.toString());
}
catch (AbortException e) {
throw new IOException(e.toString());
}
finally {
in.close();
}
}
public EightSVXDecoder() {
}
/* Accessors */
public Vector getSamples() {
return samples_;
}
/* Actions */
public void registerChunks(IFFParser iff) {
iff.declareGroupChunk(EIGHT_SVX_ID,IFFParser.ID_FORM);
iff.declarePropertyChunk(EIGHT_SVX_ID,VHDR_ID);
iff.declarePropertyChunk(EIGHT_SVX_ID,NAME_ID);
iff.declarePropertyChunk(EIGHT_SVX_ID,COPYRIGHT_ID);
iff.declareCollectionChunk(EIGHT_SVX_ID,ANNO_ID);
iff.declarePropertyChunk(EIGHT_SVX_ID,AUTH_ID);
iff.declarePropertyChunk(EIGHT_SVX_ID,CHAN_ID);
iff.declareDataChunk(EIGHT_SVX_ID,BODY_ID);
}
/**
* Visits the start of an IFF GroupChunkExpression.
*
* Altough this method is declared as public it may only
* be called from an IFFParser that has been invoked
* by this class.
*
* Pre condition
* Vector <clips> must not be null.
* This method espects only FORM groups of type 8SVX.
* Post condition
* -
* Obligation
* -
*
* @param group Group Chunk to be visited.
* @exception ParseException
* When an error has been encountered.
*/
public void enterGroup(IFFChunk group) {
if (group.getType() == EIGHT_SVX_ID) { within8SVXGroup_ = true;}
}
public void leaveGroup(IFFChunk group) {
if (group.getType() == EIGHT_SVX_ID) { within8SVXGroup_ = false;}
}
public void visitChunk(IFFChunk group, IFFChunk chunk)
throws ParseException {
if (within8SVXGroup_) {
if (chunk.getID() == BODY_ID ) // && group.getID() == EIGHT_SVX_ID)
{
if (group.getPropertyChunk(VHDR_ID) == null) {
throw new ParseException("Sorry: Without 8SVX.VHDR-Chunk no sound possible");
}
EightSVXAudioClip newSample = new EightSVXAudioClip();
decodeVHDR(newSample,group.getPropertyChunk(VHDR_ID));
decodeCHAN(newSample,group.getPropertyChunk(CHAN_ID));
decodeNAME(newSample,group.getPropertyChunk(NAME_ID));
decodeCOPYRIGHT(newSample,group.getPropertyChunk(COPYRIGHT_ID));
decodeAUTH(newSample,group.getPropertyChunk(COPYRIGHT_ID));
decodeANNO(newSample,group.getCollectionChunks(ANNO_ID));
decodeBODY(newSample,chunk);
addAudioClip(newSample);
}
}
}
public void addAudioClip(AudioClip clip) {
samples_.addElement(clip);
}
/**
* The Voice 8 Header (VHDR) property chunk holds the playback parameters for the
* sampled waveform.
* <pre>
* typedef LONG Fixed; // A Fixed-point value, 16 bits to the left of
* // the point and 16 to the right. A Fixed is a number
* // of 2^16ths, i.e., 65536ths.
* #define Unity 0x10000L // Unity = Fixed 1.0 = maximum volume
*
* // sCompression: Choice of compression algorithm applied to the samples.
* #define sCmpNone 0 // not compressed
* #define sCmpFibDelta 1 // Fibonacci-delta encoding.
* // Can be more kinds in the future.
*
* typedef struct {
* ULONG oneShotHiSamples, // # samples in the high octave 1-shot part
* repeatHiSamples, // # samples in the high octave repeat part
* samplesPerHiCycle; // # samples/cycle in high octave, else 0
* UWORD samplesPerSec; // data sampling rate
* UBYTE ctOctave, // # octaves of waveform
* sCompression; // data compression technique used
* Fixed volume; // playback volume form 0 to Unity (full
* // volume). Map this value into the output
* // hardware's dynamic range.
* } Voice8Header;
* </pre>
*/
protected void decodeVHDR(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
try {
if (chunk != null) {
MC68000InputStream in = new MC68000InputStream(new ByteArrayInputStream(chunk.getData()));
sample.setOneShotHiSamples(in.readULONG());
sample.setRepeatHiSamples(in.readULONG());
sample.setSamplesPerHiCycle(in.readULONG());
sample.setSampleRate(in.readUWORD());
sample.setCtOctave(in.readUBYTE());
sample.setSCompression(in.readUBYTE());
sample.setVolume(in.readLONG());
}
}
catch (IOException e) {
throw new ParseException("Error parsing 8SVX VHDR:" +e.getMessage());
}
}
protected void decodeCHAN(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
if (chunk != null) {
sample.setSampleType(chunk.getData()[3]);
}
}
protected void decodeNAME(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
if (chunk != null) {
sample.setName(new String(chunk.getData()));
}
}
protected void decodeCOPYRIGHT(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
if (chunk != null) {
sample.setCopyright(new String(chunk.getData()));
}
}
protected void decodeAUTH(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
if (chunk != null) {
sample.setAuthor(new String(chunk.getData()));
}
}
protected void decodeANNO(EightSVXAudioClip sample,IFFChunk[] chunks)
throws ParseException {
if (chunks != null) {
for (int i=0; i < chunks.length; i++) {
IFFChunk chunk = chunks[i];
sample.setRemark(sample.getRemark() + new String(chunk.getData()));
}
}
}
protected void decodeBODY(EightSVXAudioClip sample,IFFChunk chunk)
throws ParseException {
if (chunk != null) {
byte[] data = chunk.getData();
sample.set8SVXBody(data);
}
}
}
| |
/*
* Copyright Camunda Services GmbH and/or licensed to Camunda Services GmbH
* under one or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information regarding copyright
* ownership. Camunda licenses this file to you under the Apache License,
* Version 2.0; you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.camunda.bpm.engine.test.api.identity;
import java.util.List;
import org.camunda.bpm.engine.ProcessEngineException;
import org.camunda.bpm.engine.identity.Group;
import org.camunda.bpm.engine.identity.GroupQuery;
import org.camunda.bpm.engine.impl.test.PluggableProcessEngineTestCase;
/**
* @author Joram Barrez
*/
public class GroupQueryTest extends PluggableProcessEngineTestCase {
@Override
protected void setUp() throws Exception {
super.setUp();
createGroup("muppets", "Muppet show characters_", "user");
createGroup("frogs", "Famous frogs", "user");
createGroup("mammals", "Famous mammals from eighties", "user");
createGroup("admin", "Administrators", "security");
identityService.saveUser(identityService.newUser("kermit"));
identityService.saveUser(identityService.newUser("fozzie"));
identityService.saveUser(identityService.newUser("mispiggy"));
identityService.saveTenant(identityService.newTenant("tenant"));
identityService.createMembership("kermit", "muppets");
identityService.createMembership("fozzie", "muppets");
identityService.createMembership("mispiggy", "muppets");
identityService.createMembership("kermit", "frogs");
identityService.createMembership("fozzie", "mammals");
identityService.createMembership("mispiggy", "mammals");
identityService.createMembership("kermit", "admin");
identityService.createTenantGroupMembership("tenant", "frogs");
}
private Group createGroup(String id, String name, String type) {
Group group = identityService.newGroup(id);
group.setName(name);
group.setType(type);
identityService.saveGroup(group);
return group;
}
@Override
protected void tearDown() throws Exception {
identityService.deleteUser("kermit");
identityService.deleteUser("fozzie");
identityService.deleteUser("mispiggy");
identityService.deleteGroup("muppets");
identityService.deleteGroup("mammals");
identityService.deleteGroup("frogs");
identityService.deleteGroup("admin");
identityService.deleteTenant("tenant");
super.tearDown();
}
public void testQueryById() {
GroupQuery query = identityService.createGroupQuery().groupId("muppets");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidId() {
GroupQuery query = identityService.createGroupQuery().groupId("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupId(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByIdIn() {
// empty list
assertTrue(identityService.createGroupQuery().groupIdIn("a", "b").list().isEmpty());
// collect all ids
List<Group> list = identityService.createGroupQuery().list();
String[] ids = new String[list.size()];
for (int i = 0; i < ids.length; i++) {
ids[i] = list.get(i).getId();
}
List<Group> idInList = identityService.createGroupQuery().groupIdIn(ids).list();
assertEquals(list.size(), idInList.size());
for (Group group : idInList) {
boolean found = false;
for (Group otherGroup : list) {
if(otherGroup.getId().equals(group.getId())) {
found = true; break;
}
}
if(!found) {
fail("Expected to find group " + group);
}
}
}
public void testQueryByName() {
GroupQuery query = identityService.createGroupQuery().groupName("Muppet show characters_");
verifyQueryResults(query, 1);
query = identityService.createGroupQuery().groupName("Famous frogs");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidName() {
GroupQuery query = identityService.createGroupQuery().groupName("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupName(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByNameLike() {
GroupQuery query = identityService.createGroupQuery().groupNameLike("%Famous%");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupNameLike("Famous%");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupNameLike("%show%");
verifyQueryResults(query, 1);
query = identityService.createGroupQuery().groupNameLike("%ters\\_");
verifyQueryResults(query, 1);
}
public void testQueryByInvalidNameLike() {
GroupQuery query = identityService.createGroupQuery().groupNameLike("%invalid%");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupNameLike(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByType() {
GroupQuery query = identityService.createGroupQuery().groupType("user");
verifyQueryResults(query, 3);
query = identityService.createGroupQuery().groupType("admin");
verifyQueryResults(query, 0);
}
public void testQueryByInvalidType() {
GroupQuery query = identityService.createGroupQuery().groupType("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupType(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByMember() {
GroupQuery query = identityService.createGroupQuery().groupMember("fozzie");
verifyQueryResults(query, 2);
query = identityService.createGroupQuery().groupMember("kermit");
verifyQueryResults(query, 3);
query = query.orderByGroupId().asc();
List<Group> groups = query.list();
assertEquals(3, groups.size());
assertEquals("admin", groups.get(0).getId());
assertEquals("frogs", groups.get(1).getId());
assertEquals("muppets", groups.get(2).getId());
query = query.groupType("user");
groups = query.list();
assertEquals(2, groups.size());
assertEquals("frogs", groups.get(0).getId());
assertEquals("muppets", groups.get(1).getId());
}
public void testQueryByInvalidMember() {
GroupQuery query = identityService.createGroupQuery().groupMember("invalid");
verifyQueryResults(query, 0);
try {
identityService.createGroupQuery().groupMember(null).list();
fail();
} catch (ProcessEngineException e) {}
}
public void testQueryByMemberOfTenant() {
GroupQuery query = identityService.createGroupQuery().memberOfTenant("nonExisting");
verifyQueryResults(query, 0);
query = identityService.createGroupQuery().memberOfTenant("tenant");
verifyQueryResults(query, 1);
Group group = query.singleResult();
assertEquals("frogs", group.getId());
}
public void testQuerySorting() {
// asc
assertEquals(4, identityService.createGroupQuery().orderByGroupId().asc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupName().asc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupType().asc().count());
// desc
assertEquals(4, identityService.createGroupQuery().orderByGroupId().desc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupName().desc().count());
assertEquals(4, identityService.createGroupQuery().orderByGroupType().desc().count());
// Multiple sortings
GroupQuery query = identityService.createGroupQuery().orderByGroupType().asc().orderByGroupName().desc();
List<Group> groups = query.list();
assertEquals(4, query.count());
assertEquals("security", groups.get(0).getType());
assertEquals("user", groups.get(1).getType());
assertEquals("user", groups.get(2).getType());
assertEquals("user", groups.get(3).getType());
assertEquals("admin", groups.get(0).getId());
assertEquals("muppets", groups.get(1).getId());
assertEquals("mammals", groups.get(2).getId());
assertEquals("frogs", groups.get(3).getId());
}
public void testQueryInvalidSortingUsage() {
try {
identityService.createGroupQuery().orderByGroupId().list();
fail();
} catch (ProcessEngineException e) {}
try {
identityService.createGroupQuery().orderByGroupId().orderByGroupName().list();
fail();
} catch (ProcessEngineException e) {}
}
private void verifyQueryResults(GroupQuery query, int countExpected) {
assertEquals(countExpected, query.list().size());
assertEquals(countExpected, query.count());
if (countExpected == 1) {
assertNotNull(query.singleResult());
} else if (countExpected > 1){
verifySingleResultFails(query);
} else if (countExpected == 0) {
assertNull(query.singleResult());
}
}
private void verifySingleResultFails(GroupQuery query) {
try {
query.singleResult();
fail();
} catch (ProcessEngineException e) {}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.orc;
import com.facebook.presto.orc.memory.AbstractAggregatedMemoryContext;
import com.facebook.presto.orc.memory.AggregatedMemoryContext;
import com.facebook.presto.orc.metadata.ColumnEncoding;
import com.facebook.presto.orc.metadata.MetadataReader;
import com.facebook.presto.orc.metadata.OrcType;
import com.facebook.presto.orc.metadata.OrcType.OrcTypeKind;
import com.facebook.presto.orc.metadata.PostScript.HiveWriterVersion;
import com.facebook.presto.orc.metadata.StripeInformation;
import com.facebook.presto.orc.metadata.statistics.ColumnStatistics;
import com.facebook.presto.orc.metadata.statistics.StripeStatistics;
import com.facebook.presto.orc.reader.StreamReader;
import com.facebook.presto.orc.reader.StreamReaders;
import com.facebook.presto.orc.stream.InputStreamSources;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.type.Type;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import io.airlift.slice.Slice;
import io.airlift.slice.Slices;
import io.airlift.units.DataSize;
import org.joda.time.DateTimeZone;
import java.io.Closeable;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.stream.Collectors;
import static com.facebook.presto.orc.OrcDataSourceUtils.mergeAdjacentDiskRanges;
import static com.facebook.presto.orc.OrcReader.MAX_BATCH_SIZE;
import static com.facebook.presto.orc.OrcRecordReader.LinearProbeRangeFinder.createTinyStripesRangeFinder;
import static com.google.common.base.Preconditions.checkArgument;
import static java.lang.Math.max;
import static java.lang.Math.min;
import static java.lang.Math.toIntExact;
import static java.util.Comparator.comparingLong;
import static java.util.Objects.requireNonNull;
public class OrcRecordReader
implements Closeable
{
private final OrcDataSource orcDataSource;
private final StreamReader[] streamReaders;
private final long[] maxBytesPerCell;
private long maxCombinedBytesPerRow;
private final long totalRowCount;
private final long splitLength;
private final Set<Integer> presentColumns;
private final long maxBlockBytes;
private long currentPosition;
private long currentStripePosition;
private int currentBatchSize;
private int maxBatchSize = MAX_BATCH_SIZE;
private final List<StripeInformation> stripes;
private final StripeReader stripeReader;
private int currentStripe = -1;
private AggregatedMemoryContext currentStripeSystemMemoryContext;
private final long fileRowCount;
private final List<Long> stripeFilePositions;
private long filePosition;
private Iterator<RowGroup> rowGroups = ImmutableList.<RowGroup>of().iterator();
private long currentGroupRowCount;
private long nextRowInGroup;
private final Map<String, Slice> userMetadata;
private final AbstractAggregatedMemoryContext systemMemoryUsage;
public OrcRecordReader(
Map<Integer, Type> includedColumns,
OrcPredicate predicate,
long numberOfRows,
List<StripeInformation> fileStripes,
List<ColumnStatistics> fileStats,
List<StripeStatistics> stripeStats,
OrcDataSource orcDataSource,
long splitOffset,
long splitLength,
List<OrcType> types,
Optional<OrcDecompressor> decompressor,
int rowsInRowGroup,
DateTimeZone hiveStorageTimeZone,
HiveWriterVersion hiveWriterVersion,
MetadataReader metadataReader,
DataSize maxMergeDistance,
DataSize maxReadSize,
DataSize maxBlockSize,
Map<String, Slice> userMetadata,
AbstractAggregatedMemoryContext systemMemoryUsage)
throws IOException
{
requireNonNull(includedColumns, "includedColumns is null");
requireNonNull(predicate, "predicate is null");
requireNonNull(fileStripes, "fileStripes is null");
requireNonNull(stripeStats, "stripeStats is null");
requireNonNull(orcDataSource, "orcDataSource is null");
requireNonNull(types, "types is null");
requireNonNull(decompressor, "decompressor is null");
requireNonNull(hiveStorageTimeZone, "hiveStorageTimeZone is null");
requireNonNull(userMetadata, "userMetadata is null");
// reduce the included columns to the set that is also present
ImmutableSet.Builder<Integer> presentColumns = ImmutableSet.builder();
ImmutableMap.Builder<Integer, Type> presentColumnsAndTypes = ImmutableMap.builder();
OrcType root = types.get(0);
for (Map.Entry<Integer, Type> entry : includedColumns.entrySet()) {
// an old file can have less columns since columns can be added
// after the file was written
if (entry.getKey() < root.getFieldCount()) {
presentColumns.add(entry.getKey());
presentColumnsAndTypes.put(entry.getKey(), entry.getValue());
}
}
this.presentColumns = presentColumns.build();
this.maxBlockBytes = requireNonNull(maxBlockSize, "maxBlockSize is null").toBytes();
// it is possible that old versions of orc use 0 to mean there are no row groups
checkArgument(rowsInRowGroup > 0, "rowsInRowGroup must be greater than zero");
// sort stripes by file position
List<StripeInfo> stripeInfos = new ArrayList<>();
for (int i = 0; i < fileStripes.size(); i++) {
Optional<StripeStatistics> stats = Optional.empty();
// ignore all stripe stats if too few or too many
if (stripeStats.size() == fileStripes.size()) {
stats = Optional.of(stripeStats.get(i));
}
stripeInfos.add(new StripeInfo(fileStripes.get(i), stats));
}
Collections.sort(stripeInfos, comparingLong(info -> info.getStripe().getOffset()));
long totalRowCount = 0;
long fileRowCount = 0;
ImmutableList.Builder<StripeInformation> stripes = ImmutableList.builder();
ImmutableList.Builder<Long> stripeFilePositions = ImmutableList.builder();
if (predicate.matches(numberOfRows, getStatisticsByColumnOrdinal(root, fileStats))) {
// select stripes that start within the specified split
for (StripeInfo info : stripeInfos) {
StripeInformation stripe = info.getStripe();
if (splitContainsStripe(splitOffset, splitLength, stripe) && isStripeIncluded(root, stripe, info.getStats(), predicate)) {
stripes.add(stripe);
stripeFilePositions.add(fileRowCount);
totalRowCount += stripe.getNumberOfRows();
}
fileRowCount += stripe.getNumberOfRows();
}
}
this.totalRowCount = totalRowCount;
this.stripes = stripes.build();
this.stripeFilePositions = stripeFilePositions.build();
orcDataSource = wrapWithCacheIfTinyStripes(orcDataSource, this.stripes, maxMergeDistance, maxReadSize);
this.orcDataSource = orcDataSource;
this.splitLength = splitLength;
this.fileRowCount = stripeInfos.stream()
.map(StripeInfo::getStripe)
.mapToLong(StripeInformation::getNumberOfRows)
.sum();
this.userMetadata = ImmutableMap.copyOf(Maps.transformValues(userMetadata, Slices::copyOf));
this.systemMemoryUsage = requireNonNull(systemMemoryUsage, "systemMemoryUsage is null").newAggregatedMemoryContext();
this.currentStripeSystemMemoryContext = systemMemoryUsage.newAggregatedMemoryContext();
stripeReader = new StripeReader(
orcDataSource,
decompressor,
types,
this.presentColumns,
rowsInRowGroup,
predicate,
hiveWriterVersion,
metadataReader);
streamReaders = createStreamReaders(orcDataSource, types, hiveStorageTimeZone, presentColumnsAndTypes.build());
maxBytesPerCell = new long[streamReaders.length];
}
private static boolean splitContainsStripe(long splitOffset, long splitLength, StripeInformation stripe)
{
long splitEndOffset = splitOffset + splitLength;
return splitOffset <= stripe.getOffset() && stripe.getOffset() < splitEndOffset;
}
private static boolean isStripeIncluded(
OrcType rootStructType,
StripeInformation stripe,
Optional<StripeStatistics> stripeStats,
OrcPredicate predicate)
{
// if there are no stats, include the column
if (!stripeStats.isPresent()) {
return true;
}
return predicate.matches(stripe.getNumberOfRows(), getStatisticsByColumnOrdinal(rootStructType, stripeStats.get().getColumnStatistics()));
}
@VisibleForTesting
static OrcDataSource wrapWithCacheIfTinyStripes(OrcDataSource dataSource, List<StripeInformation> stripes, DataSize maxMergeDistance, DataSize maxReadSize)
{
if (dataSource instanceof CachingOrcDataSource) {
return dataSource;
}
for (StripeInformation stripe : stripes) {
if (stripe.getTotalLength() > maxReadSize.toBytes()) {
return dataSource;
}
}
return new CachingOrcDataSource(dataSource, createTinyStripesRangeFinder(stripes, maxMergeDistance, maxReadSize));
}
/**
* Return the row position relative to the start of the file.
*/
public long getFilePosition()
{
return filePosition;
}
/**
* Returns the total number of rows in the file. This count includes rows
* for stripes that were completely excluded due to stripe statistics.
*/
public long getFileRowCount()
{
return fileRowCount;
}
/**
* Return the row position within the stripes being read by this reader.
* This position will include rows that were never read due to row groups
* that are excluded due to row group statistics. Thus, it will advance
* faster than the number of rows actually read.
*/
public long getReaderPosition()
{
return currentPosition;
}
/**
* Returns the total number of rows that can possibly be read by this reader.
* This count may be fewer than the number of rows in the file if some
* stripes were excluded due to stripe statistics, but may be more than
* the number of rows read if some row groups are excluded due to statistics.
*/
public long getReaderRowCount()
{
return totalRowCount;
}
public float getProgress()
{
return ((float) currentPosition) / totalRowCount;
}
public long getSplitLength()
{
return splitLength;
}
/**
* Returns the sum of the largest cells in size from each column
*/
public long getMaxCombinedBytesPerRow()
{
return maxCombinedBytesPerRow;
}
@Override
public void close()
throws IOException
{
orcDataSource.close();
}
public boolean isColumnPresent(int hiveColumnIndex)
{
return presentColumns.contains(hiveColumnIndex);
}
public int nextBatch()
throws IOException
{
// update position for current row group (advancing resets them)
filePosition += currentBatchSize;
currentPosition += currentBatchSize;
// if next row is within the current group return
if (nextRowInGroup >= currentGroupRowCount) {
// attempt to advance to next row group
if (!advanceToNextRowGroup()) {
filePosition = fileRowCount;
currentPosition = totalRowCount;
return -1;
}
}
currentBatchSize = toIntExact(min(maxBatchSize, currentGroupRowCount - nextRowInGroup));
for (StreamReader column : streamReaders) {
if (column != null) {
column.prepareNextRead(currentBatchSize);
}
}
nextRowInGroup += currentBatchSize;
return currentBatchSize;
}
public Block readBlock(Type type, int columnIndex)
throws IOException
{
Block block = streamReaders[columnIndex].readBlock(type);
if (block.getPositionCount() > 0) {
long bytesPerCell = block.getSizeInBytes() / block.getPositionCount();
if (maxBytesPerCell[columnIndex] < bytesPerCell) {
maxCombinedBytesPerRow = maxCombinedBytesPerRow - maxBytesPerCell[columnIndex] + bytesPerCell;
maxBytesPerCell[columnIndex] = bytesPerCell;
maxBatchSize = toIntExact(min(maxBatchSize, max(1, maxBlockBytes / maxCombinedBytesPerRow)));
}
}
return block;
}
public StreamReader getStreamReader(int index)
{
checkArgument(index < streamReaders.length, "index does not exist");
return streamReaders[index];
}
public Map<String, Slice> getUserMetadata()
{
return ImmutableMap.copyOf(Maps.transformValues(userMetadata, Slices::copyOf));
}
private boolean advanceToNextRowGroup()
throws IOException
{
nextRowInGroup = 0;
while (!rowGroups.hasNext() && currentStripe < stripes.size()) {
advanceToNextStripe();
}
if (!rowGroups.hasNext()) {
currentGroupRowCount = 0;
return false;
}
RowGroup currentRowGroup = rowGroups.next();
currentGroupRowCount = currentRowGroup.getRowCount();
currentPosition = currentStripePosition + currentRowGroup.getRowOffset();
filePosition = stripeFilePositions.get(currentStripe) + currentRowGroup.getRowOffset();
// give reader data streams from row group
InputStreamSources rowGroupStreamSources = currentRowGroup.getStreamSources();
for (StreamReader column : streamReaders) {
if (column != null) {
column.startRowGroup(rowGroupStreamSources);
}
}
return true;
}
private void advanceToNextStripe()
throws IOException
{
currentStripeSystemMemoryContext.close();
currentStripeSystemMemoryContext = systemMemoryUsage.newAggregatedMemoryContext();
rowGroups = ImmutableList.<RowGroup>of().iterator();
currentStripe++;
if (currentStripe >= stripes.size()) {
return;
}
if (currentStripe > 0) {
currentStripePosition += stripes.get(currentStripe - 1).getNumberOfRows();
}
StripeInformation stripeInformation = stripes.get(currentStripe);
Stripe stripe = stripeReader.readStripe(stripeInformation, currentStripeSystemMemoryContext);
if (stripe != null) {
// Give readers access to dictionary streams
InputStreamSources dictionaryStreamSources = stripe.getDictionaryStreamSources();
List<ColumnEncoding> columnEncodings = stripe.getColumnEncodings();
for (StreamReader column : streamReaders) {
if (column != null) {
column.startStripe(dictionaryStreamSources, columnEncodings);
}
}
rowGroups = stripe.getRowGroups().iterator();
}
}
private static StreamReader[] createStreamReaders(
OrcDataSource orcDataSource,
List<OrcType> types,
DateTimeZone hiveStorageTimeZone,
Map<Integer, Type> includedColumns)
{
List<StreamDescriptor> streamDescriptors = createStreamDescriptor("", "", 0, types, orcDataSource).getNestedStreams();
OrcType rowType = types.get(0);
StreamReader[] streamReaders = new StreamReader[rowType.getFieldCount()];
for (int columnId = 0; columnId < rowType.getFieldCount(); columnId++) {
if (includedColumns.containsKey(columnId)) {
StreamDescriptor streamDescriptor = streamDescriptors.get(columnId);
streamReaders[columnId] = StreamReaders.createStreamReader(streamDescriptor, hiveStorageTimeZone);
}
}
return streamReaders;
}
private static StreamDescriptor createStreamDescriptor(String parentStreamName, String fieldName, int typeId, List<OrcType> types, OrcDataSource dataSource)
{
OrcType type = types.get(typeId);
if (!fieldName.isEmpty()) {
parentStreamName += "." + fieldName;
}
ImmutableList.Builder<StreamDescriptor> nestedStreams = ImmutableList.builder();
if (type.getOrcTypeKind() == OrcTypeKind.STRUCT) {
for (int i = 0; i < type.getFieldCount(); ++i) {
nestedStreams.add(createStreamDescriptor(parentStreamName, type.getFieldName(i), type.getFieldTypeIndex(i), types, dataSource));
}
}
else if (type.getOrcTypeKind() == OrcTypeKind.LIST) {
nestedStreams.add(createStreamDescriptor(parentStreamName, "item", type.getFieldTypeIndex(0), types, dataSource));
}
else if (type.getOrcTypeKind() == OrcTypeKind.MAP) {
nestedStreams.add(createStreamDescriptor(parentStreamName, "key", type.getFieldTypeIndex(0), types, dataSource));
nestedStreams.add(createStreamDescriptor(parentStreamName, "value", type.getFieldTypeIndex(1), types, dataSource));
}
return new StreamDescriptor(parentStreamName, typeId, fieldName, type.getOrcTypeKind(), dataSource, nestedStreams.build());
}
private static Map<Integer, ColumnStatistics> getStatisticsByColumnOrdinal(OrcType rootStructType, List<ColumnStatistics> fileStats)
{
requireNonNull(rootStructType, "rootStructType is null");
checkArgument(rootStructType.getOrcTypeKind() == OrcTypeKind.STRUCT);
requireNonNull(fileStats, "fileStats is null");
ImmutableMap.Builder<Integer, ColumnStatistics> statistics = ImmutableMap.builder();
for (int ordinal = 0; ordinal < rootStructType.getFieldCount(); ordinal++) {
if (fileStats.size() > ordinal) {
ColumnStatistics element = fileStats.get(rootStructType.getFieldTypeIndex(ordinal));
if (element != null) {
statistics.put(ordinal, element);
}
}
}
return statistics.build();
}
private static class StripeInfo
{
private final StripeInformation stripe;
private final Optional<StripeStatistics> stats;
public StripeInfo(StripeInformation stripe, Optional<StripeStatistics> stats)
{
this.stripe = requireNonNull(stripe, "stripe is null");
this.stats = requireNonNull(stats, "metadata is null");
}
public StripeInformation getStripe()
{
return stripe;
}
public Optional<StripeStatistics> getStats()
{
return stats;
}
}
@VisibleForTesting
static class LinearProbeRangeFinder
implements CachingOrcDataSource.RegionFinder
{
private final List<DiskRange> diskRanges;
private int index;
public LinearProbeRangeFinder(List<DiskRange> diskRanges)
{
this.diskRanges = diskRanges;
}
@Override
public DiskRange getRangeFor(long desiredOffset)
{
// Assumption: range are always read in order
// Assumption: bytes that are not part of any range are never read
for (; index < diskRanges.size(); index++) {
DiskRange range = diskRanges.get(index);
if (range.getEnd() > desiredOffset) {
checkArgument(range.getOffset() <= desiredOffset);
return range;
}
}
throw new IllegalArgumentException("Invalid desiredOffset " + desiredOffset);
}
public static LinearProbeRangeFinder createTinyStripesRangeFinder(List<StripeInformation> stripes, DataSize maxMergeDistance, DataSize maxReadSize)
{
if (stripes.size() == 0) {
return new LinearProbeRangeFinder(ImmutableList.of());
}
List<DiskRange> scratchDiskRanges = stripes.stream()
.map(stripe -> new DiskRange(stripe.getOffset(), toIntExact(stripe.getTotalLength())))
.collect(Collectors.toList());
List<DiskRange> diskRanges = mergeAdjacentDiskRanges(scratchDiskRanges, maxMergeDistance, maxReadSize);
return new LinearProbeRangeFinder(diskRanges);
}
}
}
| |
/**
*/
package org.tud.inf.st.mbt.actions.provider;
import java.util.ArrayList;
import java.util.Collection;
import org.eclipse.emf.common.notify.Adapter;
import org.eclipse.emf.common.notify.Notification;
import org.eclipse.emf.common.notify.Notifier;
import org.eclipse.emf.edit.provider.ChangeNotifier;
import org.eclipse.emf.edit.provider.ComposeableAdapterFactory;
import org.eclipse.emf.edit.provider.ComposedAdapterFactory;
import org.eclipse.emf.edit.provider.IChangeNotifier;
import org.eclipse.emf.edit.provider.IDisposable;
import org.eclipse.emf.edit.provider.IEditingDomainItemProvider;
import org.eclipse.emf.edit.provider.IItemLabelProvider;
import org.eclipse.emf.edit.provider.IItemPropertySource;
import org.eclipse.emf.edit.provider.INotifyChangedListener;
import org.eclipse.emf.edit.provider.IStructuredItemContentProvider;
import org.eclipse.emf.edit.provider.ITreeItemContentProvider;
import org.tud.inf.st.mbt.actions.util.ActionsAdapterFactory;
/**
* This is the factory that is used to provide the interfaces needed to support Viewers.
* The adapters generated by this factory convert EMF adapter notifications into calls to {@link #fireNotifyChanged fireNotifyChanged}.
* The adapters also support Eclipse property sheets.
* Note that most of the adapters are shared among multiple instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public class ActionsItemProviderAdapterFactory extends ActionsAdapterFactory implements ComposeableAdapterFactory, IChangeNotifier, IDisposable {
/**
* This keeps track of the root adapter factory that delegates to this adapter factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ComposedAdapterFactory parentAdapterFactory;
/**
* This is used to implement {@link org.eclipse.emf.edit.provider.IChangeNotifier}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected IChangeNotifier changeNotifier = new ChangeNotifier();
/**
* This keeps track of all the supported types checked by {@link #isFactoryForType isFactoryForType}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected Collection<Object> supportedTypes = new ArrayList<Object>();
/**
* This constructs an instance.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ActionsItemProviderAdapterFactory() {
supportedTypes.add(IEditingDomainItemProvider.class);
supportedTypes.add(IStructuredItemContentProvider.class);
supportedTypes.add(ITreeItemContentProvider.class);
supportedTypes.add(IItemLabelProvider.class);
supportedTypes.add(IItemPropertySource.class);
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.PreGenerationSequence} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PreGenerationSequenceItemProvider preGenerationSequenceItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.PreGenerationSequence}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createPreGenerationSequenceAdapter() {
if (preGenerationSequenceItemProvider == null) {
preGenerationSequenceItemProvider = new PreGenerationSequenceItemProvider(this);
}
return preGenerationSequenceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.PostGenerationSequence} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected PostGenerationSequenceItemProvider postGenerationSequenceItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.PostGenerationSequence}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createPostGenerationSequenceAdapter() {
if (postGenerationSequenceItemProvider == null) {
postGenerationSequenceItemProvider = new PostGenerationSequenceItemProvider(this);
}
return postGenerationSequenceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.ThrowAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ThrowActionItemProvider throwActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.ThrowAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createThrowActionAdapter() {
if (throwActionItemProvider == null) {
throwActionItemProvider = new ThrowActionItemProvider(this);
}
return throwActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.TermAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TermActionItemProvider termActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.TermAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createTermActionAdapter() {
if (termActionItemProvider == null) {
termActionItemProvider = new TermActionItemProvider(this);
}
return termActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.SetDataAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SetDataActionItemProvider setDataActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.SetDataAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSetDataActionAdapter() {
if (setDataActionItemProvider == null) {
setDataActionItemProvider = new SetDataActionItemProvider(this);
}
return setDataActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.RemoveBagAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected RemoveBagActionItemProvider removeBagActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.RemoveBagAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createRemoveBagActionAdapter() {
if (removeBagActionItemProvider == null) {
removeBagActionItemProvider = new RemoveBagActionItemProvider(this);
}
return removeBagActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.ActivateFeatureAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ActivateFeatureActionItemProvider activateFeatureActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.ActivateFeatureAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createActivateFeatureActionAdapter() {
if (activateFeatureActionItemProvider == null) {
activateFeatureActionItemProvider = new ActivateFeatureActionItemProvider(this);
}
return activateFeatureActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.DeactivateFeatureAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected DeactivateFeatureActionItemProvider deactivateFeatureActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.DeactivateFeatureAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createDeactivateFeatureActionAdapter() {
if (deactivateFeatureActionItemProvider == null) {
deactivateFeatureActionItemProvider = new DeactivateFeatureActionItemProvider(this);
}
return deactivateFeatureActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.GetDataAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GetDataActionItemProvider getDataActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.GetDataAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createGetDataActionAdapter() {
if (getDataActionItemProvider == null) {
getDataActionItemProvider = new GetDataActionItemProvider(this);
}
return getDataActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.SetPropertyAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected SetPropertyActionItemProvider setPropertyActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.SetPropertyAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createSetPropertyActionAdapter() {
if (setPropertyActionItemProvider == null) {
setPropertyActionItemProvider = new SetPropertyActionItemProvider(this);
}
return setPropertyActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.GetPropertyAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GetPropertyActionItemProvider getPropertyActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.GetPropertyAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createGetPropertyActionAdapter() {
if (getPropertyActionItemProvider == null) {
getPropertyActionItemProvider = new GetPropertyActionItemProvider(this);
}
return getPropertyActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.GetRealTimeAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GetRealTimeActionItemProvider getRealTimeActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.GetRealTimeAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createGetRealTimeActionAdapter() {
if (getRealTimeActionItemProvider == null) {
getRealTimeActionItemProvider = new GetRealTimeActionItemProvider(this);
}
return getRealTimeActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.GetFeatureStateAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected GetFeatureStateActionItemProvider getFeatureStateActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.GetFeatureStateAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createGetFeatureStateActionAdapter() {
if (getFeatureStateActionItemProvider == null) {
getFeatureStateActionItemProvider = new GetFeatureStateActionItemProvider(this);
}
return getFeatureStateActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.FailAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected FailActionItemProvider failActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.FailAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createFailActionAdapter() {
if (failActionItemProvider == null) {
failActionItemProvider = new FailActionItemProvider(this);
}
return failActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.StandAloneAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected StandAloneActionItemProvider standAloneActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.StandAloneAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createStandAloneActionAdapter() {
if (standAloneActionItemProvider == null) {
standAloneActionItemProvider = new StandAloneActionItemProvider(this);
}
return standAloneActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.TimedConditionAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TimedConditionActionItemProvider timedConditionActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.TimedConditionAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createTimedConditionActionAdapter() {
if (timedConditionActionItemProvider == null) {
timedConditionActionItemProvider = new TimedConditionActionItemProvider(this);
}
return timedConditionActionItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.ActionReference} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected ActionReferenceItemProvider actionReferenceItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.ActionReference}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createActionReferenceAdapter() {
if (actionReferenceItemProvider == null) {
actionReferenceItemProvider = new ActionReferenceItemProvider(this);
}
return actionReferenceItemProvider;
}
/**
* This keeps track of the one adapter used for all {@link org.tud.inf.st.mbt.actions.TimeAction} instances.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected TimeActionItemProvider timeActionItemProvider;
/**
* This creates an adapter for a {@link org.tud.inf.st.mbt.actions.TimeAction}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter createTimeActionAdapter() {
if (timeActionItemProvider == null) {
timeActionItemProvider = new TimeActionItemProvider(this);
}
return timeActionItemProvider;
}
/**
* This returns the root adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public ComposeableAdapterFactory getRootAdapterFactory() {
return parentAdapterFactory == null ? this : parentAdapterFactory.getRootAdapterFactory();
}
/**
* This sets the composed adapter factory that contains this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void setParentAdapterFactory(ComposedAdapterFactory parentAdapterFactory) {
this.parentAdapterFactory = parentAdapterFactory;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean isFactoryForType(Object type) {
return supportedTypes.contains(type) || super.isFactoryForType(type);
}
/**
* This implementation substitutes the factory itself as the key for the adapter.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Adapter adapt(Notifier notifier, Object type) {
return super.adapt(notifier, this);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object adapt(Object object, Object type) {
if (isFactoryForType(type)) {
Object adapter = super.adapt(object, type);
if (!(type instanceof Class<?>) || (((Class<?>)type).isInstance(adapter))) {
return adapter;
}
}
return null;
}
/**
* This adds a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void addListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.addListener(notifyChangedListener);
}
/**
* This removes a listener.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void removeListener(INotifyChangedListener notifyChangedListener) {
changeNotifier.removeListener(notifyChangedListener);
}
/**
* This delegates to {@link #changeNotifier} and to {@link #parentAdapterFactory}.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void fireNotifyChanged(Notification notification) {
changeNotifier.fireNotifyChanged(notification);
if (parentAdapterFactory != null) {
parentAdapterFactory.fireNotifyChanged(notification);
}
}
/**
* This disposes all of the item providers created by this factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public void dispose() {
if (actionReferenceItemProvider != null) actionReferenceItemProvider.dispose();
if (standAloneActionItemProvider != null) standAloneActionItemProvider.dispose();
if (timedConditionActionItemProvider != null) timedConditionActionItemProvider.dispose();
if (preGenerationSequenceItemProvider != null) preGenerationSequenceItemProvider.dispose();
if (postGenerationSequenceItemProvider != null) postGenerationSequenceItemProvider.dispose();
if (throwActionItemProvider != null) throwActionItemProvider.dispose();
if (termActionItemProvider != null) termActionItemProvider.dispose();
if (setDataActionItemProvider != null) setDataActionItemProvider.dispose();
if (getDataActionItemProvider != null) getDataActionItemProvider.dispose();
if (removeBagActionItemProvider != null) removeBagActionItemProvider.dispose();
if (activateFeatureActionItemProvider != null) activateFeatureActionItemProvider.dispose();
if (deactivateFeatureActionItemProvider != null) deactivateFeatureActionItemProvider.dispose();
if (setPropertyActionItemProvider != null) setPropertyActionItemProvider.dispose();
if (failActionItemProvider != null) failActionItemProvider.dispose();
if (timeActionItemProvider != null) timeActionItemProvider.dispose();
if (getPropertyActionItemProvider != null) getPropertyActionItemProvider.dispose();
if (getRealTimeActionItemProvider != null) getRealTimeActionItemProvider.dispose();
if (getFeatureStateActionItemProvider != null) getFeatureStateActionItemProvider.dispose();
}
}
| |
/*_##########################################################################
_##
_## Copyright (C) 2013-2014 Pcap4J.org
_##
_##########################################################################
*/
package org.pcap4j.packet;
import static org.pcap4j.util.ByteArrays.*;
import org.pcap4j.packet.IcmpV6CommonPacket.IpV6NeighborDiscoveryOption;
import org.pcap4j.packet.namednumber.IpV6NeighborDiscoveryOptionType;
import org.pcap4j.util.ByteArrays;
/**
* @author Kaito Yamada
* @since pcap4j 0.9.15
*/
public final class IpV6NeighborDiscoveryMtuOption
implements IpV6NeighborDiscoveryOption {
/*
* 0 1 2 3
* 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | Type | Length | Reserved |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* | MTU |
* +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
* Type=5
*/
/**
*
*/
private static final long serialVersionUID = 4145831782727036195L;
private static final int TYPE_OFFSET
= 0;
private static final int TYPE_SIZE
= BYTE_SIZE_IN_BYTES;
private static final int LENGTH_OFFSET
= TYPE_OFFSET + TYPE_SIZE;
private static final int LENGTH_SIZE
= BYTE_SIZE_IN_BYTES;
private static final int RESERVED_OFFSET
= LENGTH_OFFSET + LENGTH_SIZE;
private static final int RESERVED_SIZE
= SHORT_SIZE_IN_BYTES;
private static final int MTU_OFFSET
= RESERVED_OFFSET + RESERVED_SIZE;
private static final int MTU_SIZE
= INT_SIZE_IN_BYTES;
private static final int IPV6_NEIGHBOR_DISCOVERY_MTU_OPTION_SIZE
= MTU_OFFSET + MTU_SIZE;
private final IpV6NeighborDiscoveryOptionType type
= IpV6NeighborDiscoveryOptionType.MTU;
private final byte length;
private final short reserved;
private final int mtu;
/**
* A static factory method.
* This method validates the arguments by {@link ByteArrays#validateBounds(byte[], int, int)},
* which may throw exceptions undocumented here.
*
* @param rawData rawData
* @param offset offset
* @param length length
* @return a new IpV6NeighborDiscoveryMtuOption object.
* @throws IllegalRawDataException if parsing the raw data fails.
*/
public static IpV6NeighborDiscoveryMtuOption newInstance(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
ByteArrays.validateBounds(rawData, offset, length);
return new IpV6NeighborDiscoveryMtuOption(rawData, offset, length);
}
private IpV6NeighborDiscoveryMtuOption(
byte[] rawData, int offset, int length
) throws IllegalRawDataException {
if (length < IPV6_NEIGHBOR_DISCOVERY_MTU_OPTION_SIZE) {
StringBuilder sb = new StringBuilder(50);
sb.append("The raw data length must be more than 7. rawData: ")
.append(ByteArrays.toHexString(rawData, " "))
.append(", offset: ")
.append(offset)
.append(", length: ")
.append(length);
throw new IllegalRawDataException(sb.toString());
}
if (rawData[TYPE_OFFSET + offset] != getType().value()) {
StringBuilder sb = new StringBuilder(100);
sb.append("The type must be: ")
.append(getType().valueAsString())
.append(" rawData: ")
.append(ByteArrays.toHexString(rawData, " "))
.append(", offset: ")
.append(offset)
.append(", length: ")
.append(length);
throw new IllegalRawDataException(sb.toString());
}
this.length = rawData[LENGTH_OFFSET + offset];
int lengthFieldAsInt = getLengthAsInt();
if (lengthFieldAsInt * 8 != IPV6_NEIGHBOR_DISCOVERY_MTU_OPTION_SIZE) {
StringBuilder sb = new StringBuilder(50);
sb.append("Illegal value in the length field: ")
.append(lengthFieldAsInt);
throw new IllegalRawDataException(sb.toString());
}
this.reserved = ByteArrays.getShort(rawData, RESERVED_OFFSET + offset);
this.mtu = ByteArrays.getInt(rawData, MTU_OFFSET + offset);
}
private IpV6NeighborDiscoveryMtuOption(Builder builder) {
if (builder == null) {
StringBuilder sb = new StringBuilder();
sb.append("builder: ").append(builder);
throw new NullPointerException(sb.toString());
}
this.reserved = builder.reserved;
this.mtu = builder.mtu;
if (builder.correctLengthAtBuild) {
this.length = (byte)(length() / 8);
}
else {
this.length = builder.length;
}
}
@Override
public IpV6NeighborDiscoveryOptionType getType() {
return type;
}
/**
*
* @return length
*/
public byte getLength() { return length; }
/**
*
* @return length
*/
public int getLengthAsInt() { return 0xFF & length; }
/**
*
* @return reserved
*/
public short getReserved() { return reserved; }
/**
*
* @return mtu
*/
public int getMtu() { return mtu; }
/**
*
* @return mtu
*/
public long getMtuAsLong() { return mtu & 0xFFFFFFFFL; }
@Override
public int length() { return IPV6_NEIGHBOR_DISCOVERY_MTU_OPTION_SIZE; }
@Override
public byte[] getRawData() {
byte[] rawData = new byte[length()];
rawData[TYPE_OFFSET] = getType().value();
rawData[LENGTH_OFFSET] = length;
System.arraycopy(
ByteArrays.toByteArray(reserved), 0,
rawData, RESERVED_OFFSET, RESERVED_SIZE
);
System.arraycopy(
ByteArrays.toByteArray(mtu), 0,
rawData, MTU_OFFSET, MTU_SIZE
);
return rawData;
}
/**
*
* @return a new Builder object populated with this object's fields.
*/
public Builder getBuilder() {
return new Builder(this);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("[Type: ")
.append(getType());
sb.append("] [Length: ")
.append(getLengthAsInt())
.append(" (").append(getLengthAsInt() * 8);
sb.append(" bytes)] [Reserved: ")
.append(reserved);
sb.append("] [MTU: ")
.append(getMtuAsLong());
sb.append("]");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (obj == this) { return true; }
if (!this.getClass().isInstance(obj)) { return false; }
IpV6NeighborDiscoveryMtuOption other = (IpV6NeighborDiscoveryMtuOption)obj;
return
mtu == other.mtu
&& length == other.length
&& reserved == other.reserved;
}
@Override
public int hashCode() {
int result = 17;
result = 31 * result + length;
result = 31 * result + reserved;
result = 31 * result + mtu;
return result;
}
/**
* @author Kaito Yamada
* @since pcap4j 0.9.15
*/
public static final class Builder
implements LengthBuilder<IpV6NeighborDiscoveryMtuOption> {
private byte length;
private short reserved;
private int mtu;
private boolean correctLengthAtBuild;
/**
*
*/
public Builder() {}
private Builder(IpV6NeighborDiscoveryMtuOption option) {
this.length = option.length;
this.reserved = option.reserved;
this.mtu = option.mtu;
}
/**
*
* @param length length
* @return this Builder object for method chaining.
*/
public Builder length(byte length) {
this.length = length;
return this;
}
/**
*
* @param reserved reserved
* @return this Builder object for method chaining.
*/
public Builder reserved(short reserved) {
this.reserved = reserved;
return this;
}
/**
*
* @param mtu mtu
* @return this Builder object for method chaining.
*/
public Builder mtu(int mtu) {
this.mtu = mtu;
return this;
}
@Override
public Builder correctLengthAtBuild(boolean correctLengthAtBuild) {
this.correctLengthAtBuild = correctLengthAtBuild;
return this;
}
@Override
public IpV6NeighborDiscoveryMtuOption build() {
return new IpV6NeighborDiscoveryMtuOption(this);
}
}
}
| |
/*
* Copyright 2021 LINE Corporation
*
* LINE Corporation licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.linecorp.armeria.server.grpc;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.common.collect.ImmutableMap.toImmutableMap;
import static com.google.common.collect.ImmutableSet.toImmutableSet;
import static java.util.Objects.requireNonNull;
import java.io.IOException;
import java.util.AbstractMap.SimpleImmutableEntry;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import java.util.StringJoiner;
import java.util.concurrent.CompletableFuture;
import java.util.stream.Collectors;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.google.api.AnnotationsProto;
import com.google.api.HttpRule;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.MoreObjects;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.protobuf.Any;
import com.google.protobuf.BoolValue;
import com.google.protobuf.BytesValue;
import com.google.protobuf.DescriptorProtos.MethodOptions;
import com.google.protobuf.Descriptors;
import com.google.protobuf.Descriptors.Descriptor;
import com.google.protobuf.Descriptors.FieldDescriptor;
import com.google.protobuf.Descriptors.FieldDescriptor.JavaType;
import com.google.protobuf.Descriptors.FileDescriptor;
import com.google.protobuf.Descriptors.MethodDescriptor;
import com.google.protobuf.Descriptors.ServiceDescriptor;
import com.google.protobuf.DoubleValue;
import com.google.protobuf.Duration;
import com.google.protobuf.FloatValue;
import com.google.protobuf.Int32Value;
import com.google.protobuf.Int64Value;
import com.google.protobuf.ListValue;
import com.google.protobuf.StringValue;
import com.google.protobuf.Struct;
import com.google.protobuf.Timestamp;
import com.google.protobuf.UInt32Value;
import com.google.protobuf.UInt64Value;
import com.google.protobuf.Value;
import com.linecorp.armeria.common.AggregatedHttpRequest;
import com.linecorp.armeria.common.HttpData;
import com.linecorp.armeria.common.HttpMethod;
import com.linecorp.armeria.common.HttpRequest;
import com.linecorp.armeria.common.HttpResponse;
import com.linecorp.armeria.common.HttpStatus;
import com.linecorp.armeria.common.MediaType;
import com.linecorp.armeria.common.QueryParams;
import com.linecorp.armeria.common.RequestHeaders;
import com.linecorp.armeria.common.RequestHeadersBuilder;
import com.linecorp.armeria.common.annotation.Nullable;
import com.linecorp.armeria.common.grpc.GrpcSerializationFormats;
import com.linecorp.armeria.common.grpc.protocol.GrpcHeaderNames;
import com.linecorp.armeria.common.logging.RequestLogProperty;
import com.linecorp.armeria.common.util.SafeCloseable;
import com.linecorp.armeria.internal.common.JacksonUtil;
import com.linecorp.armeria.internal.server.RouteUtil;
import com.linecorp.armeria.internal.server.grpc.HttpEndpointSpecification;
import com.linecorp.armeria.internal.server.grpc.HttpEndpointSpecification.Parameter;
import com.linecorp.armeria.internal.server.grpc.HttpEndpointSupport;
import com.linecorp.armeria.server.HttpStatusException;
import com.linecorp.armeria.server.Route;
import com.linecorp.armeria.server.RouteBuilder;
import com.linecorp.armeria.server.ServiceRequestContext;
import com.linecorp.armeria.server.grpc.HttpJsonTranscodingPathParser.PathSegment;
import com.linecorp.armeria.server.grpc.HttpJsonTranscodingPathParser.PathSegment.PathMappingType;
import com.linecorp.armeria.server.grpc.HttpJsonTranscodingPathParser.Stringifier;
import com.linecorp.armeria.server.grpc.HttpJsonTranscodingPathParser.VariablePathSegment;
import com.linecorp.armeria.server.grpc.HttpJsonTranscodingService.PathVariable.ValueDefinition.Type;
import io.grpc.MethodDescriptor.MethodType;
import io.grpc.ServerMethodDefinition;
import io.grpc.ServerServiceDefinition;
import io.grpc.protobuf.ProtoMethodDescriptorSupplier;
import io.grpc.protobuf.ProtoServiceDescriptorSupplier;
/**
* Converts HTTP/JSON request to gRPC request and delegates it to the {@link FramedGrpcService}.
*/
final class HttpJsonTranscodingService extends AbstractUnframedGrpcService
implements HttpEndpointSupport {
private static final Logger logger = LoggerFactory.getLogger(HttpJsonTranscodingService.class);
/**
* Creates a new {@link GrpcService} instance from the given {@code delegate}. If it is possible
* to support HTTP/JSON to gRPC transcoding, a new {@link HttpJsonTranscodingService} instance
* would be returned. Otherwise, the {@code delegate} would be returned.
*/
static GrpcService of(GrpcService delegate, UnframedGrpcErrorHandler unframedGrpcErrorHandler) {
requireNonNull(delegate, "delegate");
requireNonNull(unframedGrpcErrorHandler, "unframedGrpcErrorHandler");
final ImmutableMap.Builder<Route, TranscodingSpec> builder = ImmutableMap.builder();
final List<ServerServiceDefinition> serviceDefinitions = delegate.services();
for (ServerServiceDefinition serviceDefinition : serviceDefinitions) {
final Descriptors.ServiceDescriptor serviceDesc = serviceDescriptor(serviceDefinition);
if (serviceDesc == null) {
continue;
}
for (ServerMethodDefinition<?, ?> methodDefinition : serviceDefinition.getMethods()) {
final Descriptors.MethodDescriptor methodDesc = methodDescriptor(methodDefinition);
if (methodDesc == null) {
continue;
}
final MethodOptions methodOptions = methodDesc.getOptions();
if (!methodOptions.hasExtension(AnnotationsProto.http)) {
continue;
}
final HttpRule httpRule = methodOptions.getExtension(AnnotationsProto.http);
checkArgument(methodDefinition.getMethodDescriptor().getType() == MethodType.UNARY,
"Only unary methods can be configured with an HTTP/JSON endpoint: " +
"method=%s, httpRule=%s",
methodDefinition.getMethodDescriptor().getFullMethodName(), httpRule);
@Nullable
final Entry<Route, List<PathVariable>> routeAndVariables = toRouteAndPathVariables(httpRule);
if (routeAndVariables == null) {
continue;
}
final Route route = routeAndVariables.getKey();
final List<PathVariable> pathVariables = routeAndVariables.getValue();
final Map<String, Field> fields =
buildFields(methodDesc.getInputType(), ImmutableList.of(), ImmutableSet.of());
int order = 0;
builder.put(route, new TranscodingSpec(order++, httpRule, methodDefinition,
serviceDesc, methodDesc, fields, pathVariables));
for (HttpRule additionalHttpRule : httpRule.getAdditionalBindingsList()) {
@Nullable
final Entry<Route, List<PathVariable>> additionalRouteAndVariables
= toRouteAndPathVariables(additionalHttpRule);
if (additionalRouteAndVariables != null) {
builder.put(additionalRouteAndVariables.getKey(),
new TranscodingSpec(order++, additionalHttpRule, methodDefinition,
serviceDesc, methodDesc, fields,
additionalRouteAndVariables.getValue()));
}
}
}
}
final Map<Route, TranscodingSpec> routeAndSpecs = builder.build();
if (routeAndSpecs.isEmpty()) {
// We don't need to create a new HttpJsonTranscodingService instance in this case.
return delegate;
}
return new HttpJsonTranscodingService(delegate, routeAndSpecs, unframedGrpcErrorHandler);
}
@Nullable
private static ServiceDescriptor serviceDescriptor(ServerServiceDefinition serviceDefinition) {
@Nullable
final Object desc = serviceDefinition.getServiceDescriptor().getSchemaDescriptor();
if (desc instanceof ProtoServiceDescriptorSupplier) {
return ((ProtoServiceDescriptorSupplier) desc).getServiceDescriptor();
}
return null;
}
@Nullable
private static MethodDescriptor methodDescriptor(ServerMethodDefinition<?, ?> methodDefinition) {
@Nullable
final Object desc = methodDefinition.getMethodDescriptor().getSchemaDescriptor();
if (desc instanceof ProtoMethodDescriptorSupplier) {
return ((ProtoMethodDescriptorSupplier) desc).getMethodDescriptor();
}
return null;
}
@VisibleForTesting
@Nullable
static Entry<Route, List<PathVariable>> toRouteAndPathVariables(HttpRule httpRule) {
final RouteBuilder builder = Route.builder();
final String path;
switch (httpRule.getPatternCase()) {
case GET:
builder.methods(HttpMethod.GET);
path = httpRule.getGet();
break;
case PUT:
builder.methods(HttpMethod.PUT);
path = httpRule.getPut();
break;
case POST:
builder.methods(HttpMethod.POST);
path = httpRule.getPost();
break;
case DELETE:
builder.methods(HttpMethod.DELETE);
path = httpRule.getDelete();
break;
case PATCH:
builder.methods(HttpMethod.PATCH);
path = httpRule.getPatch();
break;
case CUSTOM:
default:
logger.warn("Ignoring unsupported route pattern: pattern={}, httpRule={}",
httpRule.getPatternCase(), httpRule);
return null;
}
// Check whether the path is Armeria-native.
if (path.startsWith(RouteUtil.EXACT) ||
path.startsWith(RouteUtil.PREFIX) ||
path.startsWith(RouteUtil.GLOB) ||
path.startsWith(RouteUtil.REGEX)) {
final Route route = builder.path(path).build();
final List<PathVariable> vars =
route.paramNames().stream()
.map(name -> new PathVariable(null, name,
ImmutableList.of(
new PathVariable.ValueDefinition(Type.REFERENCE,
name))))
.collect(toImmutableList());
return new SimpleImmutableEntry<>(route, vars);
}
final List<PathSegment> segments = HttpJsonTranscodingPathParser.parse(path);
final PathMappingType pathMappingType =
segments.stream().allMatch(segment -> segment.support(PathMappingType.PARAMETERIZED)) ?
PathMappingType.PARAMETERIZED : PathMappingType.GLOB;
if (pathMappingType == PathMappingType.PARAMETERIZED) {
builder.path(Stringifier.asParameterizedPath(segments, true));
} else {
builder.glob(Stringifier.asGlobPath(segments, true));
}
return new SimpleImmutableEntry<>(builder.build(), PathVariable.from(segments, pathMappingType));
}
private static Map<String, Field> buildFields(Descriptor desc,
List<String> parentNames,
Set<Descriptor> visitedTypes) {
final StringJoiner namePrefixJoiner = new StringJoiner(".");
parentNames.forEach(namePrefixJoiner::add);
final String namePrefix = namePrefixJoiner.length() == 0 ? "" : namePrefixJoiner.toString() + '.';
final ImmutableMap.Builder<String, Field> builder = ImmutableMap.builder();
desc.getFields().forEach(field -> {
final JavaType type = field.getJavaType();
switch (type) {
case INT:
case LONG:
case FLOAT:
case DOUBLE:
case BOOLEAN:
case STRING:
case BYTE_STRING:
case ENUM:
// Use field name which is specified in proto file.
builder.put(namePrefix + field.getName(),
new Field(field, parentNames, field.getJavaType()));
break;
case MESSAGE:
@Nullable
final JavaType wellKnownFieldType = getJavaTypeForWellKnownTypes(field);
if (wellKnownFieldType != null) {
builder.put(namePrefix + field.getName(),
new Field(field, parentNames, wellKnownFieldType));
break;
}
if (visitedTypes.contains(field.getMessageType())) {
// Found recursion. No more analysis for this type.
// Raise an exception in order to mark the root parameter as JavaType.MESSAGE.
throw new RecursiveTypeException(field.getMessageType());
}
@Nullable
Descriptor typeDesc =
desc.getNestedTypes().stream()
.filter(d -> d.getFullName().equals(field.getMessageType().getFullName()))
.findFirst().orElse(null);
if (typeDesc == null) {
// From the proto file.
typeDesc = findTypeDescriptor(desc.getFile(), field);
}
if (typeDesc == null) {
// According to the Language guide, the public import functionality is not available
// in Java. We will try to find dependencies only with "import" keyword.
// https://developers.google.com/protocol-buffers/docs/proto3#importing_definitions
typeDesc = desc.getFile().getDependencies().stream()
.map(fd -> findTypeDescriptor(fd, field))
.filter(Objects::nonNull).findFirst().orElse(null);
}
checkState(typeDesc != null,
"Descriptor for the type '%s' does not exist.",
field.getMessageType().getFullName());
try {
builder.putAll(buildFields(typeDesc,
ImmutableList.<String>builder()
.addAll(parentNames)
.add(field.getName())
.build(),
ImmutableSet.<Descriptor>builder()
.addAll(visitedTypes)
.add(field.getMessageType())
.build()));
} catch (RecursiveTypeException e) {
if (e.recursiveTypeDescriptor() != field.getMessageType()) {
// Re-throw the exception if it is not caused by my field.
throw e;
}
builder.put(namePrefix + field.getName(),
new Field(field, parentNames, JavaType.MESSAGE));
}
break;
}
});
return builder.build();
}
@Nullable
private static JavaType getJavaTypeForWellKnownTypes(FieldDescriptor fd) {
// MapField can be sent only via HTTP body.
if (fd.isMapField()) {
return JavaType.MESSAGE;
}
final Descriptor messageType = fd.getMessageType();
final String fullName = messageType.getFullName();
if (Timestamp.getDescriptor().getFullName().equals(fullName) ||
Duration.getDescriptor().getFullName().equals(fullName)) {
return JavaType.STRING;
}
if (DoubleValue.getDescriptor().getFullName().equals(fullName) ||
FloatValue.getDescriptor().getFullName().equals(fullName) ||
Int64Value.getDescriptor().getFullName().equals(fullName) ||
UInt64Value.getDescriptor().getFullName().equals(fullName) ||
Int32Value.getDescriptor().getFullName().equals(fullName) ||
UInt32Value.getDescriptor().getFullName().equals(fullName) ||
BoolValue.getDescriptor().getFullName().equals(fullName) ||
StringValue.getDescriptor().getFullName().equals(fullName) ||
BytesValue.getDescriptor().getFullName().equals(fullName)) {
// "value" field. Wrappers must have one field.
assert messageType.getFields().size() == 1 : "Wrappers must have one 'value' field.";
return messageType.getFields().get(0).getJavaType();
}
// The messages of the following types can be sent only via HTTP body.
if (Struct.getDescriptor().getFullName().equals(fullName) ||
ListValue.getDescriptor().getFullName().equals(fullName) ||
Value.getDescriptor().getFullName().equals(fullName) ||
// google.protobuf.Any message has the following two fields:
// string type_url = 1;
// bytes value = 2;
// which look acceptable as HTTP GET parameters, but the client must send the message like below:
// {
// "@type": "type.googleapis.com/google.protobuf.Duration",
// "value": "1.212s"
// }
// There's no specifications about rewriting parameter names, so we will handle
// google.protobuf.Any message only when it is sent via HTTP body.
Any.getDescriptor().getFullName().equals(fullName)) {
return JavaType.MESSAGE;
}
return null;
}
@Nullable
private static Descriptor findTypeDescriptor(FileDescriptor file, FieldDescriptor field) {
final Descriptor messageType = field.getMessageType();
if (!file.getPackage().equals(messageType.getFile().getPackage())) {
return null;
}
return file.findMessageTypeByName(messageType.getName());
}
private static final ObjectMapper mapper = JacksonUtil.newDefaultObjectMapper();
private final Map<Route, TranscodingSpec> routeAndSpecs;
private final Set<Route> routes;
private HttpJsonTranscodingService(GrpcService delegate,
Map<Route, TranscodingSpec> routeAndSpecs,
UnframedGrpcErrorHandler unframedGrpcErrorHandler) {
super(delegate, unframedGrpcErrorHandler);
this.routeAndSpecs = routeAndSpecs;
routes = ImmutableSet.<Route>builder()
.addAll(delegate.routes())
.addAll(routeAndSpecs.keySet())
.build();
}
@Override
public HttpEndpointSpecification httpEndpointSpecification(Route route) {
requireNonNull(route, "route");
final TranscodingSpec spec = routeAndSpecs.get(route);
if (spec == null) {
return null;
}
final Set<String> paramNames = spec.pathVariables.stream().map(PathVariable::name)
.collect(toImmutableSet());
final Map<String, Parameter> parameterTypes =
spec.fields.entrySet().stream().collect(
toImmutableMap(Entry::getKey,
fieldEntry -> new Parameter(fieldEntry.getValue().type(),
fieldEntry.getValue().isRepeated())));
return new HttpEndpointSpecification(spec.order,
route,
paramNames,
spec.serviceDescriptor,
spec.methodDescriptor,
parameterTypes,
spec.httpRule);
}
/**
* Returns the {@link Route}s which are supported by this service and the {@code delegate}.
*/
@Override
public Set<Route> routes() {
return routes;
}
@Override
public HttpResponse serve(ServiceRequestContext ctx, HttpRequest req) throws Exception {
final Route mappedRoute = ctx.config().route();
final TranscodingSpec spec = routeAndSpecs.get(mappedRoute);
if (spec != null) {
return serve0(ctx, req, spec);
}
return unwrap().serve(ctx, req);
}
private HttpResponse serve0(ServiceRequestContext ctx, HttpRequest req,
TranscodingSpec spec) throws Exception {
final RequestHeaders clientHeaders = req.headers();
final RequestHeadersBuilder grpcHeaders = clientHeaders.toBuilder();
if (grpcHeaders.get(GrpcHeaderNames.GRPC_ENCODING) != null) {
return HttpResponse.of(HttpStatus.UNSUPPORTED_MEDIA_TYPE,
MediaType.PLAIN_TEXT_UTF_8,
"gRPC encoding is not supported for non-framed requests.");
}
grpcHeaders.method(HttpMethod.POST)
.contentType(GrpcSerializationFormats.JSON.mediaType());
// All clients support no encoding, and we don't support gRPC encoding for non-framed requests, so just
// clear the header if it's present.
grpcHeaders.remove(GrpcHeaderNames.GRPC_ACCEPT_ENCODING);
ctx.logBuilder().defer(RequestLogProperty.REQUEST_CONTENT,
RequestLogProperty.RESPONSE_CONTENT);
final CompletableFuture<HttpResponse> responseFuture = new CompletableFuture<>();
req.aggregate(ctx.eventLoop()).handle((clientRequest, t) -> {
try (SafeCloseable ignore = ctx.push()) {
if (t != null) {
responseFuture.completeExceptionally(t);
} else {
try {
ctx.setAttr(FramedGrpcService.RESOLVED_GRPC_METHOD, spec.method);
frameAndServe(unwrap(), ctx, grpcHeaders.build(),
convertToJson(ctx, clientRequest, spec),
responseFuture);
} catch (IllegalArgumentException iae) {
responseFuture.completeExceptionally(
HttpStatusException.of(HttpStatus.BAD_REQUEST, iae));
} catch (Exception e) {
responseFuture.completeExceptionally(e);
}
}
}
return null;
});
return HttpResponse.from(responseFuture);
}
/**
* Converts the HTTP request to gRPC JSON with the {@link TranscodingSpec}.
*/
private static HttpData convertToJson(ServiceRequestContext ctx,
AggregatedHttpRequest request,
TranscodingSpec spec) throws IOException {
try {
switch (request.method()) {
case GET:
return setParametersAndWriteJson(mapper.createObjectNode(), ctx, spec);
case PUT:
case POST:
case PATCH:
case DELETE:
final String bodyMapping = spec.httpRule.getBody();
// Put the body into the json if 'body: "*"' is specified.
if ("*".equals(bodyMapping)) {
@Nullable
final JsonNode body = getBodyContent(request);
final ObjectNode root;
if (body instanceof ObjectNode) {
root = (ObjectNode) body;
} else {
root = mapper.createObjectNode();
}
return setParametersAndWriteJson(root, ctx, spec);
}
// Put the body into the json under "name" field if 'body: "name"' is specified.
final ObjectNode root = mapper.createObjectNode();
if (!Strings.isNullOrEmpty(bodyMapping)) {
ObjectNode current = root;
final String[] nameParts = bodyMapping.split("\\.");
for (int i = 0; i < nameParts.length - 1; i++) {
current = current.putObject(nameParts[i]);
}
@Nullable
final JsonNode body = getBodyContent(request);
if (body != null) {
current.set(nameParts[nameParts.length - 1], body);
} else {
current.putNull(nameParts[nameParts.length - 1]);
}
}
return setParametersAndWriteJson(root, ctx, spec);
default:
throw HttpStatusException.of(HttpStatus.METHOD_NOT_ALLOWED);
}
} finally {
request.content().close();
}
}
@Nullable
private static JsonNode getBodyContent(AggregatedHttpRequest request) {
@Nullable
final MediaType contentType = request.contentType();
if (contentType == null || !contentType.isJson()) {
return null;
}
try {
return mapper.readTree(request.contentUtf8());
} catch (JsonProcessingException e) {
return null;
}
}
@VisibleForTesting
static Map<String, String> populatePathVariables(ServiceRequestContext ctx,
List<PathVariable> pathVariables) {
return pathVariables.stream().map(var -> {
final String value =
var.values().stream()
.map(def -> {
if (def.type == Type.REFERENCE) {
return ctx.pathParam(def.value);
} else {
return def.value;
}
}).collect(Collectors.joining("/"));
return new SimpleImmutableEntry<>(var.name(), value);
}).collect(toImmutableMap(Entry::getKey, Entry::getValue));
}
private static HttpData setParametersAndWriteJson(ObjectNode root,
ServiceRequestContext ctx,
TranscodingSpec spec) throws JsonProcessingException {
// Generate path variable name/value map.
final Map<String, String> resolvedPathVars = populatePathVariables(ctx, spec.pathVariables);
setParametersToNode(root, resolvedPathVars.entrySet(), spec);
if (ctx.query() != null) {
setParametersToNode(root, QueryParams.fromQueryString(ctx.query()), spec);
}
return HttpData.wrap(mapper.writeValueAsBytes(root));
}
private static void setParametersToNode(ObjectNode root,
Iterable<Entry<String, String>> parameters,
TranscodingSpec spec) {
for (Map.Entry<String, String> entry : parameters) {
final Field field = spec.fields.get(entry.getKey());
if (field == null) {
// Ignore unknown parameters.
continue;
}
if (field.javaType == JavaType.MESSAGE) {
throw new IllegalArgumentException(
"Unsupported message type: " + field.descriptor.getFullName());
}
ObjectNode currentNode = root;
for (String parentName : field.parentNames) {
final JsonNode node = currentNode.get(parentName);
if (node != null) {
// It should be an ObjectNode but it may not if a user sent a wrong JSON document
// in the HTTP body with HTTP POST, PUT, PATCH or DELETE methods.
checkArgument(node.isObject(), "Invalid request body (must be a JSON object)");
currentNode = (ObjectNode) node;
} else {
currentNode = currentNode.putObject(parentName);
}
}
// If the field has a 'repeated' label, we should treat it as a JSON array node.
if (field.isRepeated()) {
final ArrayNode arrayNode;
final JsonNode node = currentNode.get(field.name());
if (node != null) {
// It should be an ArrayNode but it may not if a user sent a wrong JSON document
// in the HTTP body with HTTP POST, PUT, PATCH or DELETE methods.
checkArgument(node.isArray(), "Invalid request body (must be a JSON array)");
arrayNode = (ArrayNode) node;
} else {
arrayNode = currentNode.putArray(field.name());
}
// If a request has multiple values for a query parameter like 'param=foo¶m=bar¶m=baz',
// the following JSON would be generated.
// { "param": ["foo", "bar", "baz"] }
setValueToArrayNode(arrayNode, field, entry.getValue());
} else {
setValueToObjectNode(currentNode, field, entry.getValue());
}
}
}
private static void setValueToArrayNode(ArrayNode node, Field field, String value) {
switch (field.type()) {
case INT:
node.add(Integer.parseInt(value));
break;
case LONG:
node.add(Long.parseLong(value));
break;
case FLOAT:
node.add(Float.parseFloat(value));
break;
case DOUBLE:
node.add(Double.parseDouble(value));
break;
case BOOLEAN:
node.add(Boolean.parseBoolean(value));
break;
case STRING:
case BYTE_STRING:
case ENUM:
node.add(value);
break;
}
}
private static void setValueToObjectNode(ObjectNode node, Field field, String value) {
switch (field.type()) {
case INT:
node.put(field.name(), Integer.parseInt(value));
break;
case LONG:
node.put(field.name(), Long.parseLong(value));
break;
case FLOAT:
node.put(field.name(), Float.parseFloat(value));
break;
case DOUBLE:
node.put(field.name(), Double.parseDouble(value));
break;
case BOOLEAN:
node.put(field.name(), Boolean.parseBoolean(value));
break;
case STRING:
case BYTE_STRING:
case ENUM:
node.put(field.name(), value);
break;
}
}
/**
* Details of HTTP/JSON to gRPC transcoding.
*/
static final class TranscodingSpec {
private final int order;
private final HttpRule httpRule;
private final ServerMethodDefinition<?, ?> method;
private final Descriptors.ServiceDescriptor serviceDescriptor;
private final Descriptors.MethodDescriptor methodDescriptor;
private final Map<String, Field> fields;
private final List<PathVariable> pathVariables;
private TranscodingSpec(int order,
HttpRule httpRule,
ServerMethodDefinition<?, ?> method,
ServiceDescriptor serviceDescriptor,
MethodDescriptor methodDescriptor,
Map<String, Field> fields,
List<PathVariable> pathVariables) {
this.order = order;
this.httpRule = httpRule;
this.method = method;
this.serviceDescriptor = serviceDescriptor;
this.methodDescriptor = methodDescriptor;
this.fields = fields;
this.pathVariables = pathVariables;
}
}
/**
* gRPC field definition.
*/
static final class Field {
private final FieldDescriptor descriptor;
private final List<String> parentNames;
private final JavaType javaType;
private Field(FieldDescriptor descriptor, List<String> parentNames, JavaType javaType) {
this.descriptor = descriptor;
this.parentNames = parentNames;
this.javaType = javaType;
}
JavaType type() {
return javaType;
}
String name() {
return descriptor.getJsonName();
}
boolean isRepeated() {
return descriptor.isRepeated();
}
}
/**
* A path variable defined in the path of {@code google.api.http} option.
*/
static final class PathVariable {
/**
* Collects {@link PathVariable}s from the parsed {@link PathSegment}s.
*/
static List<PathVariable> from(List<PathSegment> segments,
PathSegment.PathMappingType type) {
return segments.stream()
.filter(VariablePathSegment.class::isInstance)
.flatMap(segment -> resolvePathVariables(null, (VariablePathSegment) segment, type)
.stream())
.collect(toImmutableList());
}
private static List<PathVariable> resolvePathVariables(@Nullable String parent,
VariablePathSegment var,
PathSegment.PathMappingType type) {
final ImmutableList.Builder<PathVariable> pathVariables = ImmutableList.builder();
final ImmutableList.Builder<ValueDefinition> valueDefinitions = ImmutableList.builder();
var.valueSegments().forEach(segment -> {
if (segment instanceof VariablePathSegment) {
final List<PathVariable> children =
resolvePathVariables(var.fieldPath(), (VariablePathSegment) segment, type);
// Flatten value definitions which include the way how to get the value of the variable.
// Example:
// - original path: "/v1/hello/{name=foo/{age=*}/{country=*}}"
// - parsed path: "/v1/hello/foo/:age/:country"
// - variables:
// - name: foo, :age, :country
// - age: :age
// - country: :country
children.stream()
.filter(child -> var.fieldPath().equals(child.parent()))
.forEach(child -> valueDefinitions.addAll(child.values()));
pathVariables.addAll(children);
} else {
@Nullable
final String v = segment.pathVariable(type);
if (v != null) {
valueDefinitions.add(new ValueDefinition(Type.REFERENCE, v));
} else {
valueDefinitions.add(new ValueDefinition(Type.LITERAL, segment.segmentString(type)));
}
}
});
return pathVariables.add(new PathVariable(parent, var.fieldPath(), valueDefinitions.build()))
.build();
}
@Nullable
private final String parent;
private final String name;
private final List<ValueDefinition> values;
PathVariable(@Nullable String parent,
String name,
List<ValueDefinition> values) {
this.parent = parent;
this.name = requireNonNull(name, "name");
this.values = requireNonNull(values, "values");
}
@Nullable
String parent() {
return parent;
}
String name() {
return name;
}
List<ValueDefinition> values() {
return values;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("parent", parent)
.add("name", name)
.add("values", values)
.toString();
}
static final class ValueDefinition {
private final Type type;
private final String value;
ValueDefinition(Type type, String value) {
this.type = requireNonNull(type, "type");
this.value = requireNonNull(value, "value");
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("type", type)
.add("value", value)
.toString();
}
enum Type {
/**
* Uses the {@code value} as a literal.
*/
LITERAL,
/**
* Needs to get the value of {@link ServiceRequestContext#pathParam(String)} with
* the {@code value}.
*/
REFERENCE
}
}
}
/**
* Notifies that a recursively nesting type exists.
*/
private static class RecursiveTypeException extends IllegalArgumentException {
private static final long serialVersionUID = -6764357154559606786L;
private final Descriptor recursiveTypeDescriptor;
RecursiveTypeException(Descriptor recursiveTypeDescriptor) {
this.recursiveTypeDescriptor = recursiveTypeDescriptor;
}
Descriptor recursiveTypeDescriptor() {
return recursiveTypeDescriptor;
}
@Override
public Throwable fillInStackTrace() {
return this;
}
}
}
| |
package org.apache.lucene.spatial.prefix;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.spatial4j.core.context.SpatialContext;
import com.spatial4j.core.shape.Point;
import com.spatial4j.core.shape.Rectangle;
import com.spatial4j.core.shape.Shape;
import com.spatial4j.core.shape.SpatialRelation;
import org.apache.lucene.index.IndexReaderContext;
import org.apache.lucene.spatial.prefix.tree.Cell;
import org.apache.lucene.spatial.prefix.tree.CellIterator;
import org.apache.lucene.spatial.prefix.tree.SpatialPrefixTree;
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Bits;
/**
* Computes spatial facets in two dimensions as a grid of numbers. The data is often visualized as a so-called
* "heatmap", hence the name.
*
* @lucene.experimental
*/
public class HeatmapFacetCounter {
//TODO where should this code live? It could go to PrefixTreeFacetCounter, or maybe here in its own class is fine.
/** Maximum number of supported rows (or columns). */
public static final int MAX_ROWS_OR_COLUMNS = (int) Math.sqrt(ArrayUtil.MAX_ARRAY_LENGTH);
/** Response structure */
public static class Heatmap {
public final int columns;
public final int rows;
public final int[] counts;//in order of 1st column (all rows) then 2nd column (all rows) etc.
public final Rectangle region;
public Heatmap(int columns, int rows, Rectangle region) {
this.columns = columns;
this.rows = rows;
this.counts = new int[columns * rows];
this.region = region;
}
public int getCount(int x, int y) {
return counts[x * rows + y];
}
@Override
public String toString() {
return "Heatmap{" + columns + "x" + rows + " " + region + '}';
}
}
/**
* Calculates spatial 2D facets (aggregated counts) in a grid, sometimes called a heatmap.
* Facet computation is implemented by navigating the underlying indexed terms efficiently. If you don't know exactly
* what facetLevel to go to for a given input box but you have some sense of how many cells there should be relative
* to the size of the shape, then consider using the logic that {@link org.apache.lucene.spatial.prefix.PrefixTreeStrategy}
* uses when approximating what level to go to when indexing a shape given a distErrPct.
*
* @param context the IndexReader's context
* @param topAcceptDocs a Bits to limit counted docs. If null, live docs are counted.
* @param inputShape the shape to gather grid squares for; typically a {@link Rectangle}.
* The <em>actual</em> heatmap area will usually be larger since the cells on the edge that overlap
* are returned. We always return a rectangle of integers even if the inputShape isn't a rectangle
* -- the non-intersecting cells will all be 0.
* If null is given, the entire world is assumed.
* @param facetLevel the target depth (detail) of cells.
* @param maxCells the maximum number of cells to return. If the cells exceed this count, an
*/
public static Heatmap calcFacets(PrefixTreeStrategy strategy, IndexReaderContext context, Bits topAcceptDocs,
Shape inputShape, final int facetLevel, int maxCells) throws IOException {
if (maxCells > (MAX_ROWS_OR_COLUMNS * MAX_ROWS_OR_COLUMNS)) {
throw new IllegalArgumentException("maxCells (" + maxCells + ") should be <= " + MAX_ROWS_OR_COLUMNS);
}
if (inputShape == null) {
inputShape = strategy.getSpatialContext().getWorldBounds();
}
final Rectangle inputRect = inputShape.getBoundingBox();
//First get the rect of the cell at the bottom-left at depth facetLevel
final SpatialPrefixTree grid = strategy.getGrid();
final SpatialContext ctx = grid.getSpatialContext();
final Point cornerPt = ctx.makePoint(inputRect.getMinX(), inputRect.getMinY());
final CellIterator cellIterator = grid.getTreeCellIterator(cornerPt, facetLevel);
Cell cornerCell = null;
while (cellIterator.hasNext()) {
cornerCell = cellIterator.next();
}
assert cornerCell != null && cornerCell.getLevel() == facetLevel : "Cell not at target level: " + cornerCell;
final Rectangle cornerRect = (Rectangle) cornerCell.getShape();
assert cornerRect.hasArea();
//Now calculate the number of columns and rows necessary to cover the inputRect
double heatMinX = cornerRect.getMinX();//note: we might change this below...
final double cellWidth = cornerRect.getWidth();
final Rectangle worldRect = ctx.getWorldBounds();
final int columns = calcRowsOrCols(cellWidth, heatMinX, inputRect.getWidth(), inputRect.getMinX(), worldRect.getWidth());
final double heatMinY = cornerRect.getMinY();
final double cellHeight = cornerRect.getHeight();
final int rows = calcRowsOrCols(cellHeight, heatMinY, inputRect.getHeight(), inputRect.getMinY(), worldRect.getHeight());
assert rows > 0 && columns > 0;
if (columns > MAX_ROWS_OR_COLUMNS || rows > MAX_ROWS_OR_COLUMNS || columns * rows > maxCells) {
throw new IllegalArgumentException(
"Too many cells (" + columns + " x " + rows + ") for level " + facetLevel + " shape " + inputRect);
}
//Create resulting heatmap bounding rectangle & Heatmap object.
final double halfCellWidth = cellWidth / 2.0;
// if X world-wraps, use world bounds' range
if (columns * cellWidth + halfCellWidth > worldRect.getWidth()) {
heatMinX = worldRect.getMinX();
}
double heatMaxX = heatMinX + columns * cellWidth;
if (Math.abs(heatMaxX - worldRect.getMaxX()) < halfCellWidth) {//numeric conditioning issue
heatMaxX = worldRect.getMaxX();
} else if (heatMaxX > worldRect.getMaxX()) {//wraps dateline (won't happen if !geo)
heatMaxX = heatMaxX - worldRect.getMaxX() + worldRect.getMinX();
}
final double halfCellHeight = cellHeight / 2.0;
double heatMaxY = heatMinY + rows * cellHeight;
if (Math.abs(heatMaxY - worldRect.getMaxY()) < halfCellHeight) {//numeric conditioning issue
heatMaxY = worldRect.getMaxY();
}
final Heatmap heatmap = new Heatmap(columns, rows, ctx.makeRectangle(heatMinX, heatMaxX, heatMinY, heatMaxY));
//All ancestor cell counts (of facetLevel) will be captured during facet visiting and applied later. If the data is
// just points then there won't be any ancestors.
//Facet count of ancestors covering all of the heatmap:
final int[] allCellsAncestorCount = new int[1]; // single-element array so it can be accumulated in the inner class
//All other ancestors:
final Map<Rectangle,Integer> ancestors = new HashMap<>();
//Now lets count some facets!
PrefixTreeFacetCounter.compute(strategy, context, topAcceptDocs, inputShape, facetLevel,
new PrefixTreeFacetCounter.FacetVisitor() {
@Override
public void visit(Cell cell, int count) {
final double heatMinX = heatmap.region.getMinX();
final Rectangle rect = (Rectangle) cell.getShape();
if (cell.getLevel() == facetLevel) {//heatmap level; count it directly
//convert to col & row
int column;
if (rect.getMinX() >= heatMinX) {
column = (int) Math.round((rect.getMinX() - heatMinX) / cellWidth);
} else { // due to dateline wrap
column = (int) Math.round((rect.getMinX() + 360 - heatMinX) / cellWidth);
}
int row = (int) Math.round((rect.getMinY() - heatMinY) / cellHeight);
//note: unfortunately, it's possible for us to visit adjacent cells to the heatmap (if the SpatialPrefixTree
// allows adjacent cells to overlap on the seam), so we need to skip them
if (column < 0 || column >= heatmap.columns || row < 0 || row >= heatmap.rows) {
return;
}
// increment
heatmap.counts[column * heatmap.rows + row] += count;
} else if (rect.relate(heatmap.region) == SpatialRelation.CONTAINS) {//containing ancestor
allCellsAncestorCount[0] += count;
} else { // ancestor
// note: not particularly efficient (possible put twice, and Integer wrapper); oh well
Integer existingCount = ancestors.put(rect, count);
if (existingCount != null) {
ancestors.put(rect, count + existingCount);
}
}
}
});
//Update the heatmap counts with ancestor counts
// Apply allCellsAncestorCount
if (allCellsAncestorCount[0] > 0) {
for (int i = 0; i < heatmap.counts.length; i++) {
heatmap.counts[i] += allCellsAncestorCount[0];
}
}
// Apply ancestors
// note: This approach isn't optimized for a ton of ancestor cells. We'll potentially increment the same cells
// multiple times in separate passes if any ancestors overlap. IF this poses a problem, we could optimize it
// with additional complication by keeping track of intervals in a sorted tree structure (possible TreeMap/Set)
// and iterate them cleverly such that we just make one pass at this stage.
int[] pair = new int[2];//output of intersectInterval
for (Map.Entry<Rectangle, Integer> entry : ancestors.entrySet()) {
Rectangle rect = entry.getKey();
final int count = entry.getValue();
//note: we approach this in a way that eliminates int overflow/underflow (think huge cell, tiny heatmap)
intersectInterval(heatMinY, heatMaxY, cellHeight, rows, rect.getMinY(), rect.getMaxY(), pair);
final int startRow = pair[0];
final int endRow = pair[1];
if (!heatmap.region.getCrossesDateLine()) {
intersectInterval(heatMinX, heatMaxX, cellWidth, columns, rect.getMinX(), rect.getMaxX(), pair);
final int startCol = pair[0];
final int endCol = pair[1];
incrementRange(heatmap, startCol, endCol, startRow, endRow, count);
} else {
//left half of dateline:
if (rect.getMaxX() >= heatMinX) {
final int leftColumns = (int) Math.round((180 - heatMinX) / cellWidth) + 1;
intersectInterval(heatMinX, 180, cellWidth, leftColumns, rect.getMinX(), rect.getMaxX(), pair);
final int startCol = pair[0];
final int endCol = pair[1];
incrementRange(heatmap, startCol, endCol, startRow, endRow, count);
}
//right half of dateline
if (rect.getMinY() <= heatMaxX) {
final int rightColumns = (int) Math.round(heatMaxX / cellWidth) + 1;
intersectInterval(0, heatMaxX, cellWidth, rightColumns, rect.getMinX(), rect.getMaxX(), pair);
final int startCol = pair[0];
final int endCol = pair[1];
incrementRange(heatmap, startCol, endCol, startRow, endRow, count);
}
}
}
return heatmap;
}
private static void intersectInterval(double heatMin, double heatMax, double heatCellLen, int heatLen,
double cellMin, double cellMax,
int[] out) {
//precondition: we know there's an intersection
if (heatMin >= cellMin) {
out[0] = 0;
} else {
out[0] = (int) Math.round((cellMin - heatMin) / heatCellLen);
}
if (heatMax <= cellMax) {
out[1] = heatLen - 1;
} else {
out[1] = (int) Math.round((cellMax - heatMin) / heatCellLen) - 1;
}
}
private static void incrementRange(Heatmap heatmap, int startColumn, int endColumn, int startRow, int endRow,
int count) {
//startColumn & startRow are not necessarily within the heatmap range; likewise numRows/columns may overlap.
if (startColumn < 0) {
endColumn += startColumn;
startColumn = 0;
}
endColumn = Math.min(heatmap.columns-1, endColumn);
if (startRow < 0) {
endRow += startRow;
startRow = 0;
}
endRow = Math.min(heatmap.rows-1, endRow);
if (startRow > endRow) {
return;//short-circuit
}
for (int c = startColumn; c <= endColumn; c++) {
int cBase = c * heatmap.rows;
for (int r = startRow; r <= endRow; r++) {
heatmap.counts[cBase + r] += count;
}
}
}
/** Computes the number of intervals (rows or columns) to cover a range given the sizes. */
private static int calcRowsOrCols(double cellRange, double cellMin, double requestRange, double requestMin,
double worldRange) {
assert requestMin >= cellMin;
//Idealistically this wouldn't be so complicated but we concern ourselves with overflow and edge cases
double range = (requestRange + (requestMin - cellMin));
if (range == 0) {
return 1;
}
final double intervals = Math.ceil(range / cellRange);
if (intervals > Integer.MAX_VALUE) {
return Integer.MAX_VALUE;//should result in an error soon (exceed thresholds)
}
// ensures we don't have more intervals than world bounds (possibly due to rounding/edge issue)
final long intervalsMax = Math.round(worldRange / cellRange);
if (intervalsMax > Integer.MAX_VALUE) {
//just return intervals
return (int) intervals;
}
return Math.min((int)intervalsMax, (int)intervals);
}
private HeatmapFacetCounter() {
}
}
| |
// Copyright (C) 2009 The Android Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.gerrit.pgm.util;
import static com.google.inject.Scopes.SINGLETON;
import static com.google.inject.Stage.PRODUCTION;
import com.google.gerrit.lifecycle.LifecycleModule;
import com.google.gerrit.server.config.GerritServerConfigModule;
import com.google.gerrit.server.config.SitePath;
import com.google.gerrit.server.schema.DataSourceProvider;
import com.google.gerrit.server.schema.DatabaseModule;
import com.google.gerrit.server.schema.SchemaModule;
import com.google.gwtorm.client.OrmException;
import com.google.inject.AbstractModule;
import com.google.inject.CreationException;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.name.Names;
import com.google.inject.spi.Message;
import org.kohsuke.args4j.Option;
import java.io.File;
import java.io.FileFilter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import javax.sql.DataSource;
public abstract class SiteProgram extends AbstractProgram {
@Option(name = "--site-path", aliases = {"-d"}, usage = "Local directory containing site data")
private File sitePath = new File(".");
/** @return the site path specified on the command line. */
protected File getSitePath() {
File path = sitePath.getAbsoluteFile();
if (".".equals(path.getName())) {
path = path.getParentFile();
}
return path;
}
/** Ensures we are running inside of a valid site, otherwise throws a Die. */
protected void mustHaveValidSite() throws Die {
if (!new File(new File(getSitePath(), "etc"), "gerrit.config").exists()) {
throw die("not a Gerrit site: '" + getSitePath() + "'\n"
+ "Perhaps you need to run init first?");
}
}
/** Load extra JARs from {@code lib/} subdirectory of {@link #getSitePath()} */
protected void loadSiteLib() {
final File libdir = new File(getSitePath(), "lib");
final File[] list = libdir.listFiles(new FileFilter() {
@Override
public boolean accept(File path) {
if (!path.isFile()) {
return false;
}
return path.getName().endsWith(".jar") //
|| path.getName().endsWith(".zip");
}
});
if (list != null && 0 < list.length) {
Arrays.sort(list, new Comparator<File>() {
@Override
public int compare(File a, File b) {
return a.getName().compareTo(b.getName());
}
});
addToClassLoader(list);
}
}
private void addToClassLoader(final File[] additionalLocations) {
final ClassLoader cl = getClass().getClassLoader();
if (!(cl instanceof URLClassLoader)) {
throw noAddURL("Not loaded by URLClassLoader", null);
}
final URLClassLoader ucl = (URLClassLoader) cl;
final Set<URL> have = new HashSet<URL>();
have.addAll(Arrays.asList(ucl.getURLs()));
final Method m;
try {
m = URLClassLoader.class.getDeclaredMethod("addURL", URL.class);
m.setAccessible(true);
} catch (SecurityException e) {
throw noAddURL("Method addURL not available", e);
} catch (NoSuchMethodException e) {
throw noAddURL("Method addURL not available", e);
}
for (final File path : additionalLocations) {
try {
final URL url = path.toURI().toURL();
if (have.add(url)) {
m.invoke(cl, url);
}
} catch (MalformedURLException e) {
throw noAddURL("addURL " + path + " failed", e);
} catch (IllegalArgumentException e) {
throw noAddURL("addURL " + path + " failed", e);
} catch (IllegalAccessException e) {
throw noAddURL("addURL " + path + " failed", e);
} catch (InvocationTargetException e) {
throw noAddURL("addURL " + path + " failed", e.getCause());
}
}
}
private static UnsupportedOperationException noAddURL(String m, Throwable why) {
final String prefix = "Cannot extend classpath: ";
return new UnsupportedOperationException(prefix + m, why);
}
/** @return provides database connectivity and site path. */
protected Injector createDbInjector(final DataSourceProvider.Context context) {
loadSiteLib();
final File sitePath = getSitePath();
final List<Module> modules = new ArrayList<Module>();
modules.add(new AbstractModule() {
@Override
protected void configure() {
bind(File.class).annotatedWith(SitePath.class).toInstance(sitePath);
}
});
modules.add(new LifecycleModule() {
@Override
protected void configure() {
bind(DataSourceProvider.Context.class).toInstance(context);
bind(Key.get(DataSource.class, Names.named("ReviewDb"))).toProvider(
DataSourceProvider.class).in(SINGLETON);
listener().to(DataSourceProvider.class);
}
});
modules.add(new GerritServerConfigModule());
modules.add(new DatabaseModule());
modules.add(new SchemaModule());
try {
return Guice.createInjector(PRODUCTION, modules);
} catch (CreationException ce) {
final Message first = ce.getErrorMessages().iterator().next();
Throwable why = first.getCause();
if (why instanceof SQLException) {
throw die("Cannot connect to SQL database", why);
}
if (why instanceof OrmException && why.getCause() != null
&& "Unable to determine driver URL".equals(why.getMessage())) {
why = why.getCause();
if (isCannotCreatePoolException(why)) {
throw die("Cannot connect to SQL database", why.getCause());
}
throw die("Cannot connect to SQL database", why);
}
final StringBuilder buf = new StringBuilder();
if (why != null) {
buf.append(why.getMessage());
why = why.getCause();
} else {
buf.append(first.getMessage());
}
while (why != null) {
buf.append("\n caused by ");
buf.append(why.toString());
why = why.getCause();
}
throw die(buf.toString(), new RuntimeException("DbInjector failed", ce));
}
}
@SuppressWarnings("deprecation")
private static boolean isCannotCreatePoolException(Throwable why) {
return why instanceof org.apache.commons.dbcp.SQLNestedException
&& why.getCause() != null
&& why.getMessage().startsWith(
"Cannot create PoolableConnectionFactory");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bulk;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.DocWriteRequest;
import org.elasticsearch.action.delete.DeleteRequest;
import org.elasticsearch.action.index.IndexRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeUnit;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.threadpool.Scheduler;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.Closeable;
import java.util.Objects;
import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.function.BiConsumer;
import java.util.function.Supplier;
/**
* A bulk processor is a thread safe bulk processing class, allowing to easily set when to "flush" a new bulk request
* (either based on number of actions, based on the size, or time), and to easily control the number of concurrent bulk
* requests allowed to be executed in parallel.
* <p>
* In order to create a new bulk processor, use the {@link Builder}.
*/
public class BulkProcessor implements Closeable {
/**
* A listener for the execution.
*/
public interface Listener {
/**
* Callback before the bulk is executed.
*/
void beforeBulk(long executionId, BulkRequest request);
/**
* Callback after a successful execution of bulk request.
*/
void afterBulk(long executionId, BulkRequest request, BulkResponse response);
/**
* Callback after a failed execution of bulk request.
* <p>
* Note that in case an instance of <code>InterruptedException</code> is passed, which means that request processing has been
* cancelled externally, the thread's interruption status has been restored prior to calling this method.
*/
void afterBulk(long executionId, BulkRequest request, Throwable failure);
}
/**
* A builder used to create a build an instance of a bulk processor.
*/
public static class Builder {
private final BiConsumer<BulkRequest, ActionListener<BulkResponse>> consumer;
private final Listener listener;
private final Scheduler scheduler;
private final Runnable onClose;
private int concurrentRequests = 1;
private int bulkActions = 1000;
private ByteSizeValue bulkSize = new ByteSizeValue(5, ByteSizeUnit.MB);
private TimeValue flushInterval = null;
private BackoffPolicy backoffPolicy = BackoffPolicy.exponentialBackoff();
private String globalIndex;
private String globalType;
private String globalRouting;
private String globalPipeline;
private Builder(BiConsumer<BulkRequest, ActionListener<BulkResponse>> consumer, Listener listener,
Scheduler scheduler, Runnable onClose) {
this.consumer = consumer;
this.listener = listener;
this.scheduler = scheduler;
this.onClose = onClose;
}
/**
* Sets the number of concurrent requests allowed to be executed. A value of 0 means that only a single
* request will be allowed to be executed. A value of 1 means 1 concurrent request is allowed to be executed
* while accumulating new bulk requests. Defaults to {@code 1}.
*/
public Builder setConcurrentRequests(int concurrentRequests) {
this.concurrentRequests = concurrentRequests;
return this;
}
/**
* Sets when to flush a new bulk request based on the number of actions currently added. Defaults to
* {@code 1000}. Can be set to {@code -1} to disable it.
*/
public Builder setBulkActions(int bulkActions) {
this.bulkActions = bulkActions;
return this;
}
/**
* Sets when to flush a new bulk request based on the size of actions currently added. Defaults to
* {@code 5mb}. Can be set to {@code -1} to disable it.
*/
public Builder setBulkSize(ByteSizeValue bulkSize) {
this.bulkSize = bulkSize;
return this;
}
/**
* Sets a flush interval flushing *any* bulk actions pending if the interval passes. Defaults to not set.
* <p>
* Note, both {@link #setBulkActions(int)} and {@link #setBulkSize(org.elasticsearch.common.unit.ByteSizeValue)}
* can be set to {@code -1} with the flush interval set allowing for complete async processing of bulk actions.
*/
public Builder setFlushInterval(TimeValue flushInterval) {
this.flushInterval = flushInterval;
return this;
}
public Builder setGlobalIndex(String globalIndex) {
this.globalIndex = globalIndex;
return this;
}
public Builder setGlobalType(String globalType) {
this.globalType = globalType;
return this;
}
public Builder setGlobalRouting(String globalRouting) {
this.globalRouting = globalRouting;
return this;
}
public Builder setGlobalPipeline(String globalPipeline) {
this.globalPipeline = globalPipeline;
return this;
}
/**
* Sets a custom backoff policy. The backoff policy defines how the bulk processor should handle retries of bulk requests internally
* in case they have failed due to resource constraints (i.e. a thread pool was full).
*
* The default is to back off exponentially.
*
* @see org.elasticsearch.action.bulk.BackoffPolicy#exponentialBackoff()
*/
public Builder setBackoffPolicy(BackoffPolicy backoffPolicy) {
if (backoffPolicy == null) {
throw new NullPointerException("'backoffPolicy' must not be null. To disable backoff, pass BackoffPolicy.noBackoff()");
}
this.backoffPolicy = backoffPolicy;
return this;
}
/**
* Builds a new bulk processor.
*/
public BulkProcessor build() {
return new BulkProcessor(consumer, backoffPolicy, listener, concurrentRequests, bulkActions,
bulkSize, flushInterval, scheduler, onClose, createBulkRequestWithGlobalDefaults());
}
private Supplier<BulkRequest> createBulkRequestWithGlobalDefaults() {
return () -> new BulkRequest(globalIndex, globalType)
.pipeline(globalPipeline)
.routing(globalRouting);
}
}
public static Builder builder(Client client, Listener listener) {
Objects.requireNonNull(client, "client");
Objects.requireNonNull(listener, "listener");
return new Builder(client::bulk, listener, client.threadPool(), () -> {});
}
public static Builder builder(BiConsumer<BulkRequest, ActionListener<BulkResponse>> consumer, Listener listener) {
Objects.requireNonNull(consumer, "consumer");
Objects.requireNonNull(listener, "listener");
final ScheduledThreadPoolExecutor scheduledThreadPoolExecutor = Scheduler.initScheduler(Settings.EMPTY);
return new Builder(consumer, listener,
buildScheduler(scheduledThreadPoolExecutor),
() -> Scheduler.terminate(scheduledThreadPoolExecutor, 10, TimeUnit.SECONDS));
}
private static Scheduler buildScheduler(ScheduledThreadPoolExecutor scheduledThreadPoolExecutor) {
return (command, delay, executor) ->
Scheduler.wrapAsScheduledCancellable(scheduledThreadPoolExecutor.schedule(command, delay.millis(), TimeUnit.MILLISECONDS));
}
private final int bulkActions;
private final long bulkSize;
private final Scheduler.Cancellable cancellableFlushTask;
private final AtomicLong executionIdGen = new AtomicLong();
private BulkRequest bulkRequest;
private final Supplier<BulkRequest> bulkRequestSupplier;
private final BulkRequestHandler bulkRequestHandler;
private final Scheduler scheduler;
private final Runnable onClose;
private volatile boolean closed = false;
BulkProcessor(BiConsumer<BulkRequest, ActionListener<BulkResponse>> consumer, BackoffPolicy backoffPolicy, Listener listener,
int concurrentRequests, int bulkActions, ByteSizeValue bulkSize, @Nullable TimeValue flushInterval,
Scheduler scheduler, Runnable onClose, Supplier<BulkRequest> bulkRequestSupplier) {
this.bulkActions = bulkActions;
this.bulkSize = bulkSize.getBytes();
this.scheduler = scheduler;
this.bulkRequest = bulkRequestSupplier.get();
this.bulkRequestSupplier = bulkRequestSupplier;
this.bulkRequestHandler = new BulkRequestHandler(consumer, backoffPolicy, listener, scheduler, concurrentRequests);
// Start period flushing task after everything is setup
this.cancellableFlushTask = startFlushTask(flushInterval, scheduler);
this.onClose = onClose;
}
/**
* Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are flushed.
*/
@Override
public void close() {
try {
awaitClose(0, TimeUnit.NANOSECONDS);
} catch (InterruptedException exc) {
Thread.currentThread().interrupt();
}
}
/**
* Closes the processor. If flushing by time is enabled, then it's shutdown. Any remaining bulk actions are flushed.
* <p>
* If concurrent requests are not enabled, returns {@code true} immediately.
* If concurrent requests are enabled, waits for up to the specified timeout for all bulk requests to complete then returns {@code true}
* If the specified waiting time elapses before all bulk requests complete, {@code false} is returned.
*
* @param timeout The maximum time to wait for the bulk requests to complete
* @param unit The time unit of the {@code timeout} argument
* @return {@code true} if all bulk requests completed and {@code false} if the waiting time elapsed before all the bulk requests
* completed
* @throws InterruptedException If the current thread is interrupted
*/
public synchronized boolean awaitClose(long timeout, TimeUnit unit) throws InterruptedException {
if (closed) {
return true;
}
closed = true;
this.cancellableFlushTask.cancel();
if (bulkRequest.numberOfActions() > 0) {
execute();
}
try {
return this.bulkRequestHandler.awaitClose(timeout, unit);
} finally {
onClose.run();
}
}
/**
* Adds an {@link IndexRequest} to the list of actions to execute. Follows the same behavior of {@link IndexRequest}
* (for example, if no id is provided, one will be generated, or usage of the create flag).
*/
public BulkProcessor add(IndexRequest request) {
return add((DocWriteRequest) request);
}
/**
* Adds an {@link DeleteRequest} to the list of actions to execute.
*/
public BulkProcessor add(DeleteRequest request) {
return add((DocWriteRequest) request);
}
/**
* Adds either a delete or an index request.
*/
public BulkProcessor add(DocWriteRequest request) {
return add(request, null);
}
public BulkProcessor add(DocWriteRequest request, @Nullable Object payload) {
internalAdd(request, payload);
return this;
}
boolean isOpen() {
return closed == false;
}
protected void ensureOpen() {
if (closed) {
throw new IllegalStateException("bulk process already closed");
}
}
private synchronized void internalAdd(DocWriteRequest request, @Nullable Object payload) {
ensureOpen();
bulkRequest.add(request, payload);
executeIfNeeded();
}
/**
* Adds the data from the bytes to be processed by the bulk processor
*/
public BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
XContentType xContentType) throws Exception {
return add(data, defaultIndex, defaultType, null, null, xContentType);
}
/**
* Adds the data from the bytes to be processed by the bulk processor
*/
public synchronized BulkProcessor add(BytesReference data, @Nullable String defaultIndex, @Nullable String defaultType,
@Nullable String defaultPipeline, @Nullable Object payload,
XContentType xContentType) throws Exception {
bulkRequest.add(data, defaultIndex, defaultType, null, null, null, defaultPipeline, payload, true, xContentType);
executeIfNeeded();
return this;
}
private Scheduler.Cancellable startFlushTask(TimeValue flushInterval, Scheduler scheduler) {
if (flushInterval == null) {
return new Scheduler.Cancellable() {
@Override
public boolean cancel() {
return false;
}
@Override
public boolean isCancelled() {
return true;
}
};
}
final Runnable flushRunnable = scheduler.preserveContext(new Flush());
return scheduler.scheduleWithFixedDelay(flushRunnable, flushInterval, ThreadPool.Names.GENERIC);
}
private void executeIfNeeded() {
ensureOpen();
if (!isOverTheLimit()) {
return;
}
execute();
}
// (currently) needs to be executed under a lock
private void execute() {
final BulkRequest bulkRequest = this.bulkRequest;
final long executionId = executionIdGen.incrementAndGet();
this.bulkRequest = bulkRequestSupplier.get();
this.bulkRequestHandler.execute(bulkRequest, executionId);
}
private boolean isOverTheLimit() {
if (bulkActions != -1 && bulkRequest.numberOfActions() >= bulkActions) {
return true;
}
if (bulkSize != -1 && bulkRequest.estimatedSizeInBytes() >= bulkSize) {
return true;
}
return false;
}
/**
* Flush pending delete or index requests.
*/
public synchronized void flush() {
ensureOpen();
if (bulkRequest.numberOfActions() > 0) {
execute();
}
}
class Flush implements Runnable {
@Override
public void run() {
synchronized (BulkProcessor.this) {
if (closed) {
return;
}
if (bulkRequest.numberOfActions() == 0) {
return;
}
execute();
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
// Code generated by Microsoft (R) AutoRest Code Generator.
package com.azure.resourcemanager.desktopvirtualization.models;
import com.azure.core.annotation.Fluent;
import com.azure.core.management.ProxyResource;
import com.azure.core.util.logging.ClientLogger;
import com.azure.resourcemanager.desktopvirtualization.fluent.models.HostPoolPatchProperties;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.Map;
/** HostPool properties that can be patched. */
@Fluent
public final class HostPoolPatch extends ProxyResource {
@JsonIgnore private final ClientLogger logger = new ClientLogger(HostPoolPatch.class);
/*
* tags to be updated
*/
@JsonProperty(value = "tags")
@JsonInclude(value = JsonInclude.Include.NON_NULL, content = JsonInclude.Include.ALWAYS)
private Map<String, String> tags;
/*
* HostPool properties that can be patched.
*/
@JsonProperty(value = "properties")
private HostPoolPatchProperties innerProperties;
/**
* Get the tags property: tags to be updated.
*
* @return the tags value.
*/
public Map<String, String> tags() {
return this.tags;
}
/**
* Set the tags property: tags to be updated.
*
* @param tags the tags value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withTags(Map<String, String> tags) {
this.tags = tags;
return this;
}
/**
* Get the innerProperties property: HostPool properties that can be patched.
*
* @return the innerProperties value.
*/
private HostPoolPatchProperties innerProperties() {
return this.innerProperties;
}
/**
* Get the friendlyName property: Friendly name of HostPool.
*
* @return the friendlyName value.
*/
public String friendlyName() {
return this.innerProperties() == null ? null : this.innerProperties().friendlyName();
}
/**
* Set the friendlyName property: Friendly name of HostPool.
*
* @param friendlyName the friendlyName value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withFriendlyName(String friendlyName) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withFriendlyName(friendlyName);
return this;
}
/**
* Get the description property: Description of HostPool.
*
* @return the description value.
*/
public String description() {
return this.innerProperties() == null ? null : this.innerProperties().description();
}
/**
* Set the description property: Description of HostPool.
*
* @param description the description value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withDescription(String description) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withDescription(description);
return this;
}
/**
* Get the customRdpProperty property: Custom rdp property of HostPool.
*
* @return the customRdpProperty value.
*/
public String customRdpProperty() {
return this.innerProperties() == null ? null : this.innerProperties().customRdpProperty();
}
/**
* Set the customRdpProperty property: Custom rdp property of HostPool.
*
* @param customRdpProperty the customRdpProperty value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withCustomRdpProperty(String customRdpProperty) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withCustomRdpProperty(customRdpProperty);
return this;
}
/**
* Get the maxSessionLimit property: The max session limit of HostPool.
*
* @return the maxSessionLimit value.
*/
public Integer maxSessionLimit() {
return this.innerProperties() == null ? null : this.innerProperties().maxSessionLimit();
}
/**
* Set the maxSessionLimit property: The max session limit of HostPool.
*
* @param maxSessionLimit the maxSessionLimit value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withMaxSessionLimit(Integer maxSessionLimit) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withMaxSessionLimit(maxSessionLimit);
return this;
}
/**
* Get the personalDesktopAssignmentType property: PersonalDesktopAssignment type for HostPool.
*
* @return the personalDesktopAssignmentType value.
*/
public PersonalDesktopAssignmentType personalDesktopAssignmentType() {
return this.innerProperties() == null ? null : this.innerProperties().personalDesktopAssignmentType();
}
/**
* Set the personalDesktopAssignmentType property: PersonalDesktopAssignment type for HostPool.
*
* @param personalDesktopAssignmentType the personalDesktopAssignmentType value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withPersonalDesktopAssignmentType(
PersonalDesktopAssignmentType personalDesktopAssignmentType) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withPersonalDesktopAssignmentType(personalDesktopAssignmentType);
return this;
}
/**
* Get the loadBalancerType property: The type of the load balancer.
*
* @return the loadBalancerType value.
*/
public LoadBalancerType loadBalancerType() {
return this.innerProperties() == null ? null : this.innerProperties().loadBalancerType();
}
/**
* Set the loadBalancerType property: The type of the load balancer.
*
* @param loadBalancerType the loadBalancerType value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withLoadBalancerType(LoadBalancerType loadBalancerType) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withLoadBalancerType(loadBalancerType);
return this;
}
/**
* Get the ring property: The ring number of HostPool.
*
* @return the ring value.
*/
public Integer ring() {
return this.innerProperties() == null ? null : this.innerProperties().ring();
}
/**
* Set the ring property: The ring number of HostPool.
*
* @param ring the ring value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withRing(Integer ring) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withRing(ring);
return this;
}
/**
* Get the validationEnvironment property: Is validation environment.
*
* @return the validationEnvironment value.
*/
public Boolean validationEnvironment() {
return this.innerProperties() == null ? null : this.innerProperties().validationEnvironment();
}
/**
* Set the validationEnvironment property: Is validation environment.
*
* @param validationEnvironment the validationEnvironment value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withValidationEnvironment(Boolean validationEnvironment) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withValidationEnvironment(validationEnvironment);
return this;
}
/**
* Get the registrationInfo property: The registration info of HostPool.
*
* @return the registrationInfo value.
*/
public RegistrationInfoPatch registrationInfo() {
return this.innerProperties() == null ? null : this.innerProperties().registrationInfo();
}
/**
* Set the registrationInfo property: The registration info of HostPool.
*
* @param registrationInfo the registrationInfo value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withRegistrationInfo(RegistrationInfoPatch registrationInfo) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withRegistrationInfo(registrationInfo);
return this;
}
/**
* Get the vmTemplate property: VM template for sessionhosts configuration within hostpool.
*
* @return the vmTemplate value.
*/
public String vmTemplate() {
return this.innerProperties() == null ? null : this.innerProperties().vmTemplate();
}
/**
* Set the vmTemplate property: VM template for sessionhosts configuration within hostpool.
*
* @param vmTemplate the vmTemplate value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withVmTemplate(String vmTemplate) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withVmTemplate(vmTemplate);
return this;
}
/**
* Get the ssoadfsAuthority property: URL to customer ADFS server for signing WVD SSO certificates.
*
* @return the ssoadfsAuthority value.
*/
public String ssoadfsAuthority() {
return this.innerProperties() == null ? null : this.innerProperties().ssoadfsAuthority();
}
/**
* Set the ssoadfsAuthority property: URL to customer ADFS server for signing WVD SSO certificates.
*
* @param ssoadfsAuthority the ssoadfsAuthority value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withSsoadfsAuthority(String ssoadfsAuthority) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withSsoadfsAuthority(ssoadfsAuthority);
return this;
}
/**
* Get the ssoClientId property: ClientId for the registered Relying Party used to issue WVD SSO certificates.
*
* @return the ssoClientId value.
*/
public String ssoClientId() {
return this.innerProperties() == null ? null : this.innerProperties().ssoClientId();
}
/**
* Set the ssoClientId property: ClientId for the registered Relying Party used to issue WVD SSO certificates.
*
* @param ssoClientId the ssoClientId value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withSsoClientId(String ssoClientId) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withSsoClientId(ssoClientId);
return this;
}
/**
* Get the ssoClientSecretKeyVaultPath property: Path to Azure KeyVault storing the secret used for communication to
* ADFS.
*
* @return the ssoClientSecretKeyVaultPath value.
*/
public String ssoClientSecretKeyVaultPath() {
return this.innerProperties() == null ? null : this.innerProperties().ssoClientSecretKeyVaultPath();
}
/**
* Set the ssoClientSecretKeyVaultPath property: Path to Azure KeyVault storing the secret used for communication to
* ADFS.
*
* @param ssoClientSecretKeyVaultPath the ssoClientSecretKeyVaultPath value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withSsoClientSecretKeyVaultPath(String ssoClientSecretKeyVaultPath) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withSsoClientSecretKeyVaultPath(ssoClientSecretKeyVaultPath);
return this;
}
/**
* Get the ssoSecretType property: The type of single sign on Secret Type.
*
* @return the ssoSecretType value.
*/
public SsoSecretType ssoSecretType() {
return this.innerProperties() == null ? null : this.innerProperties().ssoSecretType();
}
/**
* Set the ssoSecretType property: The type of single sign on Secret Type.
*
* @param ssoSecretType the ssoSecretType value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withSsoSecretType(SsoSecretType ssoSecretType) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withSsoSecretType(ssoSecretType);
return this;
}
/**
* Get the preferredAppGroupType property: The type of preferred application group type, default to Desktop
* Application Group.
*
* @return the preferredAppGroupType value.
*/
public PreferredAppGroupType preferredAppGroupType() {
return this.innerProperties() == null ? null : this.innerProperties().preferredAppGroupType();
}
/**
* Set the preferredAppGroupType property: The type of preferred application group type, default to Desktop
* Application Group.
*
* @param preferredAppGroupType the preferredAppGroupType value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withPreferredAppGroupType(PreferredAppGroupType preferredAppGroupType) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withPreferredAppGroupType(preferredAppGroupType);
return this;
}
/**
* Get the startVMOnConnect property: The flag to turn on/off StartVMOnConnect feature.
*
* @return the startVMOnConnect value.
*/
public Boolean startVMOnConnect() {
return this.innerProperties() == null ? null : this.innerProperties().startVMOnConnect();
}
/**
* Set the startVMOnConnect property: The flag to turn on/off StartVMOnConnect feature.
*
* @param startVMOnConnect the startVMOnConnect value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withStartVMOnConnect(Boolean startVMOnConnect) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withStartVMOnConnect(startVMOnConnect);
return this;
}
/**
* Get the publicNetworkAccess property: Enabled to allow this resource to be access from the public network.
*
* @return the publicNetworkAccess value.
*/
public PublicNetworkAccess publicNetworkAccess() {
return this.innerProperties() == null ? null : this.innerProperties().publicNetworkAccess();
}
/**
* Set the publicNetworkAccess property: Enabled to allow this resource to be access from the public network.
*
* @param publicNetworkAccess the publicNetworkAccess value to set.
* @return the HostPoolPatch object itself.
*/
public HostPoolPatch withPublicNetworkAccess(PublicNetworkAccess publicNetworkAccess) {
if (this.innerProperties() == null) {
this.innerProperties = new HostPoolPatchProperties();
}
this.innerProperties().withPublicNetworkAccess(publicNetworkAccess);
return this;
}
/**
* Validates the instance.
*
* @throws IllegalArgumentException thrown if the instance is not valid.
*/
public void validate() {
if (innerProperties() != null) {
innerProperties().validate();
}
}
}
| |
/*
* Autopsy Forensic Browser
*
* Copyright 2011-2014 Basis Technology Corp.
* Contact: carrier <at> sleuthkit <dot> org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sleuthkit.autopsy.datamodel;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Observable;
import java.util.Observer;
import java.util.Set;
import java.util.logging.Level;
import org.openide.nodes.ChildFactory;
import org.openide.nodes.Children;
import org.openide.nodes.Node;
import org.openide.nodes.Sheet;
import org.openide.util.NbBundle;
import org.openide.util.lookup.Lookups;
import org.sleuthkit.autopsy.casemodule.Case;
import org.sleuthkit.autopsy.coreutils.Logger;
import org.sleuthkit.autopsy.ingest.IngestManager;
import org.sleuthkit.autopsy.ingest.ModuleDataEvent;
import org.sleuthkit.datamodel.AbstractFile;
import org.sleuthkit.datamodel.BlackboardArtifact;
import org.sleuthkit.datamodel.BlackboardAttribute;
import org.sleuthkit.datamodel.SleuthkitCase;
import org.sleuthkit.datamodel.SleuthkitCase.CaseDbQuery;
import org.sleuthkit.datamodel.TskCoreException;
import org.sleuthkit.datamodel.TskException;
/**
* Keyword hits node support
*/
public class KeywordHits implements AutopsyVisitableItem {
private SleuthkitCase skCase;
private static final Logger logger = Logger.getLogger(KeywordHits.class.getName());
private static final String KEYWORD_HITS = NbBundle.getMessage(KeywordHits.class, "KeywordHits.kwHits.text");
public static final String NAME = BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getLabel();
public static final String SIMPLE_LITERAL_SEARCH = NbBundle
.getMessage(KeywordHits.class, "KeywordHits.simpleLiteralSearch.text");
public static final String SIMPLE_REGEX_SEARCH = NbBundle
.getMessage(KeywordHits.class, "KeywordHits.singleRegexSearch.text");
private final KeywordResults keywordResults;
public KeywordHits(SleuthkitCase skCase) {
this.skCase = skCase;
keywordResults = new KeywordResults();
}
private final class KeywordResults extends Observable {
// Map from listName/Type to Map of keyword to set of artifact Ids
private final Map<String, Map<String, Set<Long>>> topLevelMap;
KeywordResults() {
topLevelMap = new LinkedHashMap<>();
update();
}
List<String> getListNames() {
List<String> names = new ArrayList<>(topLevelMap.keySet());
// this causes the "Single ..." terms to be in the middle of the results,
// which is wierd. Make a custom comparator or do something else to maek them on top
//Collections.sort(names);
return names;
}
List<String> getKeywords(String listName) {
List<String> keywords = new ArrayList<>(topLevelMap.get(listName).keySet());
Collections.sort(keywords);
return keywords;
}
Set<Long> getArtifactIds(String listName, String keyword) {
return topLevelMap.get(listName).get(keyword);
}
// populate maps based on artifactIds
void populateMaps(Map<Long, Map<Long, String>> artifactIds) {
topLevelMap.clear();
// map of list name to keword to artifact IDs
Map<String, Map<String, Set<Long>>> listsMap = new LinkedHashMap<>();
// Map from from literal keyword to artifact IDs
Map<String, Set<Long>> literalMap = new LinkedHashMap<>();
// Map from regex keyword artifact IDs
Map<String, Set<Long>> regexMap = new LinkedHashMap<>();
// top-level nodes
topLevelMap.put(SIMPLE_LITERAL_SEARCH, literalMap);
topLevelMap.put(SIMPLE_REGEX_SEARCH, regexMap);
for (Map.Entry<Long, Map<Long, String>> art : artifactIds.entrySet()) {
long id = art.getKey();
Map<Long, String> attributes = art.getValue();
// I think we can use attributes.remove(...) here?
String listName = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID()));
String word = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID()));
String reg = attributes.get(Long.valueOf(BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID()));
// part of a list
if (listName != null) {
if (listsMap.containsKey(listName) == false) {
listsMap.put(listName, new LinkedHashMap<String, Set<Long>>());
}
Map<String, Set<Long>> listMap = listsMap.get(listName);
if (listMap.containsKey(word) == false) {
listMap.put(word, new HashSet<Long>());
}
listMap.get(word).add(id);
} // regular expression, single term
else if (reg != null) {
if (regexMap.containsKey(reg) == false) {
regexMap.put(reg, new HashSet<Long>());
}
regexMap.get(reg).add(id);
} // literal, single term
else {
if (literalMap.containsKey(word) == false) {
literalMap.put(word, new HashSet<Long>());
}
literalMap.get(word).add(id);
}
topLevelMap.putAll(listsMap);
}
setChanged();
notifyObservers();
}
@SuppressWarnings("deprecation")
public void update() {
Map<Long, Map<Long, String>> artifactIds = new LinkedHashMap<>();
if (skCase == null) {
return;
}
int setId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_SET_NAME.getTypeID();
int wordId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD.getTypeID();
int regexId = BlackboardAttribute.ATTRIBUTE_TYPE.TSK_KEYWORD_REGEXP.getTypeID();
int artId = BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT.getTypeID();
String query = "SELECT blackboard_attributes.value_text,blackboard_attributes.artifact_id," //NON-NLS
+ "blackboard_attributes.attribute_type_id FROM blackboard_attributes,blackboard_artifacts WHERE " //NON-NLS
+ "(blackboard_attributes.artifact_id=blackboard_artifacts.artifact_id AND " //NON-NLS
+ "blackboard_artifacts.artifact_type_id=" + artId //NON-NLS
+ ") AND (attribute_type_id=" + setId + " OR " //NON-NLS
+ "attribute_type_id=" + wordId + " OR " //NON-NLS
+ "attribute_type_id=" + regexId + ")"; //NON-NLS
try (CaseDbQuery dbQuery = skCase.executeQuery(query)) {
ResultSet resultSet = dbQuery.getResultSet();
while (resultSet.next()) {
String value = resultSet.getString("value_text"); //NON-NLS
long artifactId = resultSet.getLong("artifact_id"); //NON-NLS
long typeId = resultSet.getLong("attribute_type_id"); //NON-NLS
if (!artifactIds.containsKey(artifactId)) {
artifactIds.put(artifactId, new LinkedHashMap<Long, String>());
}
if (!value.equals("")) {
artifactIds.get(artifactId).put(typeId, value);
}
}
} catch (TskCoreException | SQLException ex) {
logger.log(Level.WARNING, "SQL Exception occurred: ", ex); //NON-NLS
}
populateMaps(artifactIds);
}
}
@Override
public <T> T accept(AutopsyItemVisitor<T> v) {
return v.visit(this);
}
// Created by CreateAutopsyNodeVisitor
public class RootNode extends DisplayableItemNode {
public RootNode() {
super(Children.create(new ListFactory(), true), Lookups.singleton(KEYWORD_HITS));
super.setName(NAME);
super.setDisplayName(KEYWORD_HITS);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.name.name"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.name.displayName"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.name.desc"),
getName()));
return s;
}
}
private class ListFactory extends ChildFactory.Detachable<String> implements Observer {
private final PropertyChangeListener pcl = new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
String eventType = evt.getPropertyName();
if (eventType.equals(IngestManager.IngestModuleEvent.DATA_ADDED.toString())) {
if (((ModuleDataEvent) evt.getOldValue()).getArtifactType() == BlackboardArtifact.ARTIFACT_TYPE.TSK_KEYWORD_HIT) {
keywordResults.update();
}
} else if (eventType.equals(IngestManager.IngestJobEvent.COMPLETED.toString())
|| eventType.equals(IngestManager.IngestJobEvent.CANCELLED.toString())) {
keywordResults.update();
} else if (eventType.equals(Case.Events.CURRENT_CASE.toString())) {
// case was closed. Remove listeners so that we don't get called with a stale case handle
if (evt.getNewValue() == null) {
removeNotify();
skCase = null;
}
}
}
};
@Override
protected void addNotify() {
IngestManager.getInstance().addIngestJobEventListener(pcl);
IngestManager.getInstance().addIngestModuleEventListener(pcl);
Case.addPropertyChangeListener(pcl);
keywordResults.update();
keywordResults.addObserver(this);
}
@Override
protected void removeNotify() {
IngestManager.getInstance().removeIngestJobEventListener(pcl);
IngestManager.getInstance().removeIngestModuleEventListener(pcl);
Case.removePropertyChangeListener(pcl);
keywordResults.deleteObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(keywordResults.getListNames());
return true;
}
@Override
protected Node createNodeForKey(String key) {
return new ListNode(key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
public class ListNode extends DisplayableItemNode implements Observer {
private String listName;
public ListNode(String listName) {
super(Children.create(new TermFactory(listName), true), Lookups.singleton(listName));
super.setName(listName);
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
this.listName = listName;
updateDisplayName();
keywordResults.addObserver(this);
}
private void updateDisplayName() {
int totalDescendants = 0;
for (String word : keywordResults.getKeywords(listName)) {
Set<Long> ids = keywordResults.getArtifactIds(listName, word);
totalDescendants += ids.size();
}
super.setDisplayName(listName + " (" + totalDescendants + ")");
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.name"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.displayName"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.desc"),
listName));
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.numChildren.name"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.numChildren.displayName"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.numChildren.desc"),
keywordResults.getKeywords(listName).size()));
return s;
}
@Override
public boolean isLeafTypeNode() {
return false;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
}
private class TermFactory extends ChildFactory.Detachable<String> implements Observer {
private String setName;
private TermFactory(String setName) {
super();
this.setName = setName;
}
@Override
protected void addNotify() {
keywordResults.addObserver(this);
}
@Override
protected void removeNotify() {
keywordResults.deleteObserver(this);
}
@Override
protected boolean createKeys(List<String> list) {
list.addAll(keywordResults.getKeywords(setName));
return true;
}
@Override
protected Node createNodeForKey(String key) {
return new TermNode(setName, key);
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
public class TermNode extends DisplayableItemNode implements Observer {
private String setName;
private String keyword;
public TermNode(String setName, String keyword) {
super(Children.create(new HitsFactory(setName, keyword), true), Lookups.singleton(keyword));
super.setName(keyword);
this.setName = setName;
this.keyword = keyword;
this.setIconBaseWithExtension("org/sleuthkit/autopsy/images/keyword_hits.png"); //NON-NLS
updateDisplayName();
keywordResults.addObserver(this);
}
private void updateDisplayName() {
super.setDisplayName(keyword + " (" + keywordResults.getArtifactIds(setName, keyword).size() + ")");
}
@Override
public void update(Observable o, Object arg) {
updateDisplayName();
}
@Override
public boolean isLeafTypeNode() {
return true;
}
@Override
public <T> T accept(DisplayableItemNodeVisitor<T> v) {
return v.visit(this);
}
@Override
protected Sheet createSheet() {
Sheet s = super.createSheet();
Sheet.Set ss = s.get(Sheet.PROPERTIES);
if (ss == null) {
ss = Sheet.createPropertiesSet();
s.put(ss);
}
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.name"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.displayName"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.listName.desc"),
getDisplayName()));
ss.put(new NodeProperty<>(NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.filesWithHits.name"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.filesWithHits.displayName"),
NbBundle.getMessage(this.getClass(), "KeywordHits.createSheet.filesWithHits.desc"),
keywordResults.getArtifactIds(setName, keyword).size()));
return s;
}
}
public class HitsFactory extends ChildFactory.Detachable<Long> implements Observer {
private String keyword;
private String setName;
public HitsFactory(String setName, String keyword) {
super();
this.setName = setName;
this.keyword = keyword;
}
@Override
protected void addNotify() {
keywordResults.addObserver(this);
}
@Override
protected void removeNotify() {
keywordResults.deleteObserver(this);
}
@Override
protected boolean createKeys(List<Long> list) {
list.addAll(keywordResults.getArtifactIds(setName, keyword));
return true;
}
@Override
protected Node createNodeForKey(Long artifactId) {
if (skCase == null) {
return null;
}
try {
BlackboardArtifact art = skCase.getBlackboardArtifact(artifactId);
BlackboardArtifactNode n = new BlackboardArtifactNode(art);
AbstractFile file;
try {
file = skCase.getAbstractFileById(art.getObjectID());
} catch (TskCoreException ex) {
logger.log(Level.SEVERE, "TskCoreException while constructing BlackboardArtifact Node from KeywordHitsKeywordChildren"); //NON-NLS
return n;
}
// It is possible to get a keyword hit on artifacts generated
// for the underlying image in which case MAC times are not
// available/applicable/useful.
if (file == null) {
return n;
}
n.addNodeProperty(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "KeywordHits.createNodeForKey.modTime.name"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.modTime.displayName"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.modTime.desc"),
ContentUtils.getStringTime(file.getMtime(), file)));
n.addNodeProperty(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "KeywordHits.createNodeForKey.accessTime.name"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.accessTime.displayName"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.accessTime.desc"),
ContentUtils.getStringTime(file.getAtime(), file)));
n.addNodeProperty(new NodeProperty<>(
NbBundle.getMessage(this.getClass(), "KeywordHits.createNodeForKey.chgTime.name"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.chgTime.displayName"),
NbBundle.getMessage(this.getClass(),
"KeywordHits.createNodeForKey.chgTime.desc"),
ContentUtils.getStringTime(file.getCtime(), file)));
return n;
} catch (TskException ex) {
logger.log(Level.WARNING, "TSK Exception occurred", ex); //NON-NLS
}
return null;
}
@Override
public void update(Observable o, Object arg) {
refresh(true);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.ui;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.util.List;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.http.HttpServletRequest;
/**
* This class is used to maintain a mapping between the session indexes of the SSO Identity Provider
* end and the relying party end. When a user is authenticated and logged-in using SSO, an entry is
* added to the validSessionMap where Idp-session-index --> RP-Session-id.
*
* When he logs out from either of SSO relying party, a SAML2 LogoutRequest is sent to all the
* relying party service providers who have established sessions with the Identity Provider at that
* moment. When a relying party receives a logout request, it should validate the request and
* extract the IdP session index from the request. Then it should identify the sessionId of the
* corresponding user which represents the session established at the relying party end. Then it
* removes that session from the validSessionMap and includes it to the invalidSessionsMap. So when
* a user tries to do some activity thereafter he should be logged-out from the system.
*
* This class maintains two maps to maintain valid sessions and invalid sessions. This class is
* implemented as a singleton because there should be only one SSOSessionManager per instance.
*/
public class CarbonSSOSessionManager {
private static Log log = LogFactory.getLog(CarbonSSOSessionManager.class);
/**
* CarbonSSOSessionManager instance which is used as the singleton instance
*/
private static CarbonSSOSessionManager instance = new CarbonSSOSessionManager();
/**
* This hash map is used to maintain a map of valid sessions. IdpSessionIndex is used as the key
* while the RPSessionId is used as the value.
*/
private ConcurrentHashMap<String, String> validSessionMap = new ConcurrentHashMap<String, String>();
/**
* This hash map is used to maintain the invalid sessions. RPSessionIndex is used as the key
* while IdpSessionIndex is used as the value.
*/
private ConcurrentHashMap<String, String> invalidSessionsMap = new ConcurrentHashMap<String, String>();
/**
* Private Constructor since we are implementing a Singleton here
*/
private CarbonSSOSessionManager() {
}
/**
* Get the CarbonSSOSessionManager instance.
*
* @return CarbonSSOSessionManager instance
*/
public static CarbonSSOSessionManager getInstance() {
return instance;
}
/**
* Add a new session mapping : IdpSessionIndex --> localSessionId
*
* @param idPSessionIndex session index sent along in the SAML Response
* @param localSessionId id of the current session established locally.
*/
public void addSessionMapping(String idPSessionIndex, String localSessionId) {
validSessionMap.put(idPSessionIndex, localSessionId);
}
/**
* make a session invalid after receiving the single logout request from the identity provider
*
* @param idPSessionIndex session index established at the identity provider's end
*/
public void makeSessionInvalid(String idPSessionIndex) {
if (validSessionMap.containsKey(idPSessionIndex)) {
// add the invalid session to the invalidSessionMap
invalidSessionsMap.put(validSessionMap.get(idPSessionIndex), idPSessionIndex);
// remove the invalid session from the valid session map
validSessionMap.remove(idPSessionIndex);
}
}
/**
* Check whether a particular session is valid.
*
* @param localSessionId session id established locally
* @return true, if the session is valid, false otherwise
*/
public boolean isSessionValid(String localSessionId) {
boolean isSessionValid = true;
if (invalidSessionsMap.containsKey(localSessionId)) {
isSessionValid = false;
}
return isSessionValid;
}
/**
* Remove invalid session from the invalid session map. This needs to be done before completing
* the sign out.
*
* @param localSessionId SessionId established locally
*/
public void removeInvalidSession(String localSessionId) {
if (invalidSessionsMap.containsKey(localSessionId)) {
invalidSessionsMap.remove(localSessionId);
}
}
/**
* This method checks whether the request is for a SSO authentication related page or servlet.
* If it is so, the session invalidation should be skipped.
*
* @param request Request, HTTPServletRequest
* @return true, if session invalidation should be skipped.
*/
public boolean skipSSOSessionInvalidation(HttpServletRequest request,
CarbonUIAuthenticator uiAuthenticator) {
String requestedURI = request.getRequestURI();
if (uiAuthenticator != null) {
List<String> skippingUrls = uiAuthenticator.getSessionValidationSkippingUrls();
return skip(requestedURI, skippingUrls);
} else {
return false;
}
}
/**
* Skips authentication for given URI's.
*
* @param request The request to access a page.
* @return <code>true</code> if request doesnt need to authenticate, else <code>false</code>.
*/
public boolean skipAuthentication(HttpServletRequest request) {
String requestedURI = request.getRequestURI();
CarbonUIAuthenticator uiAuthenticator = CarbonUILoginUtil.getAuthenticator(request);
if (uiAuthenticator != null) {
List<String> skippingUrls = uiAuthenticator.getAuthenticationSkippingUrls();
return skip(requestedURI, skippingUrls);
} else {
return false;
}
}
/**
*
* @param request
* @return
*/
public String getRequestedUrl(HttpServletRequest request, CarbonUIAuthenticator uiAuthenticator) {
String requestedURI = request.getRequestURI();
boolean skipSessionValidation = skipSSOSessionInvalidation(request, uiAuthenticator);
boolean isSessionValid = isSessionValid(request.getSession().getId());
if (!skipSessionValidation && !isSessionValid) {
requestedURI = "/carbon/admin/logout_action.jsp";
if(log.isDebugEnabled()) {
log.debug("Request URI changed to " + requestedURI);
}
}
if (skipSessionValidation && !isSessionValid) {
removeInvalidSession(request.getSession().getId());
}
return requestedURI;
}
/**
*
* @param requestedURI
* @param skippingUrls
* @return
*/
private boolean skip(String requestedURI, List<String> skippingUrls) {
for (String skippingUrl : skippingUrls) {
if (requestedURI.contains(skippingUrl)) {
return true;
}
}
return false;
}
}
| |
package org.jhipster.brooklyn.service;
import org.jhipster.brooklyn.domain.Authority;
import org.jhipster.brooklyn.domain.User;
import org.jhipster.brooklyn.repository.AuthorityRepository;
import org.jhipster.brooklyn.config.Constants;
import org.jhipster.brooklyn.repository.UserRepository;
import org.jhipster.brooklyn.security.AuthoritiesConstants;
import org.jhipster.brooklyn.security.SecurityUtils;
import org.jhipster.brooklyn.service.util.RandomUtil;
import org.jhipster.brooklyn.service.dto.UserDTO;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.time.ZonedDateTime;
import java.util.*;
/**
* Service class for managing users.
*/
@Service
@Transactional
public class UserService {
private final Logger log = LoggerFactory.getLogger(UserService.class);
private final UserRepository userRepository;
private final PasswordEncoder passwordEncoder;
private final SocialService socialService;
private final AuthorityRepository authorityRepository;
public UserService(UserRepository userRepository, PasswordEncoder passwordEncoder, SocialService socialService, AuthorityRepository authorityRepository) {
this.userRepository = userRepository;
this.passwordEncoder = passwordEncoder;
this.socialService = socialService;
this.authorityRepository = authorityRepository;
}
public Optional<User> activateRegistration(String key) {
log.debug("Activating user for activation key {}", key);
return userRepository.findOneByActivationKey(key)
.map(user -> {
// activate given user for the registration key.
user.setActivated(true);
user.setActivationKey(null);
log.debug("Activated user: {}", user);
return user;
});
}
public Optional<User> completePasswordReset(String newPassword, String key) {
log.debug("Reset user password for reset key {}", key);
return userRepository.findOneByResetKey(key)
.filter(user -> {
ZonedDateTime oneDayAgo = ZonedDateTime.now().minusHours(24);
return user.getResetDate().isAfter(oneDayAgo);
})
.map(user -> {
user.setPassword(passwordEncoder.encode(newPassword));
user.setResetKey(null);
user.setResetDate(null);
return user;
});
}
public Optional<User> requestPasswordReset(String mail) {
return userRepository.findOneByEmail(mail)
.filter(User::getActivated)
.map(user -> {
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(ZonedDateTime.now());
return user;
});
}
public User createUser(String login, String password, String firstName, String lastName, String email,
String imageUrl, String langKey) {
User newUser = new User();
Authority authority = authorityRepository.findOne(AuthoritiesConstants.USER);
Set<Authority> authorities = new HashSet<>();
String encryptedPassword = passwordEncoder.encode(password);
newUser.setLogin(login);
// new user gets initially a generated password
newUser.setPassword(encryptedPassword);
newUser.setFirstName(firstName);
newUser.setLastName(lastName);
newUser.setEmail(email);
newUser.setImageUrl(imageUrl);
newUser.setLangKey(langKey);
// new user is not active
newUser.setActivated(false);
// new user gets registration key
newUser.setActivationKey(RandomUtil.generateActivationKey());
authorities.add(authority);
newUser.setAuthorities(authorities);
userRepository.save(newUser);
log.debug("Created Information for User: {}", newUser);
return newUser;
}
public User createUser(UserDTO userDTO) {
User user = new User();
user.setLogin(userDTO.getLogin());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
user.setEmail(userDTO.getEmail());
user.setImageUrl(userDTO.getImageUrl());
if (userDTO.getLangKey() == null) {
user.setLangKey("en"); // default language
} else {
user.setLangKey(userDTO.getLangKey());
}
if (userDTO.getAuthorities() != null) {
Set<Authority> authorities = new HashSet<>();
userDTO.getAuthorities().forEach(
authority -> authorities.add(authorityRepository.findOne(authority))
);
user.setAuthorities(authorities);
}
String encryptedPassword = passwordEncoder.encode(RandomUtil.generatePassword());
user.setPassword(encryptedPassword);
user.setResetKey(RandomUtil.generateResetKey());
user.setResetDate(ZonedDateTime.now());
user.setActivated(true);
userRepository.save(user);
log.debug("Created Information for User: {}", user);
return user;
}
/**
* Update basic information (first name, last name, email, language) for the current user.
*
* @param firstName first name of user
* @param lastName last name of user
* @param email email id of user
* @param langKey language key
* @param imageUrl image URL of user
*/
public void updateUser(String firstName, String lastName, String email, String langKey, String imageUrl) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> {
user.setFirstName(firstName);
user.setLastName(lastName);
user.setEmail(email);
user.setLangKey(langKey);
user.setImageUrl(imageUrl);
log.debug("Changed Information for User: {}", user);
});
}
/**
* Update all information for a specific user, and return the modified user.
*
* @param userDTO user to update
* @return updated user
*/
public Optional<UserDTO> updateUser(UserDTO userDTO) {
return Optional.of(userRepository
.findOne(userDTO.getId()))
.map(user -> {
user.setLogin(userDTO.getLogin());
user.setFirstName(userDTO.getFirstName());
user.setLastName(userDTO.getLastName());
user.setEmail(userDTO.getEmail());
user.setImageUrl(userDTO.getImageUrl());
user.setActivated(userDTO.isActivated());
user.setLangKey(userDTO.getLangKey());
Set<Authority> managedAuthorities = user.getAuthorities();
managedAuthorities.clear();
userDTO.getAuthorities().stream()
.map(authorityRepository::findOne)
.forEach(managedAuthorities::add);
log.debug("Changed Information for User: {}", user);
return user;
})
.map(UserDTO::new);
}
public void deleteUser(String login) {
userRepository.findOneByLogin(login).ifPresent(user -> {
socialService.deleteUserSocialConnection(user.getLogin());
userRepository.delete(user);
log.debug("Deleted User: {}", user);
});
}
public void changePassword(String password) {
userRepository.findOneByLogin(SecurityUtils.getCurrentUserLogin()).ifPresent(user -> {
String encryptedPassword = passwordEncoder.encode(password);
user.setPassword(encryptedPassword);
log.debug("Changed password for User: {}", user);
});
}
@Transactional(readOnly = true)
public Page<UserDTO> getAllManagedUsers(Pageable pageable) {
return userRepository.findAllByLoginNot(pageable, Constants.ANONYMOUS_USER).map(UserDTO::new);
}
@Transactional(readOnly = true)
public Optional<User> getUserWithAuthoritiesByLogin(String login) {
return userRepository.findOneWithAuthoritiesByLogin(login);
}
@Transactional(readOnly = true)
public User getUserWithAuthorities(Long id) {
return userRepository.findOneWithAuthoritiesById(id);
}
@Transactional(readOnly = true)
public User getUserWithAuthorities() {
return userRepository.findOneWithAuthoritiesByLogin(SecurityUtils.getCurrentUserLogin()).orElse(null);
}
/**
* Not activated users should be automatically deleted after 3 days.
* <p>
* This is scheduled to get fired everyday, at 01:00 (am).
* </p>
*/
@Scheduled(cron = "0 0 1 * * ?")
public void removeNotActivatedUsers() {
ZonedDateTime now = ZonedDateTime.now();
List<User> users = userRepository.findAllByActivatedIsFalseAndCreatedDateBefore(now.minusDays(3));
for (User user : users) {
log.debug("Deleting not activated user {}", user.getLogin());
userRepository.delete(user);
}
}
}
| |
/*
* Copyright 2014 Uwe Trottmann
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.battlelancer.seriesguide.ui.dialogs;
import android.app.Activity;
import android.content.ContentValues;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.FragmentTransaction;
import android.text.InputFilter;
import android.text.Spanned;
import android.view.LayoutInflater;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import com.battlelancer.seriesguide.R;
import com.battlelancer.seriesguide.interfaces.OnListsChangedListener;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.ListItems;
import com.battlelancer.seriesguide.provider.SeriesGuideContract.Lists;
import com.battlelancer.seriesguide.util.Utils;
/**
* Dialog to rename or remove a list.
*/
public class ListManageDialogFragment extends DialogFragment {
public static ListManageDialogFragment newInstance(String listId) {
ListManageDialogFragment f = new ListManageDialogFragment();
Bundle args = new Bundle();
args.putString("listid", listId);
f.setArguments(args);
return f;
}
private EditText mTitle;
private OnListsChangedListener mListener;
private Button mButtonNegative;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// hide title, use custom theme
setStyle(STYLE_NO_TITLE, 0);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
final View layout = inflater.inflate(R.layout.dialog_list_manage, container, false);
// title
mTitle = (EditText) layout.findViewById(R.id.editTextListManageListTitle);
mTitle.setFilters(new InputFilter[] {
new CharAndDigitInputFilter()
});
// buttons
mButtonNegative = (Button) layout.findViewById(R.id.buttonNegative);
mButtonNegative.setText(R.string.list_remove);
mButtonNegative.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// remove list and items
String listId = getArguments().getString("listid");
getActivity().getContentResolver().delete(Lists.buildListUri(listId), null,
null);
getActivity().getContentResolver().delete(ListItems.CONTENT_URI,
Lists.LIST_ID + "=?", new String[] {
listId
});
// remove tab from view pager
mListener.onListsChanged();
dismiss();
}
});
Button buttonPositive = (Button) layout.findViewById(R.id.buttonPositive);
buttonPositive.setText(android.R.string.ok);
buttonPositive.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
// prevent whitespaces/empty names
if (mTitle.getText().toString().trim().length() == 0) {
return;
}
// update title
String listId = getArguments().getString("listid");
ContentValues values = new ContentValues();
values.put(Lists.NAME, mTitle.getText().toString());
getActivity().getContentResolver().update(Lists.buildListUri(listId), values, null,
null);
// refresh view pager
mListener.onListsChanged();
dismiss();
}
});
return layout;
}
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
try {
mListener = (OnListsChangedListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString()
+ " must implement OnListsChangedListener");
}
}
@Override
public void onStart() {
super.onStart();
Utils.trackView(getActivity(), "List Manage Dialog");
}
@Override
public void onActivityCreated(Bundle arg0) {
super.onActivityCreated(arg0);
// pre-populate list title
String listId = getArguments().getString("listid");
final Cursor list = getActivity().getContentResolver()
.query(Lists.buildListUri(listId), new String[] {
Lists.NAME
}, null, null, null);
list.moveToFirst();
mTitle.setText(list.getString(0));
list.close();
// do not allow removing last list, disable remove button
Cursor lists = getActivity().getContentResolver().query(Lists.CONTENT_URI,
new String[] {
Lists._ID
}, null, null, null);
if (lists.getCount() == 1) {
mButtonNegative.setEnabled(false);
}
lists.close();
}
/**
* Display a dialog which allows to edit the title of this list or remove
* it.
*/
public static void showListManageDialog(String listId, FragmentManager fm) {
// DialogFragment.show() will take care of adding the fragment
// in a transaction. We also want to remove any currently showing
// dialog, so make our own transaction and take care of that here.
FragmentTransaction ft = fm.beginTransaction();
Fragment prev = fm.findFragmentByTag("listmanagedialog");
if (prev != null) {
ft.remove(prev);
}
ft.addToBackStack(null);
// Create and show the dialog.
DialogFragment newFragment = ListManageDialogFragment.newInstance(listId);
newFragment.show(ft, "listmanagedialog");
}
/**
* Restricts text input to characters and digits preventing any special
* characters.
*/
public static class CharAndDigitInputFilter implements InputFilter {
@Override
public CharSequence filter(CharSequence source, int start, int end, Spanned dest,
int dstart, int dend) {
for (int i = start; i < end; i++) {
if (!(Character.isLetterOrDigit(source.charAt(i))
|| Character.isWhitespace(source.charAt(i)))) {
return "";
}
}
return null;
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless.node;
import org.elasticsearch.painless.AnalyzerCaster;
import org.elasticsearch.painless.DefBootstrap;
import org.elasticsearch.painless.Definition;
import org.elasticsearch.painless.Definition.def;
import org.elasticsearch.painless.Globals;
import org.elasticsearch.painless.Locals;
import org.elasticsearch.painless.Location;
import org.elasticsearch.painless.MethodWriter;
import org.elasticsearch.painless.Operation;
import org.objectweb.asm.Label;
import org.objectweb.asm.Type;
import java.util.Objects;
import java.util.Set;
import static org.elasticsearch.painless.WriterConstants.EQUALS;
import static org.elasticsearch.painless.WriterConstants.OBJECTS_TYPE;
/**
* Represents a comparison expression.
*/
public final class EComp extends AExpression {
private final Operation operation;
private AExpression left;
private AExpression right;
private Class<?> promotedType;
public EComp(Location location, Operation operation, AExpression left, AExpression right) {
super(location);
this.operation = Objects.requireNonNull(operation);
this.left = Objects.requireNonNull(left);
this.right = Objects.requireNonNull(right);
}
@Override
void extractVariables(Set<String> variables) {
left.extractVariables(variables);
right.extractVariables(variables);
}
@Override
void analyze(Locals locals) {
if (operation == Operation.EQ) {
analyzeEq(locals);
} else if (operation == Operation.EQR) {
analyzeEqR(locals);
} else if (operation == Operation.NE) {
analyzeNE(locals);
} else if (operation == Operation.NER) {
analyzeNER(locals);
} else if (operation == Operation.GTE) {
analyzeGTE(locals);
} else if (operation == Operation.GT) {
analyzeGT(locals);
} else if (operation == Operation.LTE) {
analyzeLTE(locals);
} else if (operation == Operation.LT) {
analyzeLT(locals);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
private void analyzeEq(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply equals [==] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.isNull && right.isNull) {
throw createError(new IllegalArgumentException("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
if (promotedType == boolean.class) {
constant = (boolean)left.constant == (boolean)right.constant;
} else if (promotedType == int.class) {
constant = (int)left.constant == (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant == (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant == (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant == (double)right.constant;
} else if (!left.isNull) {
constant = left.constant.equals(right.constant);
} else if (!right.isNull) {
constant = right.constant.equals(null);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
private void analyzeEqR(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply reference equals [===] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
left.expected = promotedType;
right.expected = promotedType;
left = left.cast(variables);
right = right.cast(variables);
if (left.isNull && right.isNull) {
throw createError(new IllegalArgumentException("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
if (promotedType == boolean.class) {
constant = (boolean)left.constant == (boolean)right.constant;
} else if (promotedType == int.class) {
constant = (int)left.constant == (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant == (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant == (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant == (double)right.constant;
} else {
constant = left.constant == right.constant;
}
}
actual = boolean.class;
}
private void analyzeNE(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply not equals [!=] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.isNull && right.isNull) {
throw createError(new IllegalArgumentException("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
if (promotedType == boolean.class) {
constant = (boolean)left.constant != (boolean)right.constant;
} else if (promotedType == int.class) {
constant = (int)left.constant != (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant != (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant != (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant != (double)right.constant;
} else if (!left.isNull) {
constant = !left.constant.equals(right.constant);
} else if (!right.isNull) {
constant = !right.constant.equals(null);
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
private void analyzeNER(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteEquality(left.actual, right.actual);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
left.expected = promotedType;
right.expected = promotedType;
left = left.cast(variables);
right = right.cast(variables);
if (left.isNull && right.isNull) {
throw createError(new IllegalArgumentException("Extraneous comparison of null constants."));
}
if ((left.constant != null || left.isNull) && (right.constant != null || right.isNull)) {
if (promotedType == boolean.class) {
constant = (boolean)left.constant != (boolean)right.constant;
} else if (promotedType == int.class) {
constant = (int)left.constant != (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant != (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant != (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant != (double)right.constant;
} else {
constant = left.constant != right.constant;
}
}
actual = boolean.class;
}
private void analyzeGTE(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.constant != null && right.constant != null) {
if (promotedType == int.class) {
constant = (int)left.constant >= (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant >= (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant >= (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant >= (double)right.constant;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
private void analyzeGT(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply greater than [>] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.constant != null && right.constant != null) {
if (promotedType == int.class) {
constant = (int)left.constant > (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant > (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant > (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant > (double)right.constant;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
private void analyzeLTE(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.constant != null && right.constant != null) {
if (promotedType == int.class) {
constant = (int)left.constant <= (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant <= (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant <= (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant <= (double)right.constant;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
private void analyzeLT(Locals variables) {
left.analyze(variables);
right.analyze(variables);
promotedType = AnalyzerCaster.promoteNumeric(left.actual, right.actual, true);
if (promotedType == null) {
throw createError(new ClassCastException("Cannot apply less than [>=] to types " +
"[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "]."));
}
if (promotedType == def.class) {
left.expected = left.actual;
right.expected = right.actual;
} else {
left.expected = promotedType;
right.expected = promotedType;
}
left = left.cast(variables);
right = right.cast(variables);
if (left.constant != null && right.constant != null) {
if (promotedType == int.class) {
constant = (int)left.constant < (int)right.constant;
} else if (promotedType == long.class) {
constant = (long)left.constant < (long)right.constant;
} else if (promotedType == float.class) {
constant = (float)left.constant < (float)right.constant;
} else if (promotedType == double.class) {
constant = (double)left.constant < (double)right.constant;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
actual = boolean.class;
}
@Override
void write(MethodWriter writer, Globals globals) {
writer.writeDebugInfo(location);
left.write(writer, globals);
if (!right.isNull) {
right.write(writer, globals);
}
Label jump = new Label();
Label end = new Label();
boolean eq = (operation == Operation.EQ || operation == Operation.EQR);
boolean ne = (operation == Operation.NE || operation == Operation.NER);
boolean lt = operation == Operation.LT;
boolean lte = operation == Operation.LTE;
boolean gt = operation == Operation.GT;
boolean gte = operation == Operation.GTE;
boolean writejump = true;
Type type = MethodWriter.getType(promotedType);
if (promotedType == void.class || promotedType == byte.class || promotedType == short.class || promotedType == char.class) {
throw createError(new IllegalStateException("Illegal tree structure."));
} else if (promotedType == boolean.class) {
if (eq) writer.ifCmp(type, MethodWriter.EQ, jump);
else if (ne) writer.ifCmp(type, MethodWriter.NE, jump);
else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} else if (promotedType == int.class || promotedType == long.class || promotedType == float.class || promotedType == double.class) {
if (eq) writer.ifCmp(type, MethodWriter.EQ, jump);
else if (ne) writer.ifCmp(type, MethodWriter.NE, jump);
else if (lt) writer.ifCmp(type, MethodWriter.LT, jump);
else if (lte) writer.ifCmp(type, MethodWriter.LE, jump);
else if (gt) writer.ifCmp(type, MethodWriter.GT, jump);
else if (gte) writer.ifCmp(type, MethodWriter.GE, jump);
else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} else if (promotedType == def.class) {
Type booleanType = Type.getType(boolean.class);
Type descriptor = Type.getMethodType(booleanType, MethodWriter.getType(left.actual), MethodWriter.getType(right.actual));
if (eq) {
if (right.isNull) {
writer.ifNull(jump);
} else if (!left.isNull && operation == Operation.EQ) {
writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL);
writejump = false;
} else {
writer.ifCmp(type, MethodWriter.EQ, jump);
}
} else if (ne) {
if (right.isNull) {
writer.ifNonNull(jump);
} else if (!left.isNull && operation == Operation.NE) {
writer.invokeDefCall("eq", descriptor, DefBootstrap.BINARY_OPERATOR, DefBootstrap.OPERATOR_ALLOWS_NULL);
writer.ifZCmp(MethodWriter.EQ, jump);
} else {
writer.ifCmp(type, MethodWriter.NE, jump);
}
} else if (lt) {
writer.invokeDefCall("lt", descriptor, DefBootstrap.BINARY_OPERATOR, 0);
writejump = false;
} else if (lte) {
writer.invokeDefCall("lte", descriptor, DefBootstrap.BINARY_OPERATOR, 0);
writejump = false;
} else if (gt) {
writer.invokeDefCall("gt", descriptor, DefBootstrap.BINARY_OPERATOR, 0);
writejump = false;
} else if (gte) {
writer.invokeDefCall("gte", descriptor, DefBootstrap.BINARY_OPERATOR, 0);
writejump = false;
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
} else {
if (eq) {
if (right.isNull) {
writer.ifNull(jump);
} else if (operation == Operation.EQ) {
writer.invokeStatic(OBJECTS_TYPE, EQUALS);
writejump = false;
} else {
writer.ifCmp(type, MethodWriter.EQ, jump);
}
} else if (ne) {
if (right.isNull) {
writer.ifNonNull(jump);
} else if (operation == Operation.NE) {
writer.invokeStatic(OBJECTS_TYPE, EQUALS);
writer.ifZCmp(MethodWriter.EQ, jump);
} else {
writer.ifCmp(type, MethodWriter.NE, jump);
}
} else {
throw createError(new IllegalStateException("Illegal tree structure."));
}
}
if (writejump) {
writer.push(false);
writer.goTo(end);
writer.mark(jump);
writer.push(true);
writer.mark(end);
}
}
@Override
public String toString() {
return singleLineToString(left, operation.symbol, right);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.utils;
import java.lang.reflect.Array;
import java.util.NoSuchElementException;
/**
* A priority linked list implementation
* <p>
* It implements this by maintaining an individual LinkedBlockingDeque for each priority level.
*/
public class PriorityLinkedListImpl<T> implements PriorityLinkedList<T> {
protected LinkedListImpl<T>[] levels;
private int size;
private int lastReset;
private int highestPriority = -1;
private int lastPriority = -1;
public PriorityLinkedListImpl(final int priorities) {
levels = (LinkedListImpl<T>[]) Array.newInstance(LinkedListImpl.class, priorities);
for (int i = 0; i < priorities; i++) {
levels[i] = new LinkedListImpl<>();
}
}
private void checkHighest(final int priority) {
if (lastPriority != priority || priority > highestPriority) {
lastPriority = priority;
if (lastReset == Integer.MAX_VALUE) {
lastReset = 0;
} else {
lastReset++;
}
}
if (priority > highestPriority) {
highestPriority = priority;
}
}
@Override
public void addHead(final T t, final int priority) {
checkHighest(priority);
levels[priority].addHead(t);
size++;
}
@Override
public void addTail(final T t, final int priority) {
checkHighest(priority);
levels[priority].addTail(t);
size++;
}
@Override
public T poll() {
T t = null;
// We are just using a simple prioritization algorithm:
// Highest priority refs always get returned first.
// This could cause starvation of lower priority refs.
// TODO - A better prioritization algorithm
for (int i = highestPriority; i >= 0; i--) {
LinkedListImpl<T> ll = levels[i];
if (ll.size() != 0) {
t = ll.poll();
if (t != null) {
size--;
if (ll.size() == 0) {
if (highestPriority == i) {
highestPriority--;
}
}
}
break;
}
}
return t;
}
@Override
public void clear() {
for (LinkedListImpl<T> list : levels) {
list.clear();
}
size = 0;
}
@Override
public int size() {
return size;
}
@Override
public boolean isEmpty() {
return size == 0;
}
@Override
public LinkedListIterator<T> iterator() {
return new PriorityLinkedListIterator();
}
private class PriorityLinkedListIterator implements LinkedListIterator<T> {
private int index;
private final LinkedListIterator<T>[] cachedIters = new LinkedListIterator[levels.length];
private LinkedListIterator<T> lastIter;
private int resetCount = lastReset;
volatile boolean closed = false;
PriorityLinkedListIterator() {
index = levels.length - 1;
}
@Override
protected void finalize() {
close();
}
@Override
public void repeat() {
if (lastIter == null) {
throw new NoSuchElementException();
}
lastIter.repeat();
}
@Override
public void close() {
if (!closed) {
closed = true;
lastIter = null;
for (LinkedListIterator<T> iter : cachedIters) {
if (iter != null) {
iter.close();
}
}
}
}
private void checkReset() {
if (lastReset != resetCount) {
index = highestPriority;
resetCount = lastReset;
}
}
@Override
public boolean hasNext() {
checkReset();
while (index >= 0) {
lastIter = cachedIters[index];
if (lastIter == null) {
lastIter = cachedIters[index] = levels[index].iterator();
}
boolean b = lastIter.hasNext();
if (b) {
return true;
}
index--;
if (index < 0) {
index = levels.length - 1;
break;
}
}
return false;
}
@Override
public T next() {
if (lastIter == null) {
throw new NoSuchElementException();
}
return lastIter.next();
}
@Override
public void remove() {
if (lastIter == null) {
throw new NoSuchElementException();
}
lastIter.remove();
// This next statement would be the equivalent of:
// if (index == highestPriority && levels[index].size() == 0)
// However we have to keep checking all the previous levels
// otherwise we would cache a max that will not exist
// what would make us eventually having hasNext() returning false
// as a bug
// Part of the fix for HORNETQ-705
for (int i = index; i >= 0 && levels[index].size() == 0; i--) {
highestPriority = i;
}
size--;
}
}
}
| |
/*
* Copyright (C) 2012 Square, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.squareup.okhttp;
import com.squareup.okhttp.internal.Util;
import com.squareup.okhttp.internal.http.HttpAuthenticator;
import com.squareup.okhttp.internal.http.HttpURLConnectionImpl;
import com.squareup.okhttp.internal.http.HttpsURLConnectionImpl;
import com.squareup.okhttp.internal.http.OkResponseCacheAdapter;
import com.squareup.okhttp.internal.tls.OkHostnameVerifier;
import java.net.CookieHandler;
import java.net.HttpURLConnection;
import java.net.Proxy;
import java.net.ProxySelector;
import java.net.ResponseCache;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLStreamHandler;
import java.net.URLStreamHandlerFactory;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.TimeUnit;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
/**
* Configures and creates HTTP connections.
*/
public final class OkHttpClient implements URLStreamHandlerFactory {
private static final List<String> DEFAULT_TRANSPORTS = Util.immutableList(Arrays.asList("spdy/3", "http/1.1"));
private final RouteDatabase routeDatabase;
private final Dispatcher dispatcher;
private Proxy proxy;
private List<String> transports;
private ProxySelector proxySelector;
private CookieHandler cookieHandler;
private ResponseCache responseCache;
private SSLSocketFactory sslSocketFactory;
private HostnameVerifier hostnameVerifier;
private OkAuthenticator authenticator;
private ConnectionPool connectionPool;
private boolean followProtocolRedirects = true;
private int connectTimeout;
private int readTimeout;
public OkHttpClient() {
routeDatabase = new RouteDatabase();
dispatcher = new Dispatcher();
}
private OkHttpClient(OkHttpClient copyFrom) {
routeDatabase = copyFrom.routeDatabase;
dispatcher = copyFrom.dispatcher;
}
/**
* Sets the default connect timeout for new connections. A value of 0 means no timeout.
*
* @see URLConnection#setConnectTimeout(int)
*/
public void setConnectTimeout(long timeout, TimeUnit unit) {
if (timeout < 0) {
throw new IllegalArgumentException("timeout < 0");
}
if (unit == null) {
throw new IllegalArgumentException("unit == null");
}
long millis = unit.toMillis(timeout);
if (millis > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Timeout too large.");
}
connectTimeout = (int) millis;
}
/**
* Default connect timeout (in milliseconds).
*/
public int getConnectTimeout() {
return connectTimeout;
}
/**
* Sets the default read timeout for new connections. A value of 0 means no timeout.
*
* @see URLConnection#setReadTimeout(int)
*/
public void setReadTimeout(long timeout, TimeUnit unit) {
if (timeout < 0) {
throw new IllegalArgumentException("timeout < 0");
}
if (unit == null) {
throw new IllegalArgumentException("unit == null");
}
long millis = unit.toMillis(timeout);
if (millis > Integer.MAX_VALUE) {
throw new IllegalArgumentException("Timeout too large.");
}
readTimeout = (int) millis;
}
/**
* Default read timeout (in milliseconds).
*/
public int getReadTimeout() {
return readTimeout;
}
public Proxy getProxy() {
return proxy;
}
/**
* Sets the HTTP proxy that will be used by connections created by this
* client. This takes precedence over {@link #setProxySelector}, which is
* only honored when this proxy is null (which it is by default). To disable
* proxy use completely, call {@code setProxy(Proxy.NO_PROXY)}.
*/
public OkHttpClient setProxy(Proxy proxy) {
this.proxy = proxy;
return this;
}
public ProxySelector getProxySelector() {
return proxySelector;
}
/**
* Sets the proxy selection policy to be used if no {@link #setProxy proxy}
* is specified explicitly. The proxy selector may return multiple proxies;
* in that case they will be tried in sequence until a successful connection
* is established.
* <p/>
* <p>If unset, the {@link ProxySelector#getDefault() system-wide default}
* proxy selector will be used.
*/
public OkHttpClient setProxySelector(ProxySelector proxySelector) {
this.proxySelector = proxySelector;
return this;
}
public CookieHandler getCookieHandler() {
return cookieHandler;
}
/**
* Sets the cookie handler to be used to read outgoing cookies and write
* incoming cookies.
* <p/>
* <p>If unset, the {@link CookieHandler#getDefault() system-wide default}
* cookie handler will be used.
*/
public OkHttpClient setCookieHandler(CookieHandler cookieHandler) {
this.cookieHandler = cookieHandler;
return this;
}
public ResponseCache getResponseCache() {
return responseCache;
}
/**
* Sets the response cache to be used to read and write cached responses.
* <p/>
* <p>If unset, the {@link ResponseCache#getDefault() system-wide default}
* response cache will be used.
*/
public OkHttpClient setResponseCache(ResponseCache responseCache) {
this.responseCache = responseCache;
return this;
}
public OkResponseCache getOkResponseCache() {
if (responseCache instanceof HttpResponseCache) {
return ((HttpResponseCache) responseCache).okResponseCache;
} else if (responseCache != null) {
return new OkResponseCacheAdapter(responseCache);
} else {
return null;
}
}
public SSLSocketFactory getSslSocketFactory() {
return sslSocketFactory;
}
/**
* Sets the socket factory used to secure HTTPS connections.
* <p/>
* <p>If unset, the {@link HttpsURLConnection#getDefaultSSLSocketFactory()
* system-wide default} SSL socket factory will be used.
*/
public OkHttpClient setSslSocketFactory(SSLSocketFactory sslSocketFactory) {
this.sslSocketFactory = sslSocketFactory;
return this;
}
public HostnameVerifier getHostnameVerifier() {
return hostnameVerifier;
}
/**
* Sets the verifier used to confirm that response certificates apply to
* requested hostnames for HTTPS connections.
* <p/>
* <p>If unset, the {@link HttpsURLConnection#getDefaultHostnameVerifier()
* system-wide default} hostname verifier will be used.
*/
public OkHttpClient setHostnameVerifier(HostnameVerifier hostnameVerifier) {
this.hostnameVerifier = hostnameVerifier;
return this;
}
public OkAuthenticator getAuthenticator() {
return authenticator;
}
/**
* Sets the authenticator used to respond to challenges from the remote web
* server or proxy server.
* <p/>
* <p>If unset, the {@link java.net.Authenticator#setDefault system-wide default}
* authenticator will be used.
*/
public OkHttpClient setAuthenticator(OkAuthenticator authenticator) {
this.authenticator = authenticator;
return this;
}
public ConnectionPool getConnectionPool() {
return connectionPool;
}
/**
* Sets the connection pool used to recycle HTTP and HTTPS connections.
* <p/>
* <p>If unset, the {@link ConnectionPool#getDefault() system-wide
* default} connection pool will be used.
*/
public OkHttpClient setConnectionPool(ConnectionPool connectionPool) {
this.connectionPool = connectionPool;
return this;
}
public boolean getFollowProtocolRedirects() {
return followProtocolRedirects;
}
/**
* Configure this client to follow redirects from HTTPS to HTTP and from HTTP
* to HTTPS.
* <p/>
* <p>If unset, protocol redirects will be followed. This is different than
* the built-in {@code HttpURLConnection}'s default.
*/
public OkHttpClient setFollowProtocolRedirects(boolean followProtocolRedirects) {
this.followProtocolRedirects = followProtocolRedirects;
return this;
}
public RouteDatabase getRoutesDatabase() {
return routeDatabase;
}
public List<String> getTransports() {
return transports;
}
/**
* Configure the transports used by this client to communicate with remote
* servers. By default this client will prefer the most efficient transport
* available, falling back to more ubiquitous transports. Applications should
* only call this method to avoid specific compatibility problems, such as web
* servers that behave incorrectly when SPDY is enabled.
* <p/>
* <p>The following transports are currently supported:
* <ul>
* <li><a href="http://www.w3.org/Protocols/rfc2616/rfc2616.html">http/1.1</a>
* <li><a href="http://www.chromium.org/spdy/spdy-protocol/spdy-protocol-draft3">spdy/3</a>
* </ul>
* <p/>
* <p><strong>This is an evolving set.</strong> Future releases may drop
* support for transitional transports (like spdy/3), in favor of their
* successors (spdy/4 or http/2.0). The http/1.1 transport will never be
* dropped.
* <p/>
* <p>If multiple protocols are specified, <a
* href="https://technotes.googlecode.com/git/nextprotoneg.html">NPN</a> will
* be used to negotiate a transport. Future releases may use another mechanism
* (such as <a href="http://tools.ietf.org/html/draft-friedl-tls-applayerprotoneg-02">ALPN</a>)
* to negotiate a transport.
*
* @param transports the transports to use, in order of preference. The list
* must contain "http/1.1". It must not contain null.
*/
public OkHttpClient setTransports(List<String> transports) {
transports = Util.immutableList(transports);
if (!transports.contains("http/1.1")) {
throw new IllegalArgumentException("transports doesn't contain http/1.1: " + transports);
}
if (transports.contains(null)) {
throw new IllegalArgumentException("transports must not contain null");
}
if (transports.contains("")) {
throw new IllegalArgumentException("transports contains an empty string");
}
this.transports = transports;
return this;
}
/**
* Schedules {@code request} to be executed.
*/
/* OkHttp 2.0: public */ void enqueue(Request request, Response.Receiver responseReceiver) {
// Create the HttpURLConnection immediately so the enqueued job gets the current settings of
// this client. Otherwise changes to this client (socket factory, redirect policy, etc.) may
// incorrectly be reflected in the request when it is dispatched later.
dispatcher.enqueue(open(request.url()), request, responseReceiver);
}
/**
* Cancels all scheduled tasks tagged with {@code tag}. Requests that are already
* in flight might not be canceled.
*/
public void cancel(Object tag) {
dispatcher.cancel(tag);
}
public HttpURLConnection open(URL url) {
return open(url, proxy);
}
HttpURLConnection open(URL url, Proxy proxy) {
String protocol = url.getProtocol();
OkHttpClient copy = copyWithDefaults();
copy.proxy = proxy;
if (protocol.equals("http"))
return new HttpURLConnectionImpl(url, copy);
if (protocol.equals("https"))
return new HttpsURLConnectionImpl(url, copy);
throw new IllegalArgumentException("Unexpected protocol: " + protocol);
}
/**
* Returns a shallow copy of this OkHttpClient that uses the system-wide default for
* each field that hasn't been explicitly configured.
*/
private OkHttpClient copyWithDefaults() {
OkHttpClient result = new OkHttpClient(this);
result.proxy = proxy;
result.proxySelector = proxySelector != null ? proxySelector : ProxySelector.getDefault();
result.cookieHandler = cookieHandler != null ? cookieHandler : CookieHandler.getDefault();
result.responseCache = responseCache != null ? responseCache : ResponseCache.getDefault();
result.sslSocketFactory = sslSocketFactory != null ? sslSocketFactory : HttpsURLConnection.getDefaultSSLSocketFactory();
result.hostnameVerifier = hostnameVerifier != null ? hostnameVerifier : OkHostnameVerifier.INSTANCE;
result.authenticator = authenticator != null ? authenticator : HttpAuthenticator.SYSTEM_DEFAULT;
result.connectionPool = connectionPool != null ? connectionPool : ConnectionPool.getDefault();
result.followProtocolRedirects = followProtocolRedirects;
result.transports = transports != null ? transports : DEFAULT_TRANSPORTS;
result.connectTimeout = connectTimeout;
result.readTimeout = readTimeout;
return result;
}
/**
* Creates a URLStreamHandler as a {@link URL#setURLStreamHandlerFactory}.
* <p/>
* <p>This code configures OkHttp to handle all HTTP and HTTPS connections
* created with {@link URL#openConnection()}: <pre> {@code
* <p/>
* OkHttpClient okHttpClient = new OkHttpClient();
* URL.setURLStreamHandlerFactory(okHttpClient);
* }</pre>
*/
public URLStreamHandler createURLStreamHandler(final String protocol) {
if (!protocol.equals("http") && !protocol.equals("https"))
return null;
return new URLStreamHandler() {
@Override
protected URLConnection openConnection(URL url) {
return open(url);
}
@Override
protected URLConnection openConnection(URL url, Proxy proxy) {
return open(url, proxy);
}
@Override
protected int getDefaultPort() {
if (protocol.equals("http"))
return 80;
if (protocol.equals("https"))
return 443;
throw new AssertionError();
}
};
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.test.rest.yaml;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.carrotsearch.randomizedtesting.RandomizedTest;
import org.apache.http.HttpHost;
import org.elasticsearch.Version;
import org.elasticsearch.client.Response;
import org.elasticsearch.client.ResponseException;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.test.rest.ESRestTestCase;
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi;
import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec;
import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSection;
import org.elasticsearch.test.rest.yaml.section.ClientYamlTestSuite;
import org.elasticsearch.test.rest.yaml.section.DoSection;
import org.elasticsearch.test.rest.yaml.section.ExecutableSection;
import org.junit.AfterClass;
import org.junit.Before;
/**
* Runs a suite of yaml tests shared with all the official Elasticsearch clients against against an elasticsearch cluster.
*/
public abstract class ESClientYamlSuiteTestCase extends ESRestTestCase {
/**
* Property that allows to control which REST tests get run. Supports comma separated list of tests
* or directories that contain tests e.g. -Dtests.rest.suite=index,get,create/10_with_id
*/
public static final String REST_TESTS_SUITE = "tests.rest.suite";
/**
* Property that allows to blacklist some of the REST tests based on a comma separated list of globs
* e.g. -Dtests.rest.blacklist=get/10_basic/*
*/
public static final String REST_TESTS_BLACKLIST = "tests.rest.blacklist";
/**
* Property that allows to control whether spec validation is enabled or not (default true).
*/
private static final String REST_TESTS_VALIDATE_SPEC = "tests.rest.validate_spec";
private static final String TESTS_PATH = "/rest-api-spec/test";
private static final String SPEC_PATH = "/rest-api-spec/api";
/**
* This separator pattern matches ',' except it is preceded by a '\'.
* This allows us to support ',' within paths when it is escaped with a slash.
*
* For example, the path string "/a/b/c\,d/e/f,/foo/bar,/baz" is separated to "/a/b/c\,d/e/f", "/foo/bar" and "/baz".
*
* For reference, this regular expression feature is known as zero-width negative look-behind.
*
*/
private static final String PATHS_SEPARATOR = "(?<!\\\\),";
private static List<BlacklistedPathPatternMatcher> blacklistPathMatchers;
private static ClientYamlTestExecutionContext restTestExecutionContext;
private static ClientYamlTestExecutionContext adminExecutionContext;
private final ClientYamlTestCandidate testCandidate;
protected ESClientYamlSuiteTestCase(ClientYamlTestCandidate testCandidate) {
this.testCandidate = testCandidate;
}
@Before
public void initAndResetContext() throws Exception {
if (restTestExecutionContext == null) {
assert adminExecutionContext == null;
assert blacklistPathMatchers == null;
ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load(SPEC_PATH);
validateSpec(restSpec);
List<HttpHost> hosts = getClusterHosts();
RestClient restClient = client();
Tuple<Version, Map<HttpHost, Version>> versionMapTuple = readVersionsFromInfo(restClient, hosts.size());
Version esVersion;
try {
Tuple<Version, Version> versionVersionTuple = readVersionsFromCatNodes(restClient);
esVersion = versionVersionTuple.v1();
Version masterVersion = versionVersionTuple.v2();
logger.info("initializing yaml client, minimum es version: [{}] master version: [{}] hosts: {}",
esVersion, masterVersion, hosts);
} catch (ResponseException ex) {
if (ex.getResponse().getStatusLine().getStatusCode() == 403) {
logger.warn("Fallback to simple info '/' request, _cat/nodes is not authorized");
esVersion = versionMapTuple.v1();
logger.info("initializing yaml client, minimum es version: [{}] hosts: {}", esVersion, hosts);
} else {
throw ex;
}
}
ClientYamlTestClient clientYamlTestClient =
new ClientYamlTestClient(restSpec, restClient, hosts, esVersion, versionMapTuple.v2());
restTestExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, randomizeContentType());
adminExecutionContext = new ClientYamlTestExecutionContext(clientYamlTestClient, false);
String[] blacklist = resolvePathsProperty(REST_TESTS_BLACKLIST, null);
blacklistPathMatchers = new ArrayList<>();
for (String entry : blacklist) {
blacklistPathMatchers.add(new BlacklistedPathPatternMatcher(entry));
}
}
assert restTestExecutionContext != null;
assert adminExecutionContext != null;
assert blacklistPathMatchers != null;
// admin context must be available for @After always, regardless of whether the test was blacklisted
adminExecutionContext.clear();
restTestExecutionContext.clear();
}
@Override
protected void afterIfFailed(List<Throwable> errors) {
// Dump the stash on failure. Instead of dumping it in true json we escape `\n`s so stack traces are easier to read
logger.info("Stash dump on failure [{}]",
XContentHelper.toString(restTestExecutionContext.stash()).replace("\\n", "\n").replace("\\r", "\r").replace("\\t", "\t"));
super.afterIfFailed(errors);
}
public static Iterable<Object[]> createParameters() throws Exception {
String[] paths = resolvePathsProperty(REST_TESTS_SUITE, ""); // default to all tests under the test root
List<Object[]> tests = new ArrayList<>();
Map<String, Set<Path>> yamlSuites = loadYamlSuites(paths);
// yaml suites are grouped by directory (effectively by api)
for (String api : yamlSuites.keySet()) {
List<Path> yamlFiles = new ArrayList<>(yamlSuites.get(api));
for (Path yamlFile : yamlFiles) {
ClientYamlTestSuite restTestSuite = ClientYamlTestSuite.parse(api, yamlFile);
for (ClientYamlTestSection testSection : restTestSuite.getTestSections()) {
tests.add(new Object[]{ new ClientYamlTestCandidate(restTestSuite, testSection) });
}
}
}
//sort the candidates so they will always be in the same order before being shuffled, for repeatability
Collections.sort(tests,
(o1, o2) -> ((ClientYamlTestCandidate)o1[0]).getTestPath().compareTo(((ClientYamlTestCandidate)o2[0]).getTestPath()));
return tests;
}
/** Find all yaml suites that match the given list of paths from the root test path. */
// pkg private for tests
static Map<String, Set<Path>> loadYamlSuites(String... paths) throws Exception {
Map<String, Set<Path>> files = new HashMap<>();
Path root = PathUtils.get(ESClientYamlSuiteTestCase.class.getResource(TESTS_PATH).toURI());
for (String strPath : paths) {
Path path = root.resolve(strPath);
if (Files.isDirectory(path)) {
Files.walk(path).forEach(file -> {
if (file.toString().endsWith(".yaml")) {
addYamlSuite(root, file, files);
}
});
} else {
path = root.resolve(strPath + ".yaml");
assert Files.exists(path);
addYamlSuite(root, path, files);
}
}
return files;
}
/** Add a single suite file to the set of suites. */
private static void addYamlSuite(Path root, Path file, Map<String, Set<Path>> files) {
String groupName = root.relativize(file.getParent()).toString();
Set<Path> filesSet = files.get(groupName);
if (filesSet == null) {
filesSet = new HashSet<>();
files.put(groupName, filesSet);
}
filesSet.add(file);
}
private static String[] resolvePathsProperty(String propertyName, String defaultValue) {
String property = System.getProperty(propertyName);
if (!Strings.hasLength(property)) {
return defaultValue == null ? Strings.EMPTY_ARRAY : new String[]{defaultValue};
} else {
return property.split(PATHS_SEPARATOR);
}
}
protected ClientYamlTestExecutionContext getAdminExecutionContext() {
return adminExecutionContext;
}
private static void validateSpec(ClientYamlSuiteRestSpec restSpec) {
boolean validateSpec = RandomizedTest.systemPropertyAsBoolean(REST_TESTS_VALIDATE_SPEC, true);
if (validateSpec) {
StringBuilder errorMessage = new StringBuilder();
for (ClientYamlSuiteRestApi restApi : restSpec.getApis()) {
if (restApi.getMethods().contains("GET") && restApi.isBodySupported()) {
if (!restApi.getMethods().contains("POST")) {
errorMessage.append("\n- ").append(restApi.getName()).append(" supports GET with a body but doesn't support POST");
}
}
}
if (errorMessage.length() > 0) {
throw new IllegalArgumentException(errorMessage.toString());
}
}
}
@AfterClass
public static void clearStatic() {
blacklistPathMatchers = null;
restTestExecutionContext = null;
adminExecutionContext = null;
}
private static Tuple<Version, Version> readVersionsFromCatNodes(RestClient restClient) throws IOException {
// we simply go to the _cat/nodes API and parse all versions in the cluster
Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master"));
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response, Version.CURRENT);
String nodesCatResponse = restTestResponse.getBodyAsString();
String[] split = nodesCatResponse.split("\n");
Version version = null;
Version masterVersion = null;
for (String perNode : split) {
final String[] versionAndMaster = perNode.split("\\s+");
assert versionAndMaster.length == 2 : "invalid line: " + perNode + " length: " + versionAndMaster.length;
final Version currentVersion = Version.fromString(versionAndMaster[0]);
final boolean master = versionAndMaster[1].trim().equals("*");
if (master) {
assert masterVersion == null;
masterVersion = currentVersion;
}
if (version == null) {
version = currentVersion;
} else if (version.onOrAfter(currentVersion)) {
version = currentVersion;
}
}
return new Tuple<>(version, masterVersion);
}
private static Tuple<Version, Map<HttpHost, Version>> readVersionsFromInfo(RestClient restClient, int numHosts) throws IOException {
Version version = null;
Map<HttpHost, Version> hostVersionMap = new HashMap<>();
for (int i = 0; i < numHosts; i++) {
//we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster
Response response = restClient.performRequest("GET", "/");
ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response, Version.CURRENT);
Object latestVersion = restTestResponse.evaluate("version.number");
if (latestVersion == null) {
throw new RuntimeException("elasticsearch version not found in the response");
}
final Version currentVersion = Version.fromString(latestVersion.toString());
if (version == null) {
version = currentVersion;
} else if (version.onOrAfter(currentVersion)) {
version = currentVersion;
}
hostVersionMap.put(response.getHost(), currentVersion);
}
return new Tuple<>(version, Collections.unmodifiableMap(hostVersionMap));
}
public void test() throws IOException {
//skip test if it matches one of the blacklist globs
for (BlacklistedPathPatternMatcher blacklistedPathMatcher : blacklistPathMatchers) {
String testPath = testCandidate.getSuitePath() + "/" + testCandidate.getTestSection().getName();
assumeFalse("[" + testCandidate.getTestPath() + "] skipped, reason: blacklisted", blacklistedPathMatcher
.isSuffixMatch(testPath));
}
//skip test if the whole suite (yaml file) is disabled
assumeFalse(testCandidate.getSetupSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()),
testCandidate.getSetupSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
//skip test if the whole suite (yaml file) is disabled
assumeFalse(testCandidate.getTeardownSection().getSkipSection().getSkipMessage(testCandidate.getSuitePath()),
testCandidate.getTeardownSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
//skip test if test section is disabled
assumeFalse(testCandidate.getTestSection().getSkipSection().getSkipMessage(testCandidate.getTestPath()),
testCandidate.getTestSection().getSkipSection().skip(restTestExecutionContext.esVersion()));
//let's check that there is something to run, otherwise there might be a problem with the test section
if (testCandidate.getTestSection().getExecutableSections().size() == 0) {
throw new IllegalArgumentException("No executable sections loaded for [" + testCandidate.getTestPath() + "]");
}
if (!testCandidate.getSetupSection().isEmpty()) {
logger.debug("start setup test [{}]", testCandidate.getTestPath());
for (DoSection doSection : testCandidate.getSetupSection().getDoSections()) {
executeSection(doSection);
}
logger.debug("end setup test [{}]", testCandidate.getTestPath());
}
restTestExecutionContext.clear();
try {
for (ExecutableSection executableSection : testCandidate.getTestSection().getExecutableSections()) {
executeSection(executableSection);
}
} finally {
logger.debug("start teardown test [{}]", testCandidate.getTestPath());
for (DoSection doSection : testCandidate.getTeardownSection().getDoSections()) {
executeSection(doSection);
}
logger.debug("end teardown test [{}]", testCandidate.getTestPath());
}
}
/**
* Execute an {@link ExecutableSection}, careful to log its place of origin on failure.
*/
private void executeSection(ExecutableSection executableSection) {
try {
executableSection.execute(restTestExecutionContext);
} catch (Exception e) {
throw new RuntimeException(errorMessage(executableSection, e), e);
} catch (AssertionError e) {
throw new AssertionError(errorMessage(executableSection, e), e);
}
}
private String errorMessage(ExecutableSection executableSection, Throwable t) {
return "Failure at [" + testCandidate.getSuitePath() + ":" + executableSection.getLocation().lineNumber + "]: " + t.getMessage();
}
protected boolean randomizeContentType() {
return true;
}
}
| |
/*
* Copyright 2017 The Kythe Authors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.devtools.kythe.analyzers.java;
import com.google.devtools.kythe.platform.java.helpers.SignatureGenerator;
import com.google.devtools.kythe.proto.MarkedSource;
import com.google.devtools.kythe.proto.Storage.VName;
import com.google.devtools.kythe.util.KytheURI;
import com.sun.tools.javac.code.Symbol;
import com.sun.tools.javac.code.Symbol.ClassSymbol;
import com.sun.tools.javac.code.Type;
import com.sun.tools.javac.code.TypeTag;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import javax.annotation.Nullable;
import javax.lang.model.element.ElementKind;
/** {@link MarkedSource} utility class. */
public final class MarkedSources {
private MarkedSources() {}
/** Returns a {@link MarkedSource} instance for a {@link Symbol}. */
static MarkedSource construct(
SignatureGenerator signatureGenerator,
Symbol sym,
@Nullable MarkedSource.Builder msBuilder,
@Nullable Iterable<MarkedSource> postChildren,
Function<Symbol, Optional<VName>> symNames) {
MarkedSource markedType = markType(signatureGenerator, sym, symNames);
return construct(signatureGenerator, sym, msBuilder, postChildren, markedType);
}
private static MarkedSource construct(
SignatureGenerator signatureGenerator,
Symbol sym,
@Nullable MarkedSource.Builder msBuilder,
@Nullable Iterable<MarkedSource> postChildren,
@Nullable MarkedSource markedType) {
MarkedSource.Builder markedSource = msBuilder == null ? MarkedSource.newBuilder() : msBuilder;
if (markedType != null) {
markedSource.addChild(markedType);
}
String identToken = buildContext(markedSource.addChildBuilder(), sym, signatureGenerator);
switch (sym.getKind()) {
case TYPE_PARAMETER:
markedSource
.addChildBuilder()
.setKind(MarkedSource.Kind.IDENTIFIER)
.setPreText("<" + sym.getSimpleName() + ">");
break;
case CONSTRUCTOR:
case METHOD:
ClassSymbol enclClass = sym.enclClass();
String methodName;
if (sym.getKind() == ElementKind.CONSTRUCTOR && enclClass != null) {
methodName = enclClass.getSimpleName().toString();
} else {
methodName = sym.getSimpleName().toString();
}
markedSource.addChildBuilder().setKind(MarkedSource.Kind.IDENTIFIER).setPreText(methodName);
markedSource
.addChildBuilder()
.setKind(MarkedSource.Kind.PARAMETER_LOOKUP_BY_PARAM)
.setPreText("(")
.setPostChildText(", ")
.setPostText(")");
break;
default:
markedSource.addChildBuilder().setKind(MarkedSource.Kind.IDENTIFIER).setPreText(identToken);
break;
}
if (postChildren != null) {
postChildren.forEach(markedSource::addChild);
}
return markedSource.build();
}
/**
* Sets the provided {@link MarkedSource.Builder} to a CONTEXT node, populating it with the
* fully-qualified parent scope for sym. Returns the identifier corresponding to sym.
*/
private static String buildContext(
MarkedSource.Builder context, Symbol sym, SignatureGenerator signatureGenerator) {
context.setKind(MarkedSource.Kind.CONTEXT).setPostChildText(".").setAddFinalListToken(true);
String identToken = getIdentToken(sym, signatureGenerator);
Symbol parent = getQualifiedNameParent(sym);
List<MarkedSource> parents = new ArrayList<>();
while (parent != null) {
String parentName = getIdentToken(parent, signatureGenerator);
if (!parentName.isEmpty()) {
parents.add(
MarkedSource.newBuilder()
.setKind(MarkedSource.Kind.IDENTIFIER)
.setPreText(parentName)
.build());
}
parent = getQualifiedNameParent(parent);
}
for (int i = 0; i < parents.size(); ++i) {
context.addChild(parents.get(parents.size() - i - 1));
}
return identToken;
}
/**
* Returns a {@link MarkedSource} instance for sym's type (or its return type, if sym is a
* method). If there is no appropriate type for sym, returns {@code null}. Generates links with
* {@code signatureGenerator}.
*/
@Nullable
private static MarkedSource markType(
SignatureGenerator signatureGenerator,
Symbol sym,
Function<Symbol, Optional<VName>> symNames) {
// TODO(zarko): Mark up any annotations.
Type type = sym.type;
if (type == null || sym == type.tsym) {
return null;
}
if (type.getReturnType() != null) {
type = type.getReturnType();
}
String postTypeIdText = "";
if (type.hasTag(TypeTag.ARRAY) && ((Type.ArrayType) type).elemtype != null) {
postTypeIdText = "[]";
type = ((Type.ArrayType) type).elemtype;
}
MarkedSource.Builder builder =
MarkedSource.newBuilder().setKind(MarkedSource.Kind.TYPE).setPostText(" ");
if (type.hasTag(TypeTag.CLASS)) {
MarkedSource.Builder classIdentParent = builder;
if (!postTypeIdText.isEmpty()) {
classIdentParent = builder.addChildBuilder().setPostText(postTypeIdText);
}
addClassIdentifier(type, classIdentParent, signatureGenerator, symNames);
} else {
builder
.addChildBuilder()
.setKind(MarkedSource.Kind.IDENTIFIER)
.setPreText(type.toString())
.setPostText(postTypeIdText);
}
return builder.build();
}
private static void addClassIdentifier(
Type type,
MarkedSource.Builder parent,
SignatureGenerator signatureGenerator,
Function<Symbol, Optional<VName>> symNames) {
// Add the class CONTEXT (i.e. package) and class IDENTIFIER (i.e. simple name).
// The qualifiedName BOX is used to restrict the Link added below.
MarkedSource.Builder qualifiedName = parent.addChildBuilder();
String identToken =
buildContext(qualifiedName.addChildBuilder(), type.tsym, signatureGenerator);
qualifiedName.addChildBuilder().setKind(MarkedSource.Kind.IDENTIFIER).setPreText(identToken);
// Add a link to the Kythe semantic node for the class.
symNames
.apply(type.tsym)
.map(KytheURI::asString)
.ifPresent(ticket -> qualifiedName.addLinkBuilder().addDefinition(ticket));
// Possibly add a PARAMETER node for the class type arguments.
if (!type.getTypeArguments().isEmpty()) {
MarkedSource.Builder typeArgs =
parent
.addChildBuilder()
.setKind(MarkedSource.Kind.PARAMETER)
.setPreText("<")
.setPostChildText(", ")
.setPostText(">");
for (Type arg : type.getTypeArguments()) {
switch (arg.getTag()) {
case CLASS:
addClassIdentifier(arg, typeArgs.addChildBuilder(), signatureGenerator, symNames);
break;
case WILDCARD:
Type.WildcardType wild = (Type.WildcardType) arg;
if (wild.isUnbound()) {
typeArgs.addChildBuilder().setPreText(wild.kind.toString());
} else {
MarkedSource.Builder boundedWild = typeArgs.addChildBuilder();
boundedWild.addChildBuilder().setPreText(wild.kind.toString());
addClassIdentifier(wild.type, boundedWild, signatureGenerator, symNames);
}
break;
default:
typeArgs.addChildBuilder().setPreText(arg.toString());
}
}
}
}
/**
* The only place the integer index for nested classes/anonymous classes is stored is in the
* flatname of the symbol. (This index is determined at compile time using linear search; see
* 'localClassName' in Check.java). The simple name can't be relied on; for nested classes it
* drops the name of the parent class (so 'pkg.OuterClass$Inner' yields only 'Inner') and for
* anonymous classes it's blank. For multiply-nested classes, we'll see tokens like
* 'OuterClass$Inner$1$1'.
*/
private static String getIdentToken(Symbol sym, SignatureGenerator signatureGenerator) {
// If the symbol represents the generated `Array` class, replace it with the actual
// array type, if we have it.
if (SignatureGenerator.isArrayHelperClass(sym) && signatureGenerator != null) {
return signatureGenerator.getArrayTypeName();
}
String flatName = sym.flatName().toString();
int lastDot = flatName.lastIndexOf('.');
// A$1 is a valid variable/method name, so make sure we only look at $ in class names.
int lastCash = (sym instanceof ClassSymbol) ? flatName.lastIndexOf('$') : -1;
int lastTok = Math.max(lastDot, lastCash);
String identToken = lastTok < 0 ? flatName : flatName.substring(lastTok + 1);
if (!identToken.isEmpty() && Character.isDigit(identToken.charAt(0))) {
if (sym.name.isEmpty()) {
identToken = "(anon " + identToken + ")";
} else {
identToken = sym.name.toString();
}
}
return identToken;
}
/**
* Returns the Symbol for sym's parent in qualified names, assuming that we'll be using
* getIdentToken() to print nodes.
*
* <p>We're going through this extra effort to try and give people unsurprising qualified names.
* To do that we have to deal with javac's mangling (in {@link #getIdentToken} above), since for
* anonymous classes javac only stores mangled symbols. The code as written will emit only dotted
* fully-qualified names, even for inner or anonymous classes, and considers concrete type,
* package, or method names to be appropriate dot points. (If we weren't careful here we might,
* for example, observe nodes in a qualified name corresponding to variables that are initialized
* to anonymous classes.) This reflects the nesting structure from the Java side, not the JVM
* side.
*/
@Nullable
private static Symbol getQualifiedNameParent(Symbol sym) {
sym = sym.owner;
while (sym != null) {
switch (sym.kind) {
case TYP:
if (!sym.type.hasTag(TypeTag.TYPEVAR)) {
return sym;
}
break;
case PCK:
case MTH:
return sym;
// TODO(T227): resolve non-exhaustive switch statements w/o defaults
default:
break;
}
sym = sym.owner;
}
return null;
}
}
| |
/*
* Copyright 2014 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.optaplanner.benchmark.impl.statistic.subsingle.pickedmovetypebestscore;
import java.io.File;
import java.text.NumberFormat;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import org.jfree.chart.JFreeChart;
import org.jfree.chart.axis.NumberAxis;
import org.jfree.chart.plot.PlotOrientation;
import org.jfree.chart.plot.XYPlot;
import org.jfree.chart.renderer.xy.XYItemRenderer;
import org.jfree.chart.renderer.xy.YIntervalRenderer;
import org.jfree.data.xy.XYIntervalSeries;
import org.jfree.data.xy.XYIntervalSeriesCollection;
import org.optaplanner.benchmark.config.statistic.SingleStatisticType;
import org.optaplanner.benchmark.impl.report.BenchmarkReport;
import org.optaplanner.benchmark.impl.result.SubSingleBenchmarkResult;
import org.optaplanner.benchmark.impl.statistic.PureSubSingleStatistic;
import org.optaplanner.benchmark.impl.statistic.common.MillisecondsSpentNumberFormat;
import org.optaplanner.core.api.score.Score;
import org.optaplanner.core.api.solver.Solver;
import org.optaplanner.core.impl.localsearch.scope.LocalSearchPhaseScope;
import org.optaplanner.core.impl.localsearch.scope.LocalSearchStepScope;
import org.optaplanner.core.impl.phase.event.PhaseLifecycleListenerAdapter;
import org.optaplanner.core.impl.phase.scope.AbstractPhaseScope;
import org.optaplanner.core.impl.phase.scope.AbstractStepScope;
import org.optaplanner.core.impl.score.ScoreUtils;
import org.optaplanner.core.impl.score.definition.ScoreDefinition;
import org.optaplanner.core.impl.solver.DefaultSolver;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamOmitField;
@XStreamAlias("pickedMoveTypeBestScoreDiffSubSingleStatistic")
public class PickedMoveTypeBestScoreDiffSubSingleStatistic<Solution_>
extends PureSubSingleStatistic<Solution_, PickedMoveTypeBestScoreDiffStatisticPoint> {
@XStreamOmitField
private PickedMoveTypeBestScoreDiffSubSingleStatisticListener listener;
@XStreamOmitField
protected List<File> graphFileList = null;
public PickedMoveTypeBestScoreDiffSubSingleStatistic(SubSingleBenchmarkResult subSingleBenchmarkResult) {
super(subSingleBenchmarkResult, SingleStatisticType.PICKED_MOVE_TYPE_BEST_SCORE_DIFF);
listener = new PickedMoveTypeBestScoreDiffSubSingleStatisticListener();
}
/**
* @return never null
*/
@Override
public List<File> getGraphFileList() {
return graphFileList;
}
// ************************************************************************
// Lifecycle methods
// ************************************************************************
@Override
public void open(Solver<Solution_> solver) {
((DefaultSolver<Solution_>) solver).addPhaseLifecycleListener(listener);
}
@Override
public void close(Solver<Solution_> solver) {
((DefaultSolver<Solution_>) solver).removePhaseLifecycleListener(listener);
}
private class PickedMoveTypeBestScoreDiffSubSingleStatisticListener extends PhaseLifecycleListenerAdapter<Solution_> {
private Score oldBestScore = null;
@Override
public void phaseStarted(AbstractPhaseScope<Solution_> phaseScope) {
if (phaseScope instanceof LocalSearchPhaseScope) {
oldBestScore = phaseScope.getBestScore();
}
}
@Override
public void phaseEnded(AbstractPhaseScope<Solution_> phaseScope) {
if (phaseScope instanceof LocalSearchPhaseScope) {
oldBestScore = null;
}
}
@Override
public void stepEnded(AbstractStepScope<Solution_> stepScope) {
if (stepScope instanceof LocalSearchStepScope) {
localSearchStepEnded((LocalSearchStepScope<Solution_>) stepScope);
}
}
private void localSearchStepEnded(LocalSearchStepScope<Solution_> stepScope) {
if (stepScope.getBestScoreImproved()) {
long timeMillisSpent = stepScope.getPhaseScope().calculateSolverTimeMillisSpentUpToNow();
String moveType = stepScope.getStep().getSimpleMoveTypeDescription();
Score newBestScore = stepScope.getScore();
Score bestScoreDiff = newBestScore.subtract(oldBestScore);
oldBestScore = newBestScore;
pointList.add(new PickedMoveTypeBestScoreDiffStatisticPoint(
timeMillisSpent, moveType, bestScoreDiff));
}
}
}
// ************************************************************************
// CSV methods
// ************************************************************************
@Override
protected String getCsvHeader() {
return PickedMoveTypeBestScoreDiffStatisticPoint.buildCsvLine("timeMillisSpent", "moveType", "bestScoreDiff");
}
@Override
protected PickedMoveTypeBestScoreDiffStatisticPoint createPointFromCsvLine(ScoreDefinition scoreDefinition,
List<String> csvLine) {
return new PickedMoveTypeBestScoreDiffStatisticPoint(Long.parseLong(csvLine.get(0)),
csvLine.get(1), scoreDefinition.parseScore(csvLine.get(2)));
}
// ************************************************************************
// Write methods
// ************************************************************************
@Override
public void writeGraphFiles(BenchmarkReport benchmarkReport) {
List<Map<String, XYIntervalSeries>> moveTypeToSeriesMapList = new ArrayList<>(BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE);
for (PickedMoveTypeBestScoreDiffStatisticPoint point : getPointList()) {
long timeMillisSpent = point.getTimeMillisSpent();
String moveType = point.getMoveType();
double[] levelValues = ScoreUtils.extractLevelDoubles(point.getBestScoreDiff());
for (int i = 0; i < levelValues.length && i < BenchmarkReport.CHARTED_SCORE_LEVEL_SIZE; i++) {
if (i >= moveTypeToSeriesMapList.size()) {
moveTypeToSeriesMapList.add(new LinkedHashMap<>());
}
Map<String, XYIntervalSeries> moveTypeToSeriesMap = moveTypeToSeriesMapList.get(i);
XYIntervalSeries series = moveTypeToSeriesMap.computeIfAbsent(moveType,
k -> new XYIntervalSeries(moveType));
double yValue = levelValues[i];
// In an XYInterval the yLow must be lower than yHigh
series.add(timeMillisSpent, timeMillisSpent, timeMillisSpent,
yValue, (yValue > 0.0) ? 0.0 : yValue, (yValue > 0.0) ? yValue : 0.0);
}
}
graphFileList = new ArrayList<>(moveTypeToSeriesMapList.size());
for (int scoreLevelIndex = 0; scoreLevelIndex < moveTypeToSeriesMapList.size(); scoreLevelIndex++) {
XYPlot plot = createPlot(benchmarkReport, scoreLevelIndex);
XYItemRenderer renderer = new YIntervalRenderer();
plot.setRenderer(renderer);
XYIntervalSeriesCollection seriesCollection = new XYIntervalSeriesCollection();
for (XYIntervalSeries series : moveTypeToSeriesMapList.get(scoreLevelIndex).values()) {
seriesCollection.addSeries(series);
}
plot.setDataset(seriesCollection);
String scoreLevelLabel = subSingleBenchmarkResult.getSingleBenchmarkResult().getProblemBenchmarkResult()
.findScoreLevelLabel(scoreLevelIndex);
JFreeChart chart = new JFreeChart(subSingleBenchmarkResult.getName()
+ " picked move type best " + scoreLevelLabel + " diff statistic",
JFreeChart.DEFAULT_TITLE_FONT, plot, true);
graphFileList.add(writeChartToImageFile(chart,
"PickedMoveTypeBestScoreDiffStatisticLevel" + scoreLevelIndex));
}
}
private XYPlot createPlot(BenchmarkReport benchmarkReport, int scoreLevelIndex) {
Locale locale = benchmarkReport.getLocale();
NumberAxis xAxis = new NumberAxis("Time spent");
xAxis.setNumberFormatOverride(new MillisecondsSpentNumberFormat(locale));
String scoreLevelLabel = subSingleBenchmarkResult.getSingleBenchmarkResult().getProblemBenchmarkResult()
.findScoreLevelLabel(scoreLevelIndex);
NumberAxis yAxis = new NumberAxis("Best " + scoreLevelLabel + " diff");
yAxis.setNumberFormatOverride(NumberFormat.getInstance(locale));
yAxis.setAutoRangeIncludesZero(true);
XYPlot plot = new XYPlot(null, xAxis, yAxis, null);
plot.setOrientation(PlotOrientation.VERTICAL);
return plot;
}
}
| |
package org.apache.lucene.analysis;
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import org.apache.lucene.analysis.tokenattributes.OffsetAttribute;
import org.apache.lucene.analysis.tokenattributes.FlagsAttribute;
import org.apache.lucene.analysis.tokenattributes.PayloadAttribute;
import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute;
import org.apache.lucene.analysis.tokenattributes.TermAttribute;
import org.apache.lucene.analysis.tokenattributes.TypeAttribute;
import org.apache.lucene.index.Payload;
import org.apache.lucene.index.TermPositions; // for javadoc
import org.apache.lucene.util.ArrayUtil;
import org.apache.lucene.util.Attribute;
import org.apache.lucene.util.AttributeImpl;
/**
A Token is an occurrence of a term from the text of a field. It consists of
a term's text, the start and end offset of the term in the text of the field,
and a type string.
<p>
The start and end offsets permit applications to re-associate a token with
its source text, e.g., to display highlighted query terms in a document
browser, or to show matching text fragments in a <abbr title="KeyWord In Context">KWIC</abbr>
display, etc.
<p>
The type is a string, assigned by a lexical analyzer
(a.k.a. tokenizer), naming the lexical or syntactic class that the token
belongs to. For example an end of sentence marker token might be implemented
with type "eos". The default token type is "word".
<p>
A Token can optionally have metadata (a.k.a. Payload) in the form of a variable
length byte array. Use {@link TermPositions#getPayloadLength()} and
{@link TermPositions#getPayload(byte[], int)} to retrieve the payloads from the index.
<br><br>
<p><b>NOTE:</b> As of 2.9, Token implements all {@link Attribute} interfaces
that are part of core Lucene and can be found in the {@code tokenattributes} subpackage.
Even though it is not necessary to use Token anymore, with the new TokenStream API it can
be used as convenience class that implements all {@link Attribute}s, which is especially useful
to easily switch from the old to the new TokenStream API.
<br><br>
<p><b>NOTE:</b> As of 2.3, Token stores the term text
internally as a malleable char[] termBuffer instead of
String termText. The indexing code and core tokenizers
have been changed to re-use a single Token instance, changing
its buffer and other fields in-place as the Token is
processed. This provides substantially better indexing
performance as it saves the GC cost of new'ing a Token and
String for every term. The APIs that accept String
termText are still available but a warning about the
associated performance cost has been added (below). The
{@link #termText()} method has been deprecated.</p>
<p>Tokenizers and TokenFilters should try to re-use a Token
instance when possible for best performance, by
implementing the {@link TokenStream#incrementToken()} API.
Failing that, to create a new Token you should first use
one of the constructors that starts with null text. To load
the token from a char[] use {@link #setTermBuffer(char[], int, int)}.
To load from a String use {@link #setTermBuffer(String)} or {@link #setTermBuffer(String, int, int)}.
Alternatively you can get the Token's termBuffer by calling either {@link #termBuffer()},
if you know that your text is shorter than the capacity of the termBuffer
or {@link #resizeTermBuffer(int)}, if there is any possibility
that you may need to grow the buffer. Fill in the characters of your term into this
buffer, with {@link String#getChars(int, int, char[], int)} if loading from a string,
or with {@link System#arraycopy(Object, int, Object, int, int)}, and finally call {@link #setTermLength(int)} to
set the length of the term text. See <a target="_top"
href="https://issues.apache.org/jira/browse/LUCENE-969">LUCENE-969</a>
for details.</p>
<p>Typical Token reuse patterns:
<ul>
<li> Copying text from a string (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
<pre>
return reusableToken.reinit(string, startOffset, endOffset[, type]);
</pre>
</li>
<li> Copying some text from a string (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
<pre>
return reusableToken.reinit(string, 0, string.length(), startOffset, endOffset[, type]);
</pre>
</li>
</li>
<li> Copying text from char[] buffer (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
<pre>
return reusableToken.reinit(buffer, 0, buffer.length, startOffset, endOffset[, type]);
</pre>
</li>
<li> Copying some text from a char[] buffer (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
<pre>
return reusableToken.reinit(buffer, start, end - start, startOffset, endOffset[, type]);
</pre>
</li>
<li> Copying from one one Token to another (type is reset to {@link #DEFAULT_TYPE} if not specified):<br/>
<pre>
return reusableToken.reinit(source.termBuffer(), 0, source.termLength(), source.startOffset(), source.endOffset()[, source.type()]);
</pre>
</li>
</ul>
A few things to note:
<ul>
<li>clear() initializes all of the fields to default values. This was changed in contrast to Lucene 2.4, but should affect no one.</li>
<li>Because <code>TokenStreams</code> can be chained, one cannot assume that the <code>Token's</code> current type is correct.</li>
<li>The startOffset and endOffset represent the start and offset in the source text, so be careful in adjusting them.</li>
<li>When caching a reusable token, clone it. When injecting a cached token into a stream that can be reset, clone it again.</li>
</ul>
</p>
@see org.apache.lucene.index.Payload
*/
public class Token extends AttributeImpl
implements Cloneable, TermAttribute, TypeAttribute, PositionIncrementAttribute,
FlagsAttribute, OffsetAttribute, PayloadAttribute {
public static final String DEFAULT_TYPE = "word";
private static int MIN_BUFFER_SIZE = 10;
/** @deprecated We will remove this when we remove the
* deprecated APIs */
private String termText;
/**
* Characters for the term text.
* @deprecated This will be made private. Instead, use:
* {@link #termBuffer()},
* {@link #setTermBuffer(char[], int, int)},
* {@link #setTermBuffer(String)}, or
* {@link #setTermBuffer(String, int, int)}
*/
char[] termBuffer;
/**
* Length of term text in the buffer.
* @deprecated This will be made private. Instead, use:
* {@link #termLength()}, or @{link setTermLength(int)}.
*/
int termLength;
/**
* Start in source text.
* @deprecated This will be made private. Instead, use:
* {@link #startOffset()}, or @{link setStartOffset(int)}.
*/
int startOffset;
/**
* End in source text.
* @deprecated This will be made private. Instead, use:
* {@link #endOffset()}, or @{link setEndOffset(int)}.
*/
int endOffset;
/**
* The lexical type of the token.
* @deprecated This will be made private. Instead, use:
* {@link #type()}, or @{link setType(String)}.
*/
String type = DEFAULT_TYPE;
private int flags;
/**
* @deprecated This will be made private. Instead, use:
* {@link #getPayload()}, or @{link setPayload(Payload)}.
*/
Payload payload;
/**
* @deprecated This will be made private. Instead, use:
* {@link #getPositionIncrement()}, or @{link setPositionIncrement(String)}.
*/
int positionIncrement = 1;
/** Constructs a Token will null text. */
public Token() {
}
/** Constructs a Token with null text and start & end
* offsets.
* @param start start offset in the source text
* @param end end offset in the source text */
public Token(int start, int end) {
startOffset = start;
endOffset = end;
}
/** Constructs a Token with null text and start & end
* offsets plus the Token type.
* @param start start offset in the source text
* @param end end offset in the source text
* @param typ the lexical type of this Token */
public Token(int start, int end, String typ) {
startOffset = start;
endOffset = end;
type = typ;
}
/**
* Constructs a Token with null text and start & end
* offsets plus flags. NOTE: flags is EXPERIMENTAL.
* @param start start offset in the source text
* @param end end offset in the source text
* @param flags The bits to set for this token
*/
public Token(int start, int end, int flags) {
startOffset = start;
endOffset = end;
this.flags = flags;
}
/** Constructs a Token with the given term text, and start
* & end offsets. The type defaults to "word."
* <b>NOTE:</b> for better indexing speed you should
* instead use the char[] termBuffer methods to set the
* term text.
* @param text term text
* @param start start offset
* @param end end offset
*/
public Token(String text, int start, int end) {
termText = text;
startOffset = start;
endOffset = end;
}
/** Constructs a Token with the given text, start and end
* offsets, & type. <b>NOTE:</b> for better indexing
* speed you should instead use the char[] termBuffer
* methods to set the term text.
* @param text term text
* @param start start offset
* @param end end offset
* @param typ token type
*/
public Token(String text, int start, int end, String typ) {
termText = text;
startOffset = start;
endOffset = end;
type = typ;
}
/**
* Constructs a Token with the given text, start and end
* offsets, & type. <b>NOTE:</b> for better indexing
* speed you should instead use the char[] termBuffer
* methods to set the term text.
* @param text
* @param start
* @param end
* @param flags token type bits
*/
public Token(String text, int start, int end, int flags) {
termText = text;
startOffset = start;
endOffset = end;
this.flags = flags;
}
/**
* Constructs a Token with the given term buffer (offset
* & length), start and end
* offsets
* @param startTermBuffer
* @param termBufferOffset
* @param termBufferLength
* @param start
* @param end
*/
public Token(char[] startTermBuffer, int termBufferOffset, int termBufferLength, int start, int end) {
setTermBuffer(startTermBuffer, termBufferOffset, termBufferLength);
startOffset = start;
endOffset = end;
}
/** Set the position increment. This determines the position of this token
* relative to the previous Token in a {@link TokenStream}, used in phrase
* searching.
*
* <p>The default value is one.
*
* <p>Some common uses for this are:<ul>
*
* <li>Set it to zero to put multiple terms in the same position. This is
* useful if, e.g., a word has multiple stems. Searches for phrases
* including either stem will match. In this case, all but the first stem's
* increment should be set to zero: the increment of the first instance
* should be one. Repeating a token with an increment of zero can also be
* used to boost the scores of matches on that token.
*
* <li>Set it to values greater than one to inhibit exact phrase matches.
* If, for example, one does not want phrases to match across removed stop
* words, then one could build a stop word filter that removes stop words and
* also sets the increment to the number of stop words removed before each
* non-stop word. Then exact phrase queries will only match when the terms
* occur with no intervening stop words.
*
* </ul>
* @param positionIncrement the distance from the prior term
* @see org.apache.lucene.index.TermPositions
*/
public void setPositionIncrement(int positionIncrement) {
if (positionIncrement < 0)
throw new IllegalArgumentException
("Increment must be zero or greater: " + positionIncrement);
this.positionIncrement = positionIncrement;
}
/** Returns the position increment of this Token.
* @see #setPositionIncrement
*/
public int getPositionIncrement() {
return positionIncrement;
}
/** Sets the Token's term text. <b>NOTE:</b> for better
* indexing speed you should instead use the char[]
* termBuffer methods to set the term text.
* @deprecated use {@link #setTermBuffer(char[], int, int)} or
* {@link #setTermBuffer(String)} or
* {@link #setTermBuffer(String, int, int)}.
*/
public void setTermText(String text) {
termText = text;
termBuffer = null;
}
/** Returns the Token's term text.
*
* @deprecated This method now has a performance penalty
* because the text is stored internally in a char[]. If
* possible, use {@link #termBuffer()} and {@link
* #termLength()} directly instead. If you really need a
* String, use {@link #term()}</b>
*/
public final String termText() {
if (termText == null && termBuffer != null)
termText = new String(termBuffer, 0, termLength);
return termText;
}
/** Returns the Token's term text.
*
* This method has a performance penalty
* because the text is stored internally in a char[]. If
* possible, use {@link #termBuffer()} and {@link
* #termLength()} directly instead. If you really need a
* String, use this method, which is nothing more than
* a convenience call to <b>new String(token.termBuffer(), 0, token.termLength())</b>
*/
public final String term() {
if (termText != null)
return termText;
initTermBuffer();
return new String(termBuffer, 0, termLength);
}
/** Copies the contents of buffer, starting at offset for
* length characters, into the termBuffer array.
* @param buffer the buffer to copy
* @param offset the index in the buffer of the first character to copy
* @param length the number of characters to copy
*/
public final void setTermBuffer(char[] buffer, int offset, int length) {
termText = null;
growTermBuffer(length);
System.arraycopy(buffer, offset, termBuffer, 0, length);
termLength = length;
}
/** Copies the contents of buffer into the termBuffer array.
* @param buffer the buffer to copy
*/
public final void setTermBuffer(String buffer) {
termText = null;
final int length = buffer.length();
growTermBuffer(length);
buffer.getChars(0, length, termBuffer, 0);
termLength = length;
}
/** Copies the contents of buffer, starting at offset and continuing
* for length characters, into the termBuffer array.
* @param buffer the buffer to copy
* @param offset the index in the buffer of the first character to copy
* @param length the number of characters to copy
*/
public final void setTermBuffer(String buffer, int offset, int length) {
assert offset <= buffer.length();
assert offset + length <= buffer.length();
termText = null;
growTermBuffer(length);
buffer.getChars(offset, offset + length, termBuffer, 0);
termLength = length;
}
/** Returns the internal termBuffer character array which
* you can then directly alter. If the array is too
* small for your token, use {@link
* #resizeTermBuffer(int)} to increase it. After
* altering the buffer be sure to call {@link
* #setTermLength} to record the number of valid
* characters that were placed into the termBuffer. */
public final char[] termBuffer() {
initTermBuffer();
return termBuffer;
}
/** Grows the termBuffer to at least size newSize, preserving the
* existing content. Note: If the next operation is to change
* the contents of the term buffer use
* {@link #setTermBuffer(char[], int, int)},
* {@link #setTermBuffer(String)}, or
* {@link #setTermBuffer(String, int, int)}
* to optimally combine the resize with the setting of the termBuffer.
* @param newSize minimum size of the new termBuffer
* @return newly created termBuffer with length >= newSize
*/
public char[] resizeTermBuffer(int newSize) {
if (termBuffer == null) {
// The buffer is always at least MIN_BUFFER_SIZE
newSize = newSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : newSize;
//Preserve termText
if (termText != null) {
final int ttLen = termText.length();
newSize = newSize < ttLen ? ttLen : newSize;
termBuffer = new char[ArrayUtil.getNextSize(newSize)];
termText.getChars(0, termText.length(), termBuffer, 0);
termText = null;
} else { // no term Text, the first allocation
termBuffer = new char[ArrayUtil.getNextSize(newSize)];
}
} else {
if(termBuffer.length < newSize){
// Not big enough; create a new array with slight
// over allocation and preserve content
final char[] newCharBuffer = new char[ArrayUtil.getNextSize(newSize)];
System.arraycopy(termBuffer, 0, newCharBuffer, 0, termBuffer.length);
termBuffer = newCharBuffer;
}
}
return termBuffer;
}
/** Allocates a buffer char[] of at least newSize, without preserving the existing content.
* its always used in places that set the content
* @param newSize minimum size of the buffer
*/
private void growTermBuffer(int newSize) {
if (termBuffer == null) {
// The buffer is always at least MIN_BUFFER_SIZE
termBuffer = new char[ArrayUtil.getNextSize(newSize < MIN_BUFFER_SIZE ? MIN_BUFFER_SIZE : newSize)];
} else {
if(termBuffer.length < newSize){
// Not big enough; create a new array with slight
// over allocation:
termBuffer = new char[ArrayUtil.getNextSize(newSize)];
}
}
}
// TODO: once we remove the deprecated termText() method
// and switch entirely to char[] termBuffer we don't need
// to use this method anymore, only for late init of the buffer
private void initTermBuffer() {
if (termBuffer == null) {
if (termText == null) {
termBuffer = new char[ArrayUtil.getNextSize(MIN_BUFFER_SIZE)];
termLength = 0;
} else {
int length = termText.length();
if (length < MIN_BUFFER_SIZE) length = MIN_BUFFER_SIZE;
termBuffer = new char[ArrayUtil.getNextSize(length)];
termLength = termText.length();
termText.getChars(0, termText.length(), termBuffer, 0);
termText = null;
}
} else {
termText = null;
}
}
/** Return number of valid characters (length of the term)
* in the termBuffer array. */
public final int termLength() {
initTermBuffer();
return termLength;
}
/** Set number of valid characters (length of the term) in
* the termBuffer array. Use this to truncate the termBuffer
* or to synchronize with external manipulation of the termBuffer.
* Note: to grow the size of the array,
* use {@link #resizeTermBuffer(int)} first.
* @param length the truncated length
*/
public final void setTermLength(int length) {
initTermBuffer();
if (length > termBuffer.length)
throw new IllegalArgumentException("length " + length + " exceeds the size of the termBuffer (" + termBuffer.length + ")");
termLength = length;
}
/** Returns this Token's starting offset, the position of the first character
corresponding to this token in the source text.
Note that the difference between endOffset() and startOffset() may not be
equal to termText.length(), as the term text may have been altered by a
stemmer or some other filter. */
public final int startOffset() {
return startOffset;
}
/** Set the starting offset.
@see #startOffset() */
public void setStartOffset(int offset) {
this.startOffset = offset;
}
/** Returns this Token's ending offset, one greater than the position of the
last character corresponding to this token in the source text. The length
of the token in the source text is (endOffset - startOffset). */
public final int endOffset() {
return endOffset;
}
/** Set the ending offset.
@see #endOffset() */
public void setEndOffset(int offset) {
this.endOffset = offset;
}
/** Set the starting and ending offset.
@see #startOffset() and #endOffset()*/
public void setOffset(int startOffset, int endOffset) {
this.startOffset = startOffset;
this.endOffset = endOffset;
}
/** Returns this Token's lexical type. Defaults to "word". */
public final String type() {
return type;
}
/** Set the lexical type.
@see #type() */
public final void setType(String type) {
this.type = type;
}
/**
* EXPERIMENTAL: While we think this is here to stay, we may want to change it to be a long.
* <p/>
*
* Get the bitset for any bits that have been set. This is completely distinct from {@link #type()}, although they do share similar purposes.
* The flags can be used to encode information about the token for use by other {@link org.apache.lucene.analysis.TokenFilter}s.
*
*
* @return The bits
*/
public int getFlags() {
return flags;
}
/**
* @see #getFlags()
*/
public void setFlags(int flags) {
this.flags = flags;
}
/**
* Returns this Token's payload.
*/
public Payload getPayload() {
return this.payload;
}
/**
* Sets this Token's payload.
*/
public void setPayload(Payload payload) {
this.payload = payload;
}
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append('(');
initTermBuffer();
if (termBuffer == null)
sb.append("null");
else
sb.append(termBuffer, 0, termLength);
sb.append(',').append(startOffset).append(',').append(endOffset);
if (!type.equals("word"))
sb.append(",type=").append(type);
if (positionIncrement != 1)
sb.append(",posIncr=").append(positionIncrement);
sb.append(')');
return sb.toString();
}
/** Resets the term text, payload, flags, and positionIncrement,
* startOffset, endOffset and token type to default.
*/
public void clear() {
payload = null;
// Leave termBuffer to allow re-use
termLength = 0;
termText = null;
positionIncrement = 1;
flags = 0;
startOffset = endOffset = 0;
type = DEFAULT_TYPE;
}
public Object clone() {
Token t = (Token)super.clone();
// Do a deep clone
if (termBuffer != null) {
t.termBuffer = (char[]) termBuffer.clone();
}
if (payload != null) {
t.payload = (Payload) payload.clone();
}
return t;
}
/** Makes a clone, but replaces the term buffer &
* start/end offset in the process. This is more
* efficient than doing a full clone (and then calling
* setTermBuffer) because it saves a wasted copy of the old
* termBuffer. */
public Token clone(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
final Token t = new Token(newTermBuffer, newTermOffset, newTermLength, newStartOffset, newEndOffset);
t.positionIncrement = positionIncrement;
t.flags = flags;
t.type = type;
if (payload != null)
t.payload = (Payload) payload.clone();
return t;
}
public boolean equals(Object obj) {
if (obj == this)
return true;
if (obj instanceof Token) {
Token other = (Token) obj;
initTermBuffer();
other.initTermBuffer();
if (termLength == other.termLength &&
startOffset == other.startOffset &&
endOffset == other.endOffset &&
flags == other.flags &&
positionIncrement == other.positionIncrement &&
subEqual(type, other.type) &&
subEqual(payload, other.payload)) {
for(int i=0;i<termLength;i++)
if (termBuffer[i] != other.termBuffer[i])
return false;
return true;
} else
return false;
} else
return false;
}
private boolean subEqual(Object o1, Object o2) {
if (o1 == null)
return o2 == null;
else
return o1.equals(o2);
}
public int hashCode() {
initTermBuffer();
int code = termLength;
code = code * 31 + startOffset;
code = code * 31 + endOffset;
code = code * 31 + flags;
code = code * 31 + positionIncrement;
code = code * 31 + type.hashCode();
code = (payload == null ? code : code * 31 + payload.hashCode());
code = code * 31 + ArrayUtil.hashCode(termBuffer, 0, termLength);
return code;
}
// like clear() but doesn't clear termBuffer/text
private void clearNoTermBuffer() {
payload = null;
positionIncrement = 1;
flags = 0;
startOffset = endOffset = 0;
type = DEFAULT_TYPE;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(char[], int, int)},
* {@link #setStartOffset},
* {@link #setEndOffset},
* {@link #setType}
* @return this Token instance */
public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType) {
clearNoTermBuffer();
payload = null;
positionIncrement = 1;
setTermBuffer(newTermBuffer, newTermOffset, newTermLength);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = newType;
return this;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(char[], int, int)},
* {@link #setStartOffset},
* {@link #setEndOffset}
* {@link #setType} on Token.DEFAULT_TYPE
* @return this Token instance */
public Token reinit(char[] newTermBuffer, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
clearNoTermBuffer();
setTermBuffer(newTermBuffer, newTermOffset, newTermLength);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = DEFAULT_TYPE;
return this;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(String)},
* {@link #setStartOffset},
* {@link #setEndOffset}
* {@link #setType}
* @return this Token instance */
public Token reinit(String newTerm, int newStartOffset, int newEndOffset, String newType) {
clearNoTermBuffer();
setTermBuffer(newTerm);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = newType;
return this;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(String, int, int)},
* {@link #setStartOffset},
* {@link #setEndOffset}
* {@link #setType}
* @return this Token instance */
public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset, String newType) {
clearNoTermBuffer();
setTermBuffer(newTerm, newTermOffset, newTermLength);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = newType;
return this;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(String)},
* {@link #setStartOffset},
* {@link #setEndOffset}
* {@link #setType} on Token.DEFAULT_TYPE
* @return this Token instance */
public Token reinit(String newTerm, int newStartOffset, int newEndOffset) {
clearNoTermBuffer();
setTermBuffer(newTerm);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = DEFAULT_TYPE;
return this;
}
/** Shorthand for calling {@link #clear},
* {@link #setTermBuffer(String, int, int)},
* {@link #setStartOffset},
* {@link #setEndOffset}
* {@link #setType} on Token.DEFAULT_TYPE
* @return this Token instance */
public Token reinit(String newTerm, int newTermOffset, int newTermLength, int newStartOffset, int newEndOffset) {
clearNoTermBuffer();
setTermBuffer(newTerm, newTermOffset, newTermLength);
startOffset = newStartOffset;
endOffset = newEndOffset;
type = DEFAULT_TYPE;
return this;
}
/**
* Copy the prototype token's fields into this one. Note: Payloads are shared.
* @param prototype
*/
public void reinit(Token prototype) {
prototype.initTermBuffer();
setTermBuffer(prototype.termBuffer, 0, prototype.termLength);
positionIncrement = prototype.positionIncrement;
flags = prototype.flags;
startOffset = prototype.startOffset;
endOffset = prototype.endOffset;
type = prototype.type;
payload = prototype.payload;
}
/**
* Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared.
* @param prototype
* @param newTerm
*/
public void reinit(Token prototype, String newTerm) {
setTermBuffer(newTerm);
positionIncrement = prototype.positionIncrement;
flags = prototype.flags;
startOffset = prototype.startOffset;
endOffset = prototype.endOffset;
type = prototype.type;
payload = prototype.payload;
}
/**
* Copy the prototype token's fields into this one, with a different term. Note: Payloads are shared.
* @param prototype
* @param newTermBuffer
* @param offset
* @param length
*/
public void reinit(Token prototype, char[] newTermBuffer, int offset, int length) {
setTermBuffer(newTermBuffer, offset, length);
positionIncrement = prototype.positionIncrement;
flags = prototype.flags;
startOffset = prototype.startOffset;
endOffset = prototype.endOffset;
type = prototype.type;
payload = prototype.payload;
}
public void copyTo(AttributeImpl target) {
if (target instanceof Token) {
final Token to = (Token) target;
to.reinit(this);
// reinit shares the payload, so clone it:
if (payload !=null) {
to.payload = (Payload) payload.clone();
}
// remove the following optimization in 3.0 when old TokenStream API removed:
} else if (target instanceof TokenWrapper) {
((TokenWrapper) target).delegate = (Token) this.clone();
} else {
initTermBuffer();
((TermAttribute) target).setTermBuffer(termBuffer, 0, termLength);
((OffsetAttribute) target).setOffset(startOffset, endOffset);
((PositionIncrementAttribute) target).setPositionIncrement(positionIncrement);
((PayloadAttribute) target).setPayload((payload == null) ? null : (Payload) payload.clone());
((FlagsAttribute) target).setFlags(flags);
((TypeAttribute) target).setType(type);
}
}
}
| |
// Copyright 2015 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import static com.google.common.base.Preconditions.checkNotNull;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSortedSet;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.analysis.FilesToRunProvider;
import com.google.devtools.build.lib.analysis.RuleConfiguredTarget.Mode;
import com.google.devtools.build.lib.analysis.RuleContext;
import com.google.devtools.build.lib.analysis.actions.FileWriteAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction;
import com.google.devtools.build.lib.analysis.actions.SpawnAction.Builder;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.packages.AttributeMap;
import com.google.devtools.build.lib.packages.BuildType;
import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaOptimizationMode;
import com.google.devtools.build.lib.syntax.Type;
import javax.annotation.Nullable;
/**
* Common code for proguarding Java binaries.
*/
public abstract class ProguardHelper {
/**
* Attribute for attaching proguard specs explicitly to a rule, if such an attribute is desired.
*/
public static final String PROGUARD_SPECS = "proguard_specs";
/**
* A class collecting Proguard output artifacts.
*/
@Immutable
public static final class ProguardOutput {
private final Artifact outputJar;
@Nullable private final Artifact mapping;
@Nullable private final Artifact protoMapping;
@Nullable private final Artifact seeds;
@Nullable private final Artifact usage;
@Nullable private final Artifact constantStringObfuscatedMapping;
private final Artifact config;
public ProguardOutput(Artifact outputJar,
@Nullable Artifact mapping,
@Nullable Artifact protoMapping,
@Nullable Artifact seeds,
@Nullable Artifact usage,
@Nullable Artifact constantStringObfuscatedMapping,
Artifact config) {
this.outputJar = checkNotNull(outputJar);
this.mapping = mapping;
this.protoMapping = protoMapping;
this.seeds = seeds;
this.usage = usage;
this.constantStringObfuscatedMapping = constantStringObfuscatedMapping;
this.config = config;
}
public Artifact getOutputJar() {
return outputJar;
}
@Nullable
public Artifact getMapping() {
return mapping;
}
@Nullable
public Artifact getProtoMapping() {
return protoMapping;
}
@Nullable
public Artifact getConstantStringObfuscatedMapping() {
return constantStringObfuscatedMapping;
}
@Nullable
public Artifact getSeeds() {
return seeds;
}
@Nullable
public Artifact getUsage() {
return usage;
}
public Artifact getConfig() {
return config;
}
/** Adds the output artifacts to the given set builder. */
public void addAllToSet(NestedSetBuilder<Artifact> filesBuilder) {
addAllToSet(filesBuilder, null);
}
/** Adds the output artifacts to the given set builder. If the progaurd map was updated
* then add the updated map instead of the original proguard output map */
public void addAllToSet(NestedSetBuilder<Artifact> filesBuilder, Artifact finalProguardMap) {
filesBuilder.add(outputJar);
if (protoMapping != null) {
filesBuilder.add(protoMapping);
}
if (constantStringObfuscatedMapping != null) {
filesBuilder.add(constantStringObfuscatedMapping);
}
if (seeds != null) {
filesBuilder.add(seeds);
}
if (usage != null) {
filesBuilder.add(usage);
}
if (config != null) {
filesBuilder.add(config);
}
if (finalProguardMap != null) {
filesBuilder.add(finalProguardMap);
} else if (mapping != null) {
filesBuilder.add(mapping);
}
}
}
protected ProguardHelper() {}
/**
* Creates an action to run Proguard to <i>output</i> the given {@code deployJar} artifact
* if --java_optimization_mode calls for it from an assumed input artifact
* {@link JavaSemantics#JAVA_BINARY_MERGED_JAR}. Returns the artifacts that Proguard will
* generate or {@code null} if Proguard isn't used.
*
* <p>If this method returns artifacts then {@link DeployArchiveBuilder} needs to write the
* assumed input artifact (instead of the conventional deploy.jar, which now Proguard writes).
* Do not use this method for binary rules that themselves declare {@link #PROGUARD_SPECS}
* attributes, which as of includes 1/2016 {@code android_binary} and {@code android_test}.
*/
@Nullable
public ProguardOutput applyProguardIfRequested(
RuleContext ruleContext,
Artifact deployJar,
ImmutableList<Artifact> bootclasspath,
String mainClassName,
JavaSemantics semantics) throws InterruptedException {
JavaOptimizationMode optMode = getJavaOptimizationMode(ruleContext);
if (optMode == JavaOptimizationMode.NOOP || optMode == JavaOptimizationMode.LEGACY) {
// For simplicity do nothing in LEGACY mode
return null;
}
Preconditions.checkArgument(!bootclasspath.isEmpty(), "Bootclasspath should not be empty");
FilesToRunProvider proguard = findProguard(ruleContext);
if (proguard == null) {
ruleContext.ruleError("--proguard_top required for --java_optimization_mode=" + optMode);
return null;
}
ImmutableList<Artifact> proguardSpecs =
collectProguardSpecs(ruleContext, bootclasspath, mainClassName);
Artifact singleJar =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_MERGED_JAR);
// TODO(bazel-team): Verify that proguard spec files don't contain -printmapping directions
// which this -printmapping command line flag will override.
Artifact proguardOutputMap = null;
if (genProguardMapping(ruleContext.attributes()) || optMode.alwaysGenerateOutputMapping()) {
proguardOutputMap =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_PROGUARD_MAP);
}
return createProguardAction(
ruleContext,
proguard,
singleJar,
proguardSpecs,
/* proguardSeeds */ (Artifact) null,
/* proguardUsage */ (Artifact) null,
/* proguardMapping */ (Artifact) null,
bootclasspath,
deployJar,
semantics,
/* optimizationPases */ 3,
proguardOutputMap);
}
private ImmutableList<Artifact> collectProguardSpecs(
RuleContext ruleContext, ImmutableList<Artifact> bootclasspath, String mainClassName) {
return ProguardHelper.collectTransitiveProguardSpecs(
ruleContext, collectProguardSpecsForRule(ruleContext, bootclasspath, mainClassName));
}
/**
* Returns the Proguard binary to invoke when using {@link #applyProguardIfRequested}. Returning
* {@code null} from this method will generate an error in that method.
*
* @return Proguard binary or {@code null} if none is available
*/
@Nullable
protected abstract FilesToRunProvider findProguard(RuleContext ruleContext);
/**
* Returns rule-specific proguard specs not captured by {@link #PROGUARD_SPECS} attributes when
* using {@link #applyProguardIfRequested}. Typically these are generated artifacts such as specs
* generated for android resources. This method is only called if Proguard will definitely used,
* so it's ok to generate files here.
*/
protected abstract ImmutableList<Artifact> collectProguardSpecsForRule(
RuleContext ruleContext, ImmutableList<Artifact> bootclasspath, String mainClassName);
/**
* Retrieves the full set of proguard specs that should be applied to this binary, including the
* specs passed in, if Proguard should run on the given rule. {@link #createProguardAction}
* relies on this method returning an empty list if the given rule doesn't declare specs in
* --java_optimization_mode=legacy.
*
* <p>If Proguard shouldn't be applied, or the legacy link mode is used and there are no
* proguard_specs on this rule, an empty list will be returned, regardless of any given specs or
* specs from dependencies.
* {@link com.google.devtools.build.lib.rules.android.AndroidBinary#createAndroidBinary} relies on
* that behavior.
*/
public static ImmutableList<Artifact> collectTransitiveProguardSpecs(RuleContext ruleContext,
Iterable<Artifact> specsToInclude) {
JavaOptimizationMode optMode = getJavaOptimizationMode(ruleContext);
if (optMode == JavaOptimizationMode.NOOP) {
return ImmutableList.of();
}
ImmutableList<Artifact> proguardSpecs =
ruleContext.attributes().has(PROGUARD_SPECS, BuildType.LABEL_LIST)
? ruleContext.getPrerequisiteArtifacts(PROGUARD_SPECS, Mode.TARGET).list()
: ImmutableList.<Artifact>of();
if (optMode == JavaOptimizationMode.LEGACY && proguardSpecs.isEmpty()) {
return ImmutableList.of();
}
// TODO(bazel-team): In modes except LEGACY verify that proguard specs don't include -dont...
// flags since those flags would override the desired optMode
ImmutableSortedSet.Builder<Artifact> builder =
ImmutableSortedSet.orderedBy(Artifact.EXEC_PATH_COMPARATOR)
.addAll(proguardSpecs)
.addAll(specsToInclude)
.addAll(ruleContext
.getPrerequisiteArtifacts(":extra_proguard_specs", Mode.TARGET)
.list());
for (ProguardSpecProvider dep :
ruleContext.getPrerequisites("deps", Mode.TARGET, ProguardSpecProvider.class)) {
builder.addAll(dep.getTransitiveProguardSpecs());
}
// Generate and include implicit Proguard spec for requested mode.
if (!optMode.getImplicitProguardDirectives().isEmpty()) {
Artifact implicitDirectives =
getProguardConfigArtifact(ruleContext, optMode.name().toLowerCase());
ruleContext.registerAction(
FileWriteAction.create(
ruleContext,
implicitDirectives,
optMode.getImplicitProguardDirectives(),
/*makeExecutable=*/ false));
builder.add(implicitDirectives);
}
return builder.build().asList();
}
/**
* Creates a proguard spec that tells proguard to keep the binary's entry point, ie., the
* {@code main()} method to be invoked.
*/
protected static Artifact generateSpecForJavaBinary(
RuleContext ruleContext, String mainClassName) {
Artifact result = ProguardHelper.getProguardConfigArtifact(ruleContext, "jvm");
ruleContext.registerAction(
FileWriteAction.create(
ruleContext,
result,
String.format(
"-keep class %s {%n public static void main(java.lang.String[]);%n}",
mainClassName),
/*makeExecutable=*/ false));
return result;
}
/**
* @return true if proguard_generate_mapping is specified.
*/
public static final boolean genProguardMapping(AttributeMap rule) {
return rule.has("proguard_generate_mapping", Type.BOOLEAN)
&& rule.get("proguard_generate_mapping", Type.BOOLEAN);
}
public static final boolean genObfuscatedConstantStringMap(AttributeMap rule) {
return rule.has("proguard_generate_obfuscated_constant_string_mapping", Type.BOOLEAN)
&& rule.get("proguard_generate_obfuscated_constant_string_mapping", Type.BOOLEAN);
}
public static ProguardOutput getProguardOutputs(
Artifact outputJar,
@Nullable Artifact proguardSeeds,
@Nullable Artifact proguardUsage,
RuleContext ruleContext,
JavaSemantics semantics,
@Nullable Artifact proguardOutputMap)
throws InterruptedException {
JavaOptimizationMode optMode = getJavaOptimizationMode(ruleContext);
boolean mappingRequested = genProguardMapping(ruleContext.attributes());
Artifact proguardOutputProtoMap = null;
Artifact proguardConstantStringMap = null;
if (mappingRequested || optMode.alwaysGenerateOutputMapping()) {
// TODO(bazel-team): if rex is enabled, the proguard map will change and then will no
// longer correspond to the proto map
proguardOutputProtoMap = semantics.getProtoMapping(ruleContext);
}
if (genObfuscatedConstantStringMap(ruleContext.attributes())) {
proguardConstantStringMap = semantics.getObfuscatedConstantStringMap(ruleContext);
}
Artifact proguardConfigOutput =
ruleContext.getImplicitOutputArtifact(JavaSemantics.JAVA_BINARY_PROGUARD_CONFIG);
return new ProguardOutput(
outputJar,
proguardOutputMap,
proguardOutputProtoMap,
proguardSeeds,
proguardUsage,
proguardConstantStringMap,
proguardConfigOutput);
}
/**
* Creates an action to run Proguard over the given {@code programJar} with various other given
* inputs to produce {@code proguardOutputJar}. If requested explicitly, or implicitly with
* --java_optimization_mode, the action also produces a mapping file (which shows what methods and
* classes in the output Jar correspond to which methods and classes in the input). The "pair"
* returned by this method indicates whether a mapping is being produced.
*
* <p>See the Proguard manual for the meaning of the various artifacts in play.
*
* @param proguard Proguard executable to use
* @param proguardSpecs Proguard specification files to pass to Proguard
* @param proguardMapping optional mapping file for Proguard to apply
* @param libraryJars any other Jar files that the {@code programJar} will run against
* @param mappingRequested whether to ask Proguard to output a mapping file (a mapping will be
* produced anyway if --java_optimization_mode includes obfuscation)
* @param optimizationPasses if not null specifies to break proguard up into multiple passes with
* the given number of optimization passes.
* @param proguardOutputMap mapping generated by Proguard if requested. could be null.
*/
public static ProguardOutput createProguardAction(RuleContext ruleContext,
FilesToRunProvider proguard,
Artifact programJar,
ImmutableList<Artifact> proguardSpecs,
@Nullable Artifact proguardSeeds,
@Nullable Artifact proguardUsage,
@Nullable Artifact proguardMapping,
Iterable<Artifact> libraryJars,
Artifact proguardOutputJar,
JavaSemantics semantics,
@Nullable Integer optimizationPasses,
@Nullable Artifact proguardOutputMap) throws InterruptedException {
JavaOptimizationMode optMode = getJavaOptimizationMode(ruleContext);
Preconditions.checkArgument(optMode != JavaOptimizationMode.NOOP);
Preconditions.checkArgument(optMode != JavaOptimizationMode.LEGACY || !proguardSpecs.isEmpty());
ProguardOutput output =
getProguardOutputs(proguardOutputJar, proguardSeeds, proguardUsage, ruleContext, semantics,
proguardOutputMap);
if (optimizationPasses == null) {
// Run proguard as a single step.
Builder builder = makeBuilder(
proguard,
programJar,
proguardSpecs,
proguardMapping,
libraryJars,
output.getOutputJar(),
output.getMapping(),
output.getProtoMapping(),
output.getSeeds(),
output.getUsage(),
output.getConstantStringObfuscatedMapping(),
output.getConfig())
.setProgressMessage("Trimming binary with Proguard")
.addOutput(proguardOutputJar);
ruleContext.registerAction(builder.build(ruleContext));
} else {
// Optimization passes have been specified, so run proguard in multiple phases.
Artifact lastStageOutput = getProguardTempArtifact(
ruleContext, optMode.name().toLowerCase(), "proguard_preoptimization.jar");
ruleContext.registerAction(
makeBuilder(
proguard,
programJar,
proguardSpecs,
proguardMapping,
libraryJars,
output.getOutputJar(),
/* proguardOutputMap */ null,
/* proguardOutputProtoMap */ null,
output.getSeeds(), // ProGuard only prints seeds during INITIAL and NORMAL runtypes.
/* proguardUsage */ null,
/* constantStringObfuscatedMapping */ null,
/* proguardConfigOutput */ null)
.setProgressMessage("Trimming binary with Proguard: Verification/Shrinking Pass")
.addArgument("-runtype INITIAL")
.addArgument("-nextstageoutput")
.addOutputArgument(lastStageOutput)
.build(ruleContext));
for (int i = 0; i < optimizationPasses; i++) {
Artifact optimizationOutput = getProguardTempArtifact(
ruleContext, optMode.name().toLowerCase(), "proguard_optimization_" + (i + 1) + ".jar");
ruleContext.registerAction(
makeBuilder(
proguard,
programJar,
proguardSpecs,
proguardMapping,
libraryJars,
output.getOutputJar(),
/* proguardOutputMap */ null,
/* proguardOutputProtoMap */ null,
/* proguardSeeds */ null,
/* proguardUsage */ null,
/* constantStringObfuscatedMapping */ null,
/* proguardConfigOutput */ null)
.setProgressMessage("Trimming binary with Proguard: Optimization Pass " + (i + 1))
.addArgument("-runtype OPTIMIZATION")
.addArgument("-laststageoutput")
.addInputArgument(lastStageOutput)
.addArgument("-nextstageoutput")
.addOutputArgument(optimizationOutput)
.build(ruleContext));
lastStageOutput = optimizationOutput;
}
Builder builder = makeBuilder(
proguard,
programJar,
proguardSpecs,
proguardMapping,
libraryJars,
output.getOutputJar(),
output.getMapping(),
output.getProtoMapping(),
/* proguardSeeds */ null, // runtype FINAL does not produce seeds.
output.getUsage(),
output.getConstantStringObfuscatedMapping(),
output.getConfig())
.setProgressMessage("Trimming binary with Proguard: Obfuscation and Final Output Pass")
.addArgument("-runtype FINAL")
.addArgument("-laststageoutput")
.addInputArgument(lastStageOutput)
.addOutput(proguardOutputJar);
ruleContext.registerAction(builder.build(ruleContext));
}
return output;
}
private static Builder makeBuilder(
FilesToRunProvider proguard,
Artifact programJar,
ImmutableList<Artifact> proguardSpecs,
@Nullable Artifact proguardMapping,
Iterable<Artifact> libraryJars,
Artifact proguardOutputJar,
@Nullable Artifact proguardOutputMap,
@Nullable Artifact proguardOutputProtoMap,
@Nullable Artifact proguardSeeds,
@Nullable Artifact proguardUsage,
@Nullable Artifact constantStringObfuscatedMapping,
@Nullable Artifact proguardConfigOutput) {
Builder builder = new SpawnAction.Builder()
.addInputs(libraryJars)
.addInputs(proguardSpecs)
.setExecutable(proguard)
.setMnemonic("Proguard")
.addArgument("-forceprocessing")
.addArgument("-injars")
.addInputArgument(programJar)
// This is handled by the build system there is no need for proguard to check if things are
// up to date.
.addArgument("-outjars")
// Don't register the output jar as an output of the action, because multiple proguard
// actions will be created for optimization runs which will overwrite the jar, and only
// the final proguard action will declare the output jar as an output.
.addArgument(proguardOutputJar.getExecPathString());
for (Artifact libraryJar : libraryJars) {
builder
.addArgument("-libraryjars")
.addArgument(libraryJar.getExecPathString());
}
if (proguardMapping != null) {
builder
.addArgument("-applymapping")
.addInputArgument(proguardMapping);
}
for (Artifact proguardSpec : proguardSpecs) {
builder.addArgument("@" + proguardSpec.getExecPathString());
}
if (proguardOutputMap != null) {
builder
.addArgument("-printmapping")
.addOutputArgument(proguardOutputMap);
}
if (proguardOutputProtoMap != null) {
builder
.addArgument("-protomapping")
.addOutputArgument(proguardOutputProtoMap);
}
if (constantStringObfuscatedMapping != null) {
builder
.addArgument("-obfuscatedconstantstringoutputfile")
.addOutputArgument(constantStringObfuscatedMapping);
}
if (proguardSeeds != null) {
builder
.addArgument("-printseeds")
.addOutputArgument(proguardSeeds);
}
if (proguardUsage != null) {
builder
.addArgument("-printusage")
.addOutputArgument(proguardUsage);
}
if (proguardConfigOutput != null) {
builder
.addArgument("-printconfiguration")
.addOutputArgument(proguardConfigOutput);
}
return builder;
}
/**
* Returns an intermediate artifact used to run Proguard.
*/
public static Artifact getProguardTempArtifact(
RuleContext ruleContext, String prefix, String name) {
// TODO(bazel-team): Remove the redundant inclusion of the rule name, as getUniqueDirectory
// includes the rulename as well.
return Preconditions.checkNotNull(ruleContext.getUniqueDirectoryArtifact(
"proguard",
Joiner.on("_").join(prefix, ruleContext.getLabel().getName(), name),
ruleContext.getBinOrGenfilesDirectory()));
}
public static Artifact getProguardConfigArtifact(RuleContext ruleContext, String prefix) {
return getProguardTempArtifact(ruleContext, prefix, "proguard.cfg");
}
/**
* Returns {@link JavaConfiguration#getJavaOptimizationMode()}.
*/
public static JavaOptimizationMode getJavaOptimizationMode(RuleContext ruleContext) {
return ruleContext.getConfiguration().getFragment(JavaConfiguration.class)
.getJavaOptimizationMode();
}
}
| |
package moltin.example_moltin.activities;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Typeface;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.Button;
import android.widget.EditText;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.TextView;
import org.json.JSONArray;
import org.json.JSONObject;
import java.util.ArrayList;
import java.util.Iterator;
import moltin.android_sdk.Moltin;
import moltin.android_sdk.utilities.Constants;
import moltin.example_moltin.R;
import moltin.example_moltin.data.ShippingItem;
public class ShippingMethodActivity extends Activity {
private Moltin moltin;
String email="";
String s_first_name="";
String s_last_name="";
String s_address_1="";
String s_address_2="";
String s_country="";
String s_postcode="";
String b_first_name="";
String b_last_name="";
String b_address_1="";
String b_address_2="";
String b_country="";
String b_postcode="";
String shipping="";
String json;
private ArrayList<ShippingItem> shippingArray;
private int lastShippingIndex=0;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_shipping_method);
moltin = new Moltin(this);
shippingArray = new ArrayList<ShippingItem>();
json=getIntent().getExtras().getString("JSON");
email = getIntent().getExtras().getString("EMAIL");
s_first_name = getIntent().getExtras().getString("S_FIRST_NAME");
s_last_name = getIntent().getExtras().getString("S_LAST_NAME");
s_address_1 = getIntent().getExtras().getString("S_ADDRESS_1");
s_address_2 = getIntent().getExtras().getString("S_ADDRESS_2");
s_country = getIntent().getExtras().getString("S_COUNTRY");
s_postcode = getIntent().getExtras().getString("S_POSTCODE");
b_first_name = getIntent().getExtras().getString("B_FIRST_NAME");
b_last_name = getIntent().getExtras().getString("B_LAST_NAME");
b_address_1 = getIntent().getExtras().getString("B_ADDRESS_1");
b_address_2 = getIntent().getExtras().getString("B_ADDRESS_2");
b_country = getIntent().getExtras().getString("B_COUNTRY");
b_postcode = getIntent().getExtras().getString("B_POSTCODE");
checkoutOrder();
changeFonts((RelativeLayout) findViewById(R.id.layMain));
}
public void onClickHandler(View view) {
try
{
switch (view.getId())
{
case R.id.btnPlaceOrder:
shipping=shippingArray.get(lastShippingIndex).getItemSlug();
Intent intent = new Intent(this, PaymentActivity.class);
intent.putExtra("SHIPPING",shipping);
intent.putExtra("EMAIL",email);
intent.putExtra("B_FIRST_NAME",b_first_name);
intent.putExtra("B_LAST_NAME",b_last_name);
intent.putExtra("B_ADDRESS_1",b_address_1);
intent.putExtra("B_ADDRESS_2",b_address_2);
intent.putExtra("B_COUNTRY",b_country);
intent.putExtra("B_POSTCODE",b_postcode);
intent.putExtra("S_FIRST_NAME",s_first_name);
intent.putExtra("S_LAST_NAME",s_last_name);
intent.putExtra("S_ADDRESS_1",s_address_1);
intent.putExtra("S_ADDRESS_2",s_address_2);
intent.putExtra("S_COUNTRY",s_country);
intent.putExtra("S_POSTCODE",s_postcode);
intent.putExtra("JSON_CART", json);
startActivity(intent);
break;
case R.id.btnBack:
finish();
break;
case R.id.btnShipmentInactive:
refreshShippings((int) view.getTag());
break;
}
}
catch (Exception e)
{
e.printStackTrace();
}
}
private void refreshShippings(int x)
{
LinearLayout layShippings=(LinearLayout)findViewById(R.id.layShippings);
for(int i=0;i<layShippings.getChildCount();i++)
{
View view=layShippings.getChildAt(i);
int tag=(int)view.getTag();
if(tag==x)
{
layShippings.removeViewAt(x);
LayoutInflater factory = LayoutInflater.from(getApplicationContext());
View myView;
myView = factory.inflate(R.layout.shipping_item_active, null);
((TextView)findViewById(R.id.txtTotalPrice)).setText(shippingArray.get(x).getItemTotalPrice());
TextView txtTitle=(TextView) myView.findViewById(R.id.txtShippingTitle);
TextView txtPrice=(TextView) myView.findViewById(R.id.txtShippingPrice);
txtTitle.setText(shippingArray.get(x).getItemTitle());
txtTitle.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
txtPrice.setText(shippingArray.get(x).getItemPrice());
txtPrice.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
myView.setTag(tag);
myView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
refreshShippings((int)view.getTag());
}
});
layShippings.addView(myView, tag);
}
else if(tag==lastShippingIndex)
{
layShippings.removeViewAt(lastShippingIndex);
LayoutInflater factory = LayoutInflater.from(getApplicationContext());
View myView;
myView = factory.inflate(R.layout.shipping_item_inactive, null);
TextView txtTitle=(TextView) myView.findViewById(R.id.txtShippingTitle);
TextView txtPrice=(TextView) myView.findViewById(R.id.txtShippingPrice);
txtTitle.setText(shippingArray.get(lastShippingIndex).getItemTitle());
txtTitle.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
txtPrice.setText(shippingArray.get(lastShippingIndex).getItemPrice());
txtPrice.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
myView.setTag(tag);
myView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
refreshShippings((int)view.getTag());
}
});
layShippings.addView(myView, tag);
}
}
lastShippingIndex=x;
}
private void setShippings(LinearLayout layShippings, int i, String shipTitle, String shipPrice)
{
LayoutInflater factory = LayoutInflater.from(getApplicationContext());
View myView;
if(i==0)
{
myView = factory.inflate(R.layout.shipping_item_active, null);
((TextView)findViewById(R.id.txtTotalPrice)).setText(shippingArray.get(0).getItemTotalPrice());
TextView txtTitle=(TextView) myView.findViewById(R.id.txtShippingTitle);
TextView txtPrice=(TextView) myView.findViewById(R.id.txtShippingPrice);
txtTitle.setText(shipTitle);
txtTitle.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_bold)));
txtPrice.setText(shipPrice);
txtPrice.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_bold)));
}
else
{
myView = factory.inflate(R.layout.shipping_item_inactive, null);
TextView txtTitle=(TextView) myView.findViewById(R.id.txtShippingTitle);
TextView txtPrice=(TextView) myView.findViewById(R.id.txtShippingPrice);
txtTitle.setText(shipTitle);
txtTitle.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
txtPrice.setText(shipPrice);
txtPrice.setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
}
myView.setTag(i);
myView.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
refreshShippings((int)view.getTag());
}
});
layShippings.addView(myView);
}
private void checkoutOrder() {
try {
((LinearLayout)findViewById(R.id.layLoading)).setVisibility(View.VISIBLE);
moltin.cart.checkout(new Handler.Callback() {
@Override
public boolean handleMessage(Message msg) {
((LinearLayout)findViewById(R.id.layLoading)).setVisibility(View.GONE);
JSONObject json=(JSONObject)msg.obj;
if (msg.what == Constants.RESULT_OK) {
try {
JSONArray jsonArrayMethods = json.getJSONObject("result").getJSONObject("shipping").getJSONArray("methods");
LinearLayout layShippings=(LinearLayout)findViewById(R.id.layShippings);
for(int i=0;i<jsonArrayMethods.length();i++)
{
String shipTitle=jsonArrayMethods.getJSONObject(i).getString("title");
String shipPrice=jsonArrayMethods.getJSONObject(i).getJSONObject("price").getJSONObject("data").getJSONObject("formatted").getString("with_tax");
shippingArray.add(new ShippingItem(jsonArrayMethods.getJSONObject(i).getString("slug"),shipTitle,shipPrice,jsonArrayMethods.getJSONObject(i).getJSONObject("totals").getJSONObject("post_discount").getJSONObject("formatted").getString("with_tax")));
setShippings(layShippings, i, shipTitle, shipPrice);
}
JSONObject jsonGateways = json.getJSONObject("result").getJSONObject("gateways");
{
Iterator i = jsonGateways.keys();
while (i.hasNext()) {
String key = (String) i.next();
}
}
} catch (Exception e) {
e.printStackTrace();
}
return true;
} else {
return false;
}
}
});
} catch (Exception e) {
e.printStackTrace();
}
}
protected void changeFonts(ViewGroup root) {
try
{
for(int i = 0; i <root.getChildCount(); i++) {
View v = root.getChildAt(i);
if(v instanceof Button) {
((Button)v).setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
} else if(v instanceof TextView) {
((TextView)v).setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
} else if(v instanceof EditText) {
((EditText)v).setTypeface(Typeface.createFromAsset(getResources().getAssets(), getString(R.string.font_regular)));
} else if(v instanceof ViewGroup) {
changeFonts((ViewGroup) v);
}
}
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
| |
/*
* Copyright 2002-2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.castafiore.iot.emulator;
import java.util.Map;
import org.castafiore.iot.client.Device;
import org.castafiore.iot.client.FunctionHandler;
import org.castafiore.iot.client.OnReady;
import org.castafiore.iot.definitions.DeviceDefinition;
import org.castafiore.iot.definitions.EventDefinition;
import org.castafiore.iot.driver.DefinitionRegistryClient;
import org.castafiore.iot.driver.JavaWebsocketLayer;
import org.castafiore.ui.Container;
import org.castafiore.ui.UIException;
import org.castafiore.ui.engine.JQuery;
import org.castafiore.ui.events.Event;
import org.castafiore.ui.ex.EXContainer;
import org.castafiore.ui.ex.form.EXInput;
import org.castafiore.ui.ex.form.button.EXButton;
import org.castafiore.ui.ex.form.button.EXButtonSet;
import org.castafiore.ui.ex.layout.EXMigLayout;
import org.castafiore.ui.ex.toolbar.EXToolBar;
import com.fasterxml.jackson.databind.ObjectMapper;
public class Emulator extends EXMigLayout implements Event{
private Device device;
private EXInput deviceId = new EXInput("deviceId");
private EXInput definitionId = new EXInput("definitionId");
private EXInput groupId = new EXInput("groupId");
private EXInput versionId = new EXInput("versionId");
private EXInput server = new EXInput("server");
private EXInput deviceStore = new EXInput("deviceStore");
private EXButton connect = new EXButton("connect", "Connect");
private EXButton disconnect = new EXButton("disconnect", "Disconnect");
private EXToolBar buttons = new EXToolBar("toolbar");
private Container invokedList = new EXContainer("invokedList", "ul")
.addClass("list-group").setStyle("margin", "12px");
Container inputs = new EXContainer("inputs", "div");
EXMigLayout form = new EXMigLayout("form", "12;12");
private Container title = new EXContainer("head", "div").addClass("panel-heading").setText("New Device");
private static ObjectMapper mapper = new ObjectMapper();
public Emulator() {
super("emulator", "12;12;12");
addClass("panel panel-default");
buttons.setStyle("margin", "12px");
form.addChild(inputs.setStyle("padding", "12px"), "0:0");
addField(inputs, "Device Store:", deviceStore);
addField(inputs, "WikkIOT server", server);
addField(inputs, "Device Id:", deviceId);
addField(inputs, "Devinition Id:", deviceId);
addField(inputs, "Group Id:", deviceId);
addField(inputs, "Version Id:", deviceId);
//EXMigLayout buttons = new EXMigLayout("buttons", "6:6");
buttons.addClass("container");
buttons.addChild(connect);
buttons.addChild(disconnect);
connect.addEvent(this, CLICK);
form.addChild(buttons, "0:1");
disconnect.setDisplay(false);
addChild(form, "0:0");
//addChild(eventList, "0:1");
addChild(invokedList, "0:1");
String text = "<span style=\"cursor: pointer\" title=\"Logs Arbitrary invocation from iot server\" class=\"badge\">?</span>Invocation logs";
invokedList.addChild(new EXContainer("head", "li").addClass("list-group-item active").setText(text));
addChildAt(title, 0);
}
private void addField(Container parent, String label, EXInput input) {
Container group = new EXContainer("g", "div");
group.setStyle("margin", "12px 0px");
group.addChild(input.setStyleClass("form-control").setAttribute("placeholder", label));
parent.addChild(group);
}
public void connect() {
try {
String store = deviceStore.getValue();
String defid = definitionId.getValue();
String grpId = groupId.getValue();
String vid = versionId.getValue();
DeviceDefinition definition = new DefinitionRegistryClient(store).getDefinition(defid, grpId, vid);
//String json = ResourceUtil.readUrl(store + "/castafiore/resource?spec=iot:get#" + defid + "/" + grpId + "/" + vid);
//DeviceDefinition definition = mapper.readValue(json.getBytes(), DeviceDefinition.class);
setDefinition(definition);
} catch (Exception e) {
throw new RuntimeException(e);
}
}
public void disconnect(){
try{
device.disconnect();
}catch(Exception e){
throw new RuntimeException(e);
}
}
public void setDefinition(final DeviceDefinition definition) {
String did = deviceId.getId();
device = new Device(did,definition);
device.setWebsocketLayer(new JavaWebsocketLayer(device));
device.onReady(new OnReady() {
@Override
public void ready() {
disconnect.setStyle("display", "inline").addClass("btn-danger").setStyle("float", "left");
connect.setStyle("display", "none");
buttons.getDescendentByName("events").setDisplay(true);
inputs.setDisplay(false);
title.setText("<img src="+definition.getIcon()+" width=16px> " + definition.getLabel());
System.out.println("connected");
}
});
device.addFunctionHandler(new FunctionHandler() {
@Override
public void execute(String name, Map<String, String> input) {
Container li = new EXContainer(name, "li")
.addClass("list-group-item list-group-item-info");
String text = "<span class=\"badge\">" + input.toString()
+ "</span> Invoked :" + name;
li.setText(text);
invokedList.addChildAt(li, 1);
}
});
EXButtonSet set = new EXButtonSet("events");
set.setDisplay(false);
buttons.addItem(set);
for (EventDefinition defn : definition.getEvents()) {
EXButton btn = new EXButton(defn.getName(), defn.getName());
btn.addEvent(this, CLICK);
set.addItem(btn);
}
device.connect(this.server.getValue());
}
@Override
public void ClientAction(JQuery container) {
container.server(this);
}
@Override
public boolean ServerAction(Container container, Map<String, String> request)
throws UIException {
if(container.getName().equals("connect")){
connect();
}else if(container.getName().equals("disconnect")){
disconnect();
}else{
device.propagateEvent(container.getName());
}
return true;
}
@Override
public void Success(JQuery container, Map<String, String> request)
throws UIException {
// TODO Auto-generated method stub
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.backends.gwt;
import com.badlogic.gdx.Application;
import com.badlogic.gdx.ApplicationListener;
import com.badlogic.gdx.ApplicationLogger;
import com.badlogic.gdx.Audio;
import com.badlogic.gdx.Files;
import com.badlogic.gdx.Gdx;
import com.badlogic.gdx.Graphics;
import com.badlogic.gdx.Input;
import com.badlogic.gdx.LifecycleListener;
import com.badlogic.gdx.Net;
import com.badlogic.gdx.Preferences;
import com.badlogic.gdx.backends.gwt.preloader.Preloader;
import com.badlogic.gdx.backends.gwt.preloader.Preloader.PreloaderCallback;
import com.badlogic.gdx.backends.gwt.preloader.Preloader.PreloaderState;
import com.badlogic.gdx.backends.gwt.soundmanager2.SoundManager;
import com.badlogic.gdx.utils.Array;
import com.badlogic.gdx.utils.Clipboard;
import com.badlogic.gdx.utils.ObjectMap;
import com.badlogic.gdx.utils.TimeUtils;
import com.google.gwt.animation.client.AnimationScheduler;
import com.google.gwt.animation.client.AnimationScheduler.AnimationCallback;
import com.google.gwt.core.client.EntryPoint;
import com.google.gwt.core.client.GWT;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.dom.client.CanvasElement;
import com.google.gwt.dom.client.Document;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.Image;
import com.google.gwt.user.client.ui.InlineHTML;
import com.google.gwt.user.client.ui.Label;
import com.google.gwt.user.client.ui.Panel;
import com.google.gwt.user.client.ui.RootPanel;
import com.google.gwt.user.client.ui.SimplePanel;
import com.google.gwt.user.client.ui.TextArea;
import com.google.gwt.user.client.ui.VerticalPanel;
import com.google.gwt.user.client.ui.Widget;
/** Implementation of an {@link Application} based on GWT. Clients have to override {@link #getConfig()} and
* {@link #createApplicationListener()}. Clients can override the default loading screen via
* {@link #getPreloaderCallback()} and implement any loading screen drawing via GWT widgets.
* @author mzechner */
public abstract class GwtApplication implements EntryPoint, Application {
private ApplicationListener listener;
GwtApplicationConfiguration config;
GwtGraphics graphics;
private GwtInput input;
private GwtNet net;
private Panel root = null;
protected TextArea log = null;
private int logLevel = LOG_ERROR;
private ApplicationLogger applicationLogger;
private Array<Runnable> runnables = new Array<Runnable>();
private Array<Runnable> runnablesHelper = new Array<Runnable>();
private Array<LifecycleListener> lifecycleListeners = new Array<LifecycleListener>();
private int lastWidth;
private int lastHeight;
Preloader preloader;
private static AgentInfo agentInfo;
private ObjectMap<String, Preferences> prefs = new ObjectMap<String, Preferences>();
private Clipboard clipboard;
LoadingListener loadingListener;
/** @return the configuration for the {@link GwtApplication}. */
public abstract GwtApplicationConfiguration getConfig ();
public String getPreloaderBaseURL()
{
return GWT.getHostPageBaseURL() + "assets/";
}
@Override
public ApplicationListener getApplicationListener() {
return listener;
}
public abstract ApplicationListener createApplicationListener();
@Override
public void onModuleLoad () {
GwtApplication.agentInfo = computeAgentInfo();
this.listener = createApplicationListener();
this.config = getConfig();
setApplicationLogger(new GwtApplicationLogger(this.config.log));
if (config.rootPanel != null) {
this.root = config.rootPanel;
} else {
Element element = Document.get().getElementById("embed-" + GWT.getModuleName());
if (element == null) {
VerticalPanel panel = new VerticalPanel();
panel.setWidth("" + config.width + "px");
panel.setHeight("" + config.height + "px");
panel.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_CENTER);
panel.setVerticalAlignment(HasVerticalAlignment.ALIGN_MIDDLE);
RootPanel.get().add(panel);
RootPanel.get().setWidth("" + config.width + "px");
RootPanel.get().setHeight("" + config.height + "px");
this.root = panel;
} else {
VerticalPanel panel = new VerticalPanel();
panel.setWidth("" + config.width + "px");
panel.setHeight("" + config.height + "px");
panel.setHorizontalAlignment(HasHorizontalAlignment.ALIGN_CENTER);
panel.setVerticalAlignment(HasVerticalAlignment.ALIGN_MIDDLE);
element.appendChild(panel.getElement());
root = panel;
}
}
if (config.disableAudio) {
preloadAssets();
} else {
// initialize SoundManager2
SoundManager.init(GWT.getModuleBaseURL(), 9, config.preferFlash, new SoundManager.SoundManagerCallback() {
@Override
public void onready () {
preloadAssets();
}
@Override
public void ontimeout (String status, String errorType) {
error("SoundManager", status + " " + errorType);
}
});
}
}
void preloadAssets () {
final PreloaderCallback callback = getPreloaderCallback();
preloader = createPreloader();
preloader.preload("assets.txt", new PreloaderCallback() {
@Override
public void error (String file) {
callback.error(file);
}
@Override
public void update (PreloaderState state) {
callback.update(state);
if (state.hasEnded()) {
getRootPanel().clear();
if(loadingListener != null)
loadingListener.beforeSetup();
setupLoop();
addEventListeners();
if(loadingListener != null)
loadingListener.afterSetup();
}
}
});
}
/**
* Override this method to return a custom widget informing the that their browser lacks support of WebGL.
*
* @return Widget to display when WebGL is not supported.
*/
public Widget getNoWebGLSupportWidget() {
return new Label("Sorry, your browser doesn't seem to support WebGL");
}
void setupLoop () {
// setup modules
try {
graphics = new GwtGraphics(root, config);
} catch (Throwable e) {
root.clear();
root.add(getNoWebGLSupportWidget());
return;
}
lastWidth = graphics.getWidth();
lastHeight = graphics.getHeight();
Gdx.app = this;
if(config.disableAudio) {
Gdx.audio = null;
} else {
Gdx.audio = new GwtAudio();
}
Gdx.graphics = graphics;
Gdx.gl20 = graphics.getGL20();
Gdx.gl = Gdx.gl20;
Gdx.files = new GwtFiles(preloader);
this.input = new GwtInput(graphics.canvas);
Gdx.input = this.input;
this.net = new GwtNet(config);
Gdx.net = this.net;
this.clipboard = new GwtClipboard();
updateLogLabelSize();
// tell listener about app creation
try {
listener.create();
listener.resize(graphics.getWidth(), graphics.getHeight());
} catch (Throwable t) {
error("GwtApplication", "exception: " + t.getMessage(), t);
t.printStackTrace();
throw new RuntimeException(t);
}
AnimationScheduler.get().requestAnimationFrame(new AnimationCallback() {
@Override
public void execute (double timestamp) {
try {
mainLoop();
} catch (Throwable t) {
error("GwtApplication", "exception: " + t.getMessage(), t);
throw new RuntimeException(t);
}
AnimationScheduler.get().requestAnimationFrame(this, graphics.canvas);
}
}, graphics.canvas);
}
void mainLoop() {
graphics.update();
if (Gdx.graphics.getWidth() != lastWidth || Gdx.graphics.getHeight() != lastHeight) {
GwtApplication.this.listener.resize(Gdx.graphics.getWidth(), Gdx.graphics.getHeight());
lastWidth = graphics.getWidth();
lastHeight = graphics.getHeight();
Gdx.gl.glViewport(0, 0, lastWidth, lastHeight);
}
runnablesHelper.addAll(runnables);
runnables.clear();
for (int i = 0; i < runnablesHelper.size; i++) {
runnablesHelper.get(i).run();
}
runnablesHelper.clear();
graphics.frameId++;
listener.render();
input.reset();
}
public Panel getRootPanel () {
return root;
}
long loadStart = TimeUtils.nanoTime();
public Preloader createPreloader() {
return new Preloader(getPreloaderBaseURL());
}
public PreloaderCallback getPreloaderCallback () {
final Panel preloaderPanel = new VerticalPanel();
preloaderPanel.setStyleName("gdx-preloader");
final Image logo = new Image(GWT.getModuleBaseURL() + "logo.png");
logo.setStyleName("logo");
preloaderPanel.add(logo);
final Panel meterPanel = new SimplePanel();
meterPanel.setStyleName("gdx-meter");
meterPanel.addStyleName("red");
final InlineHTML meter = new InlineHTML();
final Style meterStyle = meter.getElement().getStyle();
meterStyle.setWidth(0, Unit.PCT);
meterPanel.add(meter);
preloaderPanel.add(meterPanel);
getRootPanel().add(preloaderPanel);
return new PreloaderCallback() {
@Override
public void error (String file) {
System.out.println("error: " + file);
}
@Override
public void update (PreloaderState state) {
meterStyle.setWidth(100f * state.getProgress(), Unit.PCT);
}
};
}
@Override
public Graphics getGraphics () {
return graphics;
}
@Override
public Audio getAudio () {
return Gdx.audio;
}
@Override
public Input getInput () {
return Gdx.input;
}
@Override
public Files getFiles () {
return Gdx.files;
}
@Override
public Net getNet() {
return Gdx.net;
}
private void updateLogLabelSize () {
if (log != null) {
if (graphics != null) {
log.setSize(graphics.getWidth() + "px", "200px");
} else {
log.setSize("400px", "200px"); // Should not happen at this point, use dummy value
}
}
}
@Override
public void log (String tag, String message) {
if (logLevel >= LOG_INFO) getApplicationLogger().log(tag, message);
}
@Override
public void log (String tag, String message, Throwable exception) {
if (logLevel >= LOG_INFO) getApplicationLogger().log(tag, message, exception);
}
@Override
public void error (String tag, String message) {
if (logLevel >= LOG_ERROR) getApplicationLogger().error(tag, message);
}
@Override
public void error (String tag, String message, Throwable exception) {
if (logLevel >= LOG_ERROR) getApplicationLogger().error(tag, message, exception);
}
@Override
public void debug (String tag, String message) {
if (logLevel >= LOG_DEBUG) getApplicationLogger().debug(tag, message);
}
@Override
public void debug (String tag, String message, Throwable exception) {
if (logLevel >= LOG_DEBUG) getApplicationLogger().debug(tag, message, exception);
}
@Override
public void setLogLevel (int logLevel) {
this.logLevel = logLevel;
}
@Override
public int getLogLevel() {
return logLevel;
}
@Override
public void setApplicationLogger (ApplicationLogger applicationLogger) {
this.applicationLogger = applicationLogger;
}
@Override
public ApplicationLogger getApplicationLogger () {
return applicationLogger;
}
@Override
public ApplicationType getType () {
return ApplicationType.WebGL;
}
@Override
public int getVersion () {
return 0;
}
@Override
public long getJavaHeap () {
return 0;
}
@Override
public long getNativeHeap () {
return 0;
}
@Override
public Preferences getPreferences (String name) {
Preferences pref = prefs.get(name);
if (pref == null) {
pref = new GwtPreferences(name);
prefs.put(name, pref);
}
return pref;
}
@Override
public Clipboard getClipboard () {
return clipboard;
}
@Override
public void postRunnable (Runnable runnable) {
runnables.add(runnable);
}
@Override
public void exit () {
}
/** Contains precomputed information on the user-agent. Useful for dealing with browser and OS behavioral differences. Kindly
* borrowed from PlayN */
public static AgentInfo agentInfo () {
return agentInfo;
}
/** kindly borrowed from PlayN **/
private static native AgentInfo computeAgentInfo () /*-{
var userAgent = navigator.userAgent.toLowerCase();
return {
// browser type flags
isFirefox : userAgent.indexOf("firefox") != -1,
isChrome : userAgent.indexOf("chrome") != -1,
isSafari : userAgent.indexOf("safari") != -1,
isOpera : userAgent.indexOf("opera") != -1,
isIE : userAgent.indexOf("msie") != -1 || userAgent.indexOf("trident") != -1,
// OS type flags
isMacOS : userAgent.indexOf("mac") != -1,
isLinux : userAgent.indexOf("linux") != -1,
isWindows : userAgent.indexOf("win") != -1
};
}-*/;
/** Returned by {@link #agentInfo}. Kindly borrowed from PlayN. */
public static class AgentInfo extends JavaScriptObject {
public final native boolean isFirefox () /*-{
return this.isFirefox;
}-*/;
public final native boolean isChrome () /*-{
return this.isChrome;
}-*/;
public final native boolean isSafari () /*-{
return this.isSafari;
}-*/;
public final native boolean isOpera () /*-{
return this.isOpera;
}-*/;
public final native boolean isIE () /*-{
return this.isIE;
}-*/;
public final native boolean isMacOS () /*-{
return this.isMacOS;
}-*/;
public final native boolean isLinux () /*-{
return this.isLinux;
}-*/;
public final native boolean isWindows () /*-{
return this.isWindows;
}-*/;
protected AgentInfo () {
}
}
public String getBaseUrl () {
return preloader.baseUrl;
}
public Preloader getPreloader () {
return preloader;
}
public CanvasElement getCanvasElement(){
return graphics.canvas;
}
public LoadingListener getLoadingListener () {
return loadingListener;
}
public void setLoadingListener (LoadingListener loadingListener) {
this.loadingListener = loadingListener;
}
@Override
public void addLifecycleListener (LifecycleListener listener) {
synchronized(lifecycleListeners) {
lifecycleListeners.add(listener);
}
}
@Override
public void removeLifecycleListener (LifecycleListener listener) {
synchronized(lifecycleListeners) {
lifecycleListeners.removeValue(listener, true);
}
}
native static public void consoleLog(String message) /*-{
console.log( "GWT: " + message );
}-*/;
private native void addEventListeners() /*-{
var self = this;
var eventName = null;
if ("hidden" in $doc) {
eventName = "visibilitychange"
} else if ("webkitHidden" in $doc) {
eventName = "webkitvisibilitychange"
} else if ("mozHidden" in $doc) {
eventName = "mozvisibilitychange"
} else if ("msHidden" in $doc) {
eventName = "msvisibilitychange"
}
if (eventName !== null) {
$doc.addEventListener(eventName, function(e) {
self.@com.badlogic.gdx.backends.gwt.GwtApplication::onVisibilityChange(Z)($doc['hidden'] !== true);
});
}
}-*/;
private void onVisibilityChange (boolean visible) {
if (visible) {
for (LifecycleListener listener : lifecycleListeners) {
listener.resume();
}
listener.resume();
} else {
for (LifecycleListener listener : lifecycleListeners) {
listener.pause();
}
listener.pause();
}
}
/**
* LoadingListener interface main purpose is to do some things before or after {@link GwtApplication#setupLoop()}
*/
public interface LoadingListener{
/**
* Method called before the setup
*/
public void beforeSetup();
/**
* Method called after the setup
*/
public void afterSetup();
}
}
| |
/*
* Copyright 2015 Lei CHEN (raistlic@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.raistlic.common.taskqueue;
import org.raistlic.common.precondition.InvalidContextException;
import org.raistlic.common.precondition.InvalidParameterException;
import org.raistlic.common.precondition.Precondition;
import org.raistlic.common.predicate.Predicates;
import org.raistlic.common.util.ExceptionHandler;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
/**
* @author Lei Chen (2015-11-24)
*/
final class DefaultTaskQueue implements TaskQueue, TaskQueue.Controller {
private final ExecutorService executorService;
private final QueueRunnable queueRunnable;
private final ExceptionHandler exceptionHandler;
private final AtomicBoolean running;
private final Predicate<? super Thread> isNotTaskQueuePredicate;
private volatile Thread taskQueueThread;
private volatile LinkedBlockingQueue<Runnable> queue;
DefaultTaskQueue(ThreadFactory threadFactory,
ExceptionHandler exceptionHandler) {
Precondition.param(threadFactory).isNotNull();
Precondition.param(exceptionHandler).isNotNull();
this.executorService = Executors.newSingleThreadExecutor(threadFactory);
this.exceptionHandler = exceptionHandler;
this.queueRunnable = this.new QueueRunnable();
this.running = new AtomicBoolean(false);
this.isNotTaskQueuePredicate = Predicates.not(this.new TaskQueueThreadPredicate());
}
@Override
public TaskQueue get() {
return this;
}
@Override
public boolean start() {
if (running.getAndSet(true)) {
return false;
} else {
queue = new LinkedBlockingQueue<Runnable>();
executorService.submit(queueRunnable);
return true;
}
}
@Override
public boolean stop(long timeout, TimeUnit timeUnit) throws InterruptedException, TimeoutException {
if (running.getAndSet(false)) {
executorService.shutdownNow();
queue.offer(EmptyRunnable.INSTANCE);
executorService.awaitTermination(timeout, timeUnit);
return true;
} else {
return false;
}
}
@Override
public void stop(boolean interruptCurrentTask) {
if (!running.getAndSet(false)) {
return;
}
if (interruptCurrentTask) {
executorService.shutdownNow();
} else {
executorService.shutdown();
}
queue.offer(EmptyRunnable.INSTANCE);
}
@Override
public void schedule(Runnable task) throws InvalidParameterException, InvalidContextException {
Precondition.param(task).isNotNull();
Precondition.context(running.get()).isTrue();
queue.offer(this.new ExceptionFreeTaskWrapper(task));
}
@Override
public <R> Promise<R> schedule(Task<R> task) throws InvalidParameterException, InvalidContextException {
Precondition.param(task).isNotNull();
DefaultPromise<R> defaultPromise = new DefaultPromise<R>(task, exceptionHandler);
queue.offer(defaultPromise);
return defaultPromise;
}
@Override
public <R> R scheduleAndWait(Task<R> task)
throws InvalidParameterException,
InvalidContextException,
InvalidContextException,
TaskExecutionException,
InterruptedException {
Precondition.param(task).isNotNull();
Precondition.currentThread().matches(
isNotTaskQueuePredicate,
"The method cannot be invoked with in the task queue execution thread."
);
Precondition.context(running.get()).isTrue();
Promise<R> promise = schedule(task);
try {
return promise.get();
} catch (ExecutionException ex) {
throw new TaskExecutionException(ex);
}
}
@Override
public <R> R scheduleAndWait(Task<R> task, long timeout, TimeUnit timeUnit)
throws InvalidParameterException,
InvalidContextException,
InvalidContextException,
TaskExecutionException,
InterruptedException,
TimeoutException {
Precondition.param(timeout).greaterThanOrEqualTo(0L);
Precondition.param(timeUnit).isNotNull();
Precondition.param(task).isNotNull();
Precondition.currentThread().matches(
isNotTaskQueuePredicate,
"The method cannot be invoked with in the task queue execution thread."
);
Precondition.context(running.get()).isTrue();
Promise<R> promise = schedule(task);
try {
return promise.get(timeout, timeUnit);
} catch (ExecutionException ex) {
throw new TaskExecutionException(ex);
}
}
@Override
public boolean isTaskExecutionThread() throws InvalidContextException {
return Thread.currentThread() == taskQueueThread;
}
@Override
public boolean isRunning() {
return running.get();
}
private final class QueueRunnable implements Runnable {
@Override
public void run() {
taskQueueThread = Thread.currentThread();
while (running.get()) {
Runnable runnable;
try {
runnable = queue.take();
} catch (InterruptedException ex) {
break;
}
if (running.get()) {
runnable.run();
}
}
running.set(false);
taskQueueThread = null;
}
}
private final class ExceptionFreeTaskWrapper implements Runnable {
private final Runnable runnable;
private ExceptionFreeTaskWrapper(Runnable runnable) {
this.runnable = runnable;
}
@Override
public void run() {
try {
runnable.run();
} catch (Exception ex) {
exceptionHandler.exceptionOccur(Thread.currentThread(), ex);
}
}
}
private final class TaskQueueThreadPredicate implements Predicate<Thread> {
@Override
public boolean test(Thread thread) {
return thread == taskQueueThread;
}
}
private enum EmptyRunnable implements Runnable {
INSTANCE;
@Override
public void run() {
// do nothing
}
}
}
| |
package im.bci;
import java.io.*;
import java.util.*;
import java.awt.*;
import java.awt.image.*;
/**
* Class GifDecoder - Decodes a GIF file into one or more frames.
* <br><pre>
* Example:
* GifDecoder d = new GifDecoder();
* d.read("sample.gif");
* int n = d.getFrameCount();
* for (int i = 0; i < n; i++) {
* BufferedImage frame = d.getFrame(i); // frame i
* int t = d.getDelay(i); // display duration of frame in milliseconds
* // do something with frame
* }
* </pre>
* No copyright asserted on the source code of this class. May be used for
* any purpose, however, refer to the Unisys LZW patent for any additional
* restrictions. Please forward any corrections to questions at fmsware.com.
*
* @author Kevin Weiner, FM Software; LZW decoder adapted from John Cristy's ImageMagick.
* @version 1.03 November 2003
*
*/
public class GifDecoder {
/**
* File read status: No errors.
*/
public static final int STATUS_OK = 0;
/**
* File read status: Error decoding file (may be partially decoded)
*/
public static final int STATUS_FORMAT_ERROR = 1;
/**
* File read status: Unable to open source.
*/
public static final int STATUS_OPEN_ERROR = 2;
protected BufferedInputStream in;
protected int status;
protected int width; // full image width
protected int height; // full image height
protected boolean gctFlag; // global color table used
protected int gctSize; // size of global color table
protected int loopCount = 1; // iterations; 0 = repeat forever
protected int[] gct; // global color table
protected int[] lct; // local color table
protected int[] act; // active color table
protected int bgIndex; // background color index
protected int bgColor; // background color
protected int lastBgColor; // previous bg color
protected int pixelAspect; // pixel aspect ratio
protected boolean lctFlag; // local color table flag
protected boolean interlace; // interlace flag
protected int lctSize; // local color table size
protected int ix, iy, iw, ih; // current image rectangle
protected Rectangle lastRect; // last image rect
protected BufferedImage image; // current frame
protected BufferedImage lastImage; // previous frame
protected byte[] block = new byte[256]; // current data block
protected int blockSize = 0; // block size
// last graphic control extension info
protected int dispose = 0;
// 0=no action; 1=leave in place; 2=restore to bg; 3=restore to prev
protected int lastDispose = 0;
protected boolean transparency = false; // use transparent color
protected int delay = 0; // delay in milliseconds
protected int transIndex; // transparent color index
protected static final int MaxStackSize = 4096;
// max decoder pixel stack size
// LZW decoder working arrays
protected short[] prefix;
protected byte[] suffix;
protected byte[] pixelStack;
protected byte[] pixels;
protected ArrayList<GifFrame> frames; // frames read from current file
protected int frameCount;
static class GifFrame {
public GifFrame(BufferedImage im, int del) {
image = im;
delay = del;
}
public BufferedImage image;
public int delay;
}
/**
* Gets display duration for specified frame.
*
* @param n int index of frame
* @return delay in milliseconds
*/
public int getDelay(int n) {
//
delay = -1;
if ((n >= 0) && (n < frameCount)) {
delay = frames.get(n).delay;
}
return delay;
}
/**
* Gets the number of frames read from file.
* @return frame count
*/
public int getFrameCount() {
return frameCount;
}
/**
* Gets the first (or only) image read.
*
* @return BufferedImage containing first frame, or null if none.
*/
public BufferedImage getImage() {
return getFrame(0);
}
/**
* Gets the "Netscape" iteration count, if any.
* A count of 0 means repeat indefinitiely.
*
* @return iteration count if one was specified, else 1.
*/
public int getLoopCount() {
return loopCount;
}
/**
* Creates new frame image from current data (and previous
* frames as specified by their disposition codes).
*/
protected void setPixels() {
// expose destination image's pixels as int array
int[] dest =
((DataBufferInt) image.getRaster().getDataBuffer()).getData();
// fill in starting image contents based on last image's dispose code
if (lastDispose > 0) {
if (lastDispose == 3) {
// use image before last
int n = frameCount - 2;
if (n > 0) {
lastImage = getFrame(n - 1);
} else {
lastImage = null;
}
}
if (lastImage != null) {
int[] prev =
((DataBufferInt) lastImage.getRaster().getDataBuffer()).getData();
System.arraycopy(prev, 0, dest, 0, width * height);
// copy pixels
if (lastDispose == 2) {
// fill last image rect area with background color
Graphics2D g = image.createGraphics();
Color c = null;
if (transparency) {
c = new Color(0, 0, 0, 0); // assume background is transparent
} else {
c = new Color(lastBgColor); // use given background color
}
g.setColor(c);
g.setComposite(AlphaComposite.Src); // replace area
g.fill(lastRect);
g.dispose();
}
}
}
// copy each source line to the appropriate place in the destination
int pass = 1;
int inc = 8;
int iline = 0;
for (int i = 0; i < ih; i++) {
int line = i;
if (interlace) {
if (iline >= ih) {
pass++;
switch (pass) {
case 2 :
iline = 4;
break;
case 3 :
iline = 2;
inc = 4;
break;
case 4 :
iline = 1;
inc = 2;
}
}
line = iline;
iline += inc;
}
line += iy;
if (line < height) {
int k = line * width;
int dx = k + ix; // start of line in dest
int dlim = dx + iw; // end of dest line
if ((k + width) < dlim) {
dlim = k + width; // past dest edge
}
int sx = i * iw; // start of line in source
while (dx < dlim) {
// map color and insert in destination
int index = pixels[sx++] & 0xff;
int c = act[index];
if (c != 0) {
dest[dx] = c;
}
dx++;
}
}
}
}
/**
* Gets the image contents of frame n.
*
* @return BufferedImage representation of frame, or null if n is invalid.
*/
public BufferedImage getFrame(int n) {
BufferedImage im = null;
if ((n >= 0) && (n < frameCount)) {
im = frames.get(n).image;
}
return im;
}
/**
* Gets image size.
*
* @return GIF image dimensions
*/
public Dimension getFrameSize() {
return new Dimension(width, height);
}
/**
* Reads GIF image from stream
*
* @param BufferedInputStream containing GIF file.
* @return read status code (0 = no errors)
*/
public int read(BufferedInputStream is) {
init();
if (is != null) {
in = is;
readHeader();
if (!err()) {
readContents();
if (frameCount < 0) {
status = STATUS_FORMAT_ERROR;
}
}
} else {
status = STATUS_OPEN_ERROR;
}
try {
is.close();
} catch (IOException e) {
}
return status;
}
/**
* Reads GIF image from stream
*
* @param InputStream containing GIF file.
* @return read status code (0 = no errors)
*/
public int read(InputStream is) {
init();
if (is != null) {
if (!(is instanceof BufferedInputStream))
is = new BufferedInputStream(is);
in = (BufferedInputStream) is;
readHeader();
if (!err()) {
readContents();
if (frameCount < 0) {
status = STATUS_FORMAT_ERROR;
}
}
} else {
status = STATUS_OPEN_ERROR;
}
try {
is.close();
} catch (IOException e) {
}
return status;
}
/**
* Decodes LZW image data into pixel array.
* Adapted from John Cristy's ImageMagick.
*/
protected void decodeImageData() {
int NullCode = -1;
int npix = iw * ih;
int available,
clear,
code_mask,
code_size,
end_of_information,
in_code,
old_code,
bits,
code,
count,
i,
datum,
data_size,
first,
top,
bi,
pi;
if ((pixels == null) || (pixels.length < npix)) {
pixels = new byte[npix]; // allocate new pixel array
}
if (prefix == null) prefix = new short[MaxStackSize];
if (suffix == null) suffix = new byte[MaxStackSize];
if (pixelStack == null) pixelStack = new byte[MaxStackSize + 1];
// Initialize GIF data stream decoder.
data_size = read();
clear = 1 << data_size;
end_of_information = clear + 1;
available = clear + 2;
old_code = NullCode;
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
for (code = 0; code < clear; code++) {
prefix[code] = 0;
suffix[code] = (byte) code;
}
// Decode GIF pixel stream.
datum = bits = count = first = top = pi = bi = 0;
for (i = 0; i < npix;) {
if (top == 0) {
if (bits < code_size) {
// Load bytes until there are enough bits for a code.
if (count == 0) {
// Read a new data block.
count = readBlock();
if (count <= 0)
break;
bi = 0;
}
datum += (block[bi] & 0xff) << bits;
bits += 8;
bi++;
count--;
continue;
}
// Get the next code.
code = datum & code_mask;
datum >>= code_size;
bits -= code_size;
// Interpret the code
if ((code > available) || (code == end_of_information))
break;
if (code == clear) {
// Reset decoder.
code_size = data_size + 1;
code_mask = (1 << code_size) - 1;
available = clear + 2;
old_code = NullCode;
continue;
}
if (old_code == NullCode) {
pixelStack[top++] = suffix[code];
old_code = code;
first = code;
continue;
}
in_code = code;
if (code == available) {
pixelStack[top++] = (byte) first;
code = old_code;
}
while (code > clear) {
pixelStack[top++] = suffix[code];
code = prefix[code];
}
first = suffix[code] & 0xff;
// Add a new string to the string table,
if (available >= MaxStackSize)
break;
pixelStack[top++] = (byte) first;
prefix[available] = (short) old_code;
suffix[available] = (byte) first;
available++;
if (((available & code_mask) == 0)
&& (available < MaxStackSize)) {
code_size++;
code_mask += available;
}
old_code = in_code;
}
// Pop a pixel off the pixel stack.
top--;
pixels[pi++] = pixelStack[top];
i++;
}
for (i = pi; i < npix; i++) {
pixels[i] = 0; // clear missing pixels
}
}
/**
* Returns true if an error was encountered during reading/decoding
*/
protected boolean err() {
return status != STATUS_OK;
}
/**
* Initializes or re-initializes reader
*/
protected void init() {
status = STATUS_OK;
frameCount = 0;
frames = new ArrayList<GifFrame>();
gct = null;
lct = null;
}
/**
* Reads a single byte from the input stream.
*/
protected int read() {
int curByte = 0;
try {
curByte = in.read();
} catch (IOException e) {
status = STATUS_FORMAT_ERROR;
}
return curByte;
}
/**
* Reads next variable length block from input.
*
* @return number of bytes stored in "buffer"
*/
protected int readBlock() {
blockSize = read();
int n = 0;
if (blockSize > 0) {
try {
int count = 0;
while (n < blockSize) {
count = in.read(block, n, blockSize - n);
if (count == -1)
break;
n += count;
}
} catch (IOException e) {
}
if (n < blockSize) {
status = STATUS_FORMAT_ERROR;
}
}
return n;
}
/**
* Reads color table as 256 RGB integer values
*
* @param ncolors int number of colors to read
* @return int array containing 256 colors (packed ARGB with full alpha)
*/
protected int[] readColorTable(int ncolors) {
int nbytes = 3 * ncolors;
int[] tab = null;
byte[] c = new byte[nbytes];
int n = 0;
try {
n = in.read(c);
} catch (IOException e) {
}
if (n < nbytes) {
status = STATUS_FORMAT_ERROR;
} else {
tab = new int[256]; // max size to avoid bounds checks
int i = 0;
int j = 0;
while (i < ncolors) {
int r = c[j++] & 0xff;
int g = c[j++] & 0xff;
int b = c[j++] & 0xff;
tab[i++] = 0xff000000 | (r << 16) | (g << 8) | b;
}
}
return tab;
}
/**
* Main file parser. Reads GIF content blocks.
*/
protected void readContents() {
// read GIF file content blocks
boolean done = false;
while (!(done || err())) {
int code = read();
switch (code) {
case 0x2C : // image separator
readImage();
break;
case 0x21 : // extension
code = read();
switch (code) {
case 0xf9 : // graphics control extension
readGraphicControlExt();
break;
case 0xff : // application extension
readBlock();
String app = "";
for (int i = 0; i < 11; i++) {
app += (char) block[i];
}
if (app.equals("NETSCAPE2.0")) {
readNetscapeExt();
}
else
skip(); // don't care
break;
default : // uninteresting extension
skip();
}
break;
case 0x3b : // terminator
done = true;
break;
case 0x00 : // bad byte, but keep going and see what happens
break;
default :
status = STATUS_FORMAT_ERROR;
}
}
}
/**
* Reads Graphics Control Extension values
*/
protected void readGraphicControlExt() {
read(); // block size
int packed = read(); // packed fields
dispose = (packed & 0x1c) >> 2; // disposal method
if (dispose == 0) {
dispose = 1; // elect to keep old image if discretionary
}
transparency = (packed & 1) != 0;
delay = readShort() * 10; // delay in milliseconds
transIndex = read(); // transparent color index
read(); // block terminator
}
/**
* Reads GIF file header information.
*/
protected void readHeader() {
String id = "";
for (int i = 0; i < 6; i++) {
id += (char) read();
}
if (!id.startsWith("GIF")) {
status = STATUS_FORMAT_ERROR;
return;
}
readLSD();
if (gctFlag && !err()) {
gct = readColorTable(gctSize);
bgColor = gct[bgIndex];
}
}
/**
* Reads next frame image
*/
protected void readImage() {
ix = readShort(); // (sub)image position & size
iy = readShort();
iw = readShort();
ih = readShort();
int packed = read();
lctFlag = (packed & 0x80) != 0; // 1 - local color table flag
interlace = (packed & 0x40) != 0; // 2 - interlace flag
// 3 - sort flag
// 4-5 - reserved
lctSize = 2 << (packed & 7); // 6-8 - local color table size
if (lctFlag) {
lct = readColorTable(lctSize); // read table
act = lct; // make local table active
} else {
act = gct; // make global table active
if (bgIndex == transIndex)
bgColor = 0;
}
int save = 0;
if (transparency) {
save = act[transIndex];
act[transIndex] = 0; // set transparent color if specified
}
if (act == null) {
status = STATUS_FORMAT_ERROR; // no color table defined
}
if (err()) return;
decodeImageData(); // decode pixel data
skip();
if (err()) return;
frameCount++;
// create new image to receive frame data
image =
new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB_PRE);
setPixels(); // transfer pixel data to image
frames.add(new GifFrame(image, delay)); // add image to frame list
if (transparency) {
act[transIndex] = save;
}
resetFrame();
}
/**
* Reads Logical Screen Descriptor
*/
protected void readLSD() {
// logical screen size
width = readShort();
height = readShort();
// packed fields
int packed = read();
gctFlag = (packed & 0x80) != 0; // 1 : global color table flag
// 2-4 : color resolution
// 5 : gct sort flag
gctSize = 2 << (packed & 7); // 6-8 : gct size
bgIndex = read(); // background color index
pixelAspect = read(); // pixel aspect ratio
}
/**
* Reads Netscape extenstion to obtain iteration count
*/
protected void readNetscapeExt() {
do {
readBlock();
if (block[0] == 1) {
// loop count sub-block
int b1 = block[1] & 0xff;
int b2 = block[2] & 0xff;
loopCount = (b2 << 8) | b1;
}
} while ((blockSize > 0) && !err());
}
/**
* Reads next 16-bit value, LSB first
*/
protected int readShort() {
// read 16-bit value, LSB first
return read() | (read() << 8);
}
/**
* Resets frame state for reading next image.
*/
protected void resetFrame() {
lastDispose = dispose;
lastRect = new Rectangle(ix, iy, iw, ih);
lastImage = image;
lastBgColor = bgColor;
lct = null;
}
/**
* Skips variable length blocks up to and including
* next zero length block.
*/
protected void skip() {
do {
readBlock();
} while ((blockSize > 0) && !err());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_LIFELINE_INTERVAL_SECONDS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
import static org.apache.hadoop.test.MetricsAsserts.getLongCounter;
import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.any;
import static org.mockito.Mockito.anyBoolean;
import static org.mockito.Mockito.anyInt;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.atLeastOnce;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocolPB.DatanodeLifelineProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.protocolPB.DatanodeProtocolClientSideTranslatorPB;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.HeartbeatResponse;
import org.apache.hadoop.hdfs.server.protocol.SlowPeerReports;
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Supplier;
/**
* Test suite covering lifeline protocol handling in the DataNode.
*/
public class TestDataNodeLifeline {
private static final Logger LOG = LoggerFactory.getLogger(
TestDataNodeLifeline.class);
static {
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
}
@Rule
public Timeout timeout = new Timeout(60000);
private MiniDFSCluster cluster;
private HdfsConfiguration conf;
private DatanodeLifelineProtocolClientSideTranslatorPB lifelineNamenode;
private DataNodeMetrics metrics;
private DatanodeProtocolClientSideTranslatorPB namenode;
private FSNamesystem namesystem;
private DataNode dn;
private BPServiceActor bpsa;
@Before
public void setup() throws Exception {
// Configure cluster with lifeline RPC server enabled, and down-tune
// heartbeat timings to try to force quick dead/stale DataNodes.
conf = new HdfsConfiguration();
conf.setInt(DFS_DATANODE_LIFELINE_INTERVAL_SECONDS_KEY, 2);
conf.setInt(DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1);
conf.set(DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, "0.0.0.0:0");
conf.setInt(DFS_NAMENODE_STALE_DATANODE_INTERVAL_KEY, 6 * 1000);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(1).build();
namesystem = cluster.getNameNode().getNamesystem();
// Set up spies on RPC proxies so that we can inject failures.
dn = cluster.getDataNodes().get(0);
metrics = dn.getMetrics();
assertNotNull(metrics);
List<BPOfferService> allBpos = dn.getAllBpOs();
assertNotNull(allBpos);
assertEquals(1, allBpos.size());
BPOfferService bpos = allBpos.get(0);
List<BPServiceActor> allBpsa = bpos.getBPServiceActors();
assertNotNull(allBpsa);
assertEquals(1, allBpsa.size());
bpsa = allBpsa.get(0);
assertNotNull(bpsa);
// Lifeline RPC proxy gets created on separate thread, so poll until found.
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
if (bpsa.getLifelineNameNodeProxy() != null) {
lifelineNamenode = spy(bpsa.getLifelineNameNodeProxy());
bpsa.setLifelineNameNode(lifelineNamenode);
}
return lifelineNamenode != null;
}
}, 100, 10000);
assertNotNull(bpsa.getNameNodeProxy());
namenode = spy(bpsa.getNameNodeProxy());
bpsa.setNameNode(namenode);
}
@After
public void shutdown() {
if (cluster != null) {
cluster.shutdown();
GenericTestUtils.assertNoThreadsMatching(".*lifeline.*");
}
}
@Test
public void testSendLifelineIfHeartbeatBlocked() throws Exception {
// Run the test for the duration of sending 10 lifeline RPC messages.
int numLifelines = 10;
CountDownLatch lifelinesSent = new CountDownLatch(numLifelines);
// Intercept heartbeat to inject an artificial delay, until all expected
// lifeline RPC messages have been sent.
doAnswer(new LatchAwaitingAnswer<HeartbeatResponse>(lifelinesSent))
.when(namenode).sendHeartbeat(
any(DatanodeRegistration.class),
any(StorageReport[].class),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
any(),
anyBoolean(),
any(SlowPeerReports.class),
any(SlowDiskReports.class));
// Intercept lifeline to trigger latch count-down on each call.
doAnswer(new LatchCountingAnswer<Void>(lifelinesSent))
.when(lifelineNamenode).sendLifeline(
any(DatanodeRegistration.class),
any(StorageReport[].class),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
any());
// While waiting on the latch for the expected number of lifeline messages,
// poll DataNode tracking information. Thanks to the lifeline, we expect
// that the DataNode always stays alive, and never goes stale or dead.
while (!lifelinesSent.await(1, SECONDS)) {
assertEquals("Expect DataNode to be kept alive by lifeline.", 1,
namesystem.getNumLiveDataNodes());
assertEquals("Expect DataNode not marked dead due to lifeline.", 0,
namesystem.getNumDeadDataNodes());
assertEquals("Expect DataNode not marked stale due to lifeline.", 0,
namesystem.getNumStaleDataNodes());
// add a new volume on the next heartbeat
cluster.getDataNodes().get(0).reconfigurePropertyImpl(
DFS_DATANODE_DATA_DIR_KEY,
cluster.getDataDirectory().concat("/data-new"));
}
// Verify that we did in fact call the lifeline RPC.
verify(lifelineNamenode, atLeastOnce()).sendLifeline(
any(DatanodeRegistration.class),
any(StorageReport[].class),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
any());
// Also verify lifeline call through metrics. We expect at least
// numLifelines, guaranteed by waiting on the latch. There is a small
// possibility of extra lifeline calls depending on timing, so we allow
// slack in the assertion.
assertTrue("Expect metrics to count at least " + numLifelines + " calls.",
getLongCounter("LifelinesNumOps", getMetrics(metrics.name())) >=
numLifelines);
}
@Test
public void testNoLifelineSentIfHeartbeatsOnTime() throws Exception {
// Run the test for the duration of sending 10 heartbeat RPC messages.
int numHeartbeats = 10;
CountDownLatch heartbeatsSent = new CountDownLatch(numHeartbeats);
// Intercept heartbeat to trigger latch count-down on each call.
doAnswer(new LatchCountingAnswer<HeartbeatResponse>(heartbeatsSent))
.when(namenode).sendHeartbeat(
any(DatanodeRegistration.class),
any(StorageReport[].class),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
any(),
anyBoolean(),
any(SlowPeerReports.class),
any(SlowDiskReports.class));
// While waiting on the latch for the expected number of heartbeat messages,
// poll DataNode tracking information. We expect that the DataNode always
// stays alive, and never goes stale or dead.
while (!heartbeatsSent.await(1, SECONDS)) {
assertEquals("Expect DataNode to be kept alive by lifeline.", 1,
namesystem.getNumLiveDataNodes());
assertEquals("Expect DataNode not marked dead due to lifeline.", 0,
namesystem.getNumDeadDataNodes());
assertEquals("Expect DataNode not marked stale due to lifeline.", 0,
namesystem.getNumStaleDataNodes());
}
// Verify that we did not call the lifeline RPC.
verify(lifelineNamenode, never()).sendLifeline(
any(DatanodeRegistration.class),
any(StorageReport[].class),
anyLong(),
anyLong(),
anyInt(),
anyInt(),
anyInt(),
any());
// Also verify no lifeline calls through metrics.
assertEquals("Expect metrics to count no lifeline calls.", 0,
getLongCounter("LifelinesNumOps", getMetrics(metrics.name())));
}
@Test
public void testLifelineForDeadNode() throws Exception {
long initialCapacity = cluster.getNamesystem(0).getCapacityTotal();
assertTrue(initialCapacity > 0);
dn.setHeartbeatsDisabledForTests(true);
cluster.setDataNodesDead();
assertEquals("Capacity should be 0 after all DNs dead", 0, cluster
.getNamesystem(0).getCapacityTotal());
bpsa.sendLifelineForTests();
assertEquals("Lifeline should be ignored for dead node", 0, cluster
.getNamesystem(0).getCapacityTotal());
// Wait for re-registration and heartbeat
dn.setHeartbeatsDisabledForTests(false);
final DatanodeDescriptor dnDesc = cluster.getNamesystem(0).getBlockManager()
.getDatanodeManager().getDatanodes().iterator().next();
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return dnDesc.isAlive() && dnDesc.isHeartbeatedSinceRegistration();
}
}, 100, 5000);
assertEquals("Capacity should include only live capacity", initialCapacity,
cluster.getNamesystem(0).getCapacityTotal());
}
/**
* Waits on a {@link CountDownLatch} before calling through to the method.
*/
private final class LatchAwaitingAnswer<T> implements Answer<T> {
private final CountDownLatch latch;
public LatchAwaitingAnswer(CountDownLatch latch) {
this.latch = latch;
}
@Override
@SuppressWarnings("unchecked")
public T answer(InvocationOnMock invocation)
throws Throwable {
LOG.info("Awaiting, remaining latch count is {}.", latch.getCount());
latch.await();
return (T)invocation.callRealMethod();
}
}
/**
* Counts on a {@link CountDownLatch} after each call through to the method.
*/
private final class LatchCountingAnswer<T> implements Answer<T> {
private final CountDownLatch latch;
public LatchCountingAnswer(CountDownLatch latch) {
this.latch = latch;
}
@Override
@SuppressWarnings("unchecked")
public T answer(InvocationOnMock invocation)
throws Throwable {
T result = (T)invocation.callRealMethod();
latch.countDown();
LOG.info("Countdown, remaining latch count is {}.", latch.getCount());
return result;
}
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.codeInsight.inspections;
import com.intellij.codeInsight.daemon.impl.HighlightInfo;
import com.intellij.codeInsight.intention.IntentionAction;
import com.intellij.codeInspection.actions.CleanupInspectionIntention;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.util.Pair;
import com.intellij.pom.java.LanguageLevel;
import com.intellij.psi.PsiElement;
import com.intellij.psi.PsiMethodCallExpression;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.refactoring.typeMigration.inspections.GuavaInspection;
import com.intellij.testFramework.IdeaTestUtil;
import com.intellij.testFramework.PlatformTestUtil;
import com.intellij.testFramework.builders.JavaModuleFixtureBuilder;
import com.intellij.testFramework.fixtures.JavaCodeInsightFixtureTestCase;
import java.util.Arrays;
import java.util.List;
/**
* @author Dmitry Batkovich
*/
public class GuavaInspectionTest extends JavaCodeInsightFixtureTestCase {
private GuavaInspection myInspection;
@Override
public void setUp() throws Exception {
super.setUp();
myInspection = new GuavaInspection();
myFixture.enableInspections(myInspection);
}
@Override
protected String getTestDataPath() {
return PlatformTestUtil.getCommunityPath() + "/java/typeMigration/testData/inspections/guava";
}
@Override
protected void tuneFixture(JavaModuleFixtureBuilder moduleBuilder) {
moduleBuilder.setLanguageLevel(LanguageLevel.JDK_1_8);
moduleBuilder.addLibraryJars("guava", getTestDataPath() + "/", "guava-stubs.jar");
moduleBuilder.addJdk(IdeaTestUtil.getMockJdk18Path().getPath());
}
public void testOptional() {
doTest();
}
public void testOptional2() {
doTest();
}
public void testOptional3() {
doTest();
}
public void testSimpleFluentIterable() {
doTest();
}
public void testChainedFluentIterable() {
doTest();
}
public void testFluentIterableChainWithoutVariable() {
doTestAllFile();
}
public void testChainedFluentIterableWithChainedInitializer() {
doTest();
}
public void testFluentIterableChainWithOptional() {
doTest();
}
public void testTransformAndConcat1() {
doTest();
}
public void testTransformAndConcat2() {
doTest();
}
public void testTransformAndConcat3() {
doTest();
}
public void testTransformAndConcat4() {
doTest();
}
public void testFilterIsInstance() {
doTest();
}
public void testInsertTypeParameter() {
doTest();
}
public void testRemoveMethodReferenceForFunctionalInterfaces() {
doTest();
}
public void _testChainedFluentIterableWithOf() {
doTest();
}
//needs Guava 18.0 as dependency
public void _testAppend() {
doTest();
}
public void testChainContainsStopMethods() {
doTestNoQuickFixes(PsiMethodCallExpression.class);
}
public void testFluentIterableAndOptionalChain() {
doTest();
}
public void testCopyInto() {
doTestAllFile();
}
public void testToArray() {
doTest();
}
public void testToArray2() {
doTest();
}
public void testToArray3() {
doTest();
}
public void testReturnType() {
doTest();
}
public void testFluentIterableGet() {
doTest();
}
public void testFluentIterableGet2() {
doTest();
}
public void testIterableAssignment() {
doTest();
}
public void testReturnIterable() {
doTest();
}
public void testConvertFluentIterableAsIterableParameter() {
doTest();
}
public void testConvertFluentIterableAsIterableParameter2() {
doTest();
}
public void testConvertFunctionAsParameter() {
doTest();
}
public void testFluentIterableMigrationInInheritance() {
doTest();
}
public void testFluentIterableAndOptional() {
doTest();
}
public void testFluentIterableContains() {
doTest();
}
public void testFluentIterableChainSeparatedByMethods() {
doTest();
}
public void testFluentIterableWithStaticallyImportedFrom() {
doTest();
}
public void testTypeMigrationRootBackTraverse() {
doTest();
}
public void testOptionalTransform() {
doTest();
}
public void testOptionalTransform2() {
doTest();
}
public void testRemoveMethodReference() {
doTest();
}
public void testSimplifyOptionalComposition() {
doTest();
}
public void testMigrateArrays() {
doTest();
}
public void testConvertImmutableCollections() {
doTestAllFile();
}
public void testUniqueIndex() {
doTestAllFile();
}
public void testMigrateMethodAsChainQualifier() {
doTest();
}
public void testFixAllProblems() {
doTestAllFile();
}
public void testFixAllProblems2() {
doTestAllFile();
}
public void testPredicates() {
doTestAllFile();
}
public void testPredicates2() {
doTestAllFile();
}
public void testPredicates3() {
doTestAllFile();
}
public void testPredicates4() {
doTestAllFile();
}
public void testFluentIterableElementTypeChanged() {
doTest();
}
// for ex: javax.annotations.Nullable is runtime annotation
public void testFunctionAnnotatedWithRuntimeAnnotation() {
doTestAllFile();
}
public void testFunctionAnnotatedWithRuntimeAnnotation2() {
try {
myInspection.ignoreJavaxNullable = false;
doTestAllFile();
} finally {
myInspection.ignoreJavaxNullable = true;
}
}
public void testFluentIterableFromAndParenthesises() {
doTestAllFile();
}
public void testFunctionIsMethodReference() {
doTest();
}
public void testFluentIterableLast() {
doTest();
}
public void testLambdaImplementsBothInterfaces() {
doTest();
}
private void doTestNoQuickFixes(Class<? extends PsiElement>... highlightedElements) {
myFixture.configureByFile(getTestName(true) + ".java");
myFixture.doHighlighting();
for (IntentionAction action : myFixture.getAvailableIntentions()) {
if (GuavaInspection.MigrateGuavaTypeFix.FAMILY_NAME.equals(action.getFamilyName())) {
final PsiElement element = ((GuavaInspection.MigrateGuavaTypeFix)action).getStartElement();
if (PsiTreeUtil.instanceOf(element, highlightedElements)) {
fail("Quick fix is found but not expected for types " + Arrays.toString(highlightedElements));
}
}
}
}
private void doTest() {
myFixture.configureByFile(getTestName(true) + ".java");
myFixture.enableInspections(new GuavaInspection());
boolean actionFound = false;
myFixture.doHighlighting();
for (IntentionAction action : myFixture.getAvailableIntentions()) {
if (GuavaInspection.MigrateGuavaTypeFix.FAMILY_NAME.equals(action.getFamilyName())) {
myFixture.launchAction(action);
actionFound = true;
break;
}
}
assertTrue("Quick fix isn't found", actionFound);
myFixture.checkResultByFile(getTestName(true) + "_after.java");
}
private void doTestAllFile() {
myFixture.configureByFile(getTestName(true) + ".java");
myFixture.enableInspections(new GuavaInspection());
for (HighlightInfo info : myFixture.doHighlighting())
if (GuavaInspection.PROBLEM_DESCRIPTION.equals(info.getDescription())) {
final Pair<HighlightInfo.IntentionActionDescriptor, RangeMarker> marker = info.quickFixActionMarkers.get(0);
final PsiElement someElement = myFixture.getFile().findElementAt(0);
assertNotNull(someElement);
final List<IntentionAction> options = marker.getFirst().getOptions(someElement, myFixture.getEditor());
assertNotNull(options);
boolean doBreak = false;
for (IntentionAction option : options) {
if (option instanceof CleanupInspectionIntention) {
myFixture.launchAction(option);
doBreak = true;
break;
}
}
if (doBreak) {
break;
}
}
myFixture.checkResultByFile(getTestName(true) + "_after.java");
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
* Copyright (C) 2011-2012 Eugene Fradkin (eugene.fradkin@gmail.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.erd.ui.editor;
import org.eclipse.core.runtime.IAdaptable;
import org.eclipse.draw2dl.PrintFigureOperation;
import org.eclipse.swt.SWT;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.IWorkbench;
import org.eclipse.ui.IWorkbenchPreferencePage;
import org.eclipse.ui.IWorkbenchPropertyPage;
import org.jkiss.dbeaver.erd.model.ERDAttributeVisibility;
import org.jkiss.dbeaver.erd.ui.ERDUIConstants;
import org.jkiss.dbeaver.erd.ui.internal.ERDUIActivator;
import org.jkiss.dbeaver.erd.ui.internal.ERDUIMessages;
import org.jkiss.dbeaver.model.preferences.DBPPreferenceStore;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.preferences.AbstractPrefPage;
import org.jkiss.dbeaver.utils.PrefUtils;
import org.jkiss.utils.ArrayUtils;
import java.util.ArrayList;
import java.util.List;
/**
* ERDPreferencePage
*/
public class ERDPreferencePage extends AbstractPrefPage implements IWorkbenchPreferencePage, IWorkbenchPropertyPage {
public static final String PAGE_ID = "org.jkiss.dbeaver.preferences.erd.general"; //$NON-NLS-1$
private IAdaptable element;
private Button contentsShowViews;
private Button contentsShowPartitions;
private Button changeBorderColors;
private Button changeHeaderColors;
private Combo modeCombo;
private Spinner spinnerMarginTop;
private Spinner spinnerMarginBottom;
private Spinner spinnerMarginLeft;
private Spinner spinnerMarginRight;
private Button gridCheck;
private Button snapCheck;
private Spinner spinnerGridWidth;
private Spinner spinnerGridHeight;
private List<Button> visibilityButtons = new ArrayList<>();
private List<Button> styleButtons = new ArrayList<>();
@Override
protected Control createContents(Composite parent)
{
DBPPreferenceStore store = ERDUIActivator.getDefault().getPreferences();
Composite composite = UIUtils.createPlaceholder(parent, 2, 5);
createContentsGroup(store, composite);
createColorPrefGroup(store, composite);
createVisibilityGroup(store, composite);
createStyleGroup(store, composite);
createGridGroup(store, composite);
createPrintGroup(store, composite);
return composite;
}
private void createContentsGroup(DBPPreferenceStore store, Composite composite)
{
Group contentsGroup = UIUtils.createControlGroup(composite, ERDUIMessages.erd_preference_page_title_diagram_contents, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
//((GridData)contentsGroup.getLayoutData()).horizontalSpan = 2;
contentsShowViews = UIUtils.createCheckbox(contentsGroup, ERDUIMessages.erd_preference_page_title_shows_views, store.getBoolean(ERDUIConstants.PREF_DIAGRAM_SHOW_VIEWS));
contentsShowPartitions = UIUtils.createCheckbox(contentsGroup, ERDUIMessages.erd_preference_page_title_shows_partitions, store.getBoolean(ERDUIConstants.PREF_DIAGRAM_SHOW_PARTITIONS));
}
private void createColorPrefGroup(DBPPreferenceStore store, Composite composite) {
Group contentsGroup = UIUtils.createControlGroup(composite, ERDUIMessages.erd_preference_page_title_color_pref, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
//((GridData)contentsGroup.getLayoutData()).horizontalSpan = 2;
changeBorderColors = UIUtils.createCheckbox(contentsGroup, ERDUIMessages.erd_preference_page_title_change_border_colors, store.getBoolean(ERDUIConstants.PREF_DIAGRAM_CHANGE_BORDER_COLORS));
changeHeaderColors = UIUtils.createCheckbox(contentsGroup, ERDUIMessages.erd_preference_page_title_change_header_colors, store.getBoolean(ERDUIConstants.PREF_DIAGRAM_CHANGE_HEADER_COLORS));
}
private void createVisibilityGroup(DBPPreferenceStore store, Composite composite)
{
ERDAttributeVisibility defaultVisibility = ERDAttributeVisibility.getDefaultVisibility(store);
Group elemsGroup = UIUtils.createControlGroup(composite, ERDUIMessages.erd_preference_page_title_attributes_visibility, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
for (ERDAttributeVisibility visibility : ERDAttributeVisibility.values()) {
Button radio = new Button(elemsGroup, SWT.RADIO);
radio.setData(visibility);
radio.setText(visibility.getTitle());
if (visibility == defaultVisibility) {
radio.setSelection(true);
}
visibilityButtons.add(radio);
}
}
private void createStyleGroup(DBPPreferenceStore store, Composite composite)
{
ERDViewStyle[] enabledStyles = ERDViewStyle.getDefaultStyles(store);
Group elemsGroup = UIUtils.createControlGroup(composite, ERDUIMessages.erd_preference_page_title_attribute_style, 1, GridData.VERTICAL_ALIGN_BEGINNING | GridData.FILL_HORIZONTAL, 0);
for (ERDViewStyle style : ERDViewStyle.values()) {
Button check = new Button(elemsGroup, SWT.CHECK);
check.setData(style);
check.setText(style.getTitle());
if (ArrayUtils.contains(enabledStyles, style)) {
check.setSelection(true);
}
styleButtons.add(check);
}
}
private void createGridGroup(DBPPreferenceStore store, Composite composite)
{
Group gridGroup = UIUtils.createControlGroup(composite, ERDUIMessages.pref_page_erd_group_grid, 2, GridData.VERTICAL_ALIGN_BEGINNING, 0);
gridCheck = UIUtils.createCheckbox(gridGroup, ERDUIMessages.pref_page_erd_checkbox_grid_enabled, null, store.getBoolean(ERDUIConstants.PREF_GRID_ENABLED), 2);
snapCheck = UIUtils.createCheckbox(gridGroup, ERDUIMessages.pref_page_erd_checkbox_snap_to_grid, null, store.getBoolean(ERDUIConstants.PREF_GRID_SNAP_ENABLED), 2);
spinnerGridWidth = UIUtils.createLabelSpinner(gridGroup, ERDUIMessages.pref_page_erd_spinner_grid_width, store.getInt(ERDUIConstants.PREF_GRID_WIDTH), 5, Short.MAX_VALUE);
spinnerGridHeight = UIUtils.createLabelSpinner(gridGroup, ERDUIMessages.pref_page_erd_spinner_grid_height, store.getInt(ERDUIConstants.PREF_GRID_HEIGHT), 5, Short.MAX_VALUE);
}
private void createPrintGroup(DBPPreferenceStore store, Composite composite)
{
Group printGroup = UIUtils.createControlGroup(composite, ERDUIMessages.pref_page_erd_group_print, 2, GridData.VERTICAL_ALIGN_BEGINNING, 0);
modeCombo = UIUtils.createLabelCombo(printGroup, ERDUIMessages.pref_page_erd_combo_page_mode, SWT.READ_ONLY | SWT.DROP_DOWN);
modeCombo.add(ERDUIMessages.pref_page_erd_item_tile);
modeCombo.add(ERDUIMessages.pref_page_erd_item_fit_page);
modeCombo.add(ERDUIMessages.pref_page_erd_item_fit_width);
modeCombo.add(ERDUIMessages.pref_page_erd_item_fit_height);
int modeIndex = 0;
switch (store.getInt(ERDUIConstants.PREF_PRINT_PAGE_MODE)) {
case PrintFigureOperation.FIT_PAGE: modeIndex = 1; break;
case PrintFigureOperation.FIT_WIDTH: modeIndex = 2; break;
case PrintFigureOperation.FIT_HEIGHT: modeIndex = 3; break;
}
modeCombo.select(modeIndex);
spinnerMarginTop = UIUtils.createLabelSpinner(printGroup, ERDUIMessages.pref_page_erd_spinner_margin_top, store.getInt(ERDUIConstants.PREF_PRINT_MARGIN_TOP), 0, Short.MAX_VALUE);
spinnerMarginBottom = UIUtils.createLabelSpinner(printGroup, ERDUIMessages.pref_page_erd_spinner_margin_bottom, store.getInt(ERDUIConstants.PREF_PRINT_MARGIN_BOTTOM), 0, Short.MAX_VALUE);
spinnerMarginLeft = UIUtils.createLabelSpinner(printGroup, ERDUIMessages.pref_page_erd_spinner_margin_left, store.getInt(ERDUIConstants.PREF_PRINT_MARGIN_LEFT), 0, Short.MAX_VALUE);
spinnerMarginRight = UIUtils.createLabelSpinner(printGroup, ERDUIMessages.pref_page_erd_spinner_margin_right, store.getInt(ERDUIConstants.PREF_PRINT_MARGIN_RIGHT), 0, Short.MAX_VALUE);
}
@Override
public void init(IWorkbench workbench)
{
}
@Override
protected void performDefaults()
{
super.performDefaults();
}
@Override
public boolean performOk()
{
DBPPreferenceStore store = ERDUIActivator.getDefault().getPreferences();
store.setValue(ERDUIConstants.PREF_DIAGRAM_SHOW_VIEWS, contentsShowViews.getSelection());
store.setValue(ERDUIConstants.PREF_DIAGRAM_SHOW_PARTITIONS, contentsShowPartitions.getSelection());
store.setValue(ERDUIConstants.PREF_DIAGRAM_CHANGE_BORDER_COLORS, changeBorderColors.getSelection());
store.setValue(ERDUIConstants.PREF_DIAGRAM_CHANGE_HEADER_COLORS, changeHeaderColors.getSelection());
store.setValue(ERDUIConstants.PREF_GRID_ENABLED, gridCheck.getSelection());
store.setValue(ERDUIConstants.PREF_GRID_SNAP_ENABLED, snapCheck.getSelection());
store.setValue(ERDUIConstants.PREF_GRID_WIDTH, spinnerGridWidth.getSelection());
store.setValue(ERDUIConstants.PREF_GRID_HEIGHT, spinnerGridHeight.getSelection());
int pageMode;
switch (modeCombo.getSelectionIndex()) {
case 1: pageMode = PrintFigureOperation.FIT_PAGE; break;
case 2: pageMode = PrintFigureOperation.FIT_WIDTH; break;
case 3: pageMode = PrintFigureOperation.FIT_HEIGHT; break;
default: pageMode = PrintFigureOperation.TILE; break;
}
store.setValue(ERDUIConstants.PREF_PRINT_PAGE_MODE, pageMode);
store.setValue(ERDUIConstants.PREF_PRINT_MARGIN_TOP, spinnerMarginTop.getSelection());
store.setValue(ERDUIConstants.PREF_PRINT_MARGIN_BOTTOM, spinnerMarginBottom.getSelection());
store.setValue(ERDUIConstants.PREF_PRINT_MARGIN_LEFT, spinnerMarginLeft.getSelection());
store.setValue(ERDUIConstants.PREF_PRINT_MARGIN_RIGHT, spinnerMarginRight.getSelection());
for (Button radio : visibilityButtons) {
if (radio.getSelection()) {
ERDAttributeVisibility.setDefaultVisibility(store, (ERDAttributeVisibility) radio.getData());
}
}
List<ERDViewStyle> enabledStyles = new ArrayList<>();
for (Button check : styleButtons) {
if (check.getSelection()) {
enabledStyles.add((ERDViewStyle) check.getData());
}
}
ERDViewStyle.setDefaultStyles(store, enabledStyles.toArray(new ERDViewStyle[enabledStyles.size()]));
PrefUtils.savePreferenceStore(store);
return true;
}
@Override
public IAdaptable getElement()
{
return element;
}
@Override
public void setElement(IAdaptable element)
{
this.element = element;
}
}
| |
/*
* Copyright 2015-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.util.cache;
import com.facebook.buck.io.ArchiveMemberPath;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.model.Pair;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.hash.HashCode;
import java.io.IOException;
import java.nio.file.NoSuchFileException;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Function;
/**
* Wraps a collection of {@link ProjectFilesystem}-specific {@link ProjectFileHashCache}s as a
* single cache, implementing a Chain of Responsibility to find and forward operations to the
* correct inner cache. As this "multi"-cache is meant to handle paths across different
* {@link ProjectFilesystem}s, as opposed to paths within the same {@link ProjectFilesystem}, it is
* a distinct type from {@link ProjectFileHashCache}.
* <p>
* This "stacking" approach provides a few appealing properties:
* 1) It makes it easier to module path roots with differing hash cached lifetime requirements.
* Hashes of paths from roots watched by watchman can be cached indefinitely, until a watchman
* event triggers invalidation. Hashes of paths under roots not watched by watchman, however,
* can only be cached for the duration of a single build (as we have no way to know when these
* paths are modified). By using separate {@link ProjectFileHashCache}s per path root, we can
* construct a new {@link StackedFileHashCache} on each build composed of either persistent or
* ephemeral per-root inner caches that properly manager the lifetime of cached hashes from their
* root.
* 2) Modeling the hash cache around path root and sub-paths also works well with a current
* limitation with our watchman events in which they only store relative paths, with no reference
* to the path root they originated from. If we stored hashes internally indexed by absolute
* path, then we wouldn't know where to anchor the search to resolve the path that a watchman
* event refers to (e.g. a watch event for `foo.h` could refer to `/a/b/foo.h` or `/a/b/c/foo.h`,
* depending on where the project root is). By indexing hashes by pairs of project root and
* sub-path, it's easier to identity paths to invalidate (e.g. `foo.h` would invalidate
* (`/a/b/`,`foo.h`) and not (`/a/b/`,`c/foo.h`)).
* 3) Since the current implementation of inner caches and callers generally use path root and
* sub-path pairs, it allows avoiding any overhead converting to/from absolute paths.
*/
public class StackedFileHashCache implements FileHashCache {
private final ImmutableList<? extends ProjectFileHashCache> caches;
public StackedFileHashCache(ImmutableList<? extends ProjectFileHashCache> caches) {
this.caches = caches;
}
/**
* @return the {@link ProjectFileHashCache} which handles the given relative {@link Path} under
* the given {@link ProjectFilesystem}.
*/
private Optional<? extends ProjectFileHashCache> lookup(
ProjectFilesystem filesystem,
Path path) {
for (ProjectFileHashCache cache : caches) {
// TODO(agallagher): This should check for equal filesystems probably shouldn't be using the
// root path, but we currently rely on this behavior.
if (cache.getFilesystem().getRootPath().equals(filesystem.getRootPath()) &&
cache.willGet(path)) {
return Optional.of(cache);
}
}
return Optional.empty();
}
private Optional<Pair<ProjectFileHashCache, Path>> lookup(Path path) {
Preconditions.checkArgument(path.isAbsolute());
for (ProjectFileHashCache cache : caches) {
Optional<Path> relativePath = cache.getFilesystem().getPathRelativeToProjectRoot(path);
if (relativePath.isPresent() && cache.willGet(relativePath.get())) {
return Optional.of(new Pair<>(cache, relativePath.get()));
}
}
return Optional.empty();
}
private Optional<Pair<ProjectFileHashCache, ArchiveMemberPath>> lookup(ArchiveMemberPath path) {
Preconditions.checkArgument(path.isAbsolute());
for (ProjectFileHashCache cache : caches) {
Optional<ArchiveMemberPath> relativePath =
cache.getFilesystem().getPathRelativeToProjectRoot(path.getArchivePath())
.map(path::withArchivePath);
if (relativePath.isPresent() && cache.willGet(relativePath.get())) {
return Optional.of(new Pair<>(cache, relativePath.get()));
}
}
return Optional.empty();
}
@Override
public void invalidate(Path path) {
Optional<Pair<ProjectFileHashCache, Path>> found = lookup(path);
if (found.isPresent()) {
found.get().getFirst().invalidate(found.get().getSecond());
}
}
@Override
public void invalidateAll() {
for (ProjectFileHashCache cache : caches) {
cache.invalidateAll();
}
}
@Override
public HashCode get(Path path) throws IOException {
Optional<Pair<ProjectFileHashCache, Path>> found = lookup(path);
if (!found.isPresent()) {
throw new NoSuchFileException(path.toString());
}
return found.get().getFirst().get(found.get().getSecond());
}
@Override
public long getSize(Path path) throws IOException {
Optional<Pair<ProjectFileHashCache, Path>> found = lookup(path);
if (!found.isPresent()) {
throw new NoSuchFileException(path.toString());
}
return found.get().getFirst().getSize(found.get().getSecond());
}
@Override
public HashCode get(ArchiveMemberPath archiveMemberPath) throws IOException {
Optional<Pair<ProjectFileHashCache, ArchiveMemberPath>> found = lookup(archiveMemberPath);
if (!found.isPresent()) {
throw new NoSuchFileException(archiveMemberPath.toString());
}
return found.get().getFirst().get(found.get().getSecond());
}
@Override
public void set(Path path, HashCode hashCode) throws IOException {
Optional<Pair<ProjectFileHashCache, Path>> found = lookup(path);
if (found.isPresent()) {
found.get().getFirst().set(found.get().getSecond(), hashCode);
}
}
@Override
public FileHashCacheVerificationResult verify() throws IOException {
FileHashCacheVerificationResult.Builder builder = FileHashCacheVerificationResult.builder();
int cachesExamined = 1;
int filesExamined = 0;
for (ProjectFileHashCache cache : caches) {
FileHashCacheVerificationResult result = cache.verify();
cachesExamined += result.getCachesExamined();
filesExamined += result.getFilesExamined();
builder.addAllVerificationErrors(result.getVerificationErrors());
}
return builder
.setCachesExamined(cachesExamined)
.setFilesExamined(filesExamined)
.build();
}
@Override
public HashCode get(ProjectFilesystem filesystem, Path path) throws IOException {
return lookup(filesystem, path)
.orElseThrow(() -> new NoSuchFileException(filesystem.resolve(path).toString()))
.get(path);
}
@Override
public HashCode get(ProjectFilesystem filesystem, ArchiveMemberPath path) throws IOException {
return lookup(filesystem, path.getArchivePath())
.orElseThrow(
() -> new NoSuchFileException(filesystem.resolve(path.getArchivePath()).toString()))
.get(path);
}
@Override
public long getSize(ProjectFilesystem filesystem, Path path) throws IOException {
return lookup(filesystem, path)
.orElseThrow(() -> new NoSuchFileException(filesystem.resolve(path).toString()))
.getSize(path);
}
@Override
public void invalidate(ProjectFilesystem filesystem, Path path) {
lookup(filesystem, path).ifPresent(cache -> cache.invalidate(path));
}
@Override
public void set(ProjectFilesystem filesystem, Path path, HashCode hashCode) throws IOException {
Optional<? extends ProjectFileHashCache> cache = lookup(filesystem, path);
if (cache.isPresent()) {
cache.get().set(path, hashCode);
}
}
public StackedFileHashCache newDecoratedFileHashCache(
Function<ProjectFileHashCache, ProjectFileHashCache> decorateDelegate) {
ImmutableList.Builder<ProjectFileHashCache> decoratedCaches = ImmutableList.builder();
for (ProjectFileHashCache cache : caches) {
decoratedCaches.add(decorateDelegate.apply(cache));
}
return new StackedFileHashCache(decoratedCaches.build());
}
}
| |
package cz.metacentrum.perun.webgui.tabs.cabinettabs;
import com.google.gwt.core.client.JavaScriptObject;
import com.google.gwt.event.dom.client.ClickEvent;
import com.google.gwt.event.dom.client.ClickHandler;
import com.google.gwt.resources.client.ImageResource;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
import com.google.gwt.user.cellview.client.CellTable;
import com.google.gwt.user.client.ui.*;
import cz.metacentrum.perun.webgui.client.PerunWebSession;
import cz.metacentrum.perun.webgui.client.UiElements;
import cz.metacentrum.perun.webgui.client.resources.ButtonType;
import cz.metacentrum.perun.webgui.client.resources.PerunEntity;
import cz.metacentrum.perun.webgui.client.resources.SmallIcons;
import cz.metacentrum.perun.webgui.json.GetEntityById;
import cz.metacentrum.perun.webgui.json.JsonCallbackEvents;
import cz.metacentrum.perun.webgui.json.JsonUtils;
import cz.metacentrum.perun.webgui.json.cabinetManager.CreateThanks;
import cz.metacentrum.perun.webgui.json.ownersManager.GetOwners;
import cz.metacentrum.perun.webgui.model.Owner;
import cz.metacentrum.perun.webgui.model.Publication;
import cz.metacentrum.perun.webgui.tabs.CabinetTabs;
import cz.metacentrum.perun.webgui.tabs.TabItem;
import cz.metacentrum.perun.webgui.tabs.TabItemWithUrl;
import cz.metacentrum.perun.webgui.tabs.UrlMapper;
import cz.metacentrum.perun.webgui.widgets.CustomButton;
import cz.metacentrum.perun.webgui.widgets.TabMenu;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
/**
* Tab for adding new Thanks
*
* @author Pavel Zlamal <256627@mail.muni.cz>
*/
public class CreateThanksTabItem implements TabItem, TabItemWithUrl{
/**
* Perun web session
*/
private PerunWebSession session = PerunWebSession.getInstance();
/**
* Content widget - should be simple panel
*/
private SimplePanel contentWidget = new SimplePanel();
/**
* Title widget
*/
private Label titleWidget = new Label("Add acknowledgement");
// data
private int publicationId;
private Publication publication;
private JsonCallbackEvents events;
private HTML alreadyAddedOwners = new HTML("");
/**
* Creates a tab instance
*
* @param publicationId id
*/
public CreateThanksTabItem(int publicationId){
this.publicationId = publicationId;
new GetEntityById(PerunEntity.PUBLICATION, publicationId, new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso){
publication = jso.cast();
}
}).retrieveData();
}
/**
* Creates a tab instance
*
* @param publication
*/
public CreateThanksTabItem(Publication publication){
this.publication = publication;
this.publicationId = publication.getId();
}
/**
* Creates a tab instance
*
* @param publication
* @param extEvents
*/
public CreateThanksTabItem(Publication publication, JsonCallbackEvents extEvents){
this.publication = publication;
this.publicationId = publication.getId();
this.events = extEvents;
}
public boolean isPrepared(){
return !(publication == null);
}
@Override
public boolean isRefreshParentOnClose() {
return false;
}
@Override
public void onClose() {
}
public Widget draw() {
// MAIN PANEL
VerticalPanel vp = new VerticalPanel();
vp.setSize("100%", "100%");
// CALLBACK
final GetOwners owners = new GetOwners();
// FIXME - maybe make it configurable in a future
List<String> names = Arrays.asList("MetaCentrum", "CERIT-SC", "ELIXIR");
owners.setFilterByNames(names);
// MENU
TabMenu menu = new TabMenu();
vp.add(menu);
vp.setCellHeight(menu, "30px");
// add button
final CustomButton addButton = TabMenu.getPredefinedButton(ButtonType.ADD, "Add acknowledgement for selected owner(s)");
final TabItem tab = this;
// click handler
addButton.addClickHandler(new ClickHandler(){
@Override
public void onClick(ClickEvent event) {
ArrayList<Owner> list = owners.getTableSelectedList();
if (UiElements.cantSaveEmptyListDialogBox(list)) {
for (int i=0; i<list.size(); i++) {
final String name = list.get(i).getName();
// add name events
JsonCallbackEvents thanksEvents = new JsonCallbackEvents(){
public void onFinished(JavaScriptObject jso){
updateAlreadyAdded(name);
}
};
// merge with refresh?
if (i == list.size()-1 && events != null) {
thanksEvents = JsonCallbackEvents.mergeEvents(thanksEvents, events);
}
CreateThanks request = new CreateThanks(publicationId, JsonCallbackEvents.disableButtonEvents(addButton, thanksEvents));
request.createThanks(list.get(i).getId());
if (i == list.size()-1) {
owners.clearTableSelectedSet();
}
}
}
}
});
menu.addWidget(addButton);
menu.addWidget(TabMenu.getPredefinedButton(ButtonType.CLOSE, "", new ClickHandler() {
@Override
public void onClick(ClickEvent event) {
// trigger refresh of sub-tab via event
events.onFinished(null);
session.getTabManager().closeTab(tab, isRefreshParentOnClose());
}
}));
// add already added
vp.add(alreadyAddedOwners);
vp.setCellHeight(alreadyAddedOwners, "30px");
// TABLE
owners.setFilterByType("administrative"); // show only administrative contacts
CellTable<Owner> table = owners.getTable();
table.addStyleName("perun-table");
ScrollPanel sp = new ScrollPanel();
sp.add(table);
sp.addStyleName("perun-tableScrollPanel");
vp.add(sp);
// resize small tab panel to correct size on screen
session.getUiElements().resizeSmallTabPanel(sp, 350, this);
addButton.setEnabled(false);
JsonUtils.addTableManagedButton(owners, table, addButton);
this.contentWidget.setWidget(vp);
return getWidget();
}
protected void updateAlreadyAdded(String newlyAdded)
{
String text = alreadyAddedOwners.getHTML();
if(text.length() == 0){
text += "<strong>Added:</strong> ";
}else{
text += ", ";
}
text += SafeHtmlUtils.fromString(newlyAdded).asString();
alreadyAddedOwners.setHTML(text);
}
public Widget getWidget() {
return this.contentWidget;
}
public Widget getTitle() {
return this.titleWidget;
}
public ImageResource getIcon() {
return SmallIcons.INSTANCE.addIcon();
}
@Override
public int hashCode() {
final int prime = 607;
int result = 1;
result = prime * result + publicationId;
return result;
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
CreateThanksTabItem other = (CreateThanksTabItem)obj;
if (publicationId != other.publicationId)
return false;
return true;
}
public boolean multipleInstancesEnabled() {
return false;
}
public void open() {
}
public boolean isAuthorized() {
if (session.isSelf()) {
return true;
} else {
return false;
}
}
public final static String URL = "create-thanks";
public String getUrl()
{
return URL;
}
public String getUrlWithParameters()
{
return CabinetTabs.URL + UrlMapper.TAB_NAME_SEPARATOR + getUrl() + "?pubId=" + publicationId;
}
static public CreateThanksTabItem load(Map<String, String> parameters)
{
int publicationId = Integer.parseInt(parameters.get("pubId"));
return new CreateThanksTabItem(publicationId);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tom_roush.pdfbox.io;
import android.util.Log;
import java.io.EOFException;
import java.io.IOException;
import com.tom_roush.pdfbox.android.PDFBoxConfig;
import com.tom_roush.pdfbox.cos.COSStream;
/**
* Implementation of {@link RandomAccess} as sequence of multiple fixed size pages handled
* by {@link ScratchFile}.
*/
class ScratchFileBuffer implements RandomAccess
{
private final int pageSize;
/**
* The underlying page handler.
*/
private ScratchFile pageHandler;
/**
* The number of bytes of content in this buffer.
*/
private long size = 0;
/**
* Index of current page in {@link #pageIndexes} (the nth page within this buffer).
*/
private int currentPagePositionInPageIndexes;
/**
* The offset of the current page within this buffer.
*/
private long currentPageOffset;
/**
* The current page data.
*/
private byte[] currentPage;
/**
* The current position (for next read/write) of the buffer as an offset in the current page.
*/
private int positionInPage;
/**
* <code>true</code> if current page was changed by a write method
*/
private boolean currentPageContentChanged = false;
/** contains ordered list of pages with the index the page is known by page handler ({@link ScratchFile}) */
private int[] pageIndexes = new int[16];
/** number of pages held by this buffer */
private int pageCount = 0;
/**
* Creates a new buffer using pages handled by provided {@link ScratchFile}.
*
* @param pageHandler The {@link ScratchFile} managing the pages to be used by this buffer.
*
* @throws IOException If getting first page failed.
*/
ScratchFileBuffer(ScratchFile pageHandler) throws IOException
{
pageHandler.checkClosed();
this.pageHandler = pageHandler;
pageSize = this.pageHandler.getPageSize();
addPage();
}
/**
* Checks if this buffer, or the underlying {@link ScratchFile} have been closed,
* throwing {@link IOException} if so.
*
* @throws IOException If either this buffer, or the underlying {@link ScratchFile} have been closed.
*/
private void checkClosed() throws IOException
{
if (pageHandler == null)
{
throw new IOException("Buffer already closed");
}
pageHandler.checkClosed();
}
/**
* Adds a new page and positions all pointers to start of new page.
*
* @throws IOException if requesting a new page fails
*/
private void addPage() throws IOException
{
if (pageCount+1 >= pageIndexes.length)
{
int newSize = pageIndexes.length*2;
// check overflow
if (newSize<pageIndexes.length)
{
if (pageIndexes.length == Integer.MAX_VALUE)
{
throw new IOException("Maximum buffer size reached.");
}
newSize = Integer.MAX_VALUE;
}
int[] newPageIndexes = new int[newSize];
System.arraycopy(pageIndexes, 0, newPageIndexes, 0, pageCount);
pageIndexes = newPageIndexes;
}
int newPageIdx = pageHandler.getNewPage();
pageIndexes[pageCount] = newPageIdx;
currentPagePositionInPageIndexes = pageCount;
currentPageOffset = ((long)pageCount) * pageSize;
pageCount++;
currentPage = new byte[pageSize];
positionInPage = 0;
}
/**
* {@inheritDoc}
*/
@Override
public long length() throws IOException
{
return size;
}
/**
* Ensures the current page has at least one byte left
* ({@link #positionInPage} in < {@link #pageSize}).
*
* <p>If this is not the case we go to next page (writing
* current one if changed). If current buffer has no more
* pages we add a new one.</p>
*
* @param addNewPageIfNeeded if <code>true</code> it is allowed to add a new page in case
* we are currently at end of last buffer page
*
* @return <code>true</code> if we were successful positioning pointer before end of page;
* we might return <code>false</code> if it is not allowed to add another page
* and current pointer points at end of last page
*
* @throws IOException
*/
private boolean ensureAvailableBytesInPage(boolean addNewPageIfNeeded) throws IOException
{
if (positionInPage >= pageSize)
{
// page full
if (currentPageContentChanged)
{
// write page
pageHandler.writePage(pageIndexes[currentPagePositionInPageIndexes], currentPage);
currentPageContentChanged = false;
}
// get new page
if (currentPagePositionInPageIndexes+1 < pageCount)
{
// we already have more pages assigned (there was a backward seek before)
currentPage = pageHandler.readPage(pageIndexes[++currentPagePositionInPageIndexes]);
currentPageOffset = ((long)currentPagePositionInPageIndexes) * pageSize;
positionInPage = 0;
}
else if (addNewPageIfNeeded)
{
// need new page
addPage();
}
else
{
// we are at last page and are not allowed to add new page
return false;
}
}
return true;
}
/**
* {@inheritDoc}
*/
@Override
public void write(int b) throws IOException
{
checkClosed();
ensureAvailableBytesInPage(true);
currentPage[positionInPage++] = (byte) b;
currentPageContentChanged = true;
if(currentPageOffset + positionInPage > size)
{
size = currentPageOffset + positionInPage;
}
}
/**
* {@inheritDoc}
*/
@Override
public void write(byte[] b) throws IOException
{
write(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
@Override
public void write(byte[] b, int off, int len) throws IOException
{
checkClosed();
int remain = len;
int bOff = off;
while (remain > 0)
{
ensureAvailableBytesInPage(true);
int bytesToWrite = Math.min(remain, pageSize-positionInPage);
System.arraycopy(b, bOff, currentPage, positionInPage, bytesToWrite);
positionInPage += bytesToWrite;
currentPageContentChanged = true;
bOff += bytesToWrite;
remain -= bytesToWrite;
}
if(currentPageOffset + positionInPage > size)
{
size = currentPageOffset + positionInPage;
}
}
/**
* {@inheritDoc}
*/
@Override
public final void clear() throws IOException
{
checkClosed();
// keep only the first page, discard all other pages
pageHandler.markPagesAsFree(pageIndexes, 1, pageCount - 1);
pageCount = 1;
// change to first page if we are not already there
if (currentPagePositionInPageIndexes > 0)
{
currentPage = pageHandler.readPage(pageIndexes[0]);
currentPagePositionInPageIndexes = 0;
currentPageOffset = 0;
}
positionInPage = 0;
size = 0;
currentPageContentChanged = false;
}
/**
* {@inheritDoc}
*/
@Override
public long getPosition() throws IOException
{
checkClosed();
return currentPageOffset + positionInPage;
}
/**
* {@inheritDoc}
*/
@Override
public void seek(long seekToPosition) throws IOException
{
checkClosed();
/*
* for now we won't allow to seek past end of buffer; this can be changed by adding new pages as needed
*/
if (seekToPosition > size)
{
throw new EOFException();
}
if (seekToPosition < 0)
{
throw new IOException("Negative seek offset: " + seekToPosition);
}
if ((seekToPosition >= currentPageOffset) && (seekToPosition <= currentPageOffset + pageSize))
{
// within same page
positionInPage = (int) (seekToPosition - currentPageOffset);
}
else
{
// have to go to another page
// check if current page needs to be written to file
if (currentPageContentChanged)
{
pageHandler.writePage(pageIndexes[currentPagePositionInPageIndexes], currentPage);
currentPageContentChanged = false;
}
int newPagePosition = (int) (seekToPosition / pageSize);
currentPage = pageHandler.readPage(pageIndexes[newPagePosition]);
currentPagePositionInPageIndexes = newPagePosition;
currentPageOffset = ((long)currentPagePositionInPageIndexes) * pageSize;
positionInPage = (int) (seekToPosition - currentPageOffset);
}
}
/**
* {@inheritDoc}
*/
@Override
public boolean isClosed()
{
return pageHandler == null;
}
/**
* {@inheritDoc}
*/
@Override
public int peek() throws IOException
{
int result = read();
if (result != -1)
{
rewind(1);
}
return result;
}
/**
* {@inheritDoc}
*/
@Override
public void rewind(int bytes) throws IOException
{
seek(currentPageOffset + positionInPage - bytes);
}
/**
* {@inheritDoc}
*/
@Override
public byte[] readFully(int length) throws IOException
{
byte[] bytes = new byte[length];
int bytesRead = 0;
do
{
int count = read(bytes, bytesRead, length - bytesRead);
if (count < 0)
{
throw new EOFException();
}
bytesRead += count;
} while (bytesRead < length);
return bytes;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEOF() throws IOException
{
checkClosed();
return currentPageOffset + positionInPage >= size;
}
/**
* {@inheritDoc}
*/
@Override
public int available() throws IOException
{
checkClosed();
return (int) Math.min(size - (currentPageOffset + positionInPage), Integer.MAX_VALUE);
}
/**
* {@inheritDoc}
*/
@Override
public int read() throws IOException
{
checkClosed();
if (currentPageOffset + positionInPage >= size)
{
return -1;
}
if (! ensureAvailableBytesInPage(false))
{
// should not happen, we checked it before
throw new IOException("Unexpectedly no bytes available for read in buffer.");
}
return currentPage[positionInPage++] & 0xff;
}
/**
* {@inheritDoc}
*/
@Override
public int read(byte[] b) throws IOException
{
return read(b, 0, b.length);
}
/**
* {@inheritDoc}
*/
@Override
public int read(byte[] b, int off, int len) throws IOException
{
checkClosed();
if (currentPageOffset + positionInPage >= size)
{
return -1;
}
int remain = (int) Math.min(len, size - (currentPageOffset + positionInPage));
int totalBytesRead = 0;
int bOff = off;
while (remain > 0)
{
if (! ensureAvailableBytesInPage(false))
{
// should not happen, we checked it before
throw new IOException("Unexpectedly no bytes available for read in buffer.");
}
int readBytes = Math.min(remain, pageSize - positionInPage);
System.arraycopy(currentPage, positionInPage, b, bOff, readBytes);
positionInPage += readBytes;
totalBytesRead += readBytes;
bOff += readBytes;
remain -= readBytes;
}
return totalBytesRead;
}
/**
* {@inheritDoc}
*/
@Override
public void close() throws IOException
{
if (pageHandler != null) {
pageHandler.markPagesAsFree(pageIndexes, 0, pageCount);
pageHandler = null;
pageIndexes = null;
currentPage = null;
currentPageOffset = 0;
currentPagePositionInPageIndexes = -1;
positionInPage = 0;
size = 0;
}
}
/**
* While calling finalize is normally discouraged we will have to
* use it here as long as closing a scratch file buffer is not
* done in every case. Currently {@link COSStream} creates new
* buffers without closing the old one - which might still be
* used.
*
* <p>Enabling debugging one will see if there are still cases
* where the buffer is not closed.</p>
*/
@Override
protected void finalize() throws Throwable
{
try
{
if ((pageHandler != null) && PDFBoxConfig.isDebugEnabled())
{
Log.d("PdfBox-Android","ScratchFileBuffer not closed!");
}
close();
}
finally
{
super.finalize();
}
}
}
| |
/*
* Copyright 2006-2010 Virtual Laboratory for e-Science (www.vl-e.nl)
* Copyright 2012-2013 Netherlands eScience Center.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* You may obtain a copy of the License at the following location:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* For the full license, see: LICENSE.txt (located in the root folder of this distribution).
* ---
*/
// source:
package nl.esciencecenter.vlet.vfs.lfc;
import java.io.IOException;
import java.io.OutputStream;
import nl.esciencecenter.glite.lfc.LFCException;
import nl.esciencecenter.glite.lfc.LFCServer;
import nl.esciencecenter.glite.lfc.internal.FileDesc;
import nl.esciencecenter.ptk.task.ITaskMonitor;
import nl.esciencecenter.ptk.util.logging.ClassLogger;
import nl.esciencecenter.vbrowser.vrs.exceptions.VrsException;
import nl.esciencecenter.vlet.exception.ResourceNotFoundException;
import nl.esciencecenter.vlet.vrs.vfs.VChecksum;
import nl.esciencecenter.vlet.vrs.vfs.VFile;
import nl.esciencecenter.vlet.vrs.vfs.VReplicatable;
public class LFCOutputStream extends OutputStream
{
private OutputStream outputStream;
private LFCFile lfcFile; // resolved file to write to
private VFile replica;
private LFCClient client;
private boolean isNewReplica;
private ITaskMonitor monitor;
public LFCOutputStream(ITaskMonitor monitor, LFCFile lfcFile,
VFile replica, LFCClient client, boolean isNewRepl)
throws IOException
{
super();
this.lfcFile = lfcFile;
this.replica = replica;
info("Opening OutputStream to replica:" + replica);
this.outputStream = replica.createOutputStream();
this.isNewReplica = isNewRepl;
this.monitor = monitor;
debug("Replica should be created in: " + replica.getVRL());
this.client = client;
}
/*
* (non-Javadoc)
*
* @see java.io.OutputStream#write(int)
*/
@Override
public void write(final int arg0) throws IOException
{
this.outputStream.write(arg0);
}
/*
* (non-Javadoc)
*
* @see java.io.OutputStream#flush()
*/
@Override
public void flush() throws IOException
{
this.outputStream.flush();
}
/*
* (non-Javadoc)
*
* @see java.io.OutputStream#write(byte[], int, int)
*/
@Override
public void write(final byte[] arg0, final int arg1, final int arg2)
throws IOException
{
this.outputStream.write(arg0, arg1, arg2);
debug("Wrote from " + arg1 + " to " + arg2);
}
/*
* (non-Javadoc)
*
* @see java.io.OutputStream#write(byte[])
*/
@Override
public void write(final byte[] arg0) throws IOException
{
this.outputStream.write(arg0);
}
@Override
public void close() throws IOException
{
try
{
outputStream.close();
}
catch (Exception e)
{
info("Couldn't close outputstream. Already closed ?:" + e);
}
try
{
// after closing a stream we can register the replica
// together with a new LFC entry:
if (lfcFile.exists() == false)
{
monitor.startSubTask("LFC: Registering new File entry", -1);
monitor.logPrintf("LFC: Registering new File entry:\n - "
+ lfcFile + "\n");
lfcFile.create(); // register new netry without replicas
monitor.endSubTask("LFC: Registering new File entry");
}
// as soon as the replica is created register it
if (isNewReplica)
{
monitor.startSubTask("LFC: Registering new replica", -1);
monitor.logPrintf("LFC: Registering new replica:\n - "
+ replica.getVRL() + "\n");
this.client.addReplica(monitor, lfcFile, replica, true);
monitor.endSubTask("LFC: Registering new replica");
}
else
{
// if this is an existing replica just update metadata (checksum
// , size). This also fixes the bug of getting the correct size
// after writing twice
monitor.startSubTask("LFC: Updating replica metadata", -1);
monitor.logPrintf("LFC: Updating metadata for: \n - "
+ replica.getVRL() + "\n");
client.updateReplicaMetaData(monitor, lfcFile, replica);
monitor.endSubTask("LFC: Updating replica metadata");
}
// LinkHandling new or existing file should be resolved link:
FileDescWrapper wrapperDesc = client.queryPath(lfcFile.getPath(),
true);
lfcFile.setWrapperDesc(wrapperDesc);
debug("-------------LFC file is: " + lfcFile.getLength()
+ " bytes");
debug("-------------Replica file is " + replica.getLength()
+ " bytes");
if (lfcFile.getLength() != replica.getLength())
{
error("LFC file and replica file sizes don't match!!! LFC file is "
+ lfcFile.getLength() + " bytes and replica "
+ replica.getLength() + " bytes");
}
monitor.logPrintf("LFC: Finalizing entry: setting new file size (updated from replica) to:"
+ wrapperDesc.getFileDesc().getFileSize() + "\n");
info("Closing OutputStream. Finalizing upload to:" + lfcFile);
}
catch (Exception e)
{
IOException ex = new IOException(e.getMessage());
ex.initCause(e);
throw ex;
}
finally
{
}
}
private static void error(String msg)
{
ClassLogger.getLogger(LFCOutputStream.class).errorPrintf("%s\n",msg);
}
private static void debug(String msg)
{
// Global.debugPrintln(LFCOutputStream.class, msg);
}
private static void info(String msg)
{
ClassLogger.getLogger(LFCOutputStream.class).infoPrintf("%s\n",msg);
}
}
| |
/******************************************************************************
* HierarchyServiceImplTest.java - created by aaronz on Jul 1, 2007
*
* Copyright (c) 2007 Centre for Academic Research in Educational Technologies
* Licensed under the Educational Community License version 1.0
*
* A copy of the Educational Community License has been included in this
* distribution and is available at: http://www.opensource.org/licenses/ecl1.php
*
*****************************************************************************/
package org.sakaiproject.hierarchy.impl.test;
import java.util.Map;
import java.util.Set;
import org.sakaiproject.genericdao.api.search.Search;
import org.sakaiproject.hierarchy.dao.HierarchyDao;
import org.sakaiproject.hierarchy.dao.model.HierarchyNodeMetaData;
import org.sakaiproject.hierarchy.impl.HierarchyServiceImpl;
import org.sakaiproject.hierarchy.impl.test.data.TestDataPreload;
import org.sakaiproject.hierarchy.model.HierarchyNode;
import org.springframework.test.AbstractTransactionalSpringContextTests;
/**
* Testing the hierarchy service
*
* @author Aaron Zeckoski (aaronz@vt.edu)
*/
public class HierarchyServiceImplTest extends AbstractTransactionalSpringContextTests {
protected HierarchyServiceImpl hierarchyService;
private HierarchyDao dao;
private TestDataPreload tdp;
// private SessionManager sessionManager;
// private MockControl sessionManagerControl;
protected String[] getConfigLocations() {
// point to the needed spring config files, must be on the classpath
// (add component/src/webapp/WEB-INF to the build path in Eclipse),
// they also need to be referenced in the project.xml file
return new String[] {"hibernate-test.xml", "spring-hibernate.xml"};
}
// run this before each test starts
protected void onSetUpBeforeTransaction() throws Exception {
// load the spring created dao class bean from the Spring Application Context
dao = (HierarchyDao) applicationContext.getBean("org.sakaiproject.hierarchy.dao.HierarchyDao");
if (dao == null) {
throw new NullPointerException("Dao could not be retrieved from spring context");
}
// load up the test data preloader from spring
tdp = (TestDataPreload) applicationContext.getBean("org.sakaiproject.hierarchy.test.data.TestDataPreload");
if (tdp == null) {
throw new NullPointerException("TestDatePreload could not be retrieved from spring context");
}
// load up any other needed spring beans
// // setup the mock objects if needed
// sessionManagerControl = MockControl.createControl(SessionManager.class);
// sessionManager = (SessionManager) sessionManagerControl.getMock();
// //this mock object is simply keeping us from getting a null when getCurrentSessionUserId is called
// sessionManager.getCurrentSessionUserId(); // expect this to be called
// sessionManagerControl.setDefaultMatcher(MockControl.ALWAYS_MATCHER);
// sessionManagerControl.setReturnValue(TestDataPreload.USER_ID, MockControl.ZERO_OR_MORE);
// sessionManagerControl.replay();
//create and setup the object to be tested
hierarchyService = new HierarchyServiceImpl();
hierarchyService.setDao(dao);
// hierarchyService.setSessionManager(sessionManager);
}
// run this before each test starts and as part of the transaction
protected void onSetUpInTransaction() {
// preload additional data if desired
}
/**
* ADD unit tests below here, use testMethod as the name of the unit test,
* Note that if a method is overloaded you should include the arguments in the
* test name like so: testMethodClassInt (for method(Class, int);
*/
public void testValidTestData() {
// ensure the test data is setup the way we think
assertEquals(new Long(1), tdp.pNode1.getId());
assertEquals(new Long(6), tdp.pNode6.getId());
assertEquals(new Long(9), tdp.pNode9.getId());
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#createHierarchy(java.lang.String)}.
*/
public void testCreateHierarchy() {
// test creating a valid hierarchy
HierarchyNode node = hierarchyService.createHierarchy("hierarchyC");
assertNotNull(node);
assertEquals("hierarchyC", node.hierarchyId);
assertNotNull(node.parentNodeIds);
assertNotNull(node.childNodeIds);
assertTrue(node.parentNodeIds.isEmpty());
assertTrue(node.childNodeIds.isEmpty());
// test creating a hierarchy that already exists
try {
hierarchyService.createHierarchy(TestDataPreload.HIERARCHYA);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
// test creating a hierarchy with too long an id
try {
hierarchyService.createHierarchy("1234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890123456789012345678901234567890");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#setHierarchyRootNode(java.lang.String, java.lang.String)}.
*/
public void testSetHierarchyRootNode() {
HierarchyNode node = null;
// test reassigning existing rootnode is no problem
node = hierarchyService.setHierarchyRootNode(TestDataPreload.HIERARCHYA, tdp.node1.id);
assertNotNull(node);
assertEquals(TestDataPreload.HIERARCHYA, node.hierarchyId);
assertEquals(tdp.node1.id, node.id);
// test reassigning a new node to be the parent node
assertEquals(Boolean.FALSE, tdp.meta11.getIsRootNode());
assertEquals(Boolean.TRUE, tdp.meta9.getIsRootNode());
node = hierarchyService.setHierarchyRootNode(TestDataPreload.HIERARCHYB, tdp.node11.id);
assertNotNull(node);
assertEquals(TestDataPreload.HIERARCHYB, node.hierarchyId);
assertEquals(tdp.node11.id, node.id);
// test assigning a node which has parents causes failure
try {
hierarchyService.setHierarchyRootNode(TestDataPreload.HIERARCHYA, tdp.node3.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
// test assigning a root node from another hierarchy to this root causes failure
try {
hierarchyService.setHierarchyRootNode(TestDataPreload.HIERARCHYB, tdp.node1.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#destroyHierarchy(java.lang.String)}.
*/
public void testDestroyHierarchy() {
hierarchyService.destroyHierarchy(TestDataPreload.HIERARCHYB);
long count = dao.countBySearch(HierarchyNodeMetaData.class,
new Search("hierarchyId", TestDataPreload.HIERARCHYB) );
assertEquals(0, count);
// test removing a non-existent hierarchy fails
try {
hierarchyService.destroyHierarchy(TestDataPreload.HIERARCHYB);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getRootLevelNode(java.lang.String)}.
*/
public void testGetRootLevelNode() {
HierarchyNode node = null;
node = hierarchyService.getRootNode(TestDataPreload.HIERARCHYB);
assertNotNull(node);
assertEquals(tdp.node9, node);
assertEquals(TestDataPreload.HIERARCHYB, node.hierarchyId);
node = hierarchyService.getRootNode(TestDataPreload.HIERARCHYA);
assertNotNull(node);
assertEquals(tdp.node1, node);
assertEquals(TestDataPreload.HIERARCHYA, node.hierarchyId);
// fetching root from invalid hierarchy gets null
node = hierarchyService.getRootNode(TestDataPreload.INVALID_HIERARCHY);
assertNull(node);
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getNodeById(java.lang.String)}.
*/
public void testGetNodeById() {
HierarchyNode node = null;
node = hierarchyService.getNodeById(tdp.node4.id);
assertNotNull(node);
assertEquals(tdp.node4, node);
assertEquals(tdp.node4.id, node.id);
node = hierarchyService.getNodeById(tdp.node6.id);
assertNotNull(node);
assertEquals(tdp.node6, node);
assertEquals(tdp.node6.id, node.id);
// fetching node with invalid id should fail
try {
node = hierarchyService.getNodeById(TestDataPreload.INVALID_NODE_ID);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getChildNodes(java.lang.String, boolean)}.
*/
public void testGetChildNodes() {
Set<HierarchyNode> nodes;
// check children for the root
nodes = hierarchyService.getChildNodes(tdp.node1.id, true);
assertNotNull(nodes);
assertEquals(3, nodes.size());
assertTrue(nodes.contains(tdp.node2));
assertTrue(nodes.contains(tdp.node3));
assertTrue(nodes.contains(tdp.node4));
nodes = hierarchyService.getChildNodes(tdp.node1.id, false);
assertNotNull(nodes);
assertEquals(7, nodes.size());
assertTrue(nodes.contains(tdp.node2));
assertTrue(nodes.contains(tdp.node3));
assertTrue(nodes.contains(tdp.node4));
assertTrue(nodes.contains(tdp.node5));
assertTrue(nodes.contains(tdp.node6));
assertTrue(nodes.contains(tdp.node7));
assertTrue(nodes.contains(tdp.node8));
// check children for the mid level nodes
nodes = hierarchyService.getChildNodes(tdp.node4.id, true);
assertNotNull(nodes);
assertEquals(3, nodes.size());
assertTrue(nodes.contains(tdp.node6));
assertTrue(nodes.contains(tdp.node7));
assertTrue(nodes.contains(tdp.node8));
nodes = hierarchyService.getChildNodes(tdp.node4.id, false);
assertNotNull(nodes);
assertEquals(3, nodes.size());
assertTrue(nodes.contains(tdp.node6));
assertTrue(nodes.contains(tdp.node7));
assertTrue(nodes.contains(tdp.node8));
// leaf nodes have no children
nodes = hierarchyService.getChildNodes(tdp.node5.id, true);
assertNotNull(nodes);
assertEquals(0, nodes.size());
nodes = hierarchyService.getChildNodes(tdp.node7.id, true);
assertNotNull(nodes);
assertEquals(0, nodes.size());
// fetching children for invalid node id should fail
try {
nodes = hierarchyService.getChildNodes(TestDataPreload.INVALID_NODE_ID, true);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getParentNodes(java.lang.String, boolean)}.
*/
public void testGetParentNodes() {
Set<HierarchyNode> nodes;
// check parents for leaf nodes first
nodes = hierarchyService.getParentNodes(tdp.node7.id, false);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue(nodes.contains(tdp.node1));
assertTrue(nodes.contains(tdp.node4));
nodes = hierarchyService.getParentNodes(tdp.node7.id, true);
assertNotNull(nodes);
assertEquals(1, nodes.size());
assertTrue(nodes.contains(tdp.node4));
nodes = hierarchyService.getParentNodes(tdp.node5.id, false);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue(nodes.contains(tdp.node1));
assertTrue(nodes.contains(tdp.node3));
// check one with multiple parents
nodes = hierarchyService.getParentNodes(tdp.node10.id, false);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue(nodes.contains(tdp.node9));
assertTrue(nodes.contains(tdp.node11));
nodes = hierarchyService.getParentNodes(tdp.node10.id, true);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue(nodes.contains(tdp.node9));
assertTrue(nodes.contains(tdp.node11));
// root nodes have no parents
nodes = hierarchyService.getParentNodes(tdp.node1.id, true);
assertNotNull(nodes);
assertEquals(0, nodes.size());
nodes = hierarchyService.getParentNodes(tdp.node9.id, true);
assertNotNull(nodes);
assertEquals(0, nodes.size());
// fetching children for invalid node id should fail
try {
nodes = hierarchyService.getParentNodes(TestDataPreload.INVALID_NODE_ID, true);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#addNode(java.lang.String, java.lang.String)}.
*/
public void testAddNode() {
HierarchyNode node = null;
String newNodeId = null;
// check we can insert a node in a normal tree and that the links are created correctly in this node
node = hierarchyService.addNode(TestDataPreload.HIERARCHYA, tdp.node2.id);
assertNotNull(node);
newNodeId = node.id;
assertNotNull(newNodeId);
assertNotNull(node.directParentNodeIds);
assertEquals(1, node.directParentNodeIds.size());
assertTrue(node.directParentNodeIds.contains(tdp.node2.id));
assertNotNull(node.parentNodeIds);
assertEquals(2, node.parentNodeIds.size());
assertTrue(node.parentNodeIds.contains(tdp.node2.id));
assertTrue(node.parentNodeIds.contains(tdp.node1.id));
assertNotNull(node.directChildNodeIds);
assertTrue(node.directChildNodeIds.isEmpty());
assertNotNull(node.childNodeIds);
assertTrue(node.childNodeIds.isEmpty());
// now check that the child links were updated correctly for the parent
node = hierarchyService.getNodeById(tdp.node2.id);
assertNotNull(node);
assertEquals(tdp.node2.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(newNodeId));
assertNotNull(node.childNodeIds);
assertEquals(1, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
// and the root node
node = hierarchyService.getNodeById(tdp.node1.id);
assertNotNull(node);
assertEquals(tdp.node1.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(3, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node2.id));
assertTrue(node.directChildNodeIds.contains(tdp.node3.id));
assertTrue(node.directChildNodeIds.contains(tdp.node4.id));
assertNotNull(node.childNodeIds);
assertEquals(8, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
assertTrue(node.childNodeIds.contains(tdp.node2.id));
assertTrue(node.childNodeIds.contains(tdp.node3.id));
assertTrue(node.childNodeIds.contains(tdp.node4.id));
assertTrue(node.childNodeIds.contains(tdp.node5.id));
assertTrue(node.childNodeIds.contains(tdp.node6.id));
assertTrue(node.childNodeIds.contains(tdp.node7.id));
assertTrue(node.childNodeIds.contains(tdp.node8.id));
// check we can insert a node in an upward tree and that the links are created correctly in this node
node = hierarchyService.addNode(TestDataPreload.HIERARCHYB, tdp.node10.id);
assertNotNull(node);
newNodeId = node.id;
assertNotNull(newNodeId);
assertNotNull(node.directParentNodeIds);
assertEquals(1, node.directParentNodeIds.size());
assertTrue(node.directParentNodeIds.contains(tdp.node10.id));
assertNotNull(node.parentNodeIds);
assertEquals(3, node.parentNodeIds.size());
assertTrue(node.parentNodeIds.contains(tdp.node10.id));
assertTrue(node.parentNodeIds.contains(tdp.node9.id));
assertTrue(node.parentNodeIds.contains(tdp.node11.id));
assertNotNull(node.directChildNodeIds);
assertTrue(node.directChildNodeIds.isEmpty());
assertNotNull(node.childNodeIds);
assertTrue(node.childNodeIds.isEmpty());
// now check that the child links were updated correctly for the parent
node = hierarchyService.getNodeById(tdp.node10.id);
assertNotNull(node);
assertEquals(tdp.node10.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(newNodeId));
assertNotNull(node.childNodeIds);
assertEquals(1, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
// and the root node
node = hierarchyService.getNodeById(tdp.node9.id);
assertNotNull(node);
assertEquals(tdp.node9.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node10.id));
assertNotNull(node.childNodeIds);
assertEquals(2, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
assertTrue(node.childNodeIds.contains(tdp.node10.id));
// and the other higher parent node
node = hierarchyService.getNodeById(tdp.node11.id);
assertNotNull(node);
assertEquals(tdp.node11.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node10.id));
assertNotNull(node.childNodeIds);
assertEquals(2, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
assertTrue(node.childNodeIds.contains(tdp.node10.id));
// check we can insert a node next to others and that the links are created correctly in this node
node = hierarchyService.addNode(TestDataPreload.HIERARCHYA, tdp.node3.id);
assertNotNull(node);
newNodeId = node.id;
assertNotNull(newNodeId);
assertNotNull(node.directParentNodeIds);
assertEquals(1, node.directParentNodeIds.size());
assertTrue(node.directParentNodeIds.contains(tdp.node3.id));
assertNotNull(node.parentNodeIds);
assertEquals(2, node.parentNodeIds.size());
assertTrue(node.parentNodeIds.contains(tdp.node3.id));
assertTrue(node.parentNodeIds.contains(tdp.node1.id));
assertNotNull(node.directChildNodeIds);
assertTrue(node.directChildNodeIds.isEmpty());
assertNotNull(node.childNodeIds);
assertTrue(node.childNodeIds.isEmpty());
// now check that the child links were updated correctly for the parent
node = hierarchyService.getNodeById(tdp.node3.id);
assertNotNull(node);
assertEquals(tdp.node3.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(newNodeId));
assertTrue(node.directChildNodeIds.contains(tdp.node5.id));
assertNotNull(node.childNodeIds);
assertEquals(2, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(newNodeId));
assertTrue(node.childNodeIds.contains(tdp.node5.id));
// check that adding a node without a parent puts the node at the top of the hierarchy
// NOTE: not currently supported, so this should die
try {
node = hierarchyService.addNode(TestDataPreload.HIERARCHYA, null);
fail("Should have thrown exception");
} catch (RuntimeException e) {
assertNotNull(e);
}
// check that attempting to add a node to a non-existent node fails
try {
node = hierarchyService.addNode(TestDataPreload.HIERARCHYA, TestDataPreload.INVALID_NODE_ID);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e);
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#removeNode(java.lang.String)}.
*/
public void testRemoveNode() {
HierarchyNode node = null;
// remove a node with no children
node = hierarchyService.removeNode(tdp.node8.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node6.id));
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
// also check the root was updated correctly
node = hierarchyService.getNodeById(tdp.node1.id);
assertNotNull(node);
assertEquals(tdp.node1.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(3, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node2.id));
assertTrue(node.directChildNodeIds.contains(tdp.node3.id));
assertTrue(node.directChildNodeIds.contains(tdp.node4.id));
assertNotNull(node.childNodeIds);
assertEquals(6, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(tdp.node2.id));
assertTrue(node.childNodeIds.contains(tdp.node3.id));
assertTrue(node.childNodeIds.contains(tdp.node4.id));
assertTrue(node.childNodeIds.contains(tdp.node5.id));
assertTrue(node.childNodeIds.contains(tdp.node6.id));
assertTrue(node.childNodeIds.contains(tdp.node7.id));
// remove another node
node = hierarchyService.removeNode(tdp.node2.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.childNodeIds.contains(tdp.node3.id));
assertTrue(node.childNodeIds.contains(tdp.node4.id));
// also check the root was updated correctly
node = hierarchyService.getNodeById(tdp.node1.id);
assertNotNull(node);
assertEquals(tdp.node1.id, node.id);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node3.id));
assertTrue(node.directChildNodeIds.contains(tdp.node4.id));
assertNotNull(node.childNodeIds);
assertEquals(5, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(tdp.node3.id));
assertTrue(node.childNodeIds.contains(tdp.node4.id));
assertTrue(node.childNodeIds.contains(tdp.node5.id));
assertTrue(node.childNodeIds.contains(tdp.node6.id));
assertTrue(node.childNodeIds.contains(tdp.node7.id));
// cannot remove root node
try {
node = hierarchyService.removeNode(tdp.node1.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot remove nodes with children
try {
node = hierarchyService.removeNode(tdp.node4.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.removeNode(tdp.node3.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot remove nodes with multiple parents
try {
node = hierarchyService.removeNode(tdp.node10.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use invalid node id (exception)
try {
node = hierarchyService.removeNode(TestDataPreload.INVALID_NODE_ID);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use null node id (exception)
try {
node = hierarchyService.removeNode(null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#saveNodeMetaData(java.lang.String, java.lang.String, java.lang.String)}.
*/
public void testSaveNodeMetaData() {
HierarchyNode node = null;
// saving node data
node = hierarchyService.saveNodeMetaData(tdp.node2.id, "Node TWO", "this is a description!", "TOKEN2");
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals("Node TWO", node.title);
assertEquals("this is a description!", node.description);
assertEquals("TOKEN2", node.permToken);
// saving some nulls (should be ok)
node = hierarchyService.saveNodeMetaData(tdp.node2.id, null, "DESC", "");
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals("Node TWO", node.title);
assertEquals("DESC", node.description);
assertNull(node.permToken);
// saving all nulls (should be save as previous values)
node = hierarchyService.saveNodeMetaData(tdp.node2.id, null, null, null);
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals("Node TWO", node.title);
assertEquals("DESC", node.description);
assertNull(node.permToken);
// saving empty strings (should blank everything out)
node = hierarchyService.saveNodeMetaData(tdp.node2.id, "", "", "");
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertNull(node.title);
assertNull(node.description);
assertNull(node.permToken);
// cannot use invalid node id (exception)
try {
node = hierarchyService.saveNodeMetaData(TestDataPreload.INVALID_NODE_ID, null, null, null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use null node id (exception)
try {
node = hierarchyService.saveNodeMetaData(null, null, null, null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#setNodeDisabled(java.lang.String, java.lang.Boolean)}.
*/
public void testSetNodeDisabled() {
HierarchyNode node = null;
// basic node creation, default is enabled (i.e. isDisabled is false)
node = hierarchyService.saveNodeMetaData(tdp.node2.id, "Node TWO", "this is a description!", "TOKEN2");
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals(node.isDisabled, Boolean.FALSE);
// disabling a node
node = hierarchyService.setNodeDisabled(tdp.node2.id, Boolean.TRUE);
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals(node.isDisabled, Boolean.TRUE);
// disabling an already-disabled node
node = hierarchyService.setNodeDisabled(tdp.node2.id, Boolean.TRUE);
assertNotNull(node);
assertEquals(node.id, tdp.node2.id);
assertEquals(node.isDisabled, Boolean.TRUE);
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#addChildRelation(java.lang.String, java.lang.String)}.
*/
public void testAddChildRelation() {
HierarchyNode node = null;
// add new children
node = hierarchyService.addChildRelation(tdp.node2.id, tdp.node6.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node6.id));
assertNotNull(node.childNodeIds);
assertEquals(1, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(tdp.node6.id));
node = hierarchyService.addChildRelation(tdp.node3.id, tdp.node7.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node5.id));
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
// add children which are already there
node = hierarchyService.addChildRelation(tdp.node3.id, tdp.node5.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node5.id));
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
node = hierarchyService.addChildRelation(tdp.node4.id, tdp.node7.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(3, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node6.id));
// cannot add this node as a child of itself
try {
node = hierarchyService.addChildRelation(tdp.node7.id, tdp.node7.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot create a cycle by adding a child which is already a child or parent of this node
// (should probably check distance from the root...)
try {
node = hierarchyService.addChildRelation(tdp.node7.id, tdp.node4.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.addChildRelation(tdp.node7.id, tdp.node1.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.addChildRelation(tdp.node5.id, tdp.node3.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use invalid node ids (exception)
try {
node = hierarchyService.addChildRelation(TestDataPreload.INVALID_NODE_ID, tdp.node6.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.addChildRelation(tdp.node2.id, TestDataPreload.INVALID_NODE_ID);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use null node id (exception)
try {
node = hierarchyService.addChildRelation(null, tdp.node6.id);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.addChildRelation(tdp.node2.id, null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
//fail("Not yet implemented");
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#removeChildRelation(java.lang.String, java.lang.String)}.
*/
public void testRemoveChildRelation() {
HierarchyNode node = null;
// create extra relation first
node = hierarchyService.addChildRelation(tdp.node2.id, tdp.node6.id);
// remove a child
node = hierarchyService.removeChildRelation(tdp.node11.id, tdp.node10.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(0, node.directChildNodeIds.size());
assertNotNull(node.childNodeIds);
assertEquals(0, node.childNodeIds.size());
node = hierarchyService.removeChildRelation(tdp.node4.id, tdp.node6.id);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
assertTrue(node.directChildNodeIds.contains(tdp.node8.id));
assertNotNull(node.childNodeIds);
assertEquals(2, node.childNodeIds.size());
assertTrue(node.childNodeIds.contains(tdp.node7.id));
assertTrue(node.childNodeIds.contains(tdp.node8.id));
// remove child which is not a child (this is ok)
node = hierarchyService.removeChildRelation(tdp.node3.id, tdp.node6.id);
node = hierarchyService.removeChildRelation(tdp.node3.id, tdp.node2.id);
// cannot remove myself as a child of myself
try {
node = hierarchyService.removeChildRelation(tdp.node2.id, tdp.node2.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot orphan nodes by removing a child relation (must use remove node)
try {
node = hierarchyService.removeChildRelation(tdp.node1.id, tdp.node3.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.removeChildRelation(tdp.node3.id, tdp.node5.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use invalid node ids (exception)
try {
node = hierarchyService.removeChildRelation(TestDataPreload.INVALID_NODE_ID, tdp.node6.id);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.removeChildRelation(tdp.node2.id, TestDataPreload.INVALID_NODE_ID);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use null node id (exception)
try {
node = hierarchyService.removeChildRelation(null, tdp.node6.id);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
try {
node = hierarchyService.removeChildRelation(tdp.node2.id, null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
// fail("Not yet implemented");
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#addParentRelation(java.lang.String, java.lang.String)}.
*//**
public void testAddParentRelation() {
// add new parents
// add parents which are already there
// cannot remove all parents (must leave at least one)
// cannot add parents to the root node
// cannot create a cycle by adding a parent which is already a child or parent of this node
// cannot add parents nodes which do not exist (should fail)
// cannot use invalid node id (exception)
// cannot use invalid parent node id (exception)
// cannot use null node id (exception)
fail("Not yet implemented");
}**/
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#removeParentRelation(java.lang.String, java.lang.String)}.
*//**
public void testRemoveParentRelation() {
// cannot remove all parents (must leave at least one)
fail("Not yet implemented");
}**/
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getNodesWithToken(java.lang.String)}.
*/
public void testGetNodesWithToken() {
Set<String> nodeIds;
// get all the nodes with a specific token
nodeIds = hierarchyService.getNodesWithToken(TestDataPreload.HIERARCHYA, TestDataPreload.PERM_TOKEN_1);
assertNotNull(nodeIds);
assertEquals(3, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node2.id));
assertTrue(nodeIds.contains(tdp.node3.id));
assertTrue(nodeIds.contains(tdp.node5.id));
nodeIds = hierarchyService.getNodesWithToken(TestDataPreload.HIERARCHYB, TestDataPreload.PERM_TOKEN_1);
assertNotNull(nodeIds);
assertEquals(1, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node10.id));
nodeIds = hierarchyService.getNodesWithToken(TestDataPreload.HIERARCHYA, TestDataPreload.PERM_TOKEN_2);
assertNotNull(nodeIds);
assertEquals(1, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node4.id));
// attempt to get nodes for invalid token
nodeIds = hierarchyService.getNodesWithToken(TestDataPreload.HIERARCHYA, TestDataPreload.INVALID_PERM_TOKEN);
assertNotNull(nodeIds);
assertEquals(0, nodeIds.size());
// cannot use invalid hierarchy
try {
hierarchyService.getNodesWithToken(TestDataPreload.INVALID_HIERARCHY, TestDataPreload.PERM_TOKEN_1);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot get null token
try {
hierarchyService.getNodesWithToken(TestDataPreload.HIERARCHYA, null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getNodesWithTokens(java.lang.String[])}.
*/
public void testGetNodesWithTokens() {
Set<String> nodeIds;
Map<String, Set<String>> tokenNodes;
// get nodes for tokens
tokenNodes = hierarchyService.getNodesWithTokens(TestDataPreload.HIERARCHYA,
new String[] {TestDataPreload.PERM_TOKEN_1, TestDataPreload.PERM_TOKEN_2});
assertNotNull(tokenNodes);
assertEquals(2, tokenNodes.size());
nodeIds = tokenNodes.get(TestDataPreload.PERM_TOKEN_1);
assertEquals(3, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node2.id));
assertTrue(nodeIds.contains(tdp.node3.id));
assertTrue(nodeIds.contains(tdp.node5.id));
nodeIds = tokenNodes.get(TestDataPreload.PERM_TOKEN_2);
assertEquals(1, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node4.id));
// mix valid and invalid tokens
tokenNodes = hierarchyService.getNodesWithTokens(TestDataPreload.HIERARCHYB,
new String[] {TestDataPreload.PERM_TOKEN_1, TestDataPreload.PERM_TOKEN_2});
assertNotNull(tokenNodes);
assertEquals(2, tokenNodes.size());
nodeIds = tokenNodes.get(TestDataPreload.PERM_TOKEN_1);
assertEquals(1, nodeIds.size());
assertTrue(nodeIds.contains(tdp.node10.id));
nodeIds = tokenNodes.get(TestDataPreload.PERM_TOKEN_2);
assertEquals(0, nodeIds.size());
// attempt to get nodes for invalid token
tokenNodes = hierarchyService.getNodesWithTokens(TestDataPreload.HIERARCHYA,
new String[] {TestDataPreload.INVALID_PERM_TOKEN});
assertNotNull(tokenNodes);
assertEquals(1, tokenNodes.size());
nodeIds = tokenNodes.get(TestDataPreload.INVALID_PERM_TOKEN);
assertEquals(0, nodeIds.size());
// cannot use invalid hierarchy
try {
hierarchyService.getNodesWithTokens(TestDataPreload.INVALID_HIERARCHY,
new String[] {TestDataPreload.PERM_TOKEN_1});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot get null token
try {
hierarchyService.getNodesWithTokens(TestDataPreload.HIERARCHYA, null);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#checkUserNodePerm(java.lang.String, java.lang.String, java.lang.String)}.
*/
public void testCheckUserNodePerm() {
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node1.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node5.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node6.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node7.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node8.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node1.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node5.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node6.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node7.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node8.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node1.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node5.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node6.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node7.id, TestDataPreload.PERM_ONE) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node8.id, TestDataPreload.PERM_ONE) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node1.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node2.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node3.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node4.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node5.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node6.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node7.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node8.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node1.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node2.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node3.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node4.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node5.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node6.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node7.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.ACCESS_USER_ID, tdp.node8.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node1.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node4.id, TestDataPreload.PERM_TWO) );
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node5.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node6.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node7.id, TestDataPreload.PERM_TWO) );
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node8.id, TestDataPreload.PERM_TWO) );
try {
hierarchyService.checkUserNodePerm(null, "BBBBBB", "CCCCCCCCCCCCCCC");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.checkUserNodePerm("AAAAAAAAAA", null, "CCCCCCCCCCCCCCC");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.checkUserNodePerm("AAAAAAAAAA", "BBBBBB", null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getNodesForUserPerm(java.lang.String, java.lang.String)}.
*/
public void testGetNodesForUserPerm() {
Set<HierarchyNode> nodes = null;
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, TestDataPreload.PERM_ONE);
assertNotNull(nodes);
assertEquals(0, nodes.size());
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, TestDataPreload.PERM_TWO);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue( nodes.contains(tdp.node3) );
assertTrue( nodes.contains(tdp.node6) );
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.ACCESS_USER_ID, TestDataPreload.PERM_ONE);
assertNotNull(nodes);
assertEquals(2, nodes.size());
assertTrue( nodes.contains(tdp.node5) );
assertTrue( nodes.contains(tdp.node7) );
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.ACCESS_USER_ID, TestDataPreload.PERM_TWO);
assertNotNull(nodes);
assertEquals(1, nodes.size());
assertTrue( nodes.contains(tdp.node8) );
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertNotNull(nodes);
assertEquals(5, nodes.size());
assertTrue( nodes.contains(tdp.node2) );
assertTrue( nodes.contains(tdp.node4) );
assertTrue( nodes.contains(tdp.node6) );
assertTrue( nodes.contains(tdp.node7) );
assertTrue( nodes.contains(tdp.node8) );
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_TWO);
assertNotNull(nodes);
assertEquals(3, nodes.size());
assertTrue( nodes.contains(tdp.node2) );
assertTrue( nodes.contains(tdp.node3) );
assertTrue( nodes.contains(tdp.node5) );
// invalids
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, "XXXXXXXXX");
assertNotNull(nodes);
assertEquals(0, nodes.size());
nodes = hierarchyService.getNodesForUserPerm("XXXXXXX", TestDataPreload.PERM_ONE);
assertNotNull(nodes);
assertEquals(0, nodes.size());
try {
hierarchyService.getNodesForUserPerm(null, "XXXXXXXXX");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.getNodesForUserPerm("XXXXXXXX", null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getUserIdsForNodesPerm(java.lang.String[], java.lang.String)}.
*/
public void testGetUserIdsForNodesPerm() {
Set<String> userIds = null;
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node1.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node2.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node3.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node4.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node5.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.ACCESS_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node6.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node7.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(2, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
assertTrue(userIds.contains(TestDataPreload.ACCESS_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node8.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node1.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node2.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node3.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(2, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
assertTrue(userIds.contains(TestDataPreload.USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node4.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node5.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node6.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node7.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node8.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(1, userIds.size());
assertTrue(userIds.contains(TestDataPreload.ACCESS_USER_ID));
// multiple
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node2.id, tdp.node3.id, tdp.node4.id, tdp.node5.id}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(2, userIds.size());
assertTrue(userIds.contains(TestDataPreload.ACCESS_USER_ID));
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {tdp.node2.id, tdp.node3.id, tdp.node4.id, tdp.node5.id}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(2, userIds.size());
assertTrue(userIds.contains(TestDataPreload.USER_ID));
assertTrue(userIds.contains(TestDataPreload.MAINT_USER_ID));
// invalids
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {}, TestDataPreload.PERM_ONE);
assertNotNull(userIds);
assertEquals(0, userIds.size());
userIds = hierarchyService.getUserIdsForNodesPerm(new String[] {}, TestDataPreload.PERM_TWO);
assertNotNull(userIds);
assertEquals(0, userIds.size());
try {
hierarchyService.getUserIdsForNodesPerm(null, "XXXXXXXXX");
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.getUserIdsForNodesPerm(new String[] {"XXXXXXXX"}, null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#getPermsForUserNodes(java.lang.String, java.lang.String[])}.
*/
public void testGetPermsForUserNodes() {
Set<String> perms = null;
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node1.id});
assertNotNull(perms);
assertEquals(0, perms.size());
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node2.id});
assertNotNull(perms);
assertEquals(2, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
assertTrue(perms.contains(TestDataPreload.PERM_TWO));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node3.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_TWO));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node4.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node5.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_TWO));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node6.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node7.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node8.id});
assertNotNull(perms);
assertEquals(1, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
// multiple
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {tdp.node3.id, tdp.node4.id, tdp.node5.id, tdp.node6.id});
assertNotNull(perms);
assertEquals(2, perms.size());
assertTrue(perms.contains(TestDataPreload.PERM_ONE));
assertTrue(perms.contains(TestDataPreload.PERM_TWO));
// invalids
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.ACCESS_USER_ID, new String[] {});
assertNotNull(perms);
assertEquals(0, perms.size());
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.MAINT_USER_ID, new String[] {});
assertNotNull(perms);
assertEquals(0, perms.size());
perms = hierarchyService.getPermsForUserNodes(TestDataPreload.USER_ID, new String[] {});
assertNotNull(perms);
assertEquals(0, perms.size());
try {
hierarchyService.getPermsForUserNodes(null, new String[] {"XXXXXXXX"});
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.getPermsForUserNodes("XXXXXXXXXXX", null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
public void testGetUsersAndPermsForNodes() {
Map<String, Map<String, Set<String>>> map = null;
map = hierarchyService.getUsersAndPermsForNodes(tdp.node3.id);
assertNotNull(map);
assertEquals(1, map.size());
Map<String, Set<String>> userPerms = map.get(tdp.node3.id);
assertEquals(2, userPerms.size());
assertEquals(userPerms.get(TestDataPreload.USER_ID).size(), 1);
assertEquals(userPerms.get(TestDataPreload.MAINT_USER_ID).size(), 1);
try {
hierarchyService.getUsersAndPermsForNodes((String[])null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.getUsersAndPermsForNodes();
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
public void testGetNodesAndPermsForUser() {
Map<String, Map<String, Set<String>>> map = null;
map = hierarchyService.getNodesAndPermsForUser(TestDataPreload.ACCESS_USER_ID);
assertNotNull(map);
assertEquals(1, map.size());
assertEquals(3, map.get(TestDataPreload.ACCESS_USER_ID).size());
try {
hierarchyService.getNodesAndPermsForUser((String[])null);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.getNodesAndPermsForUser();
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#assignUserNodePerm(java.lang.String, java.lang.String, java.lang.String, boolean)}.
*/
public void testAssignUserNodePerm() {
Set<HierarchyNode> nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(5, nodes.size());
// add existing one - should be no change
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE, false);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(5, nodes.size());
// add existing one - should be no change
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE, true);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(5, nodes.size());
// now add some that do not exist already
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE, false);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(6, nodes.size());
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE) );
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, TestDataPreload.PERM_ONE, true);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(7, nodes.size());
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node5.id, TestDataPreload.PERM_ONE) );
// now test adding a completely different permission
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, TestDataPreload.PERM_THREE);
assertEquals(0, nodes.size());
hierarchyService.assignUserNodePerm(TestDataPreload.USER_ID, tdp.node1.id, TestDataPreload.PERM_THREE, false);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, TestDataPreload.PERM_THREE);
assertEquals(1, nodes.size());
assertTrue( hierarchyService.checkUserNodePerm(TestDataPreload.USER_ID, tdp.node1.id, TestDataPreload.PERM_THREE) );
hierarchyService.assignUserNodePerm(TestDataPreload.USER_ID, tdp.node1.id, TestDataPreload.PERM_THREE, true);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.USER_ID, TestDataPreload.PERM_THREE);
assertEquals(8, nodes.size());
try {
hierarchyService.assignUserNodePerm(null, tdp.node3.id, TestDataPreload.PERM_ONE, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, null, TestDataPreload.PERM_ONE, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.assignUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, null, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/**
* Test method for {@link org.sakaiproject.hierarchy.impl.HierarchyServiceImpl#removeUserNodePerm(java.lang.String, java.lang.String, java.lang.String, boolean)}.
*/
public void testRemoveUserNodePerm() {
Set<HierarchyNode> nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(5, nodes.size());
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE, false);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(4, nodes.size());
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, TestDataPreload.PERM_ONE) );
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE, true);
nodes = hierarchyService.getNodesForUserPerm(TestDataPreload.MAINT_USER_ID, TestDataPreload.PERM_ONE);
assertEquals(0, nodes.size());
assertFalse( hierarchyService.checkUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node4.id, TestDataPreload.PERM_ONE) );
// invalids don't cause failure
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node2.id, "XXXXX", false);
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, "XXXX", "XXXXX", false);
hierarchyService.removeUserNodePerm("XXX", "XXXX", "XXXXX", false);
try {
hierarchyService.removeUserNodePerm(null, tdp.node3.id, TestDataPreload.PERM_ONE, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, null, TestDataPreload.PERM_ONE, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
try {
hierarchyService.removeUserNodePerm(TestDataPreload.MAINT_USER_ID, tdp.node3.id, null, false);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
}
/*
HierarchyNode node = null;
Set<String> children = new HashSet<String>();;
// add new children
children.add(tdp.node6.id);
node = hierarchyService.updateChildren(tdp.node2.id, children);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node6.id));
children.add(tdp.node7.id);
children.add(tdp.node8.id);
node = hierarchyService.updateChildren(tdp.node2.id, children);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(3, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node6.id));
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
assertTrue(node.directChildNodeIds.contains(tdp.node8.id));
// remove some children
children.clear();
children.add(tdp.node7.id);
children.add(tdp.node8.id);
node = hierarchyService.updateChildren(tdp.node4.id, children);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(2, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node7.id));
assertTrue(node.directChildNodeIds.contains(tdp.node8.id));
// remove all children
children.clear();
node = hierarchyService.updateChildren(tdp.node4.id, children);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(0, node.directChildNodeIds.size());
// update children to the identical set
children.clear();
children.add(tdp.node5.id);
node = hierarchyService.updateChildren(tdp.node3.id, children);
assertNotNull(node);
assertNotNull(node.directChildNodeIds);
assertEquals(1, node.directChildNodeIds.size());
assertTrue(node.directChildNodeIds.contains(tdp.node5.id));
// cannot add children nodes which do not exist (even if some are valid)
children.add(TestDataPreload.INVALID_NODE_ID);
try {
node = hierarchyService.updateChildren(tdp.node3.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot add child node which is equal to this node
children.clear();
children.add(tdp.node5.id);
children.add(tdp.node3.id);
try {
node = hierarchyService.updateChildren(tdp.node3.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
children.clear();
children.add(tdp.node3.id);
try {
node = hierarchyService.updateChildren(tdp.node3.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot remove child node so that it becomes orphaned
children.clear();
children.add(tdp.node2.id);
children.add(tdp.node4.id);
try {
node = hierarchyService.updateChildren(tdp.node1.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
children.clear();
children.add(tdp.node3.id);
children.add(tdp.node4.id);
try {
node = hierarchyService.updateChildren(tdp.node1.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use invalid node id (exception)
children.clear();
children.add(tdp.node6.id);
try {
node = hierarchyService.updateChildren(TestDataPreload.INVALID_NODE_ID, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use invalid child node id (exception)
children.clear();
children.add(tdp.node6.id);
children.add(TestDataPreload.INVALID_NODE_ID);
try {
node = hierarchyService.updateChildren(tdp.node2.id, children);
fail("Should have thrown exception");
} catch (IllegalArgumentException e) {
assertNotNull(e.getMessage());
}
// cannot use null node id (exception)
children.clear();
try {
node = hierarchyService.updateChildren(null, children);
fail("Should have thrown exception");
} catch (NullPointerException e) {
assertNotNull(e.getMessage());
}
*/
}
| |
// Copyright 2015-2016 Stanford University
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package glade.grammar.synthesize;
import glade.grammar.GrammarUtils.AlternationNode;
import glade.grammar.GrammarUtils.ConstantNode;
import glade.grammar.GrammarUtils.Context;
import glade.grammar.GrammarUtils.MultiAlternationNode;
import glade.grammar.GrammarUtils.MultiConstantNode;
import glade.grammar.GrammarUtils.Node;
import glade.grammar.GrammarUtils.RepetitionNode;
import glade.util.CharacterUtils;
import glade.util.Log;
import glade.util.CharacterUtils.CharacterGeneralization;
import glade.util.OracleUtils.DiscriminativeOracle;
import glade.util.Utils.Maybe;
import glade.util.Utils.MultivalueMap;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class GrammarTransformer {
public static Node getTransform(Node node, DiscriminativeOracle oracle) {
Node transformFlatten = getTransform(node, new FlattenTransformer());
return getTransform(transformFlatten, new ConstantTransformer(oracle, getMultiAlternationRepetitionConstantNodes(transformFlatten)));
}
private static interface NodeTransformer {
public abstract Node transformConstant(ConstantNode node);
public abstract Node transformMultiConstant(MultiConstantNode node);
public abstract Node transformAlternation(AlternationNode node, Node newFirst, Node newSecond);
public abstract Node transformRepetition(RepetitionNode node, Node newStart, Node newRep, Node newEnd);
public abstract Node transformMultiAlternation(MultiAlternationNode node, List<Node> newChildren);
}
private static Node getTransform(Node node, NodeTransformer transformer) {
if(node instanceof ConstantNode) {
return transformer.transformConstant((ConstantNode)node);
} else if(node instanceof MultiConstantNode) {
return transformer.transformMultiConstant((MultiConstantNode)node);
} else if(node instanceof AlternationNode) {
AlternationNode altNode = (AlternationNode)node;
Node newFirst = getTransform(altNode.first, transformer);
Node newSecond = getTransform(altNode.second, transformer);
return transformer.transformAlternation(altNode, newFirst, newSecond);
} else if(node instanceof MultiAlternationNode) {
List<Node> newChildren = new ArrayList<Node>();
for(Node child : node.getChildren()) {
newChildren.add(getTransform(child, transformer));
}
return transformer.transformMultiAlternation((MultiAlternationNode)node, newChildren);
} else if(node instanceof RepetitionNode) {
RepetitionNode repNode = (RepetitionNode)node;
Node newStart = getTransform(repNode.start, transformer);
Node newRep = getTransform(repNode.rep, transformer);
Node newEnd = getTransform(repNode.end, transformer);
return transformer.transformRepetition(repNode, newStart, newRep, newEnd);
} else {
throw new RuntimeException("Invalid node type: " + node.getClass().getName());
}
}
private static MultiConstantNode generalizeConstant(ConstantNode node, DiscriminativeOracle oracle) {
String example = node.getData().example;
Context context = node.getData().context;
if(example.length() != 0) {
Log.info("GENERALIZING CONST: " + example + " ## " + context.pre + " ## " + context.post);
}
List<List<Character>> characterOptions = new ArrayList<List<Character>>();
List<List<Character>> characterChecks = new ArrayList<List<Character>>();
for(int i=0; i<example.length(); i++) {
List<Character> characterOption = new ArrayList<Character>();
List<Character> characterCheck = new ArrayList<Character>();
char curC = example.charAt(i);
Context curContext = new Context(context, example.substring(0, i), example.substring(i+1), example.substring(0, i), example.substring(i+1));
characterOption.add(curC);
characterCheck.add(curC);
for(CharacterGeneralization generalization : CharacterUtils.getGeneralizations()) {
if(generalization.triggers.contains(curC)) {
List<String> checks = new ArrayList<String>();
for(char c : generalization.checks) {
if(curC != c) {
checks.add("" + c);
}
}
if(GrammarSynthesis.getCheck(oracle, curContext, checks)) {
for(char c : generalization.characters) {
if(curC != c) {
characterOption.add(c);
}
}
for(char c : generalization.checks) {
if(curC != c) {
characterCheck.add(c);
}
}
}
}
}
characterOptions.add(characterOption);
characterChecks.add(characterCheck);
}
return new MultiConstantNode(node.getData(), characterOptions, characterChecks);
}
private static boolean isContained(String example, MultiConstantNode mconstNode) {
if(example.length() != mconstNode.characterOptions.size()) {
return false;
}
for(int i=0; i<example.length(); i++) {
if(!mconstNode.characterOptions.get(i).contains(example.charAt(i))) {
return false;
}
}
return true;
}
private static boolean isContained(String example, List<MultiConstantNode> mconstNodes) {
for(MultiConstantNode mconstNode : mconstNodes) {
if(isContained(example, mconstNode)) {
return true;
}
}
return false;
}
private static MultiAlternationNode generalizeMultiAlternationConstant(MultiAlternationNode node, MultivalueMap<MultiAlternationNode,ConstantNode> multiAlternationNodeConstantChildren, DiscriminativeOracle oracle) {
List<MultiConstantNode> curConsts = new ArrayList<MultiConstantNode>();
Log.info("GENERALIZING MULTI ALT: " + node.getData().example);
for(Node child : multiAlternationNodeConstantChildren.get(node)) {
if(!isContained(child.getData().example, curConsts)) {
curConsts.add(generalizeConstant((ConstantNode)child, oracle));
}
}
return new MultiAlternationNode(node.getData(), new ArrayList<Node>(curConsts));
}
private static class ConstantTransformer implements NodeTransformer {
private final DiscriminativeOracle oracle;
private final MultivalueMap<MultiAlternationNode,ConstantNode> multiAlternationNodeConstantChildren;
private final Set<ConstantNode> ignoredConstants = new HashSet<ConstantNode>();;
private ConstantTransformer(DiscriminativeOracle oracle, MultivalueMap<MultiAlternationNode,ConstantNode> multiAlternationNodeConstantChildren) {
this.oracle = oracle;
this.multiAlternationNodeConstantChildren = multiAlternationNodeConstantChildren;
for(MultiAlternationNode maltNode : multiAlternationNodeConstantChildren.keySet()) {
this.ignoredConstants.addAll(multiAlternationNodeConstantChildren.get(maltNode));
}
}
public Node transformConstant(ConstantNode node) {
return this.ignoredConstants.contains(node) ? node : generalizeConstant(node, this.oracle);
}
public Node transformMultiConstant(MultiConstantNode node) {
throw new RuntimeException("Invalid node: " + node);
}
public Node transformAlternation(AlternationNode node, Node newFirst, Node newSecond) {
return new AlternationNode(node.getData(), newFirst, newSecond);
}
public Node transformMultiAlternation(MultiAlternationNode node, List<Node> newChildren) {
return this.multiAlternationNodeConstantChildren.containsKey(node) ? generalizeMultiAlternationConstant(node, this.multiAlternationNodeConstantChildren, this.oracle) : new MultiAlternationNode(node.getData(), newChildren);
}
public Node transformRepetition(RepetitionNode node, Node newStart, Node newRep, Node newEnd) {
return new RepetitionNode(node.getData(), newStart, newRep, newEnd);
}
}
private static class FlattenTransformer implements NodeTransformer {
public Node transformConstant(ConstantNode node) {
return node;
}
public Node transformMultiConstant(MultiConstantNode node) {
return node;
}
public Node transformAlternation(AlternationNode node, Node newFirst, Node newSecond) {
List<Node> newChildren = new ArrayList<Node>();
if(newFirst instanceof MultiAlternationNode) {
newChildren.addAll(((MultiAlternationNode)newFirst).getChildren());
} else {
newChildren.add(newFirst);
}
if(newSecond instanceof MultiAlternationNode) {
newChildren.addAll(((MultiAlternationNode)newSecond).getChildren());
} else {
newChildren.add(newSecond);
}
return new MultiAlternationNode(node.getData(), newChildren);
}
public Node transformMultiAlternation(MultiAlternationNode node, List<Node> newChildren) {
throw new RuntimeException("Invalid node: " + node);
}
public Node transformRepetition(RepetitionNode node, Node newStart, Node newRep, Node newEnd) {
return new RepetitionNode(node.getData(), newStart, newRep, newEnd);
}
}
private static void getMultiAlternationRepetitionConstantNodesHelper(Node node, MultivalueMap<MultiAlternationNode,ConstantNode> result, boolean isParentRep) {
Maybe<List<Node>> constantChildren = GrammarSynthesis.getMultiAlternationRepetitionConstantChildren(node, isParentRep);
if(constantChildren.hasT()) {
for(Node child : constantChildren.getT()) {
result.add((MultiAlternationNode)node, (ConstantNode)child);
}
} else if(node instanceof RepetitionNode) {
RepetitionNode repNode = (RepetitionNode)node;
getMultiAlternationRepetitionConstantNodesHelper(repNode.start, result, false);
getMultiAlternationRepetitionConstantNodesHelper(repNode.rep, result, true);
getMultiAlternationRepetitionConstantNodesHelper(repNode.end, result, false);
} else {
for(Node child : node.getChildren()) {
getMultiAlternationRepetitionConstantNodesHelper(child, result, false);
}
}
}
private static MultivalueMap<MultiAlternationNode,ConstantNode> getMultiAlternationRepetitionConstantNodes(Node root) {
MultivalueMap<MultiAlternationNode,ConstantNode> result = new MultivalueMap<MultiAlternationNode,ConstantNode>();
getMultiAlternationRepetitionConstantNodesHelper(root, result, false);
return result;
}
}
| |
/*
* Copyright 1997-2011 teatrove.org
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package openmarker.trove.classfile;
import java.beans.IntrospectionException;
import java.beans.Introspector;
import java.beans.MethodDescriptor;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.lang.reflect.Type;
import java.lang.reflect.TypeVariable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import openmarker.trove.classfile.generics.GenericTypeFactory;
import openmarker.trove.classfile.generics.TypeVariableDesc;
/**
* A class used to create Java class files. Call the writeTo method
* to produce a class file.
*
* <p>See <i>The Java Virtual Machine Specification</i> (ISBN 0-201-63452-X)
* for information on how class files are structured. Section 4.1 describes
* the ClassFile structure.
*
* @author Brian S O'Neill, Nick Hagan
*/
public class ClassFile {
private static final int MAGIC = 0xCAFEBABE;
private static final int JDK1_1_MAJOR_VERSION = 50;
private static final int JDK1_1_MINOR_VERSION = 0;
private int mMajorVersion = JDK1_1_MAJOR_VERSION;
private int mMinorVersion = JDK1_1_MINOR_VERSION;
private final String mClassName;
private final String mSuperClassName;
private String mInnerClassName;
private TypeDesc mType;
private ConstantPool mCp;
private Modifiers mModifiers;
private ConstantClassInfo mThisClass;
private ConstantClassInfo mSuperClass;
// Holds ConstantInfo objects.
private List<ConstantClassInfo> mInterfaces =
new ArrayList<ConstantClassInfo>(2);
private Set<String> mInterfaceSet = new HashSet<String>(7);
// Holds objects.
private List<FieldInfo> mFields = new ArrayList<FieldInfo>();
private List<MethodInfo> mMethods = new ArrayList<MethodInfo>();
private List<Attribute> mAttributes = new ArrayList<Attribute>();
private SourceFileAttr mSource;
private List<ClassFile> mInnerClasses;
private int mAnonymousInnerClassCount = 0;
private InnerClassesAttr mInnerClassesAttr;
// Is non-null for inner classes.
private ClassFile mOuterClass;
// List of superclass and interface instances
private Set<Class<?>> mParentClasses = new HashSet<Class<?>>();
/**
* By default, the ClassFile defines public, non-final, concrete classes.
* This constructor creates a ClassFile for a class that extends
* java.lang.Object.
* <p>
* Use the {@link #getModifiers modifiers} to change the default
* modifiers for this class or to turn it into an interface.
*
* @param className Full class name of the form ex: "java.lang.String".
*/
public ClassFile(String className) {
this(className, (String)null);
}
/**
* By default, the ClassFile defines public, non-final, concrete classes.
* <p>
* Use the {@link #getModifiers modifiers} to change the default
* modifiers for this class or to turn it into an interface.
*
* @param className Full class name of the form ex: "java.lang.String".
* @param superClass Super class.
*/
public ClassFile(String className, Class<?> superClass) {
this(className, superClass.getName());
this.mParentClasses.add(superClass);
}
/**
* By default, the ClassFile defines public, non-final, concrete classes.
* <p>
* Use the {@link #getModifiers modifiers} to change the default
* modifiers for this class or to turn it into an interface.
*
* @param className Full class name of the form ex: "java.lang.String".
* @param superClassName Full super class name.
*/
public ClassFile(String className, String superClassName) {
if (superClassName == null) {
if (!className.equals(Object.class.getName())) {
superClassName = Object.class.getName();
}
}
mCp = new ConstantPool();
// public, non-final, concrete class
mModifiers = new Modifiers(Modifier.PUBLIC);
mThisClass = ConstantClassInfo.make(mCp, className);
mSuperClass = ConstantClassInfo.make(mCp, superClassName);
mClassName = className;
mSuperClassName = superClassName;
}
/**
* Used to construct a ClassFile when read from a stream.
*/
private ClassFile(ConstantPool cp, Modifiers modifiers,
ConstantClassInfo thisClass,
ConstantClassInfo superClass,
ClassFile outerClass) {
mCp = cp;
mModifiers = modifiers;
mThisClass = thisClass;
mSuperClass = superClass;
mClassName = thisClass.getType().getRootName();
if (superClass == null) {
mSuperClassName = null;
}
else {
mSuperClassName = superClass.getType().getRootName();
}
mOuterClass = outerClass;
}
public String getClassName() {
return mClassName;
}
public String getSuperClassName() {
return mSuperClassName;
}
/**
* Returns a TypeDesc for the type of this ClassFile.
*/
public TypeDesc getType() {
if (mType == null) {
mType = TypeDesc.forClass(mClassName);
}
return mType;
}
public Modifiers getModifiers() {
return mModifiers;
}
/**
* Returns the names of all the interfaces that this class implements.
*/
public String[] getInterfaces() {
int size = mInterfaces.size();
String[] names = new String[size];
for (int i=0; i<size; i++) {
names[i] = mInterfaces.get(i)
.getType().getRootName();
}
return names;
}
/**
* Returns all the fields defined in this class.
*/
public FieldInfo[] getFields() {
FieldInfo[] fields = new FieldInfo[mFields.size()];
return mFields.toArray(fields);
}
/**
* Returns all the methods defined in this class, not including
* constructors and static initializers.
*/
public MethodInfo[] getMethods() {
int size = mMethods.size();
List<MethodInfo> methodsOnly = new ArrayList<MethodInfo>(size);
for (int i=0; i<size; i++) {
MethodInfo method = mMethods.get(i);
String name = method.getName();
if (!"<init>".equals(name) && !"<clinit>".equals(name)) {
methodsOnly.add(method);
}
}
MethodInfo[] methodsArray = new MethodInfo[methodsOnly.size()];
return methodsOnly.toArray(methodsArray);
}
/**
* Returns all the constructors defined in this class.
*/
public MethodInfo[] getConstructors() {
int size = mMethods.size();
List<MethodInfo> ctorsOnly = new ArrayList<MethodInfo>(size);
for (int i=0; i<size; i++) {
MethodInfo method = mMethods.get(i);
if ("<init>".equals(method.getName())) {
ctorsOnly.add(method);
}
}
MethodInfo[] ctorsArray = new MethodInfo[ctorsOnly.size()];
return ctorsOnly.toArray(ctorsArray);
}
/**
* Returns the static initializer defined in this class or null if there
* isn't one.
*/
public MethodInfo getInitializer() {
int size = mMethods.size();
for (int i=0; i<size; i++) {
MethodInfo method = mMethods.get(i);
if ("<clinit>".equals(method.getName())) {
return method;
}
}
return null;
}
/**
* Returns all the inner classes defined in this class. If no inner classes
* are defined, then an array of length zero is returned.
*/
public ClassFile[] getInnerClasses() {
if (mInnerClasses == null) {
return new ClassFile[0];
}
ClassFile[] innerClasses = new ClassFile[mInnerClasses.size()];
return mInnerClasses.toArray(innerClasses);
}
/**
* Returns true if this ClassFile represents an inner class.
*/
public boolean isInnerClass() {
return mOuterClass != null;
}
/**
* If this ClassFile represents a non-anonymous inner class, returns its
* short inner class name.
*/
public String getInnerClassName() {
return mInnerClassName;
}
/**
* Returns null if this ClassFile does not represent an inner class.
*
* @see #isInnerClass()
*/
public ClassFile getOuterClass() {
return mOuterClass;
}
/**
* Returns a value indicating how deeply nested an inner class is with
* respect to its outermost enclosing class. For top level classes, 0
* is returned. For first level inner classes, 1 is returned, etc.
*/
public int getClassDepth() {
int depth = 0;
ClassFile outer = mOuterClass;
while (outer != null) {
depth++;
outer = outer.mOuterClass;
}
return depth;
}
/**
* Returns the source file of this class file or null if not set.
*/
public String getSourceFile() {
if (mSource == null) {
return null;
}
else {
return mSource.getFileName();
}
}
public boolean isSynthetic() {
for (int i = mAttributes.size(); --i >= 0; ) {
Object obj = mAttributes.get(i);
if (obj instanceof SyntheticAttr) {
return true;
}
}
return false;
}
public boolean isDeprecated() {
for (int i = mAttributes.size(); --i >= 0; ) {
Object obj = mAttributes.get(i);
if (obj instanceof DeprecatedAttr) {
return true;
}
}
return false;
}
/**
* Provides access to the ClassFile's ContantPool.
*
* @return The constant pool for this class file.
*/
public ConstantPool getConstantPool() {
return mCp;
}
/**
* Add an interface that this class implements.
*
* @param interfaceName Full interface name.
*/
public void addInterface(String interfaceName) {
if (!mInterfaceSet.contains(interfaceName)) {
mInterfaces.add(ConstantClassInfo.make(mCp, interfaceName));
mInterfaceSet.add(interfaceName);
}
}
/**
* Add an interface that this class implements.
*/
public void addInterface(Class<?> iface) {
addInterface(iface.getName());
this.mParentClasses.add(iface);
}
/**
* Add a field to this class.
*/
public FieldInfo addField(Modifiers modifiers,
String fieldName,
TypeDesc type) {
FieldInfo fi = new FieldInfo(this, modifiers, fieldName, type);
mFields.add(fi);
return fi;
}
/**
* Add a method to this class.
*
* @param ret Is null if method returns void.
* @param params May be null if method accepts no parameters.
*/
public MethodInfo addMethod(Modifiers modifiers,
String methodName,
TypeDesc ret,
TypeDesc... params) {
return addMethod(modifiers, methodName, null, ret, params);
}
public MethodInfo addMethod(Modifiers modifiers,
String methodName,
TypeVariableDesc[] typeParams,
TypeDesc ret,
TypeDesc[] params) {
MethodDesc md = MethodDesc.forArguments(ret, params);
SignatureDesc sd = SignatureDesc.forMethod(typeParams, ret, params);
return addMethod(modifiers, methodName, md, sd);
}
/**
* Add a method to this class.
*
* @param ret Is null if method returns void.
* @param params May be null if method accepts no parameters.
*/
public MethodInfo addMethod(Modifiers modifiers,
String methodName,
TypeDesc ret,
TypeDesc[] params,
String[] paramNames) {
return addMethod(modifiers, methodName, null, ret, params, paramNames);
}
public MethodInfo addMethod(Modifiers modifiers,
String methodName,
TypeVariableDesc[] typeParams,
TypeDesc ret,
TypeDesc[] params,
String[] paramNames) {
MethodDesc md = MethodDesc.forArguments(ret, params, paramNames);
SignatureDesc sd = SignatureDesc.forMethod(typeParams, ret, params);
return addMethod(modifiers, methodName, md, sd);
}
/**
* Add a method to this class.
*/
public MethodInfo addMethod(Modifiers modifiers,
String methodName,
MethodDesc md,
SignatureDesc sd) {
MethodInfo mi = new MethodInfo(this, modifiers, methodName, md, sd);
mMethods.add(mi);
return mi;
}
/**
* Add a method to this class. This method is handy for implementing
* methods defined by a pre-existing interface.
*/
public MethodInfo addMethod(Method method) {
Modifiers modifiers = new Modifiers(method.getModifiers());
modifiers.setAbstract(this.getModifiers().isInterface());
TypeVariableDesc[] typeParams = lookupTypeVariables(method);
TypeDesc ret = TypeDesc.forClass(method.getReturnType(),
method.getGenericReturnType());
MethodDescriptor methodDescriptor = lookupMethodDescriptor(method);
Class<?>[] paramClasses = method.getParameterTypes();
Type[] paramTypes = method.getGenericParameterTypes();
TypeDesc[] params = new TypeDesc[paramClasses.length];
String[] paramNames = new String[paramClasses.length];
for (int i = 0; i < paramClasses.length; i++) {
params[i] = TypeDesc.forClass(paramClasses[i], paramTypes[i]);
if(methodDescriptor != null) {
paramNames[i] =
methodDescriptor.getParameterDescriptors()[i].getName();
} else {
paramNames[i] = "param$" + i;
}
}
MethodInfo mi = addMethod(modifiers, method.getName(), typeParams,
ret, params, paramNames);
// exception stuff...
// TODO: generic exceptions
Class<?>[] exceptions = method.getExceptionTypes();
for (int i=0; i<exceptions.length; i++) {
mi.addException(exceptions[i].getName());
}
return mi;
}
/**
* Add a method to this class. This method is handy for implementing
* methods defined by a pre-existing interface.
*/
public MethodInfo addMethod(Method method, Class<?> returnType,
Class<?>... paramClasses) {
Modifiers modifiers = new Modifiers(method.getModifiers());
modifiers.setAbstract(this.getModifiers().isInterface());
TypeVariableDesc[] typeParams = lookupTypeVariables(method);
TypeDesc ret = TypeDesc.forClass(returnType,
method.getGenericReturnType());
MethodDescriptor methodDescriptor = lookupMethodDescriptor(method);
Type[] paramTypes = method.getGenericParameterTypes();
TypeDesc[] params = new TypeDesc[paramClasses.length];
String[] paramNames = new String[paramClasses.length];
for (int i = 0; i < paramClasses.length; i++) {
params[i] = TypeDesc.forClass(paramClasses[i], paramTypes[i]);
if(methodDescriptor != null) {
paramNames[i] =
methodDescriptor.getParameterDescriptors()[i].getName();
} else {
paramNames[i] = "param$" + i;
}
}
MethodInfo mi = addMethod(modifiers, method.getName(), typeParams,
ret, params, paramNames);
// exception stuff...
// TODO: generic exceptions
Class<?>[] exceptions = method.getExceptionTypes();
for (int i=0; i<exceptions.length; i++) {
mi.addException(exceptions[i].getName());
}
return mi;
}
private TypeVariableDesc[] lookupTypeVariables(Method method) {
Map<String, TypeVariableDesc> args =
new LinkedHashMap<String, TypeVariableDesc>();
// TODO: better handle this by reading each return type and param
// type and analyzing for any type variables and attempting to resolve
// said type variables into declaring class type variables by looking
// up actual tree for the proper root type (ie: GenericType.getRaw):
//
// per ret type and param
// loop through per type variable
// if type variable declaring class is this class, do nothing
// else if declaring class is parent
// per type var of active class
// per interface/super
// if contains same type var
// get class def and associated type var index
// if class equals declaring class
// if type matches, use type var of initial var
// else keep walking
// if found, use that type
// else, declare
// check if declaring class of method within immediate hiearchy
// and assume class file type parameters match (see TODO above on how
// this really should work)
boolean valid = false;
Class<?> declaringClass = method.getDeclaringClass();
for (Class<?> clazz : mParentClasses) {
if (declaringClass.isAssignableFrom(clazz)) {
valid = true;
break;
}
}
// pull in class instances first if not in hiearchy
if (!valid) {
TypeVariable<?>[] cargs = declaringClass.getTypeParameters();
for (TypeVariable<?> carg : cargs) {
args.put(carg.getName(),
(TypeVariableDesc) GenericTypeFactory.fromType(carg));
}
}
// pull in method instances overriding class level
TypeVariable<?>[] cargs = method.getTypeParameters();
for (TypeVariable<?> carg : cargs) {
args.remove(carg.getName());
args.put(carg.getName(),
(TypeVariableDesc) GenericTypeFactory.fromType(carg));
}
// return array
return args.values().toArray(new TypeVariableDesc[args.size()]);
}
private static MethodDescriptor lookupMethodDescriptor(Method method) {
MethodDescriptor methodDescriptor = null;
try {
for (MethodDescriptor methodDescriptor1 : Introspector.getBeanInfo(method.getDeclaringClass()).getMethodDescriptors()) {
if(methodDescriptor1.getMethod() == method) {
methodDescriptor = methodDescriptor1;
break;
}
}
} catch (IntrospectionException e) {
throw new RuntimeException("Unable to find MethodDescriptor for method "+method, e);
}
// if(methodDescriptor == null) {
// throw new RuntimeException("Unable to find MethodDescriptor for method "+method);
// }
return methodDescriptor;
}
/**
* Add a constructor to this class.
*
* @param params May be null if constructor accepts no parameters.
*/
public MethodInfo addConstructor(Modifiers modifiers,
TypeDesc... params) {
String[] paramNames = MethodDesc.createGenericParameterNames(params);
return addConstructor(modifiers, params, paramNames);
}
/**
* Add a constructor to this class.
*
* @param params May be null if constructor accepts no parameters.
* @param paramNames
*/
public MethodInfo addConstructor(Modifiers modifiers,
TypeDesc[] params,
String[] paramNames) {
MethodDesc md = MethodDesc.forArguments(null, params, paramNames);
MethodInfo mi = new MethodInfo(this, modifiers, "<init>", md, null);
mMethods.add(mi);
return mi;
}
/**
* Adds a public, no-arg constructor with the code buffer properly defined.
*/
public MethodInfo addDefaultConstructor() {
Modifiers modifiers = new Modifiers();
modifiers.setPublic(true);
MethodInfo mi = addConstructor(modifiers, null, null);
CodeBuilder builder = new CodeBuilder(mi);
builder.loadThis();
builder.invokeSuperConstructor();
builder.returnVoid();
return mi;
}
/**
* Add a static initializer to this class.
*/
public MethodInfo addInitializer() {
MethodDesc md = MethodDesc.forArguments(null, null, null);
Modifiers af = new Modifiers();
af.setStatic(true);
MethodInfo mi = new MethodInfo(this, af, "<clinit>", md, null);
mMethods.add(mi);
return mi;
}
/**
* Add an inner class to this class. By default, inner classes are private
* static.
*
* @param innerClassName Optional short inner class name.
*/
public ClassFile addInnerClass(String innerClassName) {
return addInnerClass(innerClassName, (String)null);
}
/**
* Add an inner class to this class. By default, inner classes are private
* static.
*
* @param innerClassName Optional short inner class name.
* @param superClass Super class.
*/
public ClassFile addInnerClass(String innerClassName, Class<?> superClass) {
return addInnerClass(innerClassName, superClass.getName());
}
/**
* Add an inner class to this class. By default, inner classes are private
* static.
*
* @param innerClassName Optional short inner class name.
* @param superClassName Full super class name.
*/
public ClassFile addInnerClass(String innerClassName,
String superClassName) {
String fullInnerClassName;
if (innerClassName == null) {
fullInnerClassName =
mClassName + '$' + (++mAnonymousInnerClassCount);
}
else {
fullInnerClassName = mClassName + '$' + innerClassName;
}
ClassFile inner = new ClassFile(fullInnerClassName, superClassName);
Modifiers access = inner.getModifiers();
access.setPrivate(true);
access.setStatic(true);
inner.mInnerClassName = innerClassName;
inner.mOuterClass = this;
if (mInnerClasses == null) {
mInnerClasses = new ArrayList<ClassFile>();
}
mInnerClasses.add(inner);
// Record the inner class in this, the outer class.
if (mInnerClassesAttr == null) {
addAttribute(new InnerClassesAttr(mCp));
}
mInnerClassesAttr.addInnerClass(fullInnerClassName, mClassName,
innerClassName, access);
// Record the inner class in itself.
inner.addAttribute(new InnerClassesAttr(inner.getConstantPool()));
inner.mInnerClassesAttr.addInnerClass(fullInnerClassName, mClassName,
innerClassName, access);
return inner;
}
/**
* Set the source file of this class file by adding a source file
* attribute. The source doesn't actually have to be a file,
* but the virtual machine spec names the attribute "SourceFile_attribute".
*/
public void setSourceFile(String fileName) {
addAttribute(new SourceFileAttr(mCp, fileName));
}
/**
* Set the signature of this class file to include generics info per JDK 5.
*/
public void setSignature(String signature) {
addAttribute(new SignatureAttr(mCp, signature));
}
/**
* Mark this class as being synthetic by adding a special attribute.
*/
public void markSynthetic() {
addAttribute(new SyntheticAttr(mCp));
}
/**
* Mark this class as being deprecated by adding a special attribute.
*/
public void markDeprecated() {
addAttribute(new DeprecatedAttr(mCp));
}
/**
* Add an attribute to this class.
*/
public void addAttribute(Attribute attr) {
if (attr instanceof SourceFileAttr) {
if (mSource != null) {
mAttributes.remove(mSource);
}
mSource = (SourceFileAttr)attr;
}
else if (attr instanceof InnerClassesAttr) {
if (mInnerClassesAttr != null) {
mAttributes.remove(mInnerClassesAttr);
}
mInnerClassesAttr = (InnerClassesAttr)attr;
}
mAttributes.add(attr);
}
public Attribute[] getAttributes() {
Attribute[] attrs = new Attribute[mAttributes.size()];
return mAttributes.toArray(attrs);
}
/**
* Sets the version to use when writing the generated ClassFile. Currently,
* only version 45, 3 is supported, and is set by default.
*
* @exception IllegalArgumentException when the version isn't supported
*/
public void setVersion(int major, int minor)
throws IllegalArgumentException {
if (major != JDK1_1_MAJOR_VERSION ||
minor != JDK1_1_MINOR_VERSION) {
throw new IllegalArgumentException("Version " + major + ", " +
minor + " is not supported");
}
mMajorVersion = major;
mMinorVersion = minor;
}
/**
* Returns all the runtime invisible annotations defined for this class
* file, or an empty array if none.
*/
public Annotation[] getRuntimeInvisibleAnnotations() {
for (int i = mAttributes.size(); --i >= 0; ) {
Attribute attr = mAttributes.get(i);
if (attr instanceof RuntimeInvisibleAnnotationsAttr) {
return ((AnnotationsAttr) attr).getAnnotations();
}
}
return new Annotation[0];
}
/**
* Returns all the runtime visible annotations defined for this class file,
* or an empty array if none.
*/
public Annotation[] getRuntimeVisibleAnnotations() {
for (int i = mAttributes.size(); --i >= 0; ) {
Attribute attr = mAttributes.get(i);
if (attr instanceof RuntimeVisibleAnnotationsAttr) {
return ((AnnotationsAttr) attr).getAnnotations();
}
}
return new Annotation[0];
}
/**
* Add a runtime invisible annotation.
*/
public Annotation addRuntimeInvisibleAnnotation(TypeDesc type) {
AnnotationsAttr attr = null;
for (int i = mAttributes.size(); --i >= 0; ) {
Attribute a = mAttributes.get(i);
if (a instanceof RuntimeInvisibleAnnotationsAttr) {
attr = (AnnotationsAttr) a;
}
}
if (attr == null) {
attr = new RuntimeInvisibleAnnotationsAttr(mCp);
addAttribute(attr);
}
Annotation ann = new Annotation(mCp);
ann.setType(type);
attr.addAnnotation(ann);
return ann;
}
/**
* Add a runtime visible annotation.
*/
public Annotation addRuntimeVisibleAnnotation(TypeDesc type) {
AnnotationsAttr attr = null;
for (int i = mAttributes.size(); --i >= 0; ) {
Attribute a = mAttributes.get(i);
if (a instanceof RuntimeVisibleAnnotationsAttr) {
attr = (AnnotationsAttr) a;
}
}
if (attr == null) {
attr = new RuntimeVisibleAnnotationsAttr(mCp);
addAttribute(attr);
}
Annotation ann = new Annotation(mCp);
ann.setType(type);
attr.addAnnotation(ann);
return ann;
}
/**
* Writes the ClassFile to the given OutputStream. When finished, the
* stream is flushed, but not closed.
*/
public void writeTo(OutputStream out) throws IOException {
if (!(out instanceof DataOutput)) {
out = new DataOutputStream(out);
}
writeTo((DataOutput)out);
out.flush();
}
/**
* Writes the ClassFile to the given DataOutput.
*/
public void writeTo(DataOutput dout) throws IOException {
dout.writeInt(MAGIC);
dout.writeShort(mMinorVersion);
dout.writeShort(mMajorVersion);
mCp.writeTo(dout);
int modifier = mModifiers.getModifier();
if (!mModifiers.isInterface()) {
// Set the ACC_SUPER flag for classes only.
// NOTE: we use SYNCHRONIZED which is the same value as
// ACC_SUPER, but Java does not have a constant field exposed
// for it, so we use SYNCHRONIZED instead
modifier |= Modifier.SYNCHRONIZED;
}
dout.writeShort(modifier);
dout.writeShort(mThisClass.getIndex());
if (mSuperClass != null) {
dout.writeShort(mSuperClass.getIndex());
}
else {
dout.writeShort(0);
}
int size = mInterfaces.size();
if (size > 65535) {
throw new RuntimeException
("Interfaces count cannot exceed 65535: " + size);
}
dout.writeShort(size);
for (int i=0; i<size; i++) {
int index = mInterfaces.get(i).getIndex();
dout.writeShort(index);
}
size = mFields.size();
if (size > 65535) {
throw new RuntimeException
("Field count cannot exceed 65535: " + size);
}
dout.writeShort(size);
for (int i=0; i<size; i++) {
FieldInfo field = mFields.get(i);
field.writeTo(dout);
}
size = mMethods.size();
if (size > 65535) {
throw new RuntimeException
("Method count cannot exceed 65535: " + size);
}
dout.writeShort(size);
for (int i=0; i<size; i++) {
MethodInfo method = mMethods.get(i);
method.writeTo(dout);
}
size = mAttributes.size();
if (size > 65535) {
throw new RuntimeException
("Attribute count cannot exceed 65535: " + size);
}
dout.writeShort(size);
for (int i=0; i<size; i++) {
Attribute attr = mAttributes.get(i);
attr.writeTo(dout);
}
}
/**
* Reads a ClassFile from the given InputStream. With this method, inner
* classes cannot be loaded, and custom attributes cannot be defined.
*
* @param in source of class file data
* @throws IOException for I/O error or if classfile is invalid.
* @throws ArrayIndexOutOfBoundsException if a constant pool index is out
* of range.
* @throws ClassCastException if a constant pool index references the
* wrong type.
*/
public static ClassFile readFrom(InputStream in) throws IOException {
return readFrom(in, null, null);
}
/**
* Reads a ClassFile from the given DataInput. With this method, inner
* classes cannot be loaded, and custom attributes cannot be defined.
*
* @param din source of class file data
* @throws IOException for I/O error or if classfile is invalid.
* @throws ArrayIndexOutOfBoundsException if a constant pool index is out
* of range.
* @throws ClassCastException if a constant pool index references the
* wrong type.
*/
public static ClassFile readFrom(DataInput din) throws IOException {
return readFrom(din, null, null);
}
/**
* Reads a ClassFile from the given InputStream. A
* {@link ClassFileDataLoader} may be provided, which allows inner class
* definitions to be loaded. Also, an {@link AttributeFactory} may be
* provided, which allows non-standard attributes to be read. All
* remaining unknown attribute types are captured, but are not decoded.
*
* @param in source of class file data
* @param loader optional loader for reading inner class definitions
* @param attrFactory optional factory for reading custom attributes
* @throws IOException for I/O error or if classfile is invalid.
* @throws ArrayIndexOutOfBoundsException if a constant pool index is out
* of range.
* @throws ClassCastException if a constant pool index references the
* wrong type.
*/
public static ClassFile readFrom(InputStream in,
ClassFileDataLoader loader,
AttributeFactory attrFactory)
throws IOException
{
if (!(in instanceof DataInput)) {
in = new DataInputStream(in);
}
return readFrom((DataInput)in, loader, attrFactory);
}
/**
* Reads a ClassFile from the given DataInput. A
* {@link ClassFileDataLoader} may be provided, which allows inner class
* definitions to be loaded. Also, an {@link AttributeFactory} may be
* provided, which allows non-standard attributes to be read. All
* remaining unknown attribute types are captured, but are not decoded.
*
* @param din source of class file data
* @param loader optional loader for reading inner class definitions
* @param attrFactory optional factory for reading custom attributes
* @throws IOException for I/O error or if classfile is invalid.
* @throws ArrayIndexOutOfBoundsException if a constant pool index is out
* of range.
* @throws ClassCastException if a constant pool index references the
* wrong type.
*/
public static ClassFile readFrom(DataInput din,
ClassFileDataLoader loader,
AttributeFactory attrFactory)
throws IOException
{
return readFrom(din, loader, attrFactory, new HashMap<String, ClassFile>(11), null);
}
/**
* @param loadedClassFiles Maps name to ClassFiles for classes already
* loaded. This prevents infinite loop: inner loads outer loads inner...
*/
private static ClassFile readFrom(DataInput din,
ClassFileDataLoader loader,
AttributeFactory attrFactory,
Map<String, ClassFile> loadedClassFiles,
ClassFile outerClass)
throws IOException
{
int magic = din.readInt();
if (magic != MAGIC) {
throw new IOException("Incorrect magic number: 0x" +
Integer.toHexString(magic));
}
/*int minor =*/ din.readUnsignedShort();
/*
if (minor != JDK1_1_MINOR_VERSION) {
throw new IOException("Minor version " + minor +
" not supported, version " +
JDK1_1_MINOR_VERSION + " is.");
}
*/
/*int major =*/ din.readUnsignedShort();
/*
if (major != JDK1_1_MAJOR_VERSION) {
throw new IOException("Major version " + major +
"not supported, version " +
JDK1_1_MAJOR_VERSION + " is.");
}
*/
ConstantPool cp = ConstantPool.readFrom(din);
Modifiers modifiers = new Modifiers(din.readUnsignedShort());
modifiers.setSynchronized(false);
int index = din.readUnsignedShort();
ConstantClassInfo thisClass = (ConstantClassInfo)cp.getConstant(index);
index = din.readUnsignedShort();
ConstantClassInfo superClass = null;
if (index > 0) {
superClass = (ConstantClassInfo)cp.getConstant(index);
}
ClassFile cf =
new ClassFile(cp, modifiers, thisClass, superClass, outerClass);
loadedClassFiles.put(cf.getClassName(), cf);
// Read interfaces.
int size = din.readUnsignedShort();
for (int i=0; i<size; i++) {
index = din.readUnsignedShort();
ConstantClassInfo info = (ConstantClassInfo)cp.getConstant(index);
cf.addInterface(info.getType().getRootName());
}
// Read fields.
size = din.readUnsignedShort();
for (int i=0; i<size; i++) {
cf.mFields.add(FieldInfo.readFrom(cf, din, attrFactory));
}
// Read methods.
size = din.readUnsignedShort();
for (int i=0; i<size; i++) {
cf.mMethods.add(MethodInfo.readFrom(cf, din, attrFactory));
}
// Read attributes.
size = din.readUnsignedShort();
for (int i=0; i<size; i++) {
Attribute attr = Attribute.readFrom(cp, din, attrFactory);
cf.addAttribute(attr);
if (attr instanceof InnerClassesAttr) {
cf.mInnerClassesAttr = (InnerClassesAttr)attr;
}
}
// Load inner and outer classes.
if (cf.mInnerClassesAttr != null && loader != null) {
InnerClassesAttr.Info[] infos =
cf.mInnerClassesAttr.getInnerClassesInfo();
for (int i=0; i<infos.length; i++) {
InnerClassesAttr.Info info = infos[i];
if (thisClass.equals(info.getInnerClass())) {
// This class is an inner class.
if (info.getInnerClassName() != null) {
cf.mInnerClassName = info.getInnerClassName();
}
ConstantClassInfo outer = info.getOuterClass();
if (cf.mOuterClass == null && outer != null) {
cf.mOuterClass = readOuterClass
(outer, loader, attrFactory, loadedClassFiles);
}
Modifiers innerFlags = info.getModifiers();
modifiers.setStatic(innerFlags.isStatic());
modifiers.setPrivate(innerFlags.isPrivate());
modifiers.setProtected(innerFlags.isProtected());
modifiers.setPublic(innerFlags.isPublic());
}
else if (thisClass.equals(info.getOuterClass())) {
// This class is an outer class.
ConstantClassInfo inner = info.getInnerClass();
if (inner != null) {
ClassFile innerClass = readInnerClass
(inner, loader, attrFactory, loadedClassFiles, cf);
if (innerClass != null) {
if (innerClass.getInnerClassName() == null) {
innerClass.mInnerClassName =
info.getInnerClassName();
}
if (cf.mInnerClasses == null) {
cf.mInnerClasses = new ArrayList<ClassFile>();
}
cf.mInnerClasses.add(innerClass);
}
}
}
}
}
return cf;
}
private static ClassFile readOuterClass(ConstantClassInfo outer,
ClassFileDataLoader loader,
AttributeFactory attrFactory,
Map<String, ClassFile> loadedClassFiles)
throws IOException
{
String name = outer.getType().getRootName();
ClassFile outerClass = loadedClassFiles.get(name);
if (outerClass != null) {
return outerClass;
}
InputStream in = loader.getClassData(name);
if (in == null) {
return null;
}
if (!(in instanceof DataInput)) {
in = new DataInputStream(in);
}
return readFrom
((DataInput)in, loader, attrFactory, loadedClassFiles, null);
}
private static ClassFile readInnerClass(ConstantClassInfo inner,
ClassFileDataLoader loader,
AttributeFactory attrFactory,
Map<String, ClassFile> loadedClassFiles,
ClassFile outerClass)
throws IOException
{
String name = inner.getType().getRootName();
ClassFile innerClass = loadedClassFiles.get(name);
if (innerClass != null) {
return innerClass;
}
InputStream in = loader.getClassData(name);
if (in == null) {
return null;
}
if (!(in instanceof DataInput)) {
in = new DataInputStream(in);
}
return readFrom
((DataInput)in, loader, attrFactory, loadedClassFiles, outerClass);
}
}
| |
/*
Derby - Class org.apache.derby.iapi.types.SQLDouble
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.iapi.types;
import org.apache.derby.shared.common.reference.SQLState;
import org.apache.derby.iapi.services.io.StoredFormatIds;
import org.apache.derby.iapi.services.io.Storable;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.services.cache.ClassSize;
import java.io.ObjectOutput;
import java.io.ObjectInput;
import java.io.IOException;
import java.math.BigDecimal;
import java.sql.ResultSet;
import java.sql.PreparedStatement;
import java.sql.SQLException;
/**
* SQLDouble satisfies the DataValueDescriptor
* interfaces (i.e., OrderableDataType). It implements a double column,
* e.g. for * storing a column value; it can be specified
* when constructed to not allow nulls. Nullability cannot be changed
* after construction, as it affects the storage size and mechanism.
* <p>
* Because OrderableDataType is a subtype of DataType,
* SQLDouble can play a role in either a DataType/Row
* or a OrderableDataType/Row, interchangeably.
* <p>
* We assume the store has a flag for nullness of the value,
* and simply return a 0-length array for the stored form
* when the value is null.
* <p>
* PERFORMANCE: There are likely alot of performance improvements
* possible for this implementation -- it new's Double
* more than it probably wants to.
* <p>
* This is modeled after SQLInteger.
* <p>
* We don't let doubles get constructed with NaN or Infinity values, and
* check for those values where they can occur on operations, so the
* set* operations do not check for them coming in.
*
*/
public final class SQLDouble extends NumberDataType
{
/*
* DataValueDescriptor interface
* (mostly implemented in DataType)
*/
// JDBC is lax in what it permits and what it
// returns, so we are similarly lax
// @see DataValueDescriptor
/**
* @exception StandardException thrown on failure to convert
*/
public int getInt() throws StandardException
{
// REMIND: do we want to check for truncation?
if ((value > (((double) Integer.MAX_VALUE) + 1.0d)) || (value < (((double) Integer.MIN_VALUE) - 1.0d)))
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "INTEGER");
return (int)value;
}
/**
* @exception StandardException thrown on failure to convert
*/
public byte getByte() throws StandardException
{
if ((value > (((double) Byte.MAX_VALUE) + 1.0d)) || (value < (((double) Byte.MIN_VALUE) - 1.0d)))
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "TINYINT");
return (byte) value;
}
/**
* @exception StandardException thrown on failure to convert
*/
public short getShort() throws StandardException
{
if ((value > (((double) Short.MAX_VALUE) + 1.0d)) || (value < (((double) Short.MIN_VALUE) - 1.0d)))
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "SMALLINT");
return (short) value;
}
/**
* @exception StandardException thrown on failure to convert
*/
public long getLong() throws StandardException
{
if ((value > (((double) Long.MAX_VALUE) + 1.0d)) || (value < (((double) Long.MIN_VALUE) - 1.0d)))
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, "BIGINT");
return (long) value;
}
/**
* @exception StandardException thrown on failure to convert
*/
public float getFloat() throws StandardException
{
if (Float.isInfinite((float)value))
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.REAL_NAME);
return (float) value;
}
public double getDouble()
{
/* This value is bogus if the SQLDouble is null */
return value;
}
/**
* DOUBLE implementation. Convert to a BigDecimal using getString.
*/
public int typeToBigDecimal()
{
return java.sql.Types.CHAR;
}
// for lack of a specification: getDouble()==0 gives true
// independent of the NULL flag
public boolean getBoolean()
{
return (value != 0);
}
public String getString()
{
if (isNull())
return null;
else
return Double.toString(value);
}
public Object getObject()
{
// REMIND: could create one Double and reuse it?
if (isNull())
return null;
else
return value;
}
/**
* Set the value from a correctly typed Double object.
* @throws StandardException
*/
void setObject(Object theValue) throws StandardException
{
setValue(((Double) theValue).doubleValue());
}
protected void setFrom(DataValueDescriptor theValue) throws StandardException {
setValue(theValue.getDouble());
}
public int getLength()
{
return DOUBLE_LENGTH;
}
// this is for DataType's error generator
public String getTypeName()
{
return TypeId.DOUBLE_NAME;
}
/*
* Storable interface, implies Externalizable, TypedFormat
*/
/**
Return my format identifier.
@see org.apache.derby.iapi.services.io.TypedFormat#getTypeFormatId
*/
public int getTypeFormatId() {
return StoredFormatIds.SQL_DOUBLE_ID;
}
/*
* see if the double value is null.
*/
/** @see Storable#isNull */
public boolean isNull()
{
return isnull;
}
public void writeExternal(ObjectOutput out) throws IOException {
// never called when value is null
if (SanityManager.DEBUG)
SanityManager.ASSERT(! isNull());
out.writeDouble(value);
}
/** @see java.io.Externalizable#readExternal */
public void readExternal(ObjectInput in) throws IOException {
value = in.readDouble();
isnull = false;
}
/**
* @see Storable#restoreToNull
*
*/
public void restoreToNull()
{
value = 0;
isnull = true;
}
/** @exception StandardException Thrown on error */
protected int typeCompare(DataValueDescriptor arg) throws StandardException
{
/* neither are null, get the value */
double thisValue = this.getDouble();
double otherValue = arg.getDouble();
if (thisValue == otherValue)
return 0;
else if (thisValue > otherValue)
return 1;
else
return -1;
}
/*
* DataValueDescriptor interface
*/
/** @see DataValueDescriptor#cloneValue */
public DataValueDescriptor cloneValue(boolean forceMaterialization)
{
try
{
return new SQLDouble(value, isnull);
} catch (StandardException se)
{
if (SanityManager.DEBUG)
SanityManager.THROWASSERT(
"error on clone, " +
" value = " + value +
" isnull = " + isnull, se);
return null;
}
}
/**
* @see DataValueDescriptor#getNewNull
*/
public DataValueDescriptor getNewNull()
{
return new SQLDouble();
}
/**
* @see DataValueDescriptor#setValueFromResultSet
*
* @exception StandardException Thrown on error
* @exception SQLException Thrown on error
*/
public void setValueFromResultSet(ResultSet resultSet, int colNumber,
boolean isNullable)
throws StandardException, SQLException
{
double dv = resultSet.getDouble(colNumber);
isnull = (isNullable && resultSet.wasNull());
if (isnull)
value = 0;
else
value = NumberDataType.normalizeDOUBLE(dv);
}
/**
Set the value into a PreparedStatement.
@exception SQLException Error setting value in PreparedStatement
*/
public final void setInto(PreparedStatement ps, int position) throws SQLException {
if (isNull()) {
ps.setNull(position, java.sql.Types.DOUBLE);
return;
}
ps.setDouble(position, value);
}
/**
Set this value into a ResultSet for a subsequent ResultSet.insertRow
or ResultSet.updateRow. This method will only be called for non-null values.
@exception SQLException thrown by the ResultSet object
@exception StandardException thrown by me accessing my value.
*/
public final void setInto(ResultSet rs, int position) throws SQLException, StandardException {
rs.updateDouble(position, value);
}
/*
* class interface
*/
/*
* constructors
*/
/** no-arg constructor, required by Formattable */
// This constructor also gets used when we are
// allocating space for a double.
public SQLDouble() {
isnull = true;
}
public SQLDouble(double val) throws StandardException
{
value = NumberDataType.normalizeDOUBLE(val);
}
public SQLDouble(Double obj) throws StandardException {
if (isnull = (obj == null))
;
else
value = NumberDataType.normalizeDOUBLE(obj.doubleValue());
}
private SQLDouble(double val, boolean startsnull) throws StandardException
{
value = NumberDataType.normalizeDOUBLE(val); // maybe only do if !startsnull
isnull = startsnull;
}
/**
@exception StandardException throws NumberFormatException
when the String format is not recognized.
*/
public void setValue(String theValue) throws StandardException
{
if (theValue == null)
{
value = 0;
isnull = true;
}
else
{
double doubleValue = 0;
try {
// ??? jsk: rounding???
doubleValue = Double.parseDouble(theValue.trim());
} catch (NumberFormatException nfe) {
throw invalidFormat();
}
value = NumberDataType.normalizeDOUBLE(doubleValue);
isnull = false;
}
}
/**
* @exception StandardException on NaN or Infinite double
*/
public void setValue(double theValue) throws StandardException
{
value = NumberDataType.normalizeDOUBLE(theValue);
isnull = false;
}
/**
* @exception StandardException on NaN or Infinite float
*/
public void setValue(float theValue) throws StandardException
{
value = NumberDataType.normalizeDOUBLE(theValue);
isnull = false;
}
public void setValue(long theValue)
{
value = theValue; // no check needed
isnull = false;
}
public void setValue(int theValue)
{
value = theValue; // no check needed
isnull = false;
}
public void setValue(Number theValue) throws StandardException
{
if (objectNull(theValue))
return;
if (SanityManager.ASSERT)
{
if (!(theValue instanceof java.lang.Double))
SanityManager.THROWASSERT("SQLDouble.setValue(Number) passed a " + theValue.getClass());
}
setValue(theValue.doubleValue());
}
/**
Called for an application setting this value using a BigDecimal
*/
public void setBigDecimal(BigDecimal bigDecimal) throws StandardException
{
if (objectNull(bigDecimal))
return;
// Note BigDecimal.doubleValue() handles the case where
// its value is outside the range of a double. It returns
// infinity values which should throw an exception in setValue(double).
double v = bigDecimal.doubleValue();
if (v == 0) {
// We need to catch underflow here, since BigDecimal#doubleValue it
// just returns 0 (i.e. no exception).
boolean isZero = bigDecimal.compareTo(BigDecimal.ZERO) == 0;
if (!isZero) {
throw StandardException.
newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE,
TypeId.REAL_NAME);
}
}
setValue(v);
}
/**
* @see NumberDataValue#setValue
*
*/
public void setValue(boolean theValue)
{
value = theValue?1:0;
isnull = false;
}
/*
* DataValueDescriptor interface
*/
/** @see DataValueDescriptor#typePrecedence */
public int typePrecedence()
{
return TypeId.DOUBLE_PRECEDENCE;
}
/*
** SQL Operators
*/
/**
* The = operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the =
* @param right The value on the right side of the =
* is not.
*
* @return A SQL boolean value telling whether the two parameters are equal
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue equals(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() == right.getDouble());
}
/**
* The <> operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the operator
* @param right The value on the right side of the operator
* is not.
*
* @return A SQL boolean value telling whether the two parameters
* are not equal
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue notEquals(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() != right.getDouble());
}
/**
* The < operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the operator
* @param right The value on the right side of the operator
*
* @return A SQL boolean value telling whether the first operand is less
* than the second operand
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue lessThan(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() < right.getDouble());
}
/**
* The > operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the operator
* @param right The value on the right side of the operator
*
* @return A SQL boolean value telling whether the first operand is greater
* than the second operand
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue greaterThan(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() > right.getDouble());
}
/**
* The <= operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the operator
* @param right The value on the right side of the operator
*
* @return A SQL boolean value telling whether the first operand is less
* than or equal to the second operand
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue lessOrEquals(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() <= right.getDouble());
}
/**
* The >= operator as called from the language module, as opposed to
* the storage module.
*
* @param left The value on the left side of the operator
* @param right The value on the right side of the operator
*
* @return A SQL boolean value telling whether the first operand is greater
* than or equal to the second operand
*
* @exception StandardException Thrown on error
*/
public BooleanDataValue greaterOrEquals(DataValueDescriptor left,
DataValueDescriptor right)
throws StandardException
{
return SQLBoolean.truthValue(left,
right,
left.getDouble() >= right.getDouble());
}
/**
* This method implements the + operator for "double + double".
*
* @param addend1 One of the addends
* @param addend2 The other addend
* @param result The result of a previous call to this method, null
* if not called yet
*
* @return A SQLDouble containing the result of the addition
*
* @exception StandardException Thrown on error
*/
public NumberDataValue plus(NumberDataValue addend1,
NumberDataValue addend2,
NumberDataValue result)
throws StandardException
{
if (result == null)
{
result = new SQLDouble();
}
if (addend1.isNull() || addend2.isNull())
{
result.setToNull();
return result;
}
double tmpresult = addend1.getDouble() + addend2.getDouble();
// No need to check underflow (result rounded to 0.0),
// since the difference between two non-equal valid DB2 DOUBLE values is always non-zero in java.lang.Double precision.
result.setValue(tmpresult);
return result;
}
/**
* This method implements the - operator for "double - double".
*
* @param left The value to be subtracted from
* @param right The value to be subtracted
* @param result The result of a previous call to this method, null
* if not called yet
*
* @return A SQLDouble containing the result of the subtraction
*
* @exception StandardException Thrown on error
*/
public NumberDataValue minus(NumberDataValue left,
NumberDataValue right,
NumberDataValue result)
throws StandardException
{
if (result == null)
{
result = new SQLDouble();
}
if (left.isNull() || right.isNull())
{
result.setToNull();
return result;
}
double tmpresult = left.getDouble() - right.getDouble();
// No need to check underflow (result rounded to 0.0),
// since no difference between two valid DB2 DOUBLE values can be rounded off to 0.0 in java.lang.Double
result.setValue(tmpresult);
return result;
}
/**
* This method implements the * operator for "double * double".
*
* @param left The first value to be multiplied
* @param right The second value to be multiplied
* @param result The result of a previous call to this method, null
* if not called yet
*
* @return A SQLDouble containing the result of the multiplication
*
* @exception StandardException Thrown on error
*/
public NumberDataValue times(NumberDataValue left,
NumberDataValue right,
NumberDataValue result)
throws StandardException
{
if (result == null)
{
result = new SQLDouble();
}
if (left.isNull() || right.isNull())
{
result.setToNull();
return result;
}
double leftValue = left.getDouble();
double rightValue = right.getDouble();
double tempResult = leftValue * rightValue;
// check underflow (result rounded to 0.0)
if ( (tempResult == 0.0) && ( (leftValue != 0.0) && (rightValue != 0.0) ) ) {
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME);
}
result.setValue(tempResult);
return result;
}
/**
* This method implements the / operator for "double / double".
*
* @param dividend The numerator
* @param divisor The denominator
* @param result The result of a previous call to this method, null
* if not called yet
*
* @return A SQLDouble containing the result of the division
*
* @exception StandardException Thrown on error
*/
public NumberDataValue divide(NumberDataValue dividend,
NumberDataValue divisor,
NumberDataValue result)
throws StandardException
{
if (result == null)
{
result = new SQLDouble();
}
if (dividend.isNull() || divisor.isNull())
{
result.setToNull();
return result;
}
/*
** For double division, we can't catch divide by zero with Double.NaN;
** So we check the divisor before the division.
*/
double divisorValue = divisor.getDouble();
if (divisorValue == 0.0e0D)
{
throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO);
}
double dividendValue = dividend.getDouble();
double divideResult = dividendValue / divisorValue;
if (Double.isNaN(divideResult))
{
throw StandardException.newException(SQLState.LANG_DIVIDE_BY_ZERO);
}
// check underflow (result rounded to 0.0d)
if ((divideResult == 0.0d) && (dividendValue != 0.0d)) {
throw StandardException.newException(SQLState.LANG_OUTSIDE_RANGE_FOR_DATATYPE, TypeId.DOUBLE_NAME);
}
result.setValue(divideResult);
return result;
}
/**
* This method implements the unary minus operator for double.
*
* @param result The result of a previous call to this method, null
* if not called yet
*
* @return A SQLDouble containing the result of the division
*
* @exception StandardException Thrown on error
*/
public NumberDataValue minus(NumberDataValue result)
throws StandardException
{
double minusResult;
if (result == null)
{
result = new SQLDouble();
}
if (this.isNull())
{
result.setToNull();
return result;
}
/*
** Doubles are assumed to be symmetric -- that is, their
** smallest negative value is representable as a positive
** value, and vice-versa.
*/
minusResult = -(this.getDouble());
result.setValue(minusResult);
return result;
}
/**
* This method implements the isNegative method.
*
* @return A boolean. If this.value is negative, return true.
* For positive values or null, return false.
*/
protected boolean isNegative()
{
return !isNull() && (value < 0.0d);
}
/*
* String display of value
*/
public String toString()
{
if (isNull())
return "NULL";
else
return Double.toString(value);
}
/*
* Hash code
*/
public int hashCode()
{
long longVal = (long) value;
double doubleLongVal = (double) longVal;
/*
** NOTE: This is coded to work around a bug in Visual Cafe 3.0.
** If longVal is compared directly to value on that platform
** with the JIT enabled, the values will not always compare
** as equal even when they should be equal. This happens with
** the value Long.MAX_VALUE, for example.
**
** Assigning the long value back to a double and then doing
** the comparison works around the bug.
**
** This fixes Cloudscape bug number 1757.
**
** - Jeff Lichtman
*/
if (doubleLongVal != value)
{
longVal = Double.doubleToLongBits(value);
}
return (int) (longVal ^ (longVal >> 32));
}
/*
* useful constants...
*/
static final int DOUBLE_LENGTH = 32; // must match the number of bytes written by DataOutput.writeDouble()
private static final int BASE_MEMORY_USAGE = ClassSize.estimateBaseFromCatalog( SQLDouble.class);
public int estimateMemoryUsage()
{
return BASE_MEMORY_USAGE;
}
/*
* object state
*/
private double value;
private boolean isnull;
}
| |
package com.anly.githubapp.data.model;
import android.os.Parcel;
import android.os.Parcelable;
import com.google.gson.annotations.SerializedName;
/**
* Created by mingjun on 16/7/18.
*/
public class Repo implements Parcelable {
/**
* id : 892275
* name : retrofit
* full_name : square/retrofit
* owner : {"login":"square","id":82592,"avatar_url":"https://avatars.githubusercontent.com/u/82592?v=3","gravatar_id":"","url":"https://api.github.com/users/square","html_url":"https://github.com/square","followers_url":"https://api.github.com/users/square/followers","following_url":"https://api.github.com/users/square/following{/other_user}","gists_url":"https://api.github.com/users/square/gists{/gist_id}","starred_url":"https://api.github.com/users/square/starred{/owner}{/repo}","subscriptions_url":"https://api.github.com/users/square/subscriptions","organizations_url":"https://api.github.com/users/square/orgs","repos_url":"https://api.github.com/users/square/repos","events_url":"https://api.github.com/users/square/events{/privacy}","received_events_url":"https://api.github.com/users/square/received_events","type":"Organization","site_admin":false}
* private : false
* html_url : https://github.com/square/retrofit
* description : Type-safe HTTP client for Android and Java by Square, Inc.
* fork : false
* url : https://api.github.com/repos/square/retrofit
* forks_url : https://api.github.com/repos/square/retrofit/forks
* keys_url : https://api.github.com/repos/square/retrofit/keys{/key_id}
* collaborators_url : https://api.github.com/repos/square/retrofit/collaborators{/collaborator}
* teams_url : https://api.github.com/repos/square/retrofit/teams
* hooks_url : https://api.github.com/repos/square/retrofit/hooks
* issue_events_url : https://api.github.com/repos/square/retrofit/issues/events{/number}
* events_url : https://api.github.com/repos/square/retrofit/events
* assignees_url : https://api.github.com/repos/square/retrofit/assignees{/user}
* branches_url : https://api.github.com/repos/square/retrofit/branches{/branch}
* tags_url : https://api.github.com/repos/square/retrofit/tags
* blobs_url : https://api.github.com/repos/square/retrofit/git/blobs{/sha}
* git_tags_url : https://api.github.com/repos/square/retrofit/git/tags{/sha}
* git_refs_url : https://api.github.com/repos/square/retrofit/git/refs{/sha}
* trees_url : https://api.github.com/repos/square/retrofit/git/trees{/sha}
* statuses_url : https://api.github.com/repos/square/retrofit/statuses/{sha}
* languages_url : https://api.github.com/repos/square/retrofit/languages
* stargazers_url : https://api.github.com/repos/square/retrofit/stargazers
* contributors_url : https://api.github.com/repos/square/retrofit/contributors
* subscribers_url : https://api.github.com/repos/square/retrofit/subscribers
* subscription_url : https://api.github.com/repos/square/retrofit/subscription
* commits_url : https://api.github.com/repos/square/retrofit/commits{/sha}
* git_commits_url : https://api.github.com/repos/square/retrofit/git/commits{/sha}
* comments_url : https://api.github.com/repos/square/retrofit/comments{/number}
* issue_comment_url : https://api.github.com/repos/square/retrofit/issues/comments{/number}
* contents_url : https://api.github.com/repos/square/retrofit/contents/{+path}
* compare_url : https://api.github.com/repos/square/retrofit/compare/{base}...{head}
* merges_url : https://api.github.com/repos/square/retrofit/merges
* archive_url : https://api.github.com/repos/square/retrofit/{archive_format}{/ref}
* downloads_url : https://api.github.com/repos/square/retrofit/downloads
* issues_url : https://api.github.com/repos/square/retrofit/issues{/number}
* pulls_url : https://api.github.com/repos/square/retrofit/pulls{/number}
* milestones_url : https://api.github.com/repos/square/retrofit/milestones{/number}
* notifications_url : https://api.github.com/repos/square/retrofit/notifications{?since,all,participating}
* labels_url : https://api.github.com/repos/square/retrofit/labels{/name}
* releases_url : https://api.github.com/repos/square/retrofit/releases{/id}
* deployments_url : https://api.github.com/repos/square/retrofit/deployments
* created_at : 2010-09-06T21:39:43Z
* updated_at : 2016-07-18T06:47:10Z
* pushed_at : 2016-07-11T08:54:14Z
* git_url : git://github.com/square/retrofit.git
* ssh_url : git@github.com:square/retrofit.git
* clone_url : https://github.com/square/retrofit.git
* svn_url : https://github.com/square/retrofit
* homepage : http://square.github.io/retrofit/
* size : 59969
* stargazers_count : 14005
* watchers_count : 14005
* language : Java
* has_issues : true
* has_downloads : true
* has_wiki : true
* has_pages : true
* forks_count : 2854
* mirror_url : null
* open_issues_count : 43
* forks : 2854
* open_issues : 43
* watchers : 14005
* default_branch : master
* score : 3.7087636
*/
private int id;
private String name;
private String full_name;
/**
* login : square
* id : 82592
* avatar_url : https://avatars.githubusercontent.com/u/82592?v=3
* gravatar_id :
* url : https://api.github.com/users/square
* html_url : https://github.com/square
* followers_url : https://api.github.com/users/square/followers
* following_url : https://api.github.com/users/square/following{/other_user}
* gists_url : https://api.github.com/users/square/gists{/gist_id}
* starred_url : https://api.github.com/users/square/starred{/owner}{/repo}
* subscriptions_url : https://api.github.com/users/square/subscriptions
* organizations_url : https://api.github.com/users/square/orgs
* repos_url : https://api.github.com/users/square/repos
* events_url : https://api.github.com/users/square/events{/privacy}
* received_events_url : https://api.github.com/users/square/received_events
* type : Organization
* site_admin : false
*/
private User owner;
@SerializedName("private")
private boolean privateFlag;
private String html_url;
private String description;
private boolean fork;
private String url;
private String forks_url;
private String keys_url;
private String collaborators_url;
private String teams_url;
private String hooks_url;
private String issue_events_url;
private String events_url;
private String assignees_url;
private String branches_url;
private String tags_url;
private String blobs_url;
private String git_tags_url;
private String git_refs_url;
private String trees_url;
private String statuses_url;
private String languages_url;
private String stargazers_url;
private String contributors_url;
private String subscribers_url;
private String subscription_url;
private String commits_url;
private String git_commits_url;
private String comments_url;
private String issue_comment_url;
private String contents_url;
private String compare_url;
private String merges_url;
private String archive_url;
private String downloads_url;
private String issues_url;
private String pulls_url;
private String milestones_url;
private String notifications_url;
private String labels_url;
private String releases_url;
private String deployments_url;
private String created_at;
private String updated_at;
private String pushed_at;
private String git_url;
private String ssh_url;
private String clone_url;
private String svn_url;
private String homepage;
private int size;
private int stargazers_count;
private int watchers_count;
private String language;
private boolean has_issues;
private boolean has_downloads;
private boolean has_wiki;
private boolean has_pages;
private int forks_count;
private String mirror_url;
private int open_issues_count;
private int forks;
private int open_issues;
private int watchers;
private String default_branch;
private double score;
private boolean isStarred;
public boolean isStarred() {
return isStarred;
}
public void setStarred(boolean starred) {
isStarred = starred;
}
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getFull_name() {
return full_name;
}
public void setFull_name(String full_name) {
this.full_name = full_name;
}
public User getOwner() {
return owner;
}
public void setOwner(User owner) {
this.owner = owner;
}
public boolean isPrivateFlag() {
return privateFlag;
}
public void setPrivateFlag(boolean privateFlag) {
this.privateFlag = privateFlag;
}
public String getHtml_url() {
return html_url;
}
public void setHtml_url(String html_url) {
this.html_url = html_url;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isFork() {
return fork;
}
public void setFork(boolean fork) {
this.fork = fork;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getForks_url() {
return forks_url;
}
public void setForks_url(String forks_url) {
this.forks_url = forks_url;
}
public String getKeys_url() {
return keys_url;
}
public void setKeys_url(String keys_url) {
this.keys_url = keys_url;
}
public String getCollaborators_url() {
return collaborators_url;
}
public void setCollaborators_url(String collaborators_url) {
this.collaborators_url = collaborators_url;
}
public String getTeams_url() {
return teams_url;
}
public void setTeams_url(String teams_url) {
this.teams_url = teams_url;
}
public String getHooks_url() {
return hooks_url;
}
public void setHooks_url(String hooks_url) {
this.hooks_url = hooks_url;
}
public String getIssue_events_url() {
return issue_events_url;
}
public void setIssue_events_url(String issue_events_url) {
this.issue_events_url = issue_events_url;
}
public String getEvents_url() {
return events_url;
}
public void setEvents_url(String events_url) {
this.events_url = events_url;
}
public String getAssignees_url() {
return assignees_url;
}
public void setAssignees_url(String assignees_url) {
this.assignees_url = assignees_url;
}
public String getBranches_url() {
return branches_url;
}
public void setBranches_url(String branches_url) {
this.branches_url = branches_url;
}
public String getTags_url() {
return tags_url;
}
public void setTags_url(String tags_url) {
this.tags_url = tags_url;
}
public String getBlobs_url() {
return blobs_url;
}
public void setBlobs_url(String blobs_url) {
this.blobs_url = blobs_url;
}
public String getGit_tags_url() {
return git_tags_url;
}
public void setGit_tags_url(String git_tags_url) {
this.git_tags_url = git_tags_url;
}
public String getGit_refs_url() {
return git_refs_url;
}
public void setGit_refs_url(String git_refs_url) {
this.git_refs_url = git_refs_url;
}
public String getTrees_url() {
return trees_url;
}
public void setTrees_url(String trees_url) {
this.trees_url = trees_url;
}
public String getStatuses_url() {
return statuses_url;
}
public void setStatuses_url(String statuses_url) {
this.statuses_url = statuses_url;
}
public String getLanguages_url() {
return languages_url;
}
public void setLanguages_url(String languages_url) {
this.languages_url = languages_url;
}
public String getStargazers_url() {
return stargazers_url;
}
public void setStargazers_url(String stargazers_url) {
this.stargazers_url = stargazers_url;
}
public String getContributors_url() {
return contributors_url;
}
public void setContributors_url(String contributors_url) {
this.contributors_url = contributors_url;
}
public String getSubscribers_url() {
return subscribers_url;
}
public void setSubscribers_url(String subscribers_url) {
this.subscribers_url = subscribers_url;
}
public String getSubscription_url() {
return subscription_url;
}
public void setSubscription_url(String subscription_url) {
this.subscription_url = subscription_url;
}
public String getCommits_url() {
return commits_url;
}
public void setCommits_url(String commits_url) {
this.commits_url = commits_url;
}
public String getGit_commits_url() {
return git_commits_url;
}
public void setGit_commits_url(String git_commits_url) {
this.git_commits_url = git_commits_url;
}
public String getComments_url() {
return comments_url;
}
public void setComments_url(String comments_url) {
this.comments_url = comments_url;
}
public String getIssue_comment_url() {
return issue_comment_url;
}
public void setIssue_comment_url(String issue_comment_url) {
this.issue_comment_url = issue_comment_url;
}
public String getContents_url() {
return contents_url;
}
public void setContents_url(String contents_url) {
this.contents_url = contents_url;
}
public String getCompare_url() {
return compare_url;
}
public void setCompare_url(String compare_url) {
this.compare_url = compare_url;
}
public String getMerges_url() {
return merges_url;
}
public void setMerges_url(String merges_url) {
this.merges_url = merges_url;
}
public String getArchive_url() {
return archive_url;
}
public void setArchive_url(String archive_url) {
this.archive_url = archive_url;
}
public String getDownloads_url() {
return downloads_url;
}
public void setDownloads_url(String downloads_url) {
this.downloads_url = downloads_url;
}
public String getIssues_url() {
return issues_url;
}
public void setIssues_url(String issues_url) {
this.issues_url = issues_url;
}
public String getPulls_url() {
return pulls_url;
}
public void setPulls_url(String pulls_url) {
this.pulls_url = pulls_url;
}
public String getMilestones_url() {
return milestones_url;
}
public void setMilestones_url(String milestones_url) {
this.milestones_url = milestones_url;
}
public String getNotifications_url() {
return notifications_url;
}
public void setNotifications_url(String notifications_url) {
this.notifications_url = notifications_url;
}
public String getLabels_url() {
return labels_url;
}
public void setLabels_url(String labels_url) {
this.labels_url = labels_url;
}
public String getReleases_url() {
return releases_url;
}
public void setReleases_url(String releases_url) {
this.releases_url = releases_url;
}
public String getDeployments_url() {
return deployments_url;
}
public void setDeployments_url(String deployments_url) {
this.deployments_url = deployments_url;
}
public String getCreated_at() {
return created_at;
}
public void setCreated_at(String created_at) {
this.created_at = created_at;
}
public String getUpdated_at() {
return updated_at;
}
public void setUpdated_at(String updated_at) {
this.updated_at = updated_at;
}
public String getPushed_at() {
return pushed_at;
}
public void setPushed_at(String pushed_at) {
this.pushed_at = pushed_at;
}
public String getGit_url() {
return git_url;
}
public void setGit_url(String git_url) {
this.git_url = git_url;
}
public String getSsh_url() {
return ssh_url;
}
public void setSsh_url(String ssh_url) {
this.ssh_url = ssh_url;
}
public String getClone_url() {
return clone_url;
}
public void setClone_url(String clone_url) {
this.clone_url = clone_url;
}
public String getSvn_url() {
return svn_url;
}
public void setSvn_url(String svn_url) {
this.svn_url = svn_url;
}
public String getHomepage() {
return homepage;
}
public void setHomepage(String homepage) {
this.homepage = homepage;
}
public int getSize() {
return size;
}
public void setSize(int size) {
this.size = size;
}
public int getStargazers_count() {
return stargazers_count;
}
public void setStargazers_count(int stargazers_count) {
this.stargazers_count = stargazers_count;
}
public int getWatchers_count() {
return watchers_count;
}
public void setWatchers_count(int watchers_count) {
this.watchers_count = watchers_count;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public boolean isHas_issues() {
return has_issues;
}
public void setHas_issues(boolean has_issues) {
this.has_issues = has_issues;
}
public boolean isHas_downloads() {
return has_downloads;
}
public void setHas_downloads(boolean has_downloads) {
this.has_downloads = has_downloads;
}
public boolean isHas_wiki() {
return has_wiki;
}
public void setHas_wiki(boolean has_wiki) {
this.has_wiki = has_wiki;
}
public boolean isHas_pages() {
return has_pages;
}
public void setHas_pages(boolean has_pages) {
this.has_pages = has_pages;
}
public int getForks_count() {
return forks_count;
}
public void setForks_count(int forks_count) {
this.forks_count = forks_count;
}
public String getMirror_url() {
return mirror_url;
}
public void setMirror_url(String mirror_url) {
this.mirror_url = mirror_url;
}
public int getOpen_issues_count() {
return open_issues_count;
}
public void setOpen_issues_count(int open_issues_count) {
this.open_issues_count = open_issues_count;
}
public int getForks() {
return forks;
}
public void setForks(int forks) {
this.forks = forks;
}
public int getOpen_issues() {
return open_issues;
}
public void setOpen_issues(int open_issues) {
this.open_issues = open_issues;
}
public int getWatchers() {
return watchers;
}
public void setWatchers(int watchers) {
this.watchers = watchers;
}
public String getDefault_branch() {
return default_branch;
}
public void setDefault_branch(String default_branch) {
this.default_branch = default_branch;
}
public double getScore() {
return score;
}
public void setScore(double score) {
this.score = score;
}
public Repo() {
}
@Override
public int describeContents() {
return 0;
}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeInt(this.id);
dest.writeString(this.name);
dest.writeString(this.full_name);
dest.writeParcelable(this.owner, flags);
dest.writeByte(this.privateFlag ? (byte) 1 : (byte) 0);
dest.writeString(this.html_url);
dest.writeString(this.description);
dest.writeByte(this.fork ? (byte) 1 : (byte) 0);
dest.writeString(this.url);
dest.writeString(this.forks_url);
dest.writeString(this.keys_url);
dest.writeString(this.collaborators_url);
dest.writeString(this.teams_url);
dest.writeString(this.hooks_url);
dest.writeString(this.issue_events_url);
dest.writeString(this.events_url);
dest.writeString(this.assignees_url);
dest.writeString(this.branches_url);
dest.writeString(this.tags_url);
dest.writeString(this.blobs_url);
dest.writeString(this.git_tags_url);
dest.writeString(this.git_refs_url);
dest.writeString(this.trees_url);
dest.writeString(this.statuses_url);
dest.writeString(this.languages_url);
dest.writeString(this.stargazers_url);
dest.writeString(this.contributors_url);
dest.writeString(this.subscribers_url);
dest.writeString(this.subscription_url);
dest.writeString(this.commits_url);
dest.writeString(this.git_commits_url);
dest.writeString(this.comments_url);
dest.writeString(this.issue_comment_url);
dest.writeString(this.contents_url);
dest.writeString(this.compare_url);
dest.writeString(this.merges_url);
dest.writeString(this.archive_url);
dest.writeString(this.downloads_url);
dest.writeString(this.issues_url);
dest.writeString(this.pulls_url);
dest.writeString(this.milestones_url);
dest.writeString(this.notifications_url);
dest.writeString(this.labels_url);
dest.writeString(this.releases_url);
dest.writeString(this.deployments_url);
dest.writeString(this.created_at);
dest.writeString(this.updated_at);
dest.writeString(this.pushed_at);
dest.writeString(this.git_url);
dest.writeString(this.ssh_url);
dest.writeString(this.clone_url);
dest.writeString(this.svn_url);
dest.writeString(this.homepage);
dest.writeInt(this.size);
dest.writeInt(this.stargazers_count);
dest.writeInt(this.watchers_count);
dest.writeString(this.language);
dest.writeByte(this.has_issues ? (byte) 1 : (byte) 0);
dest.writeByte(this.has_downloads ? (byte) 1 : (byte) 0);
dest.writeByte(this.has_wiki ? (byte) 1 : (byte) 0);
dest.writeByte(this.has_pages ? (byte) 1 : (byte) 0);
dest.writeInt(this.forks_count);
dest.writeString(this.mirror_url);
dest.writeInt(this.open_issues_count);
dest.writeInt(this.forks);
dest.writeInt(this.open_issues);
dest.writeInt(this.watchers);
dest.writeString(this.default_branch);
dest.writeDouble(this.score);
dest.writeByte(this.isStarred ? (byte) 1 : (byte) 0);
}
protected Repo(Parcel in) {
this.id = in.readInt();
this.name = in.readString();
this.full_name = in.readString();
this.owner = in.readParcelable(User.class.getClassLoader());
this.privateFlag = in.readByte() != 0;
this.html_url = in.readString();
this.description = in.readString();
this.fork = in.readByte() != 0;
this.url = in.readString();
this.forks_url = in.readString();
this.keys_url = in.readString();
this.collaborators_url = in.readString();
this.teams_url = in.readString();
this.hooks_url = in.readString();
this.issue_events_url = in.readString();
this.events_url = in.readString();
this.assignees_url = in.readString();
this.branches_url = in.readString();
this.tags_url = in.readString();
this.blobs_url = in.readString();
this.git_tags_url = in.readString();
this.git_refs_url = in.readString();
this.trees_url = in.readString();
this.statuses_url = in.readString();
this.languages_url = in.readString();
this.stargazers_url = in.readString();
this.contributors_url = in.readString();
this.subscribers_url = in.readString();
this.subscription_url = in.readString();
this.commits_url = in.readString();
this.git_commits_url = in.readString();
this.comments_url = in.readString();
this.issue_comment_url = in.readString();
this.contents_url = in.readString();
this.compare_url = in.readString();
this.merges_url = in.readString();
this.archive_url = in.readString();
this.downloads_url = in.readString();
this.issues_url = in.readString();
this.pulls_url = in.readString();
this.milestones_url = in.readString();
this.notifications_url = in.readString();
this.labels_url = in.readString();
this.releases_url = in.readString();
this.deployments_url = in.readString();
this.created_at = in.readString();
this.updated_at = in.readString();
this.pushed_at = in.readString();
this.git_url = in.readString();
this.ssh_url = in.readString();
this.clone_url = in.readString();
this.svn_url = in.readString();
this.homepage = in.readString();
this.size = in.readInt();
this.stargazers_count = in.readInt();
this.watchers_count = in.readInt();
this.language = in.readString();
this.has_issues = in.readByte() != 0;
this.has_downloads = in.readByte() != 0;
this.has_wiki = in.readByte() != 0;
this.has_pages = in.readByte() != 0;
this.forks_count = in.readInt();
this.mirror_url = in.readString();
this.open_issues_count = in.readInt();
this.forks = in.readInt();
this.open_issues = in.readInt();
this.watchers = in.readInt();
this.default_branch = in.readString();
this.score = in.readDouble();
this.isStarred = in.readByte() != 0;
}
public static final Creator<Repo> CREATOR = new Creator<Repo>() {
@Override
public Repo createFromParcel(Parcel source) {
return new Repo(source);
}
@Override
public Repo[] newArray(int size) {
return new Repo[size];
}
};
}
| |
package fr.logica.business;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.Map;
import fr.logica.business.Action.Input;
import fr.logica.business.Action.Persistence;
import fr.logica.business.Action.UserInterface;
import fr.logica.business.context.RequestContext;
import fr.logica.business.controller.Request;
import fr.logica.business.controller.Response;
import fr.logica.business.data.ListData;
import fr.logica.business.data.ScheduleEvent;
import fr.logica.db.DbQuery;
import fr.logica.db.DbQuery.Var;
import fr.logica.db.DbQuery.Visibility;
import fr.logica.reflect.DomainUtils;
public abstract class DomainLogic<E extends Entity> extends AbstractDomainLogic<E> {
@Override
public List<Key> doCustomAction(Request<E> request, E entity, RequestContext ctx) {
return null;
}
@Override
public List<Key> doCustomAction(Request<E> request, E entity, List<Key> keys, RequestContext ctx) {
return null;
}
/**
* This method returns true if the bean is invalid. Be aware that no error message will be added to the context if this method returns
* <code>true</code>, but the navigation will be interrupted. You should add yourself a message to the context.
*/
@Override
public boolean doCheck(E bean, Action action, RequestContext ctx) throws FunctionalException {
return false;
}
@Override
public String doDescription(E bean, RequestContext ctx) {
if (bean == null) {
return "";
}
if (bean.description() != null) {
return bean.description();
}
return bean.getPrimaryKey().getEncodedValue();
}
/**
* This method returns true if the variable is invalid in this specific situation. Be aware that no error message will be added to the
* context if this method returns <code>true</code>, but the navigation will be interrupted. You should add yourself a message to the
* context.
*/
@Override
public boolean doVarCheck(E bean, String varName, Action action, RequestContext ctx) throws FunctionalException {
return false;
}
/**
* You do not have to add a message to explain the problem in the context, one will be added automatically.
*/
@Override
public boolean doVarIsMandatory(E bean, String varName, Action action, RequestContext ctx) {
return bean.getModel().getField(varName).isMandatory();
}
@Deprecated
@Override
public Object doVarValue(Map<String, Object> vars, String domainName, String varName, RequestContext ctx) {
return null;
}
@Override
public Object doVarValue(E bean, String varName, RequestContext ctx) {
if ("internalCaption".equals(varName)) {
if (bean != null && bean.description() != null) {
return bean.invokeGetter(bean.description());
}
}
return null;
}
@Override
public Object uiListVarValue(Map<String, Object> vars, String queryName, String domainName, String varName, RequestContext ctx) {
return null;
}
@Override
public void dbSecure(DbQuery query, RequestContext ctx) {
}
@Override
public void dbOnSave(E bean, Action action, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public void dbOnDelete(E bean, Action action, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public void dbPostLoad(E bean, Action action, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public void dbPostSave(E bean, Action action, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public void dbPostDelete(E bean, Action action, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public String uiActionTitle(Response<E> response, RequestContext ctx) {
if (response.getAction() == null) {
return null;
}
Action action = response.getAction();
if (action.getInput() == Input.QUERY && action.getUi() == UserInterface.OUTPUT) {
return MessageUtils.getInstance(ctx).getListTitle(response.getQueryName());
}
return MessageUtils.getInstance(ctx).getTitle(response.getEntityName(), response.getAction().getCode());
}
@Override
public String uiVarCaption(E bean, String varName, Action action, RequestContext ctx) {
return null;
}
@Override
public String uiLinkCaption(E bean, String linkName, Action action, RequestContext ctx) {
return null;
}
@Override
public String uiListColumnCaption(DbQuery query, LinkModel link, String varName, RequestContext ctx) {
Var var = query.getOutVar(varName);
return MessageUtils.getInstance(ctx).getQryVarTitle(query.getName(), var.tableId, var.name);
}
@Override
public boolean uiListColumnIsVisible(DbQuery query, LinkModel link, String varName, RequestContext ctx) {
return query.getOutVar(varName).visibility == Visibility.VISIBLE;
}
@Override
public boolean uiVarIsVisible(Entity bean, String varName, Action action, RequestContext ctx) {
return true;
}
@Override
public boolean uiLinkIsVisible(Entity entity, String linkName, Action action, RequestContext ctx) {
return true;
}
@Override
public boolean uiGroupIsVisible(Entity bean, String groupName, Action action, RequestContext ctx) {
return true;
}
@Override
public boolean uiListIsProtected(Entity targetEntity, String linkName, String queryName, Action action, RequestContext ctx) {
if (action.getUi() == UserInterface.READONLY) {
return true;
}
return false;
}
@Override
public boolean uiListIsReadOnly(E bean, String linkName, String queryName, Action action, RequestContext ctx) {
return false;
}
@Override
public boolean uiVarIsProtected(Entity bean, String varName, Action action, RequestContext ctx) {
if (bean.getPrimaryKey().getModel().getFields().contains(varName)) {
if (action.getPersistence() != Persistence.INSERT && action.getPersistence() != Persistence.NONE) {
return true;
}
}
return false;
}
@Override
public void uiActionOnLoad(Response<E> response, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public void uiActionOnValidation(Request<E> request, RequestContext ctx) {
// Nothing to do on default behavior.
}
@Override
public boolean uiTabIsVisible(Entity bean, String tabName, Action action, RequestContext ctx) {
return true;
}
@Override
public String uiTabToOpen(Entity bean, String tabPanelName, Action action, RequestContext ctx) {
return null;
}
@Override
public Map<Key, String> uiLinkLoadCombo(Entity bean, LinkModel linkModel, DbQuery filterQuery, Action action, RequestContext ctx) {
return internalUiLinkLoadValues(bean, linkModel, filterQuery, false, ctx);
}
public Request<?> uiCtrlNextAction(Request<E> request, RequestContext ctx) {
Action action = request.getAction();
if (action == null || action.getNext() == null) {
return null;
}
Action nextAction = EntityManager.getEntityModel(request.getEntityName()).getAction(action.getNext());
Request<E> nextRequest = new Request<E>();
nextRequest.setAction(nextAction);
nextRequest.setBackRef(request.isBackRef());
// We don't set nextRequest entity to force entity full reload in next action.
// This will force entity post-load logic call in next action processing.
nextRequest.setEntity(null);
nextRequest.setEntityName(request.getEntityName());
nextRequest.setKeys(request.getKeys());
if (action.getInput() == Input.NONE && nextAction.getInput() == Input.ONE) {
// Previous action was an INPUT NONE, but now we may have an input
if (request.getEntity() != null && request.getEntity().getPrimaryKey().isFull()) {
if (nextRequest.getKeys() == null) {
nextRequest.setKeys(new ArrayList<Key>());
}
nextRequest.getKeys().add(request.getEntity().getPrimaryKey());
}
}
nextRequest.setLinkedEntity(request.getLinkedEntity());
nextRequest.setLinkName(request.getLinkName());
nextRequest.setQueryName(request.getQueryName());
nextRequest.setContext(request.getContext());
return nextRequest;
}
@Override
public Response<?> uiCtrlOverrideAction(Response<E> response, RequestContext ctx) {
return null;
}
@Override
public List<Key> uiCtrlMenuAction(Action action, RequestContext ctx) {
// No default selection on default behavior
return null;
}
@Override
public boolean uiListPrepare(DbQuery query, E criteria, Action action, String linkName, Entity linkedEntity, RequestContext ctx) {
return false;
}
@Override
public boolean uiListPrepare(DbQuery query, String criteria, Action action, String linkName, Entity linkedEntity, RequestContext ctx) {
return false;
}
@Override
public void uiListPrepare(DbQuery query, Entity parentEntity, String linkName, RequestContext ctx) {
// Nothing to do on default behavior
}
@Override
public boolean uiWizardCheckStep(E bean, Action action, String currentStep, String nextStep, RequestContext ctx) {
// Navigation is allowed on default behavior.
return true;
}
@Override
public ListData extQueryLoad(RequestContext context, String entityName, String queryName) {
return new ListData(entityName);
}
@Override
public E extActionLoad(String domainName, Key primaryKey, Action action, RequestContext context) {
@SuppressWarnings("unchecked")
E bean = (E) DomainUtils.newDomain(domainName);
return bean;
}
@Override
public void extActionExecute(Request<E> request, RequestContext context) {
}
@Override
public void uiSchedulePrepareEvent(ScheduleEvent event, E entity, String entityName, RequestContext ctx) {
EntityModel model = entity.getModel();
String start = uiScheduleEventStartName();
if (model.getField(start) != null) {
event.setStart((Date) entity.invokeGetter(start));
} else {
throw new TechnicalException("Variable " + start + " not found in the entity " + model.name());
}
String end = uiScheduleEventEndName();
if (model.getField(end) != null) {
event.setEnd((Date) entity.invokeGetter(end));
} else {
throw new TechnicalException("Variable " + end + " not found in the entity " + model.name());
}
}
/**
* {@inheritDoc}
* @return {@link Constants#EVENT_DATE_START}
*/
@Override
public String uiScheduleEventStartName() {
return Constants.EVENT_DATE_START;
}
/**
* {@inheritDoc}
* @return {@link Constants#EVENT_DATE_END}
*/
@Override
public String uiScheduleEventEndName() {
return Constants.EVENT_DATE_END;
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2014 Wiktor Lawski <wiktor.lawski@gmail.com>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package net.wiktorlawski.messageonthescreen.test;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import net.wiktorlawski.messageonthescreen.MessageOnTheScreen;
import android.app.Activity;
import android.app.Instrumentation;
import android.content.Intent;
import android.test.InstrumentationTestCase;
public class MessageOnTheScreenTest extends InstrumentationTestCase {
private static final String MOTS_INSTANCE_FIELD_NAME = "sInstance";
private Activity activity;
private Instrumentation instrumentation;
@Override
protected void setUp() throws Exception {
super.setUp();
instrumentation = getInstrumentation();
Intent intent =
new Intent(getInstrumentation().getContext(),
DummyActivity.class);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
activity = instrumentation.startActivitySync(intent);
}
@Override
protected void tearDown() {
activity.finish();
}
/**
* By default no MessageOnTheScreen should exist. This test has to be run
* first because there is no synchronization for setUp() and tearDown()
* methods between MessageOnTheScreenTest and SharedElementServiceTest
* classes.
*/
public void test__defaultNoInstance() throws Exception {
Field mots =
MessageOnTheScreen.class
.getDeclaredField(MOTS_INSTANCE_FIELD_NAME);
mots.setAccessible(true);
MessageOnTheScreen actual = (MessageOnTheScreen) mots.get(null);
MessageOnTheScreen expected = null;
assertEquals(expected, actual);
}
/**
* If MessageOnTheScreen object already exists, then
* MessageOnTheScreen.getInstance() method should return reference to
* this instance.
*/
public void test_getInstance() throws Exception {
Field mots =
MessageOnTheScreen.class
.getDeclaredField(MOTS_INSTANCE_FIELD_NAME);
mots.setAccessible(true);
Constructor<MessageOnTheScreen> motsConstructor =
MessageOnTheScreen.class.getDeclaredConstructor(Activity.class,
boolean.class);
motsConstructor.setAccessible(true);
MessageOnTheScreen expected = motsConstructor.newInstance(activity,
false);
mots.set(null, expected);
MessageOnTheScreen actual = MessageOnTheScreen.getInstance(activity,
false);
assertEquals(expected, actual);
}
/**
* Multiple MessageOnTheScreen.getInstance() method calls should
* return reference to the same object.
*/
public void test_getInstance2() {
MessageOnTheScreen expected = MessageOnTheScreen.getInstance(activity,
false);
MessageOnTheScreen actual = MessageOnTheScreen.getInstance(activity,
false);
assertEquals(expected, actual);
}
/**
* Multiple MessageOnTheScreen.getInstance() method calls should
* return reference to the same object even when showing parameter is
* different.
*/
public void test_getInstance3() {
MessageOnTheScreen expected = MessageOnTheScreen.getInstance(activity,
false);
MessageOnTheScreen actual = MessageOnTheScreen.getInstance(activity,
true);
assertEquals(expected, actual);
}
/**
* If MessageOnTheScreen object already exists, then
* MessageOnTheScreen.getInstance() method should return reference to
* this instance even when requested visibility is different.
*/
public void test_getInstance4() throws Exception {
Field mots =
MessageOnTheScreen.class
.getDeclaredField(MOTS_INSTANCE_FIELD_NAME);
mots.setAccessible(true);
Constructor<MessageOnTheScreen> motsConstructor =
MessageOnTheScreen.class.getDeclaredConstructor(Activity.class,
boolean.class);
motsConstructor.setAccessible(true);
MessageOnTheScreen expected = motsConstructor.newInstance(activity,
false);
mots.set(null, expected);
MessageOnTheScreen actual = MessageOnTheScreen.getInstance(activity,
true);
assertEquals(expected, actual);
}
/**
* Multiple MessageOnTheScreen.getInstance() method calls should
* return reference to the same object even for requested visibility set to
* true.
*/
public void test_getInstance5() {
MessageOnTheScreen expected = MessageOnTheScreen.getInstance(activity,
true);
MessageOnTheScreen actual = MessageOnTheScreen.getInstance(activity,
true);
assertEquals(expected, actual);
}
/**
* Adding null as the new message should not result in any type of
* exception when list of debug messages is empty.
*/
public void test_addMessage() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.addMessage(activity, null);
}
/**
* Adding null as the new message should not result in any type of
* exception when list of debug messages is not empty.
*/
public void test_addMessage2() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text");
mots.addMessage(activity, null);
}
/**
* Adding empty String as the new message should not result in any type of
* exception when list of debug messages is empty.
*/
public void test_addMessage3() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.addMessage(activity, new String());
}
/**
* Adding empty String as the new message should not result in any type of
* exception when list of debug messages is not empty.
*/
public void test_addMessage4() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text");
mots.addMessage(activity, new String());
}
/**
* Adding new message should not result in any type of exception when list
* of debug messages is empty.
*/
public void test_addMessage5() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.addMessage(activity, "text");
}
/**
* Adding new message should not result in any type of exception when list
* of debug messages is not empty.
*/
public void test_addMessage6() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text");
mots.addMessage(activity, "text");
}
/**
* Adding new message as multiple line text should not result in any type of
* exception when list of debug messages is empty.
*/
public void test_addMessage7() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.addMessage(activity, "text\ntext");
}
/**
* Adding new message as multiple line text should not result in any type of
* exception when list of debug messages is not empty.
*/
public void test_addMessage8() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text");
mots.addMessage(activity, "text\ntext");
}
/**
* Adding new messages as very long new text should not result in any type
* of exception when list of debug messages is empty.
*/
public void test_addMessage9() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
String text = "text";
StringBuilder newText = new StringBuilder(10000 * text.length());
for (int i = 0; i < 10000; i++) {
newText.append("text");
}
mots.addMessage(activity, newText.toString());
}
/**
* Adding new messages as very long new text should not result in any type
* of exception when list of debug messages is not empty.
*/
public void test_addMessage10() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
String text = "text";
StringBuilder newText = new StringBuilder(10000 * text.length());
for (int i = 0; i < 10000; i++) {
newText.append("text");
}
mots.setText(activity, newText.toString());
mots.addMessage(activity, newText.toString());
}
/**
* Setting null as the new text should not result in any type of exception.
*/
public void test_setText() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, null);
}
/**
* Setting new text as empty String should not result in any type of
* exception.
*/
public void test_setText2() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, new String());
}
/**
* Setting new text as single line text should not result in any type of
* exception.
*/
public void test_setText3() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text");
}
/**
* Setting new text as multiple line text should not result in any type of
* exception.
*/
public void test_setText4() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
mots.setText(activity, "text\ntext");
}
/**
* Setting very long new text should not result in any type of exception.
*/
public void test_setText5() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
false);
String text = "text";
StringBuilder newText = new StringBuilder(10000 * text.length());
for (int i = 0; i < 10000; i++) {
newText.append("text");
}
mots.setText(activity, newText.toString());
}
/**
* Setting null as the new text should not result in any type of exception,
* even when requested visibility should be set to true.
*/
public void test_setText6() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
true);
mots.setText(activity, null);
}
/**
* Setting new text as empty String should not result in any type of
* exception, even when requested visibility should be set to true.
*/
public void test_setText7() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
true);
mots.setText(activity, new String());
}
/**
* Setting new text as single line text should not result in any type of
* exception, even when requested visibility should be set to true.
*/
public void test_setText8() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
true);
mots.setText(activity, "text");
}
/**
* Setting new text as multiple line text should not result in any type of
* exception, even when requested visibility should be set to true.
*/
public void test_setText9() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
true);
mots.setText(activity, "text\ntext");
}
/**
* Setting very long new text should not result in any type of exception,
* even when requested visibility should be set to true.
*/
public void test_setText10() {
MessageOnTheScreen mots = MessageOnTheScreen.getInstance(activity,
true);
String text = "text";
StringBuilder newText = new StringBuilder(10000 * text.length());
for (int i = 0; i < 10000; i++) {
newText.append("text");
}
mots.setText(activity, newText.toString());
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.hive;
import com.facebook.presto.spi.ConnectorPageSink;
import com.facebook.presto.spi.ConnectorSession;
import com.facebook.presto.spi.Page;
import com.facebook.presto.spi.PageIndexer;
import com.facebook.presto.spi.PageIndexerFactory;
import com.facebook.presto.spi.PrestoException;
import com.facebook.presto.spi.block.Block;
import com.facebook.presto.spi.block.BlockBuilderStatus;
import com.facebook.presto.spi.block.DictionaryBlock;
import com.facebook.presto.spi.block.IntArrayBlockBuilder;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeManager;
import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Ints;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import io.airlift.concurrent.MoreFutures;
import io.airlift.json.JsonCodec;
import io.airlift.log.Logger;
import io.airlift.slice.Slice;
import it.unimi.dsi.fastutil.ints.IntArraySet;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.ints.IntSet;
import it.unimi.dsi.fastutil.objects.Object2IntMap;
import it.unimi.dsi.fastutil.objects.Object2IntOpenHashMap;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalInt;
import java.util.concurrent.Callable;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executors;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_TOO_MANY_OPEN_PARTITIONS;
import static com.facebook.presto.hive.HiveErrorCode.HIVE_WRITER_CLOSE_ERROR;
import static com.facebook.presto.spi.type.IntegerType.INTEGER;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Verify.verify;
import static io.airlift.slice.SizeOf.sizeOf;
import static io.airlift.slice.Slices.wrappedBuffer;
import static java.util.Objects.requireNonNull;
import static java.util.stream.Collectors.toList;
public class HivePageSink
implements ConnectorPageSink
{
private static final Logger log = Logger.get(HivePageSink.class);
private static final int MAX_PAGE_POSITIONS = 4096;
private final HiveWriterFactory writerFactory;
private final int[] dataColumnInputIndex; // ordinal of columns (not counting sample weight column)
private final int[] partitionColumnsInputIndex; // ordinal of columns (not counting sample weight column)
private final int[] bucketColumns;
private final HiveBucketFunction bucketFunction;
private final HiveWriterPagePartitioner pagePartitioner;
private final HdfsEnvironment hdfsEnvironment;
private final int maxOpenWriters;
private final ListeningExecutorService writeVerificationExecutor;
private final JsonCodec<PartitionUpdate> partitionUpdateCodec;
private final List<HiveWriter> writers = new ArrayList<>();
private final List<WriterPositions> writerPositions = new ArrayList<>();
private final ConnectorSession session;
private long writtenBytes;
private long systemMemoryUsage;
public HivePageSink(
HiveWriterFactory writerFactory,
List<HiveColumnHandle> inputColumns,
Optional<HiveBucketProperty> bucketProperty,
PageIndexerFactory pageIndexerFactory,
TypeManager typeManager,
HdfsEnvironment hdfsEnvironment,
int maxOpenWriters,
ListeningExecutorService writeVerificationExecutor,
JsonCodec<PartitionUpdate> partitionUpdateCodec,
ConnectorSession session)
{
this.writerFactory = requireNonNull(writerFactory, "writerFactory is null");
requireNonNull(inputColumns, "inputColumns is null");
requireNonNull(pageIndexerFactory, "pageIndexerFactory is null");
this.hdfsEnvironment = requireNonNull(hdfsEnvironment, "hdfsEnvironment is null");
this.maxOpenWriters = maxOpenWriters;
this.writeVerificationExecutor = requireNonNull(writeVerificationExecutor, "writeVerificationExecutor is null");
this.partitionUpdateCodec = requireNonNull(partitionUpdateCodec, "partitionUpdateCodec is null");
requireNonNull(bucketProperty, "bucketProperty is null");
this.pagePartitioner = new HiveWriterPagePartitioner(
inputColumns,
bucketProperty.isPresent(),
pageIndexerFactory,
typeManager);
// determine the input index of the partition columns and data columns
// and determine the input index and type of bucketing columns
ImmutableList.Builder<Integer> partitionColumns = ImmutableList.builder();
ImmutableList.Builder<Integer> dataColumnsInputIndex = ImmutableList.builder();
Object2IntMap<String> dataColumnNameToIdMap = new Object2IntOpenHashMap<>();
Map<String, HiveType> dataColumnNameToTypeMap = new HashMap<>();
// sample weight column is passed separately, so index must be calculated without this column
for (int inputIndex = 0; inputIndex < inputColumns.size(); inputIndex++) {
HiveColumnHandle column = inputColumns.get(inputIndex);
if (column.isPartitionKey()) {
partitionColumns.add(inputIndex);
}
else {
dataColumnsInputIndex.add(inputIndex);
dataColumnNameToIdMap.put(column.getName(), inputIndex);
dataColumnNameToTypeMap.put(column.getName(), column.getHiveType());
}
}
this.partitionColumnsInputIndex = Ints.toArray(partitionColumns.build());
this.dataColumnInputIndex = Ints.toArray(dataColumnsInputIndex.build());
if (bucketProperty.isPresent()) {
int bucketCount = bucketProperty.get().getBucketCount();
bucketColumns = bucketProperty.get().getBucketedBy().stream()
.mapToInt(dataColumnNameToIdMap::get)
.toArray();
List<HiveType> bucketColumnTypes = bucketProperty.get().getBucketedBy().stream()
.map(dataColumnNameToTypeMap::get)
.collect(toList());
bucketFunction = new HiveBucketFunction(bucketCount, bucketColumnTypes);
}
else {
bucketColumns = null;
bucketFunction = null;
}
this.session = requireNonNull(session, "session is null");
}
@Override
public long getCompletedBytes()
{
return writtenBytes;
}
@Override
public long getSystemMemoryUsage()
{
return systemMemoryUsage;
}
@Override
public CompletableFuture<Collection<Slice>> finish()
{
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#commit -> RecordWriter#close
ListenableFuture<Collection<Slice>> result = hdfsEnvironment.doAs(session.getUser(), this::doFinish);
return MoreFutures.toCompletableFuture(result);
}
private ListenableFuture<Collection<Slice>> doFinish()
{
ImmutableList.Builder<Slice> partitionUpdates = ImmutableList.builder();
List<Callable<Object>> verificationTasks = new ArrayList<>();
for (HiveWriter writer : writers) {
writer.commit();
PartitionUpdate partitionUpdate = writer.getPartitionUpdate();
partitionUpdates.add(wrappedBuffer(partitionUpdateCodec.toJsonBytes(partitionUpdate)));
writer.getVerificationTask()
.map(Executors::callable)
.ifPresent(verificationTasks::add);
}
List<Slice> result = partitionUpdates.build();
writtenBytes = writers.stream()
.mapToLong(HiveWriter::getWrittenBytes)
.sum();
if (verificationTasks.isEmpty()) {
return Futures.immediateFuture(result);
}
try {
List<ListenableFuture<?>> futures = writeVerificationExecutor.invokeAll(verificationTasks).stream()
.map(future -> (ListenableFuture<?>) future)
.collect(toList());
return Futures.transform(Futures.allAsList(futures), input -> result);
}
catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException(e);
}
}
@Override
public void abort()
{
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#rollback -> RecordWriter#close
hdfsEnvironment.doAs(session.getUser(), this::doAbort);
}
private void doAbort()
{
Optional<Exception> rollbackException = Optional.empty();
for (HiveWriter writer : writers) {
// writers can contain nulls if an exception is thrown when doAppend expends the writer list
if (writer != null) {
try {
writer.rollback();
}
catch (Exception e) {
log.warn("exception '%s' while rollback on %s", e, writer);
rollbackException = Optional.of(e);
}
}
}
if (rollbackException.isPresent()) {
throw new PrestoException(HIVE_WRITER_CLOSE_ERROR, "Error rolling back write to Hive", rollbackException.get());
}
}
@Override
public CompletableFuture<?> appendPage(Page page)
{
if (page.getPositionCount() > 0) {
// Must be wrapped in doAs entirely
// Implicit FileSystem initializations are possible in HiveRecordWriter#addRow or #createWriter
hdfsEnvironment.doAs(session.getUser(), () -> doAppend(page));
}
return NOT_BLOCKED;
}
private void doAppend(Page page)
{
while (page.getPositionCount() > MAX_PAGE_POSITIONS) {
Page chunk = page.getRegion(0, MAX_PAGE_POSITIONS);
page = page.getRegion(MAX_PAGE_POSITIONS, page.getPositionCount() - MAX_PAGE_POSITIONS);
writePage(chunk);
}
writePage(page);
}
private void writePage(Page page)
{
int[] writerIndexes = getWriterIndexes(page);
// record which positions are used by which writer
for (int position = 0; position < page.getPositionCount(); position++) {
int writerIndex = writerIndexes[position];
writerPositions.get(writerIndex).add(position);
}
// invoke the writers
Page dataPage = getDataPage(page);
IntSet writersUsed = new IntArraySet(writerIndexes);
for (IntIterator iterator = writersUsed.iterator(); iterator.hasNext(); ) {
int writerIndex = iterator.nextInt();
WriterPositions currentWriterPositions = writerPositions.get(writerIndex);
if (currentWriterPositions.isEmpty()) {
continue;
}
// If write is partitioned across multiple writers, filter page using dictionary blocks
Page pageForWriter = dataPage;
if (currentWriterPositions.size() != dataPage.getPositionCount()) {
Block[] blocks = new Block[dataPage.getChannelCount()];
for (int channel = 0; channel < dataPage.getChannelCount(); channel++) {
blocks[channel] = new DictionaryBlock(currentWriterPositions.size(), dataPage.getBlock(channel), currentWriterPositions.getPositionsArray());
}
pageForWriter = new Page(currentWriterPositions.size(), blocks);
}
HiveWriter writer = writers.get(writerIndex);
long currentWritten = writer.getWrittenBytes();
long currentMemory = writer.getSystemMemoryUsage();
writer.append(pageForWriter);
writtenBytes += (writer.getWrittenBytes() - currentWritten);
systemMemoryUsage += (writer.getSystemMemoryUsage() - currentMemory);
currentWriterPositions.clear();
}
}
private int[] getWriterIndexes(Page page)
{
Page partitionColumns = extractColumns(page, partitionColumnsInputIndex);
Block bucketBlock = buildBucketBlock(page);
int[] writerIndexes = pagePartitioner.partitionPage(partitionColumns, bucketBlock);
if (pagePartitioner.getMaxIndex() >= maxOpenWriters) {
throw new PrestoException(HIVE_TOO_MANY_OPEN_PARTITIONS, "Too many open partitions");
}
// expand writers list to new size
while (writers.size() <= pagePartitioner.getMaxIndex()) {
writers.add(null);
WriterPositions newWriterPositions = new WriterPositions();
systemMemoryUsage += sizeOf(newWriterPositions.getPositionsArray());
writerPositions.add(newWriterPositions);
}
// create missing writers
for (int position = 0; position < page.getPositionCount(); position++) {
int writerIndex = writerIndexes[position];
if (writers.get(writerIndex) != null) {
continue;
}
OptionalInt bucketNumber = OptionalInt.empty();
if (bucketBlock != null) {
bucketNumber = OptionalInt.of(bucketBlock.getInt(position, 0));
}
HiveWriter writer = writerFactory.createWriter(partitionColumns, position, bucketNumber);
writers.set(writerIndex, writer);
}
verify(writers.size() == pagePartitioner.getMaxIndex() + 1);
verify(!writers.contains(null));
return writerIndexes;
}
private Page getDataPage(Page page)
{
Block[] blocks = new Block[dataColumnInputIndex.length];
for (int i = 0; i < dataColumnInputIndex.length; i++) {
int dataColumn = dataColumnInputIndex[i];
blocks[i] = page.getBlock(dataColumn);
}
return new Page(page.getPositionCount(), blocks);
}
private Block buildBucketBlock(Page page)
{
if (bucketFunction == null) {
return null;
}
IntArrayBlockBuilder bucketColumnBuilder = new IntArrayBlockBuilder(new BlockBuilderStatus(), page.getPositionCount());
Page bucketColumnsPage = extractColumns(page, bucketColumns);
for (int position = 0; position < page.getPositionCount(); position++) {
int bucket = bucketFunction.getBucket(bucketColumnsPage, position);
bucketColumnBuilder.writeInt(bucket);
}
return bucketColumnBuilder.build();
}
private static Page extractColumns(Page page, int[] columns)
{
Block[] blocks = new Block[columns.length];
for (int i = 0; i < columns.length; i++) {
int dataColumn = columns[i];
blocks[i] = page.getBlock(dataColumn);
}
return new Page(page.getPositionCount(), blocks);
}
private static class HiveWriterPagePartitioner
{
private final PageIndexer pageIndexer;
public HiveWriterPagePartitioner(
List<HiveColumnHandle> inputColumns,
boolean bucketed,
PageIndexerFactory pageIndexerFactory,
TypeManager typeManager)
{
requireNonNull(inputColumns, "inputColumns is null");
requireNonNull(pageIndexerFactory, "pageIndexerFactory is null");
List<Type> partitionColumnTypes = inputColumns.stream()
.filter(HiveColumnHandle::isPartitionKey)
.map(column -> typeManager.getType(column.getTypeSignature()))
.collect(toList());
if (bucketed) {
partitionColumnTypes.add(INTEGER);
}
this.pageIndexer = pageIndexerFactory.createPageIndexer(partitionColumnTypes);
}
public int[] partitionPage(Page partitionColumns, Block bucketBlock)
{
if (bucketBlock != null) {
Block[] blocks = new Block[partitionColumns.getChannelCount() + 1];
for (int i = 0; i < partitionColumns.getChannelCount(); i++) {
blocks[i] = partitionColumns.getBlock(i);
}
blocks[blocks.length - 1] = bucketBlock;
partitionColumns = new Page(partitionColumns.getPositionCount(), blocks);
}
return pageIndexer.indexPage(partitionColumns);
}
public int getMaxIndex()
{
return pageIndexer.getMaxIndex();
}
}
private static final class WriterPositions
{
private final int[] positions = new int[MAX_PAGE_POSITIONS];
private int size;
public boolean isEmpty()
{
return size == 0;
}
public int size()
{
return size;
}
public int[] getPositionsArray()
{
return positions;
}
public void add(int position)
{
checkArgument(size < positions.length, "Too many page positions");
positions[size] = position;
size++;
}
public void clear()
{
size = 0;
}
@Override
public String toString()
{
return toStringHelper(this)
.add("size", size)
.toString();
}
}
}
| |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.compiler.impl;
import com.google.common.base.Throwables;
import com.intellij.compiler.impl.generic.GenericCompilerCache;
import com.intellij.compiler.impl.generic.GenericCompilerPersistentData;
import com.intellij.openapi.compiler.generic.*;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.RunResult;
import com.intellij.openapi.compiler.*;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.project.DumbService;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.util.CommonProcessors;
import com.intellij.util.Processor;
import com.intellij.util.ThrowableRunnable;
import com.intellij.util.io.KeyDescriptor;
import gnu.trove.THashSet;
import gnu.trove.TObjectHashingStrategy;
import org.jetbrains.annotations.NotNull;
import java.io.File;
import java.io.IOException;
import java.util.*;
/**
* @author nik
*/
public class GenericCompilerRunner {
private static final Logger LOG = Logger.getInstance("#com.intellij.compiler.impl.GenericCompilerRunner");
private CompileContext myContext;
private final boolean myForceCompile;
private final boolean myOnlyCheckStatus;
private final GenericCompiler<?,?,?>[] myCompilers;
private final Project myProject;
public GenericCompilerRunner(CompileContext context,
CompilerFilter compilerFilter,
CompilerManager compilerManager,
boolean forceCompile,
boolean onlyCheckStatus) {
myContext = context;
myForceCompile = forceCompile;
myOnlyCheckStatus = onlyCheckStatus;
myCompilers = compilerManager.getCompilers(GenericCompiler.class, compilerFilter);
myProject = myContext.getProject();
}
public boolean invokeCompilers(GenericCompiler.CompileOrderPlace place) throws CompileDriver.ExitException {
boolean didSomething = false;
try {
for (GenericCompiler<?,?,?> compiler : myCompilers) {
if (compiler.getOrderPlace().equals(place)) {
didSomething = invokeCompiler(compiler);
}
}
}
catch (IOException e) {
LOG.info(e);
myContext.requestRebuildNextTime(e.getMessage());
throw new CompileDriver.ExitException(CompileDriver.ExitStatus.ERRORS);
}
catch (CompileDriver.ExitException e) {
throw e;
}
catch (ProcessCanceledException e) {
throw e;
}
catch (Exception e) {
LOG.info(e);
myContext.addMessage(CompilerMessageCategory.ERROR, CompilerBundle.message("compiler.error.exception", e.getMessage()), null, -1, -1);
}
return didSomething;
}
private <Key, SourceState, OutputState> boolean invokeCompiler(GenericCompiler<Key, SourceState, OutputState> compiler) throws IOException, CompileDriver.ExitException {
return invokeCompiler(compiler, compiler.createInstance(myContext));
}
private <T extends BuildTarget, Item extends CompileItem<Key, SourceState, OutputState>, Key, SourceState, OutputState>
boolean invokeCompiler(GenericCompiler<Key, SourceState, OutputState> compiler, final GenericCompilerInstance<T, Item, Key, SourceState, OutputState> instance) throws IOException, CompileDriver.ExitException {
final GenericCompilerCache<Key, SourceState, OutputState> cache = CompilerCacheManager.getInstance(myProject).getGenericCompilerCache(compiler);
GenericCompilerPersistentData
data = new GenericCompilerPersistentData(getGenericCompilerCacheDir(myProject, compiler), compiler.getVersion());
if (data.isVersionChanged()) {
LOG.info("Clearing cache for " + compiler.getDescription());
cache.wipe();
data.save();
}
final Set<String> targetsToRemove = new HashSet<String>(data.getAllTargets());
new ReadAction() {
protected void run(final Result result) {
for (T target : instance.getAllTargets()) {
targetsToRemove.remove(target.getId());
}
}
}.execute();
if (!myOnlyCheckStatus) {
for (final String target : targetsToRemove) {
final int id = data.removeId(target);
if (LOG.isDebugEnabled()) {
LOG.debug("Removing obsolete target '" + target + "' (id=" + id + ")");
}
final List<Key> keys = new ArrayList<Key>();
CompilerUtil.runInContext(myContext, "Processing obsolete targets...", new ThrowableRunnable<IOException>() {
@Override
public void run() throws IOException {
cache.processSources(id, new CommonProcessors.CollectProcessor<Key>(keys));
List<GenericCompilerCacheState<Key, SourceState, OutputState>> obsoleteSources = new ArrayList<GenericCompilerCacheState<Key,SourceState,OutputState>>();
for (Key key : keys) {
final GenericCompilerCache.PersistentStateData<SourceState, OutputState> state = cache.getState(id, key);
obsoleteSources.add(new GenericCompilerCacheState<Key,SourceState,OutputState>(key, state.mySourceState, state.myOutputState));
}
instance.processObsoleteTarget(target, obsoleteSources);
}
});
checkForErrorsOrCanceled();
for (Key key : keys) {
cache.remove(id, key);
}
}
}
final List<T> selectedTargets = new ReadAction<List<T>>() {
protected void run(final Result<List<T>> result) {
result.setResult(instance.getSelectedTargets());
}
}.execute().getResultObject();
boolean didSomething = false;
for (T target : selectedTargets) {
int id = data.getId(target.getId());
didSomething |= processTarget(target, id, compiler, instance, cache);
}
data.save();
return didSomething;
}
private void checkForErrorsOrCanceled() throws CompileDriver.ExitException {
if (myContext.getMessageCount(CompilerMessageCategory.ERROR) > 0) {
throw new CompileDriver.ExitException(CompileDriver.ExitStatus.ERRORS);
}
if (myContext.getProgressIndicator().isCanceled()) {
throw new CompileDriver.ExitException(CompileDriver.ExitStatus.CANCELLED);
}
}
public static File getGenericCompilerCacheDir(Project project, GenericCompiler<?,?,?> compiler) {
return new File(CompilerPaths.getCacheStoreDirectory(project), compiler.getId());
}
private <T extends BuildTarget, Item extends CompileItem<Key, SourceState, OutputState>, Key, SourceState, OutputState>
boolean processTarget(T target, final int targetId, final GenericCompiler<Key, SourceState, OutputState> compiler, final GenericCompilerInstance<T, Item, Key, SourceState, OutputState> instance,
final GenericCompilerCache<Key, SourceState, OutputState> cache) throws IOException, CompileDriver.ExitException {
if (LOG.isDebugEnabled()) {
LOG.debug("Processing target '" + target + "' (id=" + targetId + ")");
}
final List<Item> items = instance.getItems(target);
checkForErrorsOrCanceled();
final List<GenericCompilerProcessingItem<Item, SourceState, OutputState>> toProcess = new ArrayList<GenericCompilerProcessingItem<Item,SourceState,OutputState>>();
final THashSet<Key> keySet = new THashSet<Key>(new SourceItemHashingStrategy<Key>(compiler));
final Ref<IOException> exception = Ref.create(null);
DumbService.getInstance(myProject).waitForSmartMode();
final Map<Item, SourceState> sourceStates = new HashMap<Item,SourceState>();
ApplicationManager.getApplication().runReadAction(new Runnable() {
@Override
public void run() {
try {
for (Item item : items) {
final Key key = item.getKey();
keySet.add(key);
if (item.isExcluded()) continue;
final GenericCompilerCache.PersistentStateData<SourceState, OutputState> data = cache.getState(targetId, key);
SourceState sourceState = data != null ? data.mySourceState : null;
final OutputState outputState = data != null ? data.myOutputState : null;
if (myForceCompile || sourceState == null || !item.isSourceUpToDate(sourceState)
|| outputState == null || !item.isOutputUpToDate(outputState)) {
sourceStates.put(item, item.computeSourceState());
toProcess.add(new GenericCompilerProcessingItem<Item,SourceState,OutputState>(item, sourceState, outputState));
}
}
}
catch (IOException e) {
exception.set(e);
}
}
});
if (!exception.isNull()) {
throw exception.get();
}
final List<Key> toRemove = new ArrayList<Key>();
cache.processSources(targetId, new Processor<Key>() {
@Override
public boolean process(Key key) {
if (!keySet.contains(key)) {
toRemove.add(key);
}
return true;
}
});
if (LOG.isDebugEnabled()) {
LOG.debug(toProcess.size() + " items will be processed, " + toRemove.size() + " items will be removed");
}
if (toProcess.isEmpty() && toRemove.isEmpty()) {
return false;
}
if (myOnlyCheckStatus) {
throw new CompileDriver.ExitException(CompileDriver.ExitStatus.CANCELLED);
}
List<GenericCompilerCacheState<Key, SourceState, OutputState>> obsoleteItems = new ArrayList<GenericCompilerCacheState<Key,SourceState,OutputState>>();
for (Key key : toRemove) {
final GenericCompilerCache.PersistentStateData<SourceState, OutputState> data = cache.getState(targetId, key);
obsoleteItems.add(new GenericCompilerCacheState<Key,SourceState,OutputState>(key, data.mySourceState, data.myOutputState));
}
final List<Item> processedItems = new ArrayList<Item>();
final List<File> filesToRefresh = new ArrayList<File>();
final List<File> dirsToRefresh = new ArrayList<File>();
instance.processItems(target, toProcess, obsoleteItems, new GenericCompilerInstance.OutputConsumer<Item>() {
@Override
public void addFileToRefresh(@NotNull File file) {
filesToRefresh.add(file);
}
@Override
public void addDirectoryToRefresh(@NotNull File dir) {
dirsToRefresh.add(dir);
}
@Override
public void addProcessedItem(@NotNull Item sourceItem) {
processedItems.add(sourceItem);
}
});
checkForErrorsOrCanceled();
CompilerUtil.runInContext(myContext, CompilerBundle.message("progress.updating.caches"), new ThrowableRunnable<IOException>() {
@Override
public void run() throws IOException {
for (Key key : toRemove) {
cache.remove(targetId, key);
}
CompilerUtil.refreshIOFiles(filesToRefresh);
CompilerUtil.refreshIODirectories(dirsToRefresh);
final RunResult runResult = new ReadAction() {
protected void run(final Result result) throws Throwable {
for (Item item : processedItems) {
SourceState sourceState = sourceStates.get(item);
if (sourceState == null) {
sourceState = item.computeSourceState();
}
cache.putState(targetId, item.getKey(), sourceState, item.computeOutputState());
}
}
}.executeSilently();
Throwables.propagateIfPossible(runResult.getThrowable(), IOException.class);
}
});
return true;
}
private class SourceItemHashingStrategy<S> implements TObjectHashingStrategy<S> {
private KeyDescriptor<S> myKeyDescriptor;
public SourceItemHashingStrategy(GenericCompiler<S, ?, ?> compiler) {
myKeyDescriptor = compiler.getItemKeyDescriptor();
}
@Override
public int computeHashCode(S object) {
return myKeyDescriptor.getHashCode(object);
}
@Override
public boolean equals(S o1, S o2) {
return myKeyDescriptor.isEqual(o1, o2);
}
}
}
| |
/**
* Copyright (c) 2013 - 2016 YCSB contributors. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.yahoo.ycsb.db;
import com.couchbase.client.protocol.views.*;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerator;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ObjectNode;
import com.yahoo.ycsb.ByteIterator;
import com.yahoo.ycsb.DB;
import com.yahoo.ycsb.DBException;
import com.yahoo.ycsb.Status;
import com.yahoo.ycsb.StringByteIterator;
import net.spy.memcached.PersistTo;
import net.spy.memcached.ReplicateTo;
import net.spy.memcached.internal.OperationFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.StringWriter;
import java.io.Writer;
import java.net.URI;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import java.util.Vector;
/**
* A class that wraps the CouchbaseClient to allow it to be interfaced with YCSB.
* This class extends {@link DB} and implements the database interface used by YCSB client.
*/
public class CouchbaseClient extends DB {
public static final String URL_PROPERTY = "couchbase.url";
public static final String BUCKET_PROPERTY = "couchbase.bucket";
public static final String PASSWORD_PROPERTY = "couchbase.password";
public static final String CHECKF_PROPERTY = "couchbase.checkFutures";
public static final String PERSIST_PROPERTY = "couchbase.persistTo";
public static final String REPLICATE_PROPERTY = "couchbase.replicateTo";
public static final String JSON_PROPERTY = "couchbase.json";
public static final String DESIGN_DOC_PROPERTY = "couchbase.ddoc";
public static final String VIEW_PROPERTY = "couchbase.view";
public static final String STALE_PROPERTY = "couchbase.stale";
public static final String SCAN_PROPERTY = "scanproportion";
public static final String STALE_PROPERTY_DEFAULT = Stale.OK.name();
public static final String SCAN_PROPERTY_DEFAULT = "0.0";
protected static final ObjectMapper JSON_MAPPER = new ObjectMapper();
private com.couchbase.client.CouchbaseClient client;
private PersistTo persistTo;
private ReplicateTo replicateTo;
private boolean checkFutures;
private boolean useJson;
private String designDoc;
private String viewName;
private Stale stale;
private View view;
private final Logger log = LoggerFactory.getLogger(getClass());
@Override
public void init() throws DBException {
Properties props = getProperties();
String url = props.getProperty(URL_PROPERTY, "http://127.0.0.1:8091/pools");
String bucket = props.getProperty(BUCKET_PROPERTY, "default");
String password = props.getProperty(PASSWORD_PROPERTY, "");
checkFutures = props.getProperty(CHECKF_PROPERTY, "true").equals("true");
useJson = props.getProperty(JSON_PROPERTY, "true").equals("true");
persistTo = parsePersistTo(props.getProperty(PERSIST_PROPERTY, "0"));
replicateTo = parseReplicateTo(props.getProperty(REPLICATE_PROPERTY, "0"));
designDoc = getProperties().getProperty(DESIGN_DOC_PROPERTY);
viewName = getProperties().getProperty(VIEW_PROPERTY);
stale = Stale.valueOf(getProperties().getProperty(STALE_PROPERTY, STALE_PROPERTY_DEFAULT).toUpperCase());
Double scanproportion = Double.valueOf(props.getProperty(SCAN_PROPERTY, SCAN_PROPERTY_DEFAULT));
Properties systemProperties = System.getProperties();
systemProperties.put("net.spy.log.LoggerImpl", "net.spy.memcached.compat.log.SLF4JLogger");
System.setProperties(systemProperties);
try {
client = new com.couchbase.client.CouchbaseClient(Arrays.asList(new URI(url)), bucket, password);
} catch (Exception e) {
throw new DBException("Could not create CouchbaseClient object.", e);
}
if (scanproportion > 0) {
try {
view = client.getView(designDoc, viewName);
} catch (Exception e) {
throw new DBException(String.format("%s=%s and %s=%s provided, unable to connect to view.",
DESIGN_DOC_PROPERTY, designDoc, VIEW_PROPERTY, viewName), e.getCause());
}
}
}
/**
* Parse the replicate property into the correct enum.
*
* @param property the stringified property value.
* @throws DBException if parsing the property did fail.
* @return the correct enum.
*/
private ReplicateTo parseReplicateTo(final String property) throws DBException {
int value = Integer.parseInt(property);
switch (value) {
case 0:
return ReplicateTo.ZERO;
case 1:
return ReplicateTo.ONE;
case 2:
return ReplicateTo.TWO;
case 3:
return ReplicateTo.THREE;
default:
throw new DBException(REPLICATE_PROPERTY + " must be between 0 and 3");
}
}
/**
* Parse the persist property into the correct enum.
*
* @param property the stringified property value.
* @throws DBException if parsing the property did fail.
* @return the correct enum.
*/
private PersistTo parsePersistTo(final String property) throws DBException {
int value = Integer.parseInt(property);
switch (value) {
case 0:
return PersistTo.ZERO;
case 1:
return PersistTo.ONE;
case 2:
return PersistTo.TWO;
case 3:
return PersistTo.THREE;
case 4:
return PersistTo.FOUR;
default:
throw new DBException(PERSIST_PROPERTY + " must be between 0 and 4");
}
}
/**
* Shutdown the client.
*/
@Override
public void cleanup() {
client.shutdown();
}
@Override
public Status read(final String table, final String key, final Set<String> fields,
final Map<String, ByteIterator> result) {
String formattedKey = formatKey(table, key);
try {
Object loaded = client.get(formattedKey);
if (loaded == null) {
return Status.ERROR;
}
decode(loaded, fields, result);
return Status.OK;
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not read value for key " + formattedKey, e);
}
return Status.ERROR;
}
}
@Override
public Status scan(final String table, final String startkey, final int recordcount, final Set<String> fields,
final Vector<HashMap<String, ByteIterator>> result) {
try {
Query query = new Query().setRangeStart(startkey)
.setLimit(recordcount)
.setIncludeDocs(true)
.setStale(stale);
ViewResponse response = client.query(view, query);
for (ViewRow row : response) {
HashMap<String, ByteIterator> rowMap = new HashMap();
decode(row.getDocument(), fields, rowMap);
result.add(rowMap);
}
return Status.OK;
} catch (Exception e) {
log.error(e.getMessage());
}
return Status.ERROR;
}
@Override
public Status update(final String table, final String key, final Map<String, ByteIterator> values) {
String formattedKey = formatKey(table, key);
try {
final OperationFuture<Boolean> future = client.replace(formattedKey, encode(values), persistTo, replicateTo);
return checkFutureStatus(future);
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not update value for key " + formattedKey, e);
}
return Status.ERROR;
}
}
@Override
public Status insert(final String table, final String key, final Map<String, ByteIterator> values) {
String formattedKey = formatKey(table, key);
try {
final OperationFuture<Boolean> future = client.add(formattedKey, encode(values), persistTo, replicateTo);
return checkFutureStatus(future);
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not insert value for key " + formattedKey, e);
}
return Status.ERROR;
}
}
@Override
public Status delete(final String table, final String key) {
String formattedKey = formatKey(table, key);
try {
final OperationFuture<Boolean> future = client.delete(formattedKey, persistTo, replicateTo);
return checkFutureStatus(future);
} catch (Exception e) {
if (log.isErrorEnabled()) {
log.error("Could not delete value for key " + formattedKey, e);
}
return Status.ERROR;
}
}
/**
* Prefix the key with the given prefix, to establish a unique namespace.
*
* @param prefix the prefix to use.
* @param key the actual key.
* @return the formatted and prefixed key.
*/
private String formatKey(final String prefix, final String key) {
return prefix + ":" + key;
}
/**
* Wrapper method that either inspects the future or not.
*
* @param future the future to potentially verify.
* @return the status of the future result.
*/
private Status checkFutureStatus(final OperationFuture<?> future) {
if (checkFutures) {
return future.getStatus().isSuccess() ? Status.OK : Status.ERROR;
} else {
return Status.OK;
}
}
/**
* Decode the object from server into the storable result.
*
* @param source the loaded object.
* @param fields the fields to check.
* @param dest the result passed back to the ycsb core.
*/
private void decode(final Object source, final Set<String> fields, final Map<String, ByteIterator> dest) {
if (useJson) {
try {
JsonNode json = JSON_MAPPER.readTree((String) source);
boolean checkFields = fields != null && !fields.isEmpty();
for (Iterator<Map.Entry<String, JsonNode>> jsonFields = json.fields(); jsonFields.hasNext();) {
Map.Entry<String, JsonNode> jsonField = jsonFields.next();
String name = jsonField.getKey();
if (checkFields && fields.contains(name)) {
continue;
}
JsonNode jsonValue = jsonField.getValue();
if (jsonValue != null && !jsonValue.isNull()) {
dest.put(name, new StringByteIterator(jsonValue.asText()));
}
}
} catch (Exception e) {
throw new RuntimeException("Could not decode JSON");
}
} else {
Map<String, String> converted = (HashMap<String, String>) source;
for (Map.Entry<String, String> entry : converted.entrySet()) {
dest.put(entry.getKey(), new StringByteIterator(entry.getValue()));
}
}
}
/**
* Encode the object for couchbase storage.
*
* @param source the source value.
* @return the storable object.
*/
private Object encode(final Map<String, ByteIterator> source) {
Map<String, String> stringMap = StringByteIterator.getStringMap(source);
if (!useJson) {
return stringMap;
}
ObjectNode node = JSON_MAPPER.createObjectNode();
for (Map.Entry<String, String> pair : stringMap.entrySet()) {
node.put(pair.getKey(), pair.getValue());
}
JsonFactory jsonFactory = new JsonFactory();
Writer writer = new StringWriter();
try {
JsonGenerator jsonGenerator = jsonFactory.createGenerator(writer);
JSON_MAPPER.writeTree(jsonGenerator, node);
} catch (Exception e) {
throw new RuntimeException("Could not encode JSON value");
}
return writer.toString();
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.zeppelin.jdbc;
import static org.apache.commons.lang.StringUtils.containsIgnoreCase;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.ResultSet;
import java.sql.ResultSetMetaData;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.apache.zeppelin.interpreter.Interpreter;
import org.apache.zeppelin.interpreter.InterpreterContext;
import org.apache.zeppelin.interpreter.InterpreterException;
import org.apache.zeppelin.interpreter.InterpreterPropertyBuilder;
import org.apache.zeppelin.interpreter.InterpreterResult;
import org.apache.zeppelin.interpreter.InterpreterResult.Code;
import org.apache.zeppelin.scheduler.Scheduler;
import org.apache.zeppelin.scheduler.SchedulerFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Function;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import com.google.common.collect.Sets.SetView;
/**
* JDBC interpreter for Zeppelin. This interpreter can also be used for accessing HAWQ,
* GreenplumDB, MariaDB, MySQL, Postgres and Redshit.
*
* <ul>
* <li>{@code default.url} - JDBC URL to connect to.</li>
* <li>{@code default.user} - JDBC user name..</li>
* <li>{@code default.password} - JDBC password..</li>
* <li>{@code default.driver.name} - JDBC driver name.</li>
* <li>{@code common.max.result} - Max number of SQL result to display.</li>
* </ul>
*
* <p>
* How to use: <br/>
* {@code %jdbc.sql} <br/>
* {@code
* SELECT store_id, count(*)
* FROM retail_demo.order_lineitems_pxf
* GROUP BY store_id;
* }
* </p>
*
*/
public class JDBCInterpreter extends Interpreter {
private Logger logger = LoggerFactory.getLogger(JDBCInterpreter.class);
static final String COMMON_KEY = "common";
static final String MAX_LINE_KEY = "max_count";
static final String MAX_LINE_DEFAULT = "1000";
static final String DEFAULT_KEY = "default";
static final String DRIVER_KEY = "driver";
static final String URL_KEY = "url";
static final String USER_KEY = "user";
static final String PASSWORD_KEY = "password";
static final String DOT = ".";
private static final char WHITESPACE = ' ';
private static final char NEWLINE = '\n';
private static final char TAB = '\t';
private static final String TABLE_MAGIC_TAG = "%table ";
private static final String EXPLAIN_PREDICATE = "EXPLAIN ";
private static final String UPDATE_COUNT_HEADER = "Update Count";
static final String COMMON_MAX_LINE = COMMON_KEY + DOT + MAX_LINE_KEY;
static final String DEFAULT_DRIVER = DEFAULT_KEY + DOT + DRIVER_KEY;
static final String DEFAULT_URL = DEFAULT_KEY + DOT + URL_KEY;
static final String DEFAULT_USER = DEFAULT_KEY + DOT + USER_KEY;
static final String DEFAULT_PASSWORD = DEFAULT_KEY + DOT + PASSWORD_KEY;
static final String EMPTY_COLUMN_VALUE = "";
private final HashMap<String, Properties> propertiesMap;
private final Map<String, Statement> paragraphIdStatementMap;
private final Map<String, ArrayList<Connection>> propertyKeyUnusedConnectionListMap;
private final Map<String, Connection> paragraphIdConnectionMap;
static {
Interpreter.register(
"sql",
"jdbc",
JDBCInterpreter.class.getName(),
new InterpreterPropertyBuilder()
.add(DEFAULT_URL, "jdbc:postgresql://localhost:5432/", "The URL for JDBC.")
.add(DEFAULT_USER, "gpadmin", "The JDBC user name")
.add(DEFAULT_PASSWORD, "",
"The JDBC user password")
.add(DEFAULT_DRIVER, "org.postgresql.Driver", "JDBC Driver Name")
.add(COMMON_MAX_LINE, MAX_LINE_DEFAULT,
"Max number of SQL result to display.").build());
}
public JDBCInterpreter(Properties property) {
super(property);
propertiesMap = new HashMap<>();
propertyKeyUnusedConnectionListMap = new HashMap<>();
paragraphIdStatementMap = new HashMap<>();
paragraphIdConnectionMap = new HashMap<>();
}
public HashMap<String, Properties> getPropertiesMap() {
return propertiesMap;
}
@Override
public void open() {
for (String propertyKey : property.stringPropertyNames()) {
logger.debug("propertyKey: {}", propertyKey);
String[] keyValue = propertyKey.split("\\.", 2);
if (2 == keyValue.length) {
logger.info("key: {}, value: {}", keyValue[0], keyValue[1]);
Properties prefixProperties;
if (propertiesMap.containsKey(keyValue[0])) {
prefixProperties = propertiesMap.get(keyValue[0]);
} else {
prefixProperties = new Properties();
propertiesMap.put(keyValue[0], prefixProperties);
}
prefixProperties.put(keyValue[1], property.getProperty(propertyKey));
}
}
Set<String> removeKeySet = new HashSet<>();
for (String key : propertiesMap.keySet()) {
if (!COMMON_KEY.equals(key)) {
Properties properties = propertiesMap.get(key);
if (!properties.containsKey(DRIVER_KEY) || !properties.containsKey(URL_KEY)) {
logger.error("{} will be ignored. {}.{} and {}.{} is mandatory.",
key, DRIVER_KEY, key, key, URL_KEY);
removeKeySet.add(key);
}
}
}
for (String key : removeKeySet) {
propertiesMap.remove(key);
}
logger.debug("propertiesMap: {}", propertiesMap);
}
public Connection getConnection(String propertyKey) throws ClassNotFoundException, SQLException {
Connection connection = null;
if (propertyKey == null || propertiesMap.get(propertyKey) == null) {
return null;
}
if (propertyKeyUnusedConnectionListMap.containsKey(propertyKey)) {
ArrayList<Connection> connectionList = propertyKeyUnusedConnectionListMap.get(propertyKey);
if (0 != connectionList.size()) {
connection = propertyKeyUnusedConnectionListMap.get(propertyKey).remove(0);
if (null != connection && connection.isClosed()) {
connection.close();
connection = null;
}
}
}
if (null == connection) {
Properties properties = propertiesMap.get(propertyKey);
logger.info(properties.getProperty(DRIVER_KEY));
Class.forName(properties.getProperty(DRIVER_KEY));
String url = properties.getProperty(URL_KEY);
String user = properties.getProperty(USER_KEY);
String password = properties.getProperty(PASSWORD_KEY);
if (null != user && null != password) {
connection = DriverManager.getConnection(url, user, password);
} else {
connection = DriverManager.getConnection(url, properties);
}
}
return connection;
}
public Statement getStatement(String propertyKey, String paragraphId)
throws SQLException, ClassNotFoundException {
Connection connection;
if (paragraphIdConnectionMap.containsKey(paragraphId)) {
connection = paragraphIdConnectionMap.get(paragraphId);
} else {
connection = getConnection(propertyKey);
}
if (connection == null) {
return null;
}
Statement statement = connection.createStatement();
if (isStatementClosed(statement)) {
connection = getConnection(propertyKey);
statement = connection.createStatement();
}
paragraphIdConnectionMap.put(paragraphId, connection);
paragraphIdStatementMap.put(paragraphId, statement);
return statement;
}
private boolean isStatementClosed(Statement statement) {
try {
return statement.isClosed();
} catch (Throwable t) {
logger.debug("{} doesn't support isClosed method", statement);
return false;
}
}
@Override
public void close() {
try {
for (List<Connection> connectionList : propertyKeyUnusedConnectionListMap.values()) {
for (Connection c : connectionList) {
c.close();
}
}
for (Statement statement : paragraphIdStatementMap.values()) {
statement.close();
}
paragraphIdStatementMap.clear();
for (Connection connection : paragraphIdConnectionMap.values()) {
connection.close();
}
paragraphIdConnectionMap.clear();
} catch (SQLException e) {
logger.error("Error while closing...", e);
}
}
private InterpreterResult executeSql(String propertyKey, String sql,
InterpreterContext interpreterContext) {
String paragraphId = interpreterContext.getParagraphId();
try {
Statement statement = getStatement(propertyKey, paragraphId);
if (statement == null) {
return new InterpreterResult(Code.ERROR, "Prefix not found.");
}
statement.setMaxRows(getMaxResult());
StringBuilder msg = null;
boolean isTableType = false;
if (containsIgnoreCase(sql, EXPLAIN_PREDICATE)) {
msg = new StringBuilder();
} else {
msg = new StringBuilder(TABLE_MAGIC_TAG);
isTableType = true;
}
ResultSet resultSet = null;
try {
boolean isResultSetAvailable = statement.execute(sql);
if (isResultSetAvailable) {
resultSet = statement.getResultSet();
ResultSetMetaData md = resultSet.getMetaData();
for (int i = 1; i < md.getColumnCount() + 1; i++) {
if (i > 1) {
msg.append(TAB);
}
msg.append(replaceReservedChars(isTableType, md.getColumnName(i)));
}
msg.append(NEWLINE);
int displayRowCount = 0;
while (resultSet.next() && displayRowCount < getMaxResult()) {
for (int i = 1; i < md.getColumnCount() + 1; i++) {
Object resultObject;
String resultValue;
resultObject = resultSet.getObject(i);
if (resultObject == null) {
resultValue = "null";
} else {
resultValue = resultSet.getString(i);
}
msg.append(replaceReservedChars(isTableType, resultValue));
if (i != md.getColumnCount()) {
msg.append(TAB);
}
}
msg.append(NEWLINE);
displayRowCount++;
}
} else {
// Response contains either an update count or there are no results.
int updateCount = statement.getUpdateCount();
msg.append(UPDATE_COUNT_HEADER).append(NEWLINE);
msg.append(updateCount).append(NEWLINE);
}
} finally {
try {
if (resultSet != null) {
resultSet.close();
}
statement.close();
} finally {
statement = null;
}
}
return new InterpreterResult(Code.SUCCESS, msg.toString());
} catch (SQLException ex) {
logger.error("Cannot run " + sql, ex);
return new InterpreterResult(Code.ERROR, ex.getMessage());
} catch (ClassNotFoundException e) {
logger.error("Cannot run " + sql, e);
return new InterpreterResult(Code.ERROR, e.getMessage());
}
}
/**
* For %table response replace Tab and Newline characters from the content.
*/
private String replaceReservedChars(boolean isTableResponseType, String str) {
if (str == null) {
return EMPTY_COLUMN_VALUE;
}
return (!isTableResponseType) ? str : str.replace(TAB, WHITESPACE).replace(NEWLINE, WHITESPACE);
}
@Override
public InterpreterResult interpret(String cmd, InterpreterContext contextInterpreter) {
logger.info("Run SQL command '{}'", cmd);
String propertyKey = getPropertyKey(cmd);
if (null != propertyKey && !propertyKey.equals(DEFAULT_KEY)) {
cmd = cmd.substring(propertyKey.length() + 2);
}
cmd = cmd.trim();
logger.info("PropertyKey: {}, SQL command: '{}'", propertyKey, cmd);
return executeSql(propertyKey, cmd, contextInterpreter);
}
@Override
public void cancel(InterpreterContext context) {
logger.info("Cancel current query statement.");
String paragraphId = context.getParagraphId();
try {
paragraphIdStatementMap.get(paragraphId).cancel();
} catch (SQLException e) {
logger.error("Error while cancelling...", e);
}
}
public String getPropertyKey(String cmd) {
boolean firstLineIndex = cmd.startsWith("(");
if (firstLineIndex) {
int configStartIndex = cmd.indexOf("(");
int configLastIndex = cmd.indexOf(")");
if (configStartIndex != -1 && configLastIndex != -1) {
return cmd.substring(configStartIndex + 1, configLastIndex);
} else {
return null;
}
} else {
return DEFAULT_KEY;
}
}
@Override
public FormType getFormType() {
return FormType.SIMPLE;
}
@Override
public int getProgress(InterpreterContext context) {
return 0;
}
@Override
public Scheduler getScheduler() {
return SchedulerFactory.singleton().createOrGetFIFOScheduler(
JDBCInterpreter.class.getName() + this.hashCode());
}
@Override
public List<String> completion(String buf, int cursor) {
return null;
}
public int getMaxResult() {
return Integer.valueOf(
propertiesMap.get(COMMON_KEY).getProperty(MAX_LINE_KEY, MAX_LINE_DEFAULT));
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.