gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.index;
import java.nio.ByteBuffer;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.Future;
import java.util.concurrent.FutureTask;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.db.BufferDecoratedKey;
import org.apache.cassandra.db.Cell;
import org.apache.cassandra.db.ColumnFamilyStore;
import org.apache.cassandra.db.DecoratedKey;
import org.apache.cassandra.db.SystemKeyspace;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.composites.CellName;
import org.apache.cassandra.db.composites.CellNameType;
import org.apache.cassandra.db.composites.SimpleDenseCellNameType;
import org.apache.cassandra.db.index.composites.CompositesIndex;
import org.apache.cassandra.db.index.keys.KeysIndex;
import org.apache.cassandra.db.marshal.AbstractType;
import org.apache.cassandra.db.marshal.BytesType;
import org.apache.cassandra.db.marshal.LocalByPartionerType;
import org.apache.cassandra.dht.LocalToken;
import org.apache.cassandra.exceptions.ConfigurationException;
import org.apache.cassandra.io.sstable.ReducingKeyIterator;
import org.apache.cassandra.io.sstable.SSTableReader;
import org.apache.cassandra.service.StorageService;
import org.apache.cassandra.utils.FBUtilities;
/**
* Abstract base class for different types of secondary indexes.
*
* Do not extend this directly, please pick from PerColumnSecondaryIndex or PerRowSecondaryIndex
*/
public abstract class SecondaryIndex
{
protected static final Logger logger = LoggerFactory.getLogger(SecondaryIndex.class);
public static final String CUSTOM_INDEX_OPTION_NAME = "class_name";
public static final AbstractType<?> keyComparator = StorageService.getPartitioner().preservesOrder()
? BytesType.instance
: new LocalByPartionerType(StorageService.getPartitioner());
/**
* Base CF that has many indexes
*/
protected ColumnFamilyStore baseCfs;
/**
* The column definitions which this index is responsible for
*/
protected final Set<ColumnDefinition> columnDefs = Collections.newSetFromMap(new ConcurrentHashMap<ColumnDefinition,Boolean>());
/**
* Perform any initialization work
*/
public abstract void init();
/**
* Reload an existing index following a change to its configuration,
* or that of the indexed column(s). Differs from init() in that we expect
* expect new resources (such as CFS for a KEYS index) to be created by
* init() but not here
*/
public abstract void reload();
/**
* Validates the index_options passed in the ColumnDef
* @throws ConfigurationException
*/
public abstract void validateOptions() throws ConfigurationException;
/**
* @return The name of the index
*/
abstract public String getIndexName();
/**
* All internal 2ndary indexes will return "_internal_" for this. Custom
* 2ndary indexes will return their class name. This only matter for
* SecondaryIndexManager.groupByIndexType.
*/
String indexTypeForGrouping()
{
// Our internal indexes overwrite this
return getClass().getCanonicalName();
}
/**
* Return the unique name for this index and column
* to be stored in the SystemKeyspace that tracks if each column is built
*
* @param columnName the name of the column
* @return the unique name
*/
abstract public String getNameForSystemKeyspace(ByteBuffer columnName);
/**
* Checks if the index for specified column is fully built
*
* @param columnName the column
* @return true if the index is fully built
*/
public boolean isIndexBuilt(ByteBuffer columnName)
{
return SystemKeyspace.isIndexBuilt(baseCfs.keyspace.getName(), getNameForSystemKeyspace(columnName));
}
public void setIndexBuilt()
{
for (ColumnDefinition columnDef : columnDefs)
SystemKeyspace.setIndexBuilt(baseCfs.keyspace.getName(), getNameForSystemKeyspace(columnDef.name.bytes));
}
public void setIndexRemoved()
{
for (ColumnDefinition columnDef : columnDefs)
SystemKeyspace.setIndexRemoved(baseCfs.keyspace.getName(), getNameForSystemKeyspace(columnDef.name.bytes));
}
/**
* Called at query time
* Creates a implementation specific searcher instance for this index type
* @param columns the list of columns which belong to this index type
* @return the secondary index search impl
*/
protected abstract SecondaryIndexSearcher createSecondaryIndexSearcher(Set<ByteBuffer> columns);
/**
* Forces this indexes' in memory data to disk
*/
public abstract void forceBlockingFlush();
/**
* Allow access to the underlying column family store if there is one
* @return the underlying column family store or null
*/
public abstract ColumnFamilyStore getIndexCfs();
/**
* Delete all files and references to this index
* @param columnName the indexed column to remove
*/
public abstract void removeIndex(ByteBuffer columnName);
/**
* Remove the index and unregisters this index's mbean if one exists
*/
public abstract void invalidate();
/**
* Truncate all the data from the current index
*
* @param truncatedAt The truncation timestamp, all data before that timestamp should be rejected.
*/
public abstract void truncateBlocking(long truncatedAt);
/**
* Builds the index using the data in the underlying CFS
* Blocks till it's complete
*/
protected void buildIndexBlocking()
{
logger.info(String.format("Submitting index build of %s for data in %s",
getIndexName(), StringUtils.join(baseCfs.getSSTables(), ", ")));
Collection<SSTableReader> sstables = baseCfs.markCurrentSSTablesReferenced();
try
{
SecondaryIndexBuilder builder = new SecondaryIndexBuilder(baseCfs,
Collections.singleton(getIndexName()),
new ReducingKeyIterator(sstables));
Future<?> future = CompactionManager.instance.submitIndexBuild(builder);
FBUtilities.waitOnFuture(future);
forceBlockingFlush();
setIndexBuilt();
}
finally
{
SSTableReader.releaseReferences(sstables);
}
logger.info("Index build of {} complete", getIndexName());
}
/**
* Builds the index using the data in the underlying CF, non blocking
*
*
* @return A future object which the caller can block on (optional)
*/
public Future<?> buildIndexAsync()
{
// if we're just linking in the index to indexedColumns on an already-built index post-restart, we're done
boolean allAreBuilt = true;
for (ColumnDefinition cdef : columnDefs)
{
if (!SystemKeyspace.isIndexBuilt(baseCfs.keyspace.getName(), getNameForSystemKeyspace(cdef.name.bytes)))
{
allAreBuilt = false;
break;
}
}
if (allAreBuilt)
return null;
// build it asynchronously; addIndex gets called by CFS open and schema update, neither of which
// we want to block for a long period. (actual build is serialized on CompactionManager.)
Runnable runnable = new Runnable()
{
public void run()
{
baseCfs.forceBlockingFlush();
buildIndexBlocking();
}
};
FutureTask<?> f = new FutureTask<Object>(runnable, null);
new Thread(f, "Creating index: " + getIndexName()).start();
return f;
}
public ColumnFamilyStore getBaseCfs()
{
return baseCfs;
}
private void setBaseCfs(ColumnFamilyStore baseCfs)
{
this.baseCfs = baseCfs;
}
public Set<ColumnDefinition> getColumnDefs()
{
return columnDefs;
}
void addColumnDef(ColumnDefinition columnDef)
{
columnDefs.add(columnDef);
}
void removeColumnDef(ByteBuffer name)
{
Iterator<ColumnDefinition> it = columnDefs.iterator();
while (it.hasNext())
{
if (it.next().name.bytes.equals(name))
it.remove();
}
}
/**
* Returns the decoratedKey for a column value
* @param value column value
* @return decorated key
*/
public DecoratedKey getIndexKeyFor(ByteBuffer value)
{
// FIXME: this imply one column definition per index
ByteBuffer name = columnDefs.iterator().next().name.bytes;
return new BufferDecoratedKey(new LocalToken(baseCfs.metadata.getColumnDefinition(name).type, value), value);
}
/**
* Returns true if the provided cell name is indexed by this secondary index.
*/
public abstract boolean indexes(CellName name);
/**
* This is the primary way to create a secondary index instance for a CF column.
* It will validate the index_options before initializing.
*
* @param baseCfs the source of data for the Index
* @param cdef the meta information about this column (index_type, index_options, name, etc...)
*
* @return The secondary index instance for this column
* @throws ConfigurationException
*/
public static SecondaryIndex createInstance(ColumnFamilyStore baseCfs, ColumnDefinition cdef) throws ConfigurationException
{
SecondaryIndex index;
switch (cdef.getIndexType())
{
case KEYS:
index = new KeysIndex();
break;
case COMPOSITES:
index = CompositesIndex.create(cdef);
break;
case CUSTOM:
assert cdef.getIndexOptions() != null;
String class_name = cdef.getIndexOptions().get(CUSTOM_INDEX_OPTION_NAME);
assert class_name != null;
try
{
index = (SecondaryIndex) Class.forName(class_name).newInstance();
}
catch (Exception e)
{
throw new RuntimeException(e);
}
break;
default:
throw new RuntimeException("Unknown index type: " + cdef.getIndexName());
}
index.addColumnDef(cdef);
index.validateOptions();
index.setBaseCfs(baseCfs);
return index;
}
public abstract boolean validate(Cell cell);
public abstract long estimateResultRows();
/**
* Returns the index comparator for index backed by CFS, or null.
*
* Note: it would be cleaner to have this be a member method. However we need this when opening indexes
* sstables, but by then the CFS won't be fully initiated, so the SecondaryIndex object won't be accessible.
*/
public static CellNameType getIndexComparator(CFMetaData baseMetadata, ColumnDefinition cdef)
{
switch (cdef.getIndexType())
{
case KEYS:
return new SimpleDenseCellNameType(keyComparator);
case COMPOSITES:
return CompositesIndex.getIndexComparator(baseMetadata, cdef);
case CUSTOM:
return null;
}
throw new AssertionError();
}
}
| |
/*
* Copyright (c) 2011 Google, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.truth;
import static com.google.common.base.Preconditions.checkState;
import com.google.common.base.Function;
import com.google.common.base.Joiner;
import com.google.common.base.Objects;
import com.google.common.base.Splitter;
import com.google.common.primitives.Ints;
import java.util.List;
import org.checkerframework.checker.nullness.qual.Nullable;
/** {@link Correspondence} implementations for testing purposes. */
final class TestCorrespondences {
/**
* A correspondence between strings and integers which tests whether the string parses as the
* integer. Parsing is as specified by {@link Integer#decode(String)}. It considers null to
* correspond to null only.
*/
static final Correspondence<String, Integer> STRING_PARSES_TO_INTEGER_CORRESPONDENCE =
Correspondence.from(
// If we were allowed to use method references, this would be:
// TestCorrespondences::stringParsesToInteger,
new Correspondence.BinaryPredicate<String, Integer>() {
@Override
public boolean apply(@Nullable String actual, @Nullable Integer expected) {
return stringParsesToInteger(actual, expected);
}
},
"parses to");
private static boolean stringParsesToInteger(
@Nullable String actual, @Nullable Integer expected) {
if (actual == null) {
return expected == null;
}
try {
// Older versions of Android reject leading plus signs, per the pre-Java-7 contract:
// https://docs.oracle.com/javase/6/docs/api/java/lang/Integer.html#decode(java.lang.String)
// https://docs.oracle.com/javase/7/docs/api/java/lang/Integer.html#decode(java.lang.String)
if (actual.startsWith("+")) {
actual = actual.substring(1);
}
return Integer.decode(actual).equals(expected);
} catch (NumberFormatException e) {
return false;
}
}
/** A formatter for the diffs between integers. */
static final Correspondence.DiffFormatter<Integer, Integer> INT_DIFF_FORMATTER =
// If we were allowed to use lambdas, this would be:
// (a, e) -> Integer.toString(a - e));
new Correspondence.DiffFormatter<Integer, Integer>() {
@Override
public String formatDiff(Integer actual, Integer expected) {
return Integer.toString(actual - expected);
}
};
/**
* A correspondence between integers which tests whether they are within 10 of each other. Smart
* diffing is enabled, with a formatted diff showing the actual value less the expected value.
* Does not support null values.
*/
static final Correspondence<Integer, Integer> WITHIN_10_OF =
Correspondence.from(
// If we were allowed to use lambdas, this would be:
// (Integer a, Integer e) -> Math.abs(a - e) <= 10,
new Correspondence.BinaryPredicate<Integer, Integer>() {
@Override
public boolean apply(Integer actual, Integer expected) {
return Math.abs(actual - expected) <= 10;
}
},
"is within 10 of")
.formattingDiffsUsing(INT_DIFF_FORMATTER);
/**
* A correspondence between strings which tests for case-insensitive equality. Supports null
* expected elements, but throws {@link NullPointerException} on null actual elements.
*/
static final Correspondence<String, String> CASE_INSENSITIVE_EQUALITY =
Correspondence.from(
// If we were allowed to use method references, this would be String::equalsIgnoreCase.
new Correspondence.BinaryPredicate<String, String>() {
@Override
public boolean apply(String actual, String expected) {
return actual.equalsIgnoreCase(expected);
}
},
"equals (ignoring case)");
/**
* A correspondence between strings which tests for case-insensitive equality, with a broken
* attempt at null-safety. The {@link Correspondence#compare} implementation returns true for
* (null, null) and false for (non-null, null), but throws {@link NullPointerException} for (null,
* non-null).
*/
static final Correspondence<String, String> CASE_INSENSITIVE_EQUALITY_HALF_NULL_SAFE =
Correspondence.from(
// If we were allowed to use method references, this would be:
// TestCorrespondences::equalsIgnoreCaseHalfNullSafe,
new Correspondence.BinaryPredicate<String, String>() {
@Override
public boolean apply(String actual, String expected) {
return equalsIgnoreCaseHalfNullSafe(actual, expected);
}
},
"equals (ignoring case)");
private static boolean equalsIgnoreCaseHalfNullSafe(String actual, String expected) {
if (actual == null && expected == null) {
return true;
}
// Oops! We don't handle the case where actual == null but expected != null.
return actual.equalsIgnoreCase(expected);
}
/**
* An example value object. It has an optional {@code id} field and a required {@code score}
* field, both positive integers.
*/
static final class Record {
private final int id;
private final int score;
static Record create(int id, int score) {
checkState(id >= 0);
checkState(score > 0);
return new Record(id, score);
}
static Record createWithoutId(int score) {
checkState(score >= 0);
return new Record(-1, score);
}
Record(int id, int score) {
this.id = id;
this.score = score;
}
boolean hasId() {
return id >= 0;
}
int getId() {
checkState(hasId());
return id;
}
int getScore() {
return score;
}
boolean hasSameId(Record that) {
return this.id == that.id;
}
@Override
public boolean equals(@Nullable Object o) {
if (o instanceof Record) {
Record that = (Record) o;
return this.id == that.id && this.score == that.score;
}
return false;
}
@Override
public int hashCode() {
return Objects.hashCode(id, score);
}
/**
* Returns the string form of the record, which is the {@code id} value or the literal {@code
* none} if none, the literal {@code /}, and the {@code score} value concatenated.
*/
@Override
public String toString() {
return Joiner.on('/').join(hasId() ? getId() : "none", getScore());
}
/**
* If the argument is the string form of a record, returns that record; otherwise returns {@code
* null}.
*/
static @Nullable Record parse(String str) {
List<String> parts = Splitter.on('/').splitToList(str);
if (parts.size() != 2) {
return null;
}
@Nullable Integer id = parts.get(0).equals("none") ? -1 : Ints.tryParse(parts.get(0));
@Nullable Integer score = Ints.tryParse(parts.get(1));
if (id == null || score == null) {
return null;
}
return new Record(id, score);
}
}
/**
* A correspondence between {@link Record} instances which tests whether their {@code id} values
* are equal and their {@code score} values are within 10 of each other. Smart diffing is not
* supported.
*
* <p>The {@link Correspondence#compare} implementation support nulls, such that null corresponds
* to null only. The {@link Correspondence#formatDiff} implementation does not support nulls.
*/
static final Correspondence<Record, Record> RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10_NO_DIFF =
Correspondence.from(
// If we were allowed to use method references, this would be:
// TestCorrespondences::recordsAreCloseEnough,
new Correspondence.BinaryPredicate<Record, Record>() {
@Override
public boolean apply(Record actual, Record expected) {
return recordsAreCloseEnough(actual, expected);
}
},
"has the same id as and a score within 10 of");
/**
* A formatter for diffs between records. If the records have the same key, it gives a string of
* the form {@code "score:<score_diff>"}. If they have different keys, it gives null.
*/
static final Correspondence.DiffFormatter<Record, Record> RECORD_DIFF_FORMATTER =
// If we were allowed to use method references, this would be:
// TestCorrespondences::formatRecordDiff);
new Correspondence.DiffFormatter<Record, Record>() {
@Override
public String formatDiff(Record actual, Record expected) {
return formatRecordDiff(actual, expected);
}
};
/**
* A correspondence between {@link Record} instances which tests whether their {@code id} values
* are equal and their {@code score} values are within 10 of each other. Smart diffing is enabled
* for records with equal {@code id} values, with a formatted diff showing the actual {@code
* score} value less the expected {@code score} value preceded by the literal {@code score:}.
*
* <p>The {@link Correspondence#compare} implementation support nulls, such that null corresponds
* to null only. The {@link Correspondence#formatDiff} implementation does not support nulls.
*/
static final Correspondence<Record, Record> RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10 =
RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10_NO_DIFF.formattingDiffsUsing(RECORD_DIFF_FORMATTER);
/**
* A correspondence like {@link #RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10} except that the actual
* values are strings which will be parsed before comparing. If the string does not parse to a
* record then it does not correspond and is not diffed. Does not support null strings or records.
*/
static final Correspondence<String, Record> PARSED_RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10 =
Correspondence.from(
// If we were allowed to use lambdas, this would be:
// (String a, Record e) -> {
// @Nullable Record actualRecord = Record.parse(a);
// return actualRecord != null && recordsAreCloseEnough(actualRecord, e);
// },
new Correspondence.BinaryPredicate<String, Record>() {
@Override
public boolean apply(String actual, Record expected) {
@Nullable Record actualRecord = Record.parse(actual);
return actualRecord != null && recordsAreCloseEnough(actualRecord, expected);
}
},
"parses to a record that " + RECORDS_EQUAL_WITH_SCORE_TOLERANCE_10)
.formattingDiffsUsing(
// If we were allowe to use lambdas, this would be:
// (a, e) -> {
// @Nullable Record actualRecord = Record.parse(a);
// return actualRecord != null ? formatRecordDiff(actualRecord, e) : null;
// });
new Correspondence.DiffFormatter<String, Record>() {
@Override
public String formatDiff(String actual, Record expected) {
@Nullable Record actualRecord = Record.parse(actual);
return actualRecord != null ? formatRecordDiff(actualRecord, expected) : null;
}
});
private static boolean recordsAreCloseEnough(@Nullable Record actual, @Nullable Record expected) {
if (actual == null) {
return expected == null;
}
if (expected == null) {
return false;
}
return actual.hasSameId(expected) && Math.abs(actual.getScore() - expected.getScore()) <= 10;
}
private static String formatRecordDiff(Record actual, Record expected) {
if (actual.hasId() && expected.hasId() && actual.getId() == expected.getId()) {
return "score:" + (actual.getScore() - expected.getScore());
} else {
return null;
}
}
/**
* A key function for {@link Record} instances that keys records by their {@code id} values. The
* key is null if the record has no {@code id}. Does not support null records.
*/
static final Function<Record, Integer> RECORD_ID =
new Function<Record, Integer>() {
@Override
public @Nullable Integer apply(Record record) {
return record.hasId() ? record.getId() : null;
}
};
/**
* A key function for {@link Record} instances that keys records by their {@code id} values. The
* key is null if the record has no {@code id}. Does not support null records.
*/
static final Function<Record, Integer> NULL_SAFE_RECORD_ID =
new Function<Record, Integer>() {
@Override
public @Nullable Integer apply(Record record) {
if (record == null) {
return 0;
}
return record.hasId() ? record.getId() : null;
}
};
/**
* A key function for {@link String} instances that attempts to parse them as {@link Record}
* instances and keys records by their {@code id} values. The key is null if the string does not
* parse or the record has no {@code id}. Does not support null strings.
*/
static final Function<String, Integer> PARSED_RECORD_ID =
new Function<String, Integer>() {
@Override
public @Nullable Integer apply(String str) {
Record record = Record.parse(str);
return record != null ? RECORD_ID.apply(record) : null;
}
};
private TestCorrespondences() {}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.myfaces.trinidadinternal.renderkit.core.ppr;
import java.util.Iterator;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.Stack;
import org.apache.myfaces.trinidad.context.RequestContext;
import org.apache.myfaces.trinidadinternal.context.RequestContextImpl;
import org.apache.myfaces.trinidad.context.PartialPageContext;
import org.apache.myfaces.trinidad.logging.TrinidadLogger;
/**
* Context object which is used to track the targets of a partial
* page render during the partial page rendering pass.
* Clients never need to explicitly create PartialPageContext
* objects.
* <p>
* During the partial rendering pass, some Renderer implementations
* may modify the set of partial targets that are rendered.
* (For example, the FormRenderer adds a partial target for its
* shared hidden fields if any children of the form are rendered.)
* After the partial render pass, getPartialTargets() can be
* called to determine the actual set of partial targets that were
* rendered.
*
* @version $Name: $ ($Revision: adfrt/faces/adf-faces-impl/src/main/java/oracle/adfinternal/view/faces/renderkit/core/ppr/PartialPageContext.java#0 $) $Date: 10-nov-2005.19:02:58 $
*/
public class PartialPageContextImpl extends PartialPageContext
{
PartialPageContextImpl()
{
_targets = new HashMap<String, Boolean>();
_renderedTargets = new HashSet<String>();
// Pre-allocate the rendered stack
_currentTargetStack = new Stack<String>();
}
/**
* Creates a PartialPageContext to use to render the partial targets with
* the specified ids.
*/
public PartialPageContextImpl(
RequestContext reqContext)
{
this();
// Components may add themselves to the partialTargets list in earlier
// phases (we don't get here until render response). If so, the IDs have
// been kept on the RequestContext. We'll grab them now and add them to the
// target list.
RequestContextImpl requestContext =
(RequestContextImpl) reqContext;
Iterator<String> targetIter = requestContext.getPartialTargets();
while (targetIter.hasNext())
_targets.put(targetIter.next(), Boolean.FALSE);
if (_targets.isEmpty())
{
_LOG.fine("PPR is about to render without any targets");
}
}
/**
* Returns the set of partial targets for this rendering pass.
*/
@Override
public Iterator<String> getPartialTargets()
{
return _targets.keySet().iterator();
}
/**
* Tests whether the specified id is the client id of a UIComponent that
* should be rendered as part of the partial rendering pass.
*/
@Override
public boolean isPartialTarget(String id)
{
return (id != null) && _targets.containsKey(id);
}
/**
* Tests whether the specified partial target has been rendered.
*/
@Override
public boolean isPartialTargetRendered(String id)
{
return _renderedTargets.contains(id);
}
/**
* Adds a new partial target to render.
* <p>
* This method may be called during the partial rendering pass to
* add to the set of partial targets, but only if the pass has
* not yet been completed. Clients should first check to see
* whether the partial rendering pass has finished by calling
* isPartialPassComplete() before calling this method.
*
* @param id The id of the partial target to render
* @see #isPartialPassComplete
*/
@Override
public void addPartialTarget(String id)
{
_targets.put(id, Boolean.FALSE);
}
/**
* Returns true if we are inside of a partial target.
*/
@Override
public boolean isInsidePartialTarget()
{
return _getCurrentPartialTarget() != null;
}
/**
* Adds a partial target that has already been rendered; this
* is needed if the "clientId" of a component does not match
* up to the top element (or elements).
*/
@Override
public void addRenderedPartialTarget(String id)
{
_renderedTargets.add(id);
}
@Override
public Iterator<String> getRenderedPartialTargets()
{
return _renderedTargets.iterator();
}
/**
* Notifies the PartialPageContext that the specified partial target is
* about to be rendered.
* <p>
* This method is called automatically by Trinidad during the partial
* rendering pass when a partial target is about to be rendered.
* Clients should never need to call this method.
*
* @param context the current FacesContext
* @param id The ID of the partial target that is about to be rendered
* @see #popRenderedPartialTarget
*/
public void pushRenderedPartialTarget(
String id
)
{
if (_LOG.isFine())
{
if (!_targets.containsKey(id))
_LOG.fine("Rendering partial target {0}, which was not requested", id);
}
_targets.put(id, Boolean.TRUE);
_currentTargetStack.push(id);
if (_LOG.isFiner())
{
_LOG.finer("Pushed rendered PPR target " + id);
}
}
/**
* Notifies the PartialPageContext that the current partial target
* has finished rendering.
* <p>
* This method is called automatically by Trinidad during the partial
* rendering pass when a partial target has finished rendering.
* Clients should never need to call this method.
*
* @param context the current FacesContext
*/
public void popRenderedPartialTarget()
{
_currentTargetStack.pop();
}
/**
* Returns the ID of the partial target that is currently being
* rendered, if any.
*/
private String _getCurrentPartialTarget()
{
if (_currentTargetStack.empty())
return null;
return _currentTargetStack.peek();
}
private Map<String, Boolean> _targets;
private Set<String> _renderedTargets;
// The stack of partial targets that are currently being rendered
// -= Simon Lessard =-
// FIXME: java.util.Stack... enough said... ArrayList or LinkedList please
private Stack<String> _currentTargetStack;
private static final TrinidadLogger _LOG = TrinidadLogger.createTrinidadLogger(PartialPageContextImpl.class);
}
| |
/*
* Copyright 2015-2018 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*/
package org.dbflute.mail.send.embedded.postie;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import javax.activation.DataHandler;
import javax.activation.DataSource;
import javax.mail.Address;
import javax.mail.BodyPart;
import javax.mail.MessagingException;
import javax.mail.NoSuchProviderException;
import javax.mail.Session;
import javax.mail.Transport;
import javax.mail.internet.AddressException;
import javax.mail.internet.InternetAddress;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import javax.mail.internet.MimePart;
import javax.mail.internet.MimeUtility;
import javax.mail.util.ByteArrayDataSource;
import org.dbflute.helper.message.ExceptionMessageBuilder;
import org.dbflute.mail.CardView;
import org.dbflute.mail.Postcard;
import org.dbflute.mail.send.SMailAddress;
import org.dbflute.mail.send.SMailPostalMotorbike;
import org.dbflute.mail.send.SMailPostie;
import org.dbflute.mail.send.exception.SMailIllegalStateException;
import org.dbflute.mail.send.exception.SMailMessageSettingFailureException;
import org.dbflute.mail.send.exception.SMailTransportFailureException;
import org.dbflute.mail.send.hook.SMailCallbackContext;
import org.dbflute.mail.send.hook.SMailPreparedMessageHook;
import org.dbflute.mail.send.supplement.async.SMailAsyncStrategy;
import org.dbflute.mail.send.supplement.async.SMailAsyncStrategyNone;
import org.dbflute.mail.send.supplement.attachment.SMailAttachment;
import org.dbflute.mail.send.supplement.filter.SMailAddressFilter;
import org.dbflute.mail.send.supplement.filter.SMailAddressFilterNone;
import org.dbflute.mail.send.supplement.filter.SMailBodyTextFilter;
import org.dbflute.mail.send.supplement.filter.SMailBodyTextFilterNone;
import org.dbflute.mail.send.supplement.filter.SMailCancelFilter;
import org.dbflute.mail.send.supplement.filter.SMailCancelFilterNone;
import org.dbflute.mail.send.supplement.filter.SMailSubjectFilter;
import org.dbflute.mail.send.supplement.filter.SMailSubjectFilterNone;
import org.dbflute.mail.send.supplement.header.SMailMailHeaderStrategy;
import org.dbflute.mail.send.supplement.header.SMailMailHeaderStrategyNone;
import org.dbflute.mail.send.supplement.inetaddr.SMailInternetAddressCreator;
import org.dbflute.mail.send.supplement.inetaddr.SMailNormalInternetAddressCreator;
import org.dbflute.mail.send.supplement.label.SMailLabelStrategy;
import org.dbflute.mail.send.supplement.label.SMailLabelStrategyNone;
import org.dbflute.mail.send.supplement.logging.SMailLoggingStrategy;
import org.dbflute.mail.send.supplement.logging.SMailTypicalLoggingStrategy;
import org.dbflute.mail.send.supplement.retry.SMailRetryStrategy;
import org.dbflute.mail.send.supplement.retry.SMailRetryStrategyNone;
import org.dbflute.optional.OptionalThing;
import org.dbflute.system.DBFluteSystem;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* @author jflute
* @since 0.4.0 (2015/05/05 Tuesday)
*/
public class SMailHonestPostie implements SMailPostie {
// ===================================================================================
// Definition
// ==========
private static final Logger logger = LoggerFactory.getLogger(SMailHonestPostie.class); // for normal logging
private static final SMailCancelFilter noneCancelFilter = new SMailCancelFilterNone();
private static final SMailAddressFilter noneAddressFilter = new SMailAddressFilterNone();
private static final SMailSubjectFilter noneSubjectFilter = new SMailSubjectFilterNone();
private static final SMailBodyTextFilter noneBodyTextFilter = new SMailBodyTextFilterNone();
private static final SMailAsyncStrategy noneAsyncStrategy = new SMailAsyncStrategyNone();
private static final SMailRetryStrategy noneRetryStrategy = new SMailRetryStrategyNone();
private static final SMailLabelStrategy noneLabelStrategy = new SMailLabelStrategyNone();
private static final SMailLoggingStrategy typicalLoggingStrategy = new SMailTypicalLoggingStrategy();
private static final SMailMailHeaderStrategy noneMailHeaderStrategy = new SMailMailHeaderStrategyNone();
private static final SMailInternetAddressCreator normalInternetAddressCreator = new SMailNormalInternetAddressCreator();
// ===================================================================================
// Attribute
// =========
protected final SMailPostalMotorbike motorbike; // not null
protected SMailCancelFilter cancelFilter = noneCancelFilter; // not null
protected SMailAddressFilter addressFilter = noneAddressFilter; // not null
protected SMailSubjectFilter subjectFilter = noneSubjectFilter; // not null
protected SMailBodyTextFilter bodyTextFilter = noneBodyTextFilter; // not null
protected SMailAsyncStrategy asyncStrategy = noneAsyncStrategy; // not null
protected SMailRetryStrategy retryStrategy = noneRetryStrategy; // not null
protected SMailLabelStrategy labelStrategy = noneLabelStrategy; // not null
protected SMailLoggingStrategy loggingStrategy = typicalLoggingStrategy; // not null
protected SMailMailHeaderStrategy mailHeaderStrategy = noneMailHeaderStrategy; // not null
protected SMailInternetAddressCreator internetAddressCreator = normalInternetAddressCreator; // not null
protected boolean training;
protected OptionalThing<String> textTransferEncoding = OptionalThing.empty();
// ===================================================================================
// Constructor
// ===========
public SMailHonestPostie(SMailPostalMotorbike motorbike) {
assertArgumentNotNull("motorbike", motorbike);
this.motorbike = motorbike;
}
public SMailHonestPostie withCancelFilter(SMailCancelFilter cancelFilter) {
assertArgumentNotNull("cancelFilter", cancelFilter);
this.cancelFilter = cancelFilter;
return this;
}
public SMailHonestPostie withAddressFilter(SMailAddressFilter addressFilter) {
assertArgumentNotNull("addressFilter", addressFilter);
this.addressFilter = addressFilter;
return this;
}
public SMailHonestPostie withSubjectFilter(SMailSubjectFilter subjectFilter) {
assertArgumentNotNull("subjectFilter", subjectFilter);
this.subjectFilter = subjectFilter;
return this;
}
public SMailHonestPostie withBodyTextFilter(SMailBodyTextFilter bodyTextFilter) {
assertArgumentNotNull("bodyTextFilter", bodyTextFilter);
this.bodyTextFilter = bodyTextFilter;
return this;
}
public SMailHonestPostie withAsyncStrategy(SMailAsyncStrategy asyncStrategy) {
assertArgumentNotNull("asyncStrategy", asyncStrategy);
this.asyncStrategy = asyncStrategy;
return this;
}
public SMailHonestPostie withRetryStrategy(SMailRetryStrategy retryStrategy) {
assertArgumentNotNull("retryStrategy", retryStrategy);
this.retryStrategy = retryStrategy;
return this;
}
public SMailHonestPostie withLabelStrategy(SMailLabelStrategy labelStrategy) {
assertArgumentNotNull("labelStrategy", labelStrategy);
this.labelStrategy = labelStrategy;
return this;
}
public SMailHonestPostie withLoggingStrategy(SMailLoggingStrategy loggingStrategy) {
assertArgumentNotNull("loggingStrategy", loggingStrategy);
this.loggingStrategy = loggingStrategy;
return this;
}
public SMailHonestPostie withMailHeaderStrategy(SMailMailHeaderStrategy mailHeaderStrategy) {
assertArgumentNotNull("mailHeaderStrategy", mailHeaderStrategy);
this.mailHeaderStrategy = mailHeaderStrategy;
return this;
}
public SMailHonestPostie withInternetAddressCreator(SMailInternetAddressCreator internetAddressCreator) {
assertArgumentNotNull("internetAddressCreator", internetAddressCreator);
this.internetAddressCreator = internetAddressCreator;
return this;
}
public SMailHonestPostie asTraining() {
training = true;
return this;
}
// ===================================================================================
// Deliver
// =======
@Override
public void deliver(Postcard postcard) {
final SMailPostingMessage message = createMailMessage(postcard);
if (isCancel(postcard)) {
return; // no logging here, only filter knows the reason
}
prepareAddress(postcard, message);
prepareSubject(postcard, message);
prepareBody(postcard, message);
prepareAsync(postcard);
prepareRetry(postcard);
disclosePostingState(postcard, message);
hookPreparedMessage(postcard, message);
if (postcard.isDryrun()) {
logger.debug("*dryrun: postcard={}", postcard); // normal logging here
return;
}
send(postcard, message);
}
protected SMailPostingMessage createMailMessage(CardView view) {
final MimeMessage mimeMessage = createMimeMessage(view, extractNativeSession(view, motorbike));
final Map<String, Object> pushedLoggingMap = view.getPushedLoggingMap();
final Map<String, Map<String, Object>> officeManagedLoggingMap = view.getOfficeManagedLoggingMap();
return new SMailPostingMessage(mimeMessage, motorbike, training, pushedLoggingMap, officeManagedLoggingMap);
}
protected Session extractNativeSession(CardView view, SMailPostalMotorbike motorbike) {
return motorbike.getNativeSession();
}
protected MimeMessage createMimeMessage(CardView view, Session session) {
return new MimeMessage(session);
}
protected boolean isCancel(CardView view) {
return cancelFilter.isCancel(view);
}
// ===================================================================================
// Prepare Address
// ===============
protected void prepareAddress(CardView view, SMailPostingMessage message) {
final SMailAddress from = view.getFrom().orElseThrow(() -> { /* already checked, but just in case */
return new SMailIllegalStateException("Not found the from address in the postcard: " + view);
});
final Address filteredFrom = addressFilter.filterFrom(view, toInternetAddress(view, from));
message.setFrom(verifyFilteredFromAddress(view, filteredFrom));
boolean existsToAddress = false;
for (SMailAddress to : view.getToList()) {
final OptionalThing<Address> opt = addressFilter.filterTo(view, toInternetAddress(view, to));
verifyFilteredOptionalAddress(view, opt).ifPresent(address -> message.addTo(address));
if (opt.isPresent()) {
existsToAddress = true;
}
}
verifyFilteredToAddressExists(view, existsToAddress);
for (SMailAddress cc : view.getCcList()) {
final OptionalThing<Address> opt = addressFilter.filterCc(view, toInternetAddress(view, cc));
verifyFilteredOptionalAddress(view, opt).ifPresent(address -> message.addCc(address));
}
for (SMailAddress bcc : view.getBccList()) {
final OptionalThing<Address> opt = addressFilter.filterBcc(view, toInternetAddress(view, bcc));
verifyFilteredOptionalAddress(view, opt).ifPresent(address -> message.addBcc(address));
}
final List<SMailAddress> replyToList = view.getReplyToList();
if (!replyToList.isEmpty()) {
final List<Address> filteredList = new ArrayList<Address>(replyToList.size());
for (SMailAddress replyTo : replyToList) {
final OptionalThing<Address> opt = addressFilter.filterReplyTo(view, toInternetAddress(view, replyTo));
verifyFilteredOptionalAddress(view, opt).ifPresent(address -> filteredList.add(address));
}
message.setReplyTo(filteredList);
}
}
// -----------------------------------------------------
// Label Handling
// --------------
protected Address toInternetAddress(CardView view, SMailAddress address) {
return createAddress(view, address);
}
protected Address createAddress(CardView view, SMailAddress address) {
final InternetAddress internetAddress;
try {
internetAddress = createInternetAddress(view, address.getAddress(), isStrictAddress());
} catch (AddressException e) {
throw new IllegalStateException("Failed to create internet address: " + address, e);
}
address.getPersonal().ifPresent(personal -> {
final String encoding = getPersonalEncoding();
try {
final Locale locale = view.getReceiverLocale().orElseGet(() -> getDefaultReceiverLocale());
final String resolved = labelStrategy.resolveLabel(view, locale, personal);
internetAddress.setPersonal(resolved, encoding);
} catch (UnsupportedEncodingException e) {
throw new IllegalStateException("Unknown encoding for personal: encoding=" + encoding + " personal=" + personal, e);
}
});
return internetAddress;
}
protected boolean isStrictAddress() {
return true;
}
protected InternetAddress createInternetAddress(CardView view, String address, boolean strict) throws AddressException {
return internetAddressCreator.create(view, address, strict);
}
protected String getPersonalEncoding() {
return getBasicEncoding();
}
protected Locale getDefaultReceiverLocale() {
return DBFluteSystem.getFinalLocale();
}
// -----------------------------------------------------
// Verify Address
// --------------
protected Address verifyFilteredFromAddress(CardView view, Address filteredFrom) {
if (filteredFrom == null) {
String msg = "The filtered from-address should not be null: postcard=" + view;
throw new SMailIllegalStateException(msg);
}
return filteredFrom;
}
protected OptionalThing<Address> verifyFilteredOptionalAddress(CardView view, OptionalThing<Address> opt) {
if (opt == null) {
String msg = "The filtered optional should not be null: postcard=" + view;
throw new SMailIllegalStateException(msg);
}
return opt;
}
protected void verifyFilteredToAddressExists(CardView view, boolean existsToAddress) {
if (!existsToAddress) {
String msg = "Empty to-address by filtering: specifiedToAddress=" + view.getToList();
throw new SMailIllegalStateException(msg);
}
}
// ===================================================================================
// Prepare Subject
// ===============
protected void prepareSubject(CardView view, SMailPostingMessage message) {
message.setSubject(getSubject(view), getSubjectEncoding());
}
protected String getSubject(CardView view) {
return subjectFilter.filterSubject(view, view.getSubject().get());
}
protected String getSubjectEncoding() {
return getBasicEncoding();
}
// ===================================================================================
// Prepare Body
// ============
protected void prepareBody(CardView view, SMailPostingMessage message) {
final String plainText = toCompletePlainText(view);
final OptionalThing<String> optHtmlText = toCompleteHtmlText(view);
message.savePlainTextForDisplay(plainText);
message.saveHtmlTextForDisplay(optHtmlText);
final Map<String, SMailAttachment> attachmentMap = view.getAttachmentMap();
final MimeMessage nativeMessage = message.getMimeMessage();
if (attachmentMap.isEmpty()) { // normally here
setupTextPart(view, nativeMessage, plainText, TextType.PLAIN); // plain is required
optHtmlText.ifPresent(htmlText -> {
setupTextPart(view, nativeMessage, htmlText, TextType.HTML);
});
} else { // with attachment
if (optHtmlText.isPresent()) {
throw new SMailIllegalStateException("Unsupported HTML mail with attachment for now: postcard=" + view);
}
try {
final MimeMultipart multipart = createTextWithAttachmentMultipart(view, message, plainText, attachmentMap);
nativeMessage.setContent(multipart);
} catch (MessagingException e) {
String msg = "Failed to set attachment multipart content: postcard=" + view;
throw new SMailIllegalStateException(msg, e);
}
}
}
protected String toCompletePlainText(CardView view) {
return view.toCompletePlainText().map(plainText -> {
return bodyTextFilter.filterBody(view, plainText, /*html*/false);
}).get();
}
protected OptionalThing<String> toCompleteHtmlText(CardView view) {
return view.toCompleteHtmlText().map(htmlText -> {
return bodyTextFilter.filterBody(view, htmlText, /*html*/true);
});
}
protected MimeMultipart createTextWithAttachmentMultipart(CardView view, SMailPostingMessage message, String plain,
Map<String, SMailAttachment> attachmentMap) throws MessagingException {
final MimeMultipart multipart = newMimeMultipart();
multipart.setSubType("mixed");
multipart.addBodyPart((BodyPart) setupTextPart(view, newMimeBodyPart(), plain, TextType.PLAIN));
for (Entry<String, SMailAttachment> entry : attachmentMap.entrySet()) {
final SMailAttachment attachment = entry.getValue();
multipart.addBodyPart((BodyPart) setupAttachmentPart(view, message, attachment));
}
return multipart;
}
protected MimeMultipart newMimeMultipart() {
return new MimeMultipart();
}
protected MimeBodyPart newMimeBodyPart() {
return new MimeBodyPart();
}
// ===================================================================================
// Text Part
// =========
protected MimePart setupTextPart(CardView view, MimePart part, String text, TextType textType) {
assertArgumentNotNull("view", view);
assertArgumentNotNull("part", part);
assertArgumentNotNull("text", text);
assertArgumentNotNull("textType", textType);
final String textEncoding = getTextEncoding(view);
final ByteBuffer buffer = prepareTextByteBuffer(view, text, textEncoding);
final DataSource source = prepareTextDataSource(view, buffer);
try {
part.setDataHandler(createDataHandler(source));
if (!isSuppressTextTransferEncoding(view)) {
part.setHeader("Content-Transfer-Encoding", getTextTransferEncoding(view));
}
part.setHeader("Content-Type", buildTextContentType(view, textType, textEncoding));
} catch (MessagingException e) {
throw new SMailMessageSettingFailureException("Failed to set headers: postcard=" + view, e);
}
return part;
}
protected String getTextEncoding(CardView view) {
return mailHeaderStrategy.getTextEncoding(view).orElseGet(() -> getBasicEncoding());
}
protected ByteBuffer prepareTextByteBuffer(CardView view, String text, String encoding) {
final ByteBuffer buffer;
try {
buffer = ByteBuffer.wrap(text.getBytes(encoding));
} catch (UnsupportedEncodingException e) {
throw new SMailMessageSettingFailureException("Unknown encoding: " + encoding, e);
}
return buffer;
}
protected ByteArrayDataSource prepareTextDataSource(CardView view, ByteBuffer buffer) {
return new ByteArrayDataSource(buffer.array(), getTextMimeType(view));
}
protected String getTextMimeType(CardView view) {
return mailHeaderStrategy.getTextMimeType(view).orElseGet(() -> {
return "application/octet-stream"; // as default of MailFlute
});
}
protected boolean isSuppressTextTransferEncoding(CardView view) {
return mailHeaderStrategy.isSuppressTextTransferEncoding();
}
protected String getTextTransferEncoding(CardView view) {
return mailHeaderStrategy.getTextTransferEncoding(view).orElseGet(() -> {
return "base64"; // as default of MailFlute (for UTF-8/base64)
});
}
protected String buildTextContentType(CardView view, TextType textType, String encoding) {
return "text/" + textType.code() + "; charset=\"" + encoding + "\"";
}
protected static enum TextType {
PLAIN("plain"), HTML("html");
private final String code;
private TextType(String code) {
this.code = code;
}
public String code() {
return code;
}
}
// ===================================================================================
// Attachment Part
// ===============
protected MimePart setupAttachmentPart(CardView view, SMailPostingMessage message, SMailAttachment attachment) {
assertArgumentNotNull("view", view);
assertArgumentNotNull("message", message);
assertArgumentNotNull("attachment", attachment);
final MimePart part = newMimeBodyPart();
final OptionalThing<String> textEncoding = getAttachmentTextEncoding(view, attachment);
final DataSource source = prepareAttachmentDataSource(view, message, attachment, textEncoding);
final String contentType = buildAttachmentContentType(view, attachment, textEncoding);
final String contentDisposition = buildAttachmentContentDisposition(view, attachment, textEncoding);
try {
part.setDataHandler(createDataHandler(source));
if (!isSuppressAttachmentTransferEncoding(view)) {
part.setHeader("Content-Transfer-Encoding", getAttachmentTransferEncoding(view));
}
part.setHeader("Content-Type", contentType);
part.setHeader("Content-Disposition", contentDisposition);
} catch (MessagingException e) {
String msg = "Failed to set headers: " + attachment;
throw new SMailMessageSettingFailureException(msg, e);
}
return part;
}
protected OptionalThing<String> getAttachmentTextEncoding(CardView view, SMailAttachment attachment) {
return attachment.getTextEncoding(); // always exists if text/plain
}
protected DataSource prepareAttachmentDataSource(CardView view, SMailPostingMessage message, SMailAttachment attachment,
OptionalThing<String> textEncoding) {
final byte[] attachedBytes = readAttachedBytes(view, attachment);
message.saveAttachmentForDisplay(attachment, attachedBytes, textEncoding);
return new ByteArrayDataSource(attachedBytes, getAttachmentMimeType(view));
}
protected byte[] readAttachedBytes(CardView view, SMailAttachment attachment) {
final InputStream ins = attachment.getReourceStream();
ByteArrayOutputStream ous = null;
try {
ous = new ByteArrayOutputStream();
final byte[] buffer = new byte[8192];
int length;
while ((length = ins.read(buffer)) > 0) {
ous.write(buffer, 0, length);
}
return ous.toByteArray();
} catch (IOException e) {
String msg = "Failed to read the attached stream as bytes: " + attachment;
throw new SMailIllegalStateException(msg, e);
} finally {
if (ous != null) {
try {
ous.close();
} catch (IOException ignored) {}
}
try {
ins.close();
} catch (IOException ignored) {}
}
}
protected String getAttachmentMimeType(CardView view) {
return mailHeaderStrategy.getAttachmentMimeType(view).orElseGet(() -> {
return "application/octet-stream"; // as default of MailFlute
});
}
protected String buildAttachmentContentType(CardView view, SMailAttachment attachment, OptionalThing<String> textEncoding) {
final String encodedFilename = getEncodedFilename(view, attachment.getFilenameOnHeader());
final StringBuilder sb = new StringBuilder();
final String contentType = attachment.getContentType();
sb.append(contentType);
if (contentType.equals("text/plain")) {
sb.append("; charset=").append(textEncoding.get());
}
sb.append("; name=\"").append(encodedFilename).append("\"");
return sb.toString();
}
protected String buildAttachmentContentDisposition(CardView view, SMailAttachment attachment, OptionalThing<String> textEncoding) {
final String encodedFilename = getEncodedFilename(view, attachment.getFilenameOnHeader());
final StringBuilder sb = new StringBuilder();
sb.append("attachment; filename=\"").append(encodedFilename).append("\"");
return sb.toString();
}
protected String getEncodedFilename(CardView view, String filename) {
final String filenameEncoding = getAttachmentFilenameEncoding(view);
final String encodedFilename;
try {
encodedFilename = MimeUtility.encodeText(filename, filenameEncoding, "B"); // uses 'B' for various characters
} catch (UnsupportedEncodingException e) {
throw new SMailMessageSettingFailureException("Unknown encoding: " + filenameEncoding, e);
}
return encodedFilename;
}
protected String getAttachmentFilenameEncoding(CardView view) {
return getBasicEncoding();
}
protected boolean isSuppressAttachmentTransferEncoding(CardView view) {
return mailHeaderStrategy.isSuppressAttachmentTransferEncoding();
}
protected String getAttachmentTransferEncoding(CardView view) {
return mailHeaderStrategy.getAttachmentTransferEncoding(view).orElseGet(() -> {
return "base64"; // as default of MailFlute (no change from the beginning)
});
}
// ===================================================================================
// Prepare Async/Retry
// ===================
protected void prepareAsync(Postcard postcard) {
if (asyncStrategy.alwaysAsync(postcard) && !postcard.isAsync()) {
logger.debug("...Calling async() automatically by strategy: {}", asyncStrategy);
postcard.async();
}
}
protected void prepareRetry(Postcard postcard) {
retryStrategy.retry(postcard, (retryCount, intervalMillis) -> {
if (postcard.getRetryCount() == 0) {
logger.debug("...Calling retry({}, {}) automatically by strategy: {}", retryCount, intervalMillis, asyncStrategy);
postcard.retry(retryCount, intervalMillis);
}
});
}
// ===================================================================================
// Disclose
// ========
protected void disclosePostingState(Postcard postcard, SMailPostingMessage message) {
postcard.officeDisclosePostingState(message);
}
// ===================================================================================
// Callback Context
// ================
protected void hookPreparedMessage(Postcard postcard, final SMailPostingMessage message) {
if (SMailCallbackContext.isExistPreparedMessageHookOnThread()) {
final SMailCallbackContext context = SMailCallbackContext.getCallbackContextOnThread();
final SMailPreparedMessageHook hook = context.getPreparedMessageHook();
hook.hookPreparedMessage(postcard, message);
}
}
// ===================================================================================
// Send Message
// ============
protected void send(Postcard postcard, SMailPostingMessage message) {
if (needsAsync(postcard)) {
asyncStrategy.async(postcard, () -> doSend(postcard, message));
} else {
doSend(postcard, message);
}
}
protected boolean needsAsync(Postcard postcard) {
return postcard.isAsync() && !postcard.isDefinitelySync();
}
// -----------------------------------------------------
// with Logging
// ------------
protected void doSend(Postcard postcard, SMailPostingMessage message) {
logMailBefore(postcard, message);
RuntimeException cause = null;
try {
retryableSend(postcard, message);
} catch (RuntimeException e) {
cause = e;
if (postcard.isSuppressSendFailure()) {
logSuppressedCause(postcard, message, e);
} else {
throw e;
}
} finally {
logMailFinally(postcard, message, cause);
}
}
protected void logMailBefore(Postcard postcard, SMailPostingMessage message) {
loggingStrategy.logMailBefore(postcard, message); // you can also make EML file here by overriding
}
protected void logSuppressedCause(Postcard postcard, SMailPostingMessage message, RuntimeException e) {
loggingStrategy.logSuppressedCause(postcard, message, e);
}
protected void logMailFinally(Postcard postcard, SMailPostingMessage message, RuntimeException cause) {
loggingStrategy.logMailFinally(postcard, message, OptionalThing.ofNullable(cause, () -> {
throw new IllegalStateException("Not found the exception for the mail finally: " + postcard);
}));
}
// -----------------------------------------------------
// Retryable
// ---------
protected void retryableSend(Postcard postcard, SMailPostingMessage message) {
final int retryCount = getRetryCount(postcard); // not negative, zero means no retry
final long intervalMillis = getIntervalMillis(postcard); // not negative
int challengeCount = 0;
Exception firstCause = null;
while (true) {
if (challengeCount > retryCount) { // over retry limit, cannot send
if (firstCause != null) { // just in case
handleSendFailure(postcard, message, firstCause);
}
break;
}
try {
if (challengeCount > 0) { // means retry sending
waitBeforeRetrySending(intervalMillis);
}
stagingSend(postcard, message);
if (challengeCount > 0) { // means retry success
logRetrySuccess(postcard, message, challengeCount, firstCause);
}
break;
} catch (RuntimeException | MessagingException e) {
if (firstCause == null) { // first cause may be most important
firstCause = e;
}
}
++challengeCount;
}
}
protected int getRetryCount(Postcard postcard) { // you can override if all mails needs retry
return postcard.getRetryCount();
}
protected long getIntervalMillis(Postcard postcard) { // you can as well
return postcard.getIntervalMillis();
}
protected void waitBeforeRetrySending(long intervalMillis) {
if (intervalMillis > 0) {
try {
Thread.sleep(intervalMillis);
} catch (InterruptedException ignored) {}
}
}
protected void logRetrySuccess(Postcard postcard, SMailPostingMessage message, int challengeCount, Exception firstCause) {
loggingStrategy.logRetrySuccess(postcard, message, challengeCount, firstCause);
}
protected void handleSendFailure(Postcard postcard, SMailPostingMessage message, Exception e) {
final ExceptionMessageBuilder br = new ExceptionMessageBuilder();
br.addNotice("Failed to send the mail message.");
br.addItem("Postcard");
br.addElement(postcard);
br.addItem("Posting Message");
br.addElement(Integer.hashCode(message.hashCode()));
br.addElement(message);
final String msg = br.buildExceptionMessage();
throw new SMailTransportFailureException(msg, e);
}
// -----------------------------------------------------
// Staging
// -------
// you can override this to switch sender to e.g. remote api
protected void stagingSend(Postcard postcard, SMailPostingMessage message) throws MessagingException {
if (!training) {
actuallySend(message);
}
}
// -----------------------------------------------------
// Actually
// --------
protected void actuallySend(SMailPostingMessage message) throws MessagingException {
final Transport transport = prepareTransport();
try {
final MimeMessage mimeMessage = message.getMimeMessage();
transport.connect(); // authenticated by session's authenticator
transport.sendMessage(mimeMessage, mimeMessage.getAllRecipients());
message.acceptSentTransport(transport); // keep e.g. last return code
} finally {
closeTransport(transport);
}
}
protected Transport prepareTransport() throws NoSuchProviderException {
return motorbike.getNativeSession().getTransport();
}
protected void closeTransport(Transport transport) {
try {
transport.close();
} catch (MessagingException continued) {
logger.warn("Failed to close the transport: " + transport, continued);
}
}
// ===================================================================================
// Assist Logic
// ============
protected String getBasicEncoding() {
return "UTF-8"; // as default of MailFlute
}
protected DataHandler createDataHandler(DataSource source) {
return new DataHandler(source);
}
// ===================================================================================
// General Helper
// ==============
protected void assertArgumentNotNull(String variableName, Object value) {
if (variableName == null) {
throw new IllegalArgumentException("The variableName should not be null.");
}
if (value == null) {
throw new IllegalArgumentException("The argument '" + variableName + "' should not be null.");
}
}
// ===================================================================================
// Accessor
// ========
public boolean isTraining() {
return training;
}
}
| |
package org.apache.solr.cloud;
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.google.common.collect.Lists;
import org.apache.solr.client.solrj.SolrRequest;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.impl.CloudSolrClient;
import org.apache.solr.client.solrj.request.CollectionAdminRequest;
import org.apache.solr.client.solrj.request.QueryRequest;
import org.apache.solr.common.SolrException;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.cloud.ClusterState;
import org.apache.solr.common.cloud.Replica;
import org.apache.solr.common.cloud.Slice;
import org.apache.solr.common.params.CollectionParams;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.apache.solr.common.params.ShardParams;
import org.apache.solr.common.util.NamedList;
import org.apache.zookeeper.KeeperException;
import org.junit.Test;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARD_UNIQUE;
public class TestCollectionAPI extends ReplicaPropertiesBase {
public static final String COLLECTION_NAME = "testcollection";
public static final String COLLECTION_NAME1 = "testcollection1";
public TestCollectionAPI() {
schemaString = "schema15.xml"; // we need a string id
sliceCount = 2;
}
@Test
@ShardsFixed(num = 2)
public void test() throws Exception {
try (CloudSolrClient client = createCloudClient(null)) {
createCollection(null, COLLECTION_NAME, 2, 2, 2, client, null, "conf1");
createCollection(null, COLLECTION_NAME1, 1, 1, 1, client, null, "conf1");
}
waitForCollection(cloudClient.getZkStateReader(), COLLECTION_NAME, 2);
waitForCollection(cloudClient.getZkStateReader(), COLLECTION_NAME1, 1);
waitForRecoveriesToFinish(COLLECTION_NAME, false);
waitForRecoveriesToFinish(COLLECTION_NAME1, false);
listCollection();
clusterStatusNoCollection();
clusterStatusWithCollection();
clusterStatusWithCollectionAndShard();
clusterStatusWithRouteKey();
clusterStatusAliasTest();
clusterStatusRolesTest();
replicaPropTest();
clusterStatusZNodeVersion();
}
private void clusterStatusWithCollectionAndShard() throws IOException, SolrServerException {
try (CloudSolrClient client = createCloudClient(null)) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", COLLECTION_NAME);
params.set("shard", SHARD1);
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertNotNull(collections.get(COLLECTION_NAME));
assertEquals(1, collections.size());
Map<String, Object> collection = (Map<String, Object>) collections.get(COLLECTION_NAME);
Map<String, Object> shardStatus = (Map<String, Object>) collection.get("shards");
assertEquals(1, shardStatus.size());
Map<String, Object> selectedShardStatus = (Map<String, Object>) shardStatus.get(SHARD1);
assertNotNull(selectedShardStatus);
}
}
private void listCollection() throws IOException, SolrServerException {
try (CloudSolrClient client = createCloudClient(null)) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.LIST.toString());
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
List<String> collections = (List<String>) rsp.get("collections");
assertTrue("control_collection was not found in list", collections.contains("control_collection"));
assertTrue(DEFAULT_COLLECTION + " was not found in list", collections.contains(DEFAULT_COLLECTION));
assertTrue(COLLECTION_NAME + " was not found in list", collections.contains(COLLECTION_NAME));
assertTrue(COLLECTION_NAME1 + " was not found in list", collections.contains(COLLECTION_NAME1));
}
}
private void clusterStatusNoCollection() throws Exception {
try (CloudSolrClient client = createCloudClient(null)) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertNotNull(collections.get(COLLECTION_NAME1));
assertEquals(4, collections.size());
List<String> liveNodes = (List<String>) cluster.get("live_nodes");
assertNotNull("Live nodes should not be null", liveNodes);
assertFalse(liveNodes.isEmpty());
}
}
private void clusterStatusWithCollection() throws IOException, SolrServerException {
try (CloudSolrClient client = createCloudClient(null)) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", COLLECTION_NAME);
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertEquals(1, collections.size());
Map<String, Object> collection = (Map<String, Object>) collections.get(COLLECTION_NAME);
assertNotNull(collection);
assertEquals("conf1", collection.get("configName"));
}
}
private void clusterStatusZNodeVersion() throws Exception {
String cname = "clusterStatusZNodeVersion";
try (CloudSolrClient client = createCloudClient(null)) {
CollectionAdminRequest.Create create = new CollectionAdminRequest.Create()
.setCollectionName(cname)
.setMaxShardsPerNode(1)
.setNumShards(1)
.setReplicationFactor(1)
.setConfigName("conf1");
create.process(client);
waitForRecoveriesToFinish(cname, true);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", cname);
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertEquals(1, collections.size());
Map<String, Object> collection = (Map<String, Object>) collections.get(cname);
assertNotNull(collection);
assertEquals("conf1", collection.get("configName"));
Integer znodeVersion = (Integer) collection.get("znodeVersion");
assertNotNull(znodeVersion);
CollectionAdminRequest.AddReplica addReplica = new CollectionAdminRequest.AddReplica()
.setCollectionName(cname)
.setShardName("shard1");
addReplica.process(client);
waitForRecoveriesToFinish(cname, true);
rsp = client.request(request);
cluster = (NamedList<Object>) rsp.get("cluster");
collections = (NamedList<Object>) cluster.get("collections");
collection = (Map<String, Object>) collections.get(cname);
Integer newVersion = (Integer) collection.get("znodeVersion");
assertNotNull(newVersion);
assertTrue(newVersion > znodeVersion);
}
}
private void clusterStatusWithRouteKey() throws IOException, SolrServerException {
try (CloudSolrClient client = createCloudClient(DEFAULT_COLLECTION)) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", "a!123"); // goes to shard2. see ShardRoutingTest for details
client.add(doc);
client.commit();
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", DEFAULT_COLLECTION);
params.set(ShardParams._ROUTE_, "a!");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertNotNull(collections.get(DEFAULT_COLLECTION));
assertEquals(1, collections.size());
Map<String, Object> collection = (Map<String, Object>) collections.get(DEFAULT_COLLECTION);
assertEquals("conf1", collection.get("configName"));
Map<String, Object> shardStatus = (Map<String, Object>) collection.get("shards");
assertEquals(1, shardStatus.size());
Map<String, Object> selectedShardStatus = (Map<String, Object>) shardStatus.get(SHARD2);
assertNotNull(selectedShardStatus);
}
}
private void clusterStatusAliasTest() throws Exception {
try (CloudSolrClient client = createCloudClient(null)) {
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CREATEALIAS.toString());
params.set("name", "myalias");
params.set("collections", DEFAULT_COLLECTION + "," + COLLECTION_NAME);
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
client.request(request);
params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", DEFAULT_COLLECTION);
request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
Map<String, String> aliases = (Map<String, String>) cluster.get("aliases");
assertNotNull("Aliases should not be null", aliases);
assertEquals("Alias: myalias not found in cluster status",
DEFAULT_COLLECTION + "," + COLLECTION_NAME, aliases.get("myalias"));
NamedList<Object> collections = (NamedList<Object>) cluster.get("collections");
assertNotNull("Collections should not be null in cluster state", collections);
assertNotNull(collections.get(DEFAULT_COLLECTION));
Map<String, Object> collection = (Map<String, Object>) collections.get(DEFAULT_COLLECTION);
assertEquals("conf1", collection.get("configName"));
List<String> collAlias = (List<String>) collection.get("aliases");
assertEquals("Aliases not found", Lists.newArrayList("myalias"), collAlias);
}
}
private void clusterStatusRolesTest() throws Exception {
try (CloudSolrClient client = createCloudClient(null)) {
client.connect();
Replica replica = client.getZkStateReader().getLeaderRetry(DEFAULT_COLLECTION, SHARD1);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.ADDROLE.toString());
params.set("node", replica.getNodeName());
params.set("role", "overseer");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
client.request(request);
params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.CLUSTERSTATUS.toString());
params.set("collection", DEFAULT_COLLECTION);
request = new QueryRequest(params);
request.setPath("/admin/collections");
NamedList<Object> rsp = client.request(request);
NamedList<Object> cluster = (NamedList<Object>) rsp.get("cluster");
assertNotNull("Cluster state should not be null", cluster);
Map<String, Object> roles = (Map<String, Object>) cluster.get("roles");
assertNotNull("Role information should not be null", roles);
List<String> overseer = (List<String>) roles.get("overseer");
assertNotNull(overseer);
assertEquals(1, overseer.size());
assertTrue(overseer.contains(replica.getNodeName()));
}
}
private void replicaPropTest() throws Exception {
try (CloudSolrClient client = createCloudClient(null)) {
client.connect();
Map<String, Slice> slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME).getSlicesMap();
List<String> sliceList = new ArrayList<>(slices.keySet());
String c1_s1 = sliceList.get(0);
List<String> replicasList = new ArrayList<>(slices.get(c1_s1).getReplicasMap().keySet());
String c1_s1_r1 = replicasList.get(0);
String c1_s1_r2 = replicasList.get(1);
String c1_s2 = sliceList.get(1);
replicasList = new ArrayList<>(slices.get(c1_s2).getReplicasMap().keySet());
String c1_s2_r1 = replicasList.get(0);
String c1_s2_r2 = replicasList.get(1);
slices = client.getZkStateReader().getClusterState().getCollection(COLLECTION_NAME1).getSlicesMap();
sliceList = new ArrayList<>(slices.keySet());
String c2_s1 = sliceList.get(0);
replicasList = new ArrayList<>(slices.get(c2_s1).getReplicasMap().keySet());
String c2_s1_r1 = replicasList.get(0);
ModifiableSolrParams params = new ModifiableSolrParams();
params.set("action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString());
// Insure we get error returns when omitting required parameters
missingParamsError(client, params);
params.set("collection", COLLECTION_NAME);
missingParamsError(client, params);
params.set("shard", c1_s1);
missingParamsError(client, params);
params.set("replica", c1_s1_r1);
missingParamsError(client, params);
params.set("property", "preferredLeader");
missingParamsError(client, params);
params.set("property.value", "true");
SolrRequest request = new QueryRequest(params);
request.setPath("/admin/collections");
client.request(request);
// The above should have set exactly one preferredleader...
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.preferredleader", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r2,
"property", "preferredLeader",
"property.value", "true");
// The preferred leader property for shard1 should have switched to the other replica.
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME,
"shard", c1_s2,
"replica", c1_s2_r1,
"property", "preferredLeader",
"property.value", "true");
// Now we should have a preferred leader in both shards...
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME1,
"shard", c2_s1,
"replica", c2_s1_r1,
"property", "preferredLeader",
"property.value", "true");
// Now we should have three preferred leaders.
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME1, c2_s1_r1, "property.preferredleader", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toString(),
"collection", COLLECTION_NAME1,
"shard", c2_s1,
"replica", c2_s1_r1,
"property", "preferredLeader");
// Now we should have two preferred leaders.
// But first we have to wait for the overseer to finish the action
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
// Try adding an arbitrary property to one that has the leader property
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "testprop",
"property.value", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r2,
"property", "prop",
"property.value", "silly");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "testprop",
"property.value", "nonsense",
SHARD_UNIQUE, "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "nonsense");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "property.testprop",
"property.value", "true",
SHARD_UNIQUE, "false");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.testprop", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "property.testprop");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.testprop");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
try {
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toString(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "preferredLeader",
"property.value", "true",
SHARD_UNIQUE, "false");
fail("Should have thrown an exception, setting shardUnique=false is not allowed for 'preferredLeader'.");
} catch (SolrException se) {
assertTrue("Should have received a specific error message",
se.getMessage().contains("with the shardUnique parameter set to something other than 'true'"));
}
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.preferredleader", "true");
verifyPropertyVal(client, COLLECTION_NAME, c1_s2_r1, "property.preferredleader", "true");
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.testprop");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r2, "property.prop", "silly");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME, "property.preferredLeader");
verifyUniquePropertyWithinCollection(client, COLLECTION_NAME1, "property.preferredLeader");
Map<String, String> origProps = getProps(client, COLLECTION_NAME, c1_s1_r1,
"state", "core", "node_name", "base_url");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "state",
"property.value", "state_bad");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "core",
"property.value", "core_bad");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "node_name",
"property.value", "node_name_bad");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.ADDREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "base_url",
"property.value", "base_url_bad");
// The above should be on new proeprties.
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.state", "state_bad");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.core", "core_bad");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.node_name", "node_name_bad");
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, "property.base_url", "base_url_bad");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "state");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "core");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "node_name");
doPropertyAction(client,
"action", CollectionParams.CollectionAction.DELETEREPLICAPROP.toLower(),
"collection", COLLECTION_NAME,
"shard", c1_s1,
"replica", c1_s1_r1,
"property", "base_url");
// They better not have been changed!
for (Map.Entry<String, String> ent : origProps.entrySet()) {
verifyPropertyVal(client, COLLECTION_NAME, c1_s1_r1, ent.getKey(), ent.getValue());
}
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.state");
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.core");
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.node_name");
verifyPropertyNotPresent(client, COLLECTION_NAME, c1_s1_r1, "property.base_url");
}
}
// Expects the map will have keys, but blank values.
private Map<String, String> getProps(CloudSolrClient client, String collectionName, String replicaName, String... props)
throws KeeperException, InterruptedException {
client.getZkStateReader().updateClusterState();
ClusterState clusterState = client.getZkStateReader().getClusterState();
Replica replica = clusterState.getReplica(collectionName, replicaName);
if (replica == null) {
fail("Could not find collection/replica pair! " + collectionName + "/" + replicaName);
}
Map<String, String> propMap = new HashMap<>();
for (String prop : props) {
propMap.put(prop, replica.getStr(prop));
}
return propMap;
}
private void missingParamsError(CloudSolrClient client, ModifiableSolrParams origParams)
throws IOException, SolrServerException {
SolrRequest request;
try {
request = new QueryRequest(origParams);
request.setPath("/admin/collections");
client.request(request);
fail("Should have thrown a SolrException due to lack of a required parameter.");
} catch (SolrException se) {
assertTrue("Should have gotten a specific message back mentioning 'missing required parameter'. Got: " + se.getMessage(),
se.getMessage().toLowerCase(Locale.ROOT).contains("missing required parameter:"));
}
}
}
| |
/* ###
* IP: GHIDRA
* REVIEWED: YES
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package ghidra.app.util.xml;
import ghidra.app.util.importer.MessageLog;
import ghidra.framework.options.*;
import ghidra.program.model.address.*;
import ghidra.program.model.listing.*;
import ghidra.program.model.util.*;
import ghidra.util.XmlProgramUtilities;
import ghidra.util.exception.*;
import ghidra.util.task.TaskMonitor;
import ghidra.util.xml.*;
import ghidra.xml.XmlElement;
import ghidra.xml.XmlPullParser;
import java.awt.Color;
import java.awt.Font;
import java.io.File;
import java.util.*;
import javax.swing.KeyStroke;
import org.xml.sax.SAXParseException;
class PropertiesXmlMgr {
private final static String PROPERTY_LIST_CATEGORY_DELIMITER = Options.DELIMITER_STRING;
private Program program;
private PropertyMapManager propMapMgr;
private AddressFactory factory;
private MessageLog log;
PropertiesXmlMgr(Program program, MessageLog log) {
this.program = program;
this.propMapMgr = program.getUsrPropertyManager();
this.factory = program.getAddressFactory();
this.log = log;
}
///////////////////////////////////////////////////////////////////////////////////////
// XML READ CURRENT DTD //
///////////////////////////////////////////////////////////////////////////////////////
void read(XmlPullParser parser, boolean overwrite, TaskMonitor monitor)
throws SAXParseException, CancelledException {
XmlElement element = parser.next();
if (!element.isStart() || !element.getName().equals("PROPERTIES")) {
throw new SAXParseException("Expected PROPERTIES start tag", null, null,
parser.getLineNumber(), parser.getColumnNumber());
}
element = parser.next();
while (element.getName().equals("PROPERTY")) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
processProperty(element, parser, overwrite);
element = parser.next();
}
if (element.isStart() || !element.getName().equals("PROPERTIES")) {
throw new SAXParseException("Expected PROPERTY element or PROPERTIES end tag", null,
null, parser.getLineNumber(), parser.getColumnNumber());
}
}
private void processProperty(XmlElement element, XmlPullParser parser, boolean overwrite)
throws SAXParseException {
String name = element.getAttribute("NAME");
if (name == null) {
throw new SAXParseException("NAME attribute missing for PROPERTY element", null, null,
parser.getLineNumber(), parser.getColumnNumber());
}
String addrStr = element.getAttribute("ADDRESS");
try {
if (addrStr != null) {
Address addr = XmlProgramUtilities.parseAddress(factory, addrStr);
if (addr == null) {
throw new AddressFormatException("Incompatible Property [" + name +
"] Address " + addrStr + " at Line: " + parser.getLineNumber());
}
processPropertyMapEntry(addr, name, element, overwrite, parser);
}
else {
processPropertyListEntry(name, element, overwrite);
}
}
catch (Exception e) {
log.appendException(e);
parser.discardSubTree(element);
return;
}
element = parser.next();
if (element.isStart() || !element.getName().equals("PROPERTY")) {
throw new SAXParseException("Expected PROPERTY end tag", null, null,
parser.getLineNumber(), parser.getColumnNumber());
}
}
private void processPropertyMapEntry(Address addr, String name, XmlElement element,
boolean overwrite, XmlPullParser parser) throws Exception {
String type = element.getAttribute("TYPE");
if (type != null) {
type = type.toLowerCase();
}
if (!overwrite && !"bookmarks".equals(type)) {
PropertyMap map = propMapMgr.getPropertyMap(name);
if (map != null && map.hasProperty(addr)) {
log.appendMsg("Conflicting '" + name + "' PROPERTY ignored at: " + addr);
return; // skip - property conflicts
}
}
if (type == null || "void".equals(type)) {
if (element.getAttribute("VALUE") != null) {
log.appendMsg("VALUE attribute ignored for void property");
}
VoidPropertyMap voidMap = propMapMgr.getVoidPropertyMap(name);
if (voidMap == null) {
voidMap = propMapMgr.createVoidPropertyMap(name);
}
voidMap.add(addr);
}
else if ("int".equals(type)) {
int value = XmlUtilities.parseInt(element.getAttribute("VALUE"));
IntPropertyMap intMap = propMapMgr.getIntPropertyMap(name);
if (intMap == null) {
intMap = propMapMgr.createIntPropertyMap(name);
}
intMap.add(addr, value);
}
else if ("long".equals(type)) {
long value = XmlUtilities.parseLong(element.getAttribute("VALUE"));
LongPropertyMap longMap = propMapMgr.getLongPropertyMap(name);
if (longMap == null) {
longMap = propMapMgr.createLongPropertyMap(name);
}
longMap.add(addr, value);
}
else if ("string".equals(type)) {
String str = element.getAttribute("VALUE");
StringPropertyMap strMap = propMapMgr.getStringPropertyMap(name);
if (strMap == null) {
strMap = propMapMgr.createStringPropertyMap(name);
}
strMap.add(addr, str);
}
else if ("bookmarks".equals(type)) {
// Must retain for backward compatibility with old Ver-1 Note bookmarks which
// were saved as simple properties
BookmarkManager bmMgr = program.getBookmarkManager();
if (!overwrite) {
Bookmark[] bookmarks = bmMgr.getBookmarks(addr, BookmarkType.NOTE);
if (bookmarks.length != 0) {
log.appendMsg("Conflicting BOOKMARK ignored at: " + addr);
return; // skip - bookmark conflicts
}
}
bmMgr.setBookmark(addr, BookmarkType.NOTE, name, element.getAttribute("VALUE"));
}
else {
log.appendMsg("Unsupported PROPERTY usage");
}
}
private String getPropertyList(String path) {
StringTokenizer st = new StringTokenizer(path, PROPERTY_LIST_CATEGORY_DELIMITER);
if (st.hasMoreElements()) {
return st.nextToken();
}
return null;
}
private String getPropertyName(String path) {
int ix = path.indexOf(PROPERTY_LIST_CATEGORY_DELIMITER);
if (ix >= 0) {
if (path.length() > (ix + 1)) {
return path.substring(ix + 1);
}
return null;
}
return path;
}
@SuppressWarnings("unchecked")
private void processPropertyListEntry(String pathname, XmlElement element, boolean overwrite)
throws Exception {
String listName = getPropertyList(pathname);
String name = getPropertyName(pathname);
if (listName == null || name == null) {
log.appendMsg("Property NAME attribute must contain both category prefix and property name");
return;
}
Options list = program.getOptions(listName);
if (!overwrite && list.contains(name)) {
log.appendMsg("Conflicting PROPERTY ignored: " + pathname);
return; // skip - property conflicts
}
String type = element.getAttribute("TYPE");
if (type != null) {
type = type.toLowerCase();
}
if (type == null || "void".equals(type)) {
log.appendMsg("Unsupported PROPERTY usage");
}
else if ("int".equals(type)) {
int value = XmlUtilities.parseInt(element.getAttribute("VALUE"));
list.setInt(name, value);
}
else if ("long".equals(type)) {
long value = XmlUtilities.parseLong(element.getAttribute("VALUE"));
list.setLong(name, value);
}
else if ("double".equals(type)) {
double value = Double.parseDouble(element.getAttribute("VALUE"));
list.setDouble(name, value);
}
else if ("float".equals(type)) {
float value = Float.parseFloat(element.getAttribute("VALUE"));
list.setFloat(name, value);
}
else if ("bool".equals(type)) {
boolean value = XmlUtilities.parseBoolean(element.getAttribute("VALUE"));
list.setBoolean(name, value);
}
else if ("string".equals(type)) {
String str = element.getAttribute("VALUE");
list.setString(name, str);
}
else if ("date".equals(type)) {
long value = XmlUtilities.parseLong(element.getAttribute("VALUE"));
list.setDate(name, new Date(value));
}
else if ("color".equals(type)) {
Color color = new Color(XmlUtilities.parseInt(element.getAttribute("VALUE")));
list.setColor(name, color);
}
else if ("file".equals(type)) {
File file = new File(element.getAttribute("VALUE"));
list.setFile(name, file);
}
else if ("enum".equals(type)) {
String escapedXML = element.getAttribute("VALUE");
String xmlString = XmlUtilities.unEscapeElementEntities(escapedXML);
@SuppressWarnings("rawtypes")
Enum enuum = (Enum) OptionType.ENUM_TYPE.convertStringToObject(xmlString);
list.setEnum(name, enuum);
}
else if ("font".equals(type)) {
String escapedXML = element.getAttribute("VALUE");
String xmlString = XmlUtilities.unEscapeElementEntities(escapedXML);
Font font = (Font) OptionType.FONT_TYPE.convertStringToObject(xmlString);
list.setFont(name, font);
}
else if ("keyStroke".equals(type)) {
String escapedXML = element.getAttribute("VALUE");
String xmlString = XmlUtilities.unEscapeElementEntities(escapedXML);
KeyStroke keyStroke =
(KeyStroke) OptionType.KEYSTROKE_TYPE.convertStringToObject(xmlString);
list.setKeyStroke(name, keyStroke);
}
else if ("custom".equals(type)) {
String escapedXML = element.getAttribute("VALUE");
String xmlString = XmlUtilities.unEscapeElementEntities(escapedXML);
CustomOption custom =
(CustomOption) OptionType.CUSTOM_TYPE.convertStringToObject(xmlString);
list.setCustomOption(name, custom);
}
else if ("bytes".equals(type)) {
String escapedXML = element.getAttribute("VALUE");
String xmlString = XmlUtilities.unEscapeElementEntities(escapedXML);
byte[] bytes = (byte[]) OptionType.BYTE_ARRAY_TYPE.convertStringToObject(xmlString);
list.setByteArray(name, bytes);
}
else {
log.appendMsg("Unsupported PROPERTY usage");
}
}
///////////////////////////////////////////////////////////////////////////////////////
// XML READ VERSION 1 DTD //
///////////////////////////////////////////////////////////////////////////////////////
void readV1(XmlPullParser parser, boolean overwrite, TaskMonitor monitor)
throws SAXParseException, CancelledException {
read(parser, overwrite, monitor);
}
///////////////////////////////////////////////////////////////////////////////////////
// XML WRITE CURRENT DTD //
///////////////////////////////////////////////////////////////////////////////////////
void write(XmlWriter writer, AddressSetView set, TaskMonitor monitor) throws CancelledException {
monitor.setMessage("Writing PROPERTIES ...");
writer.startElement("PROPERTIES");
writePropertyMaps(writer, set, monitor);
writePropertyLists(writer, monitor);
writer.endElement("PROPERTIES");
}
private void writePropertyLists(XmlWriter writer, TaskMonitor monitor)
throws CancelledException {
List<String> listNames = program.getOptionsNames();
Collections.sort(listNames);
for (int i = 0; i < listNames.size(); i++) {
Options propList = program.getOptions(listNames.get(i));
List<String> propNames = propList.getOptionNames();
Collections.sort(propNames);
String prefix = listNames.get(i) + PROPERTY_LIST_CATEGORY_DELIMITER;
for (String name : propNames) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
if (propList.isAlias(name)) { // don't write out properties that are just mirrors of some other property
continue;
}
if (propList.isDefaultValue(name)) { // don't write out default properties.
continue;
}
OptionType type = propList.getType(name);
XmlAttributes attrs = new XmlAttributes();
attrs.addAttribute("NAME", prefix + name);
switch (type) {
case INT_TYPE:
attrs.addAttribute("TYPE", "int");
attrs.addAttribute("VALUE", propList.getInt(name, 0), true);
break;
case LONG_TYPE:
attrs.addAttribute("TYPE", "long");
attrs.addAttribute("VALUE", propList.getLong(name, 0), true);
break;
case STRING_TYPE:
attrs.addAttribute("TYPE", "string");
attrs.addAttribute("VALUE", propList.getString(name, ""));
break;
case BOOLEAN_TYPE:
attrs.addAttribute("TYPE", "bool");
attrs.addAttribute("VALUE", propList.getBoolean(name, true));
break;
case DOUBLE_TYPE:
attrs.addAttribute("TYPE", "double");
attrs.addAttribute("VALUE", propList.getDouble(name, 0));
break;
case FLOAT_TYPE:
attrs.addAttribute("TYPE", "float");
attrs.addAttribute("VALUE", propList.getFloat(name, 0f));
break;
case DATE_TYPE:
attrs.addAttribute("TYPE", "date");
Date date = propList.getDate(name, (Date) null);
long time = date == null ? 0 : date.getTime();
attrs.addAttribute("VALUE", time, true);
break;
case COLOR_TYPE:
attrs.addAttribute("TYPE", "color");
Color color = propList.getColor(name, null);
int rgb = color.getRGB();
attrs.addAttribute("VALUE", rgb, true);
break;
case ENUM_TYPE:
attrs.addAttribute("TYPE", "enum");
@SuppressWarnings({ "unchecked", "rawtypes" })
Enum enuum = propList.getEnum(name, null);
String xmlString = OptionType.ENUM_TYPE.convertObjectToString(enuum);
attrs.addAttribute("VALUE", XmlUtilities.escapeElementEntities(xmlString));
break;
case FILE_TYPE:
attrs.addAttribute("TYPE", "file");
File file = propList.getFile(name, null);
String path = file.getAbsolutePath();
attrs.addAttribute("VALUE", path);
break;
case FONT_TYPE:
attrs.addAttribute("TYPE", "font");
Font font = propList.getFont(name, null);
xmlString = OptionType.FONT_TYPE.convertObjectToString(font);
attrs.addAttribute("VALUE", XmlUtilities.escapeElementEntities(xmlString));
break;
case KEYSTROKE_TYPE:
attrs.addAttribute("TYPE", "keyStroke");
KeyStroke keyStroke = propList.getKeyStroke(name, null);
xmlString = OptionType.KEYSTROKE_TYPE.convertObjectToString(keyStroke);
attrs.addAttribute("VALUE", XmlUtilities.escapeElementEntities(xmlString));
break;
case CUSTOM_TYPE:
attrs.addAttribute("TYPE", "custom");
CustomOption custom = propList.getCustomOption(name, null);
xmlString = OptionType.KEYSTROKE_TYPE.convertObjectToString(custom);
attrs.addAttribute("VALUE", XmlUtilities.escapeElementEntities(xmlString));
break;
case BYTE_ARRAY_TYPE:
attrs.addAttribute("TYPE", "bytes");
byte[] bytes = propList.getByteArray(name, null);
xmlString = OptionType.BYTE_ARRAY_TYPE.convertObjectToString(bytes);
attrs.addAttribute("VALUE", XmlUtilities.escapeElementEntities(xmlString));
break;
case NO_TYPE:
default:
throw new AssertException();
}
writer.startElement("PROPERTY", attrs);
writer.endElement("PROPERTY");
}
}
}
private void writePropertyMaps(XmlWriter writer, AddressSetView set, TaskMonitor monitor)
throws CancelledException {
Iterator<String> mapNames = propMapMgr.propertyManagers();
while (mapNames.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
String mapName = mapNames.next();
PropertyMap map = propMapMgr.getPropertyMap(mapName);
if (map instanceof VoidPropertyMap) {
writeVoidMap((VoidPropertyMap) map, writer, set, monitor);
}
else if (map instanceof IntPropertyMap) {
writeIntMap((IntPropertyMap) map, writer, set, monitor);
}
else if (map instanceof LongPropertyMap) {
writeLongMap((LongPropertyMap) map, writer, set, monitor);
}
else if (map instanceof StringPropertyMap) {
writeStringMap((StringPropertyMap) map, writer, set, monitor);
}
}
}
private void writeStringMap(StringPropertyMap map, XmlWriter writer, AddressSetView set,
TaskMonitor monitor) throws CancelledException {
AddressIterator iter =
set != null ? map.getPropertyIterator(set) : map.getPropertyIterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
Address addr = iter.next();
String value = map.getString(addr);
XmlAttributes attrs = new XmlAttributes();
attrs.addAttribute("NAME", map.getName());
attrs.addAttribute("ADDRESS", XmlProgramUtilities.toString(addr));
attrs.addAttribute("TYPE", "string");
attrs.addAttribute("VALUE", value);
writer.startElement("PROPERTY", attrs);
writer.endElement("PROPERTY");
}
}
private void writeLongMap(LongPropertyMap map, XmlWriter writer, AddressSetView set,
TaskMonitor monitor) throws CancelledException {
AddressIterator iter =
set != null ? map.getPropertyIterator(set) : map.getPropertyIterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
try {
Address addr = iter.next();
long value = map.getLong(addr);
XmlAttributes attrs = new XmlAttributes();
attrs.addAttribute("NAME", map.getName());
attrs.addAttribute("ADDRESS", XmlProgramUtilities.toString(addr));
attrs.addAttribute("TYPE", "long");
attrs.addAttribute("VALUE", value, true);
writer.startElement("PROPERTY", attrs);
writer.endElement("PROPERTY");
}
catch (NoValueException e) {
}
}
}
private void writeIntMap(IntPropertyMap map, XmlWriter writer, AddressSetView set,
TaskMonitor monitor) throws CancelledException {
AddressIterator iter =
set != null ? map.getPropertyIterator(set) : map.getPropertyIterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
try {
Address addr = iter.next();
int value = map.getInt(addr);
XmlAttributes attrs = new XmlAttributes();
attrs.addAttribute("NAME", map.getName());
attrs.addAttribute("ADDRESS", XmlProgramUtilities.toString(addr));
attrs.addAttribute("TYPE", "int");
attrs.addAttribute("VALUE", value, true);
writer.startElement("PROPERTY", attrs);
writer.endElement("PROPERTY");
}
catch (NoValueException e) {
}
}
}
private void writeVoidMap(VoidPropertyMap map, XmlWriter writer, AddressSetView set,
TaskMonitor monitor) throws CancelledException {
AddressIterator iter =
set != null ? map.getPropertyIterator(set) : map.getPropertyIterator();
while (iter.hasNext()) {
if (monitor.isCancelled()) {
throw new CancelledException();
}
Address addr = iter.next();
XmlAttributes attrs = new XmlAttributes();
attrs.addAttribute("NAME", map.getName());
attrs.addAttribute("ADDRESS", XmlProgramUtilities.toString(addr));
attrs.addAttribute("TYPE", "void");
writer.startElement("PROPERTY", attrs);
writer.endElement("PROPERTY");
}
}
}
| |
/*
* Copyright 2003-2017 Dave Griffith, Bas Leijdekkers
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.siyeh.ig.psiutils;
import com.intellij.psi.*;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.tree.IElementType;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.util.Processor;
import com.intellij.util.SmartList;
import org.jetbrains.annotations.Contract;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
public class VariableAccessUtils {
private VariableAccessUtils() {}
/**
* Finds references to the specified variable in the specified context. This can be more than an order of magnitude faster for
* finding local references, compared to using {@link ReferencesSearch}.
* @param variable the variable to find references to
* @param context the context to find references in
* @return a list of found references
*/
public static List<PsiReferenceExpression> findReferences(@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return Collections.emptyList();
}
final List<PsiReferenceExpression> result = new SmartList<>();
context.acceptChildren(new JavaRecursiveElementVisitor() {
@Override
public void visitReferenceExpression(PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
if (expression.getQualifierExpression() != null) {
return;
}
if (variable == expression.resolve()) {
result.add(expression);
}
}
});
return result;
}
public static boolean variableIsAssignedFrom(@NotNull PsiVariable variable,
@Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableAssignedFromVisitor visitor =
new VariableAssignedFromVisitor(variable);
context.accept(visitor);
return visitor.isAssignedFrom();
}
public static boolean variableIsPassedAsMethodArgument(
@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariablePassedAsArgumentVisitor visitor =
new VariablePassedAsArgumentVisitor(variable);
context.accept(visitor);
return visitor.isPassed();
}
public static boolean variableIsPassedAsMethodArgument(@NotNull PsiVariable variable, @Nullable PsiElement context,
Processor<PsiCall> callProcessor) {
return variableIsPassedAsMethodArgument(variable, context, false, callProcessor);
}
public static boolean variableIsPassedAsMethodArgument(@NotNull PsiVariable variable, @Nullable PsiElement context,
boolean builderPattern, Processor<PsiCall> callProcessor) {
if (context == null) {
return false;
}
final VariablePassedAsArgumentExcludedVisitor visitor =
new VariablePassedAsArgumentExcludedVisitor(variable, builderPattern, callProcessor);
context.accept(visitor);
return visitor.isPassed();
}
public static boolean variableIsUsedInArrayInitializer(
@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableUsedInArrayInitializerVisitor visitor =
new VariableUsedInArrayInitializerVisitor(variable);
context.accept(visitor);
return visitor.isPassed();
}
/**
* This method will return true if the specified variable is a field with greater than private visibility with a common name.
* Finding usages for such fields is too expensive.
* @param variable the variable to check assignments for
* @return true, if the variable is assigned or too expensive to search. False otherwise.
*/
public static boolean variableIsAssigned(@NotNull PsiVariable variable) {
if (variable instanceof PsiField) {
if (variable.hasModifierProperty(PsiModifier.PRIVATE)) {
final PsiClass aClass = PsiUtil.getTopLevelClass(variable);
return variableIsAssigned(variable, aClass);
}
return DeclarationSearchUtils.isTooExpensiveToSearch(variable, false) || !ReferencesSearch.search(variable).forEach(reference -> {
final PsiElement element = reference.getElement();
if (!(element instanceof PsiExpression)) {
return true;
}
final PsiExpression expression = (PsiExpression)element;
return !PsiUtil.isAccessedForWriting(expression);
});
}
final PsiElement context =
PsiTreeUtil.getParentOfType(variable, PsiCodeBlock.class, PsiMethod.class, PsiLambdaExpression.class,
PsiCatchSection.class, PsiForStatement.class, PsiForeachStatement.class);
return variableIsAssigned(variable, context);
}
public static boolean variableIsAssigned(@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableAssignedVisitor visitor = new VariableAssignedVisitor(variable, true);
context.accept(visitor);
return visitor.isAssigned();
}
public static boolean variableIsAssigned(
@NotNull PsiVariable variable, @Nullable PsiElement context,
boolean recurseIntoClasses) {
if (context == null) {
return false;
}
final VariableAssignedVisitor visitor =
new VariableAssignedVisitor(variable, recurseIntoClasses);
context.accept(visitor);
return visitor.isAssigned();
}
public static boolean variableIsReturned(@NotNull PsiVariable variable, @Nullable PsiElement context) {
return variableIsReturned(variable, context, false);
}
public static boolean variableIsReturned(@NotNull PsiVariable variable, @Nullable PsiElement context, boolean builderPattern) {
if (context == null) {
return false;
}
final VariableReturnedVisitor visitor = new VariableReturnedVisitor(variable, builderPattern);
context.accept(visitor);
return visitor.isReturned();
}
public static boolean variableValueIsUsed(
@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableValueUsedVisitor visitor =
new VariableValueUsedVisitor(variable);
context.accept(visitor);
return visitor.isVariableValueUsed();
}
public static boolean arrayContentsAreAssigned(
@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final ArrayContentsAssignedVisitor visitor =
new ArrayContentsAssignedVisitor(variable);
context.accept(visitor);
return visitor.isAssigned();
}
public static boolean variableIsUsedInInnerClass(
@NotNull PsiVariable variable, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableUsedInInnerClassVisitor visitor =
new VariableUsedInInnerClassVisitor(variable);
context.accept(visitor);
return visitor.isUsedInInnerClass();
}
public static boolean mayEvaluateToVariable(@Nullable PsiExpression expression, @NotNull PsiVariable variable) {
return mayEvaluateToVariable(expression, variable, false);
}
static boolean mayEvaluateToVariable(@Nullable PsiExpression expression, @NotNull PsiVariable variable, boolean builderPattern) {
if (expression == null) {
return false;
}
if (expression instanceof PsiParenthesizedExpression) {
final PsiParenthesizedExpression parenthesizedExpression = (PsiParenthesizedExpression)expression;
final PsiExpression containedExpression = parenthesizedExpression.getExpression();
return mayEvaluateToVariable(containedExpression, variable, builderPattern);
}
if (expression instanceof PsiTypeCastExpression) {
final PsiTypeCastExpression typeCastExpression = (PsiTypeCastExpression)expression;
final PsiExpression containedExpression = typeCastExpression.getOperand();
return mayEvaluateToVariable(containedExpression, variable, builderPattern);
}
if (expression instanceof PsiConditionalExpression) {
final PsiConditionalExpression conditional = (PsiConditionalExpression)expression;
final PsiExpression thenExpression = conditional.getThenExpression();
final PsiExpression elseExpression = conditional.getElseExpression();
return mayEvaluateToVariable(thenExpression, variable, builderPattern) ||
mayEvaluateToVariable(elseExpression, variable, builderPattern);
}
if (expression instanceof PsiArrayAccessExpression) {
final PsiElement parent = expression.getParent();
if (parent instanceof PsiArrayAccessExpression) {
return false;
}
final PsiType type = variable.getType();
if (!(type instanceof PsiArrayType)) {
return false;
}
final PsiArrayType arrayType = (PsiArrayType)type;
final int dimensions = arrayType.getArrayDimensions();
if (dimensions <= 1) {
return false;
}
PsiArrayAccessExpression arrayAccessExpression = (PsiArrayAccessExpression)expression;
PsiExpression arrayExpression = arrayAccessExpression.getArrayExpression();
int count = 1;
while (arrayExpression instanceof PsiArrayAccessExpression) {
arrayAccessExpression = (PsiArrayAccessExpression)arrayExpression;
arrayExpression = arrayAccessExpression.getArrayExpression();
count++;
}
return count != dimensions && mayEvaluateToVariable(arrayExpression, variable, builderPattern);
}
if (builderPattern && expression instanceof PsiMethodCallExpression) {
final PsiMethodCallExpression methodCallExpression = (PsiMethodCallExpression)expression;
final PsiMethod method = methodCallExpression.resolveMethod();
if (method == null) {
return false;
}
final PsiType returnType = method.getReturnType();
final PsiType variableType = variable.getType();
if (!variableType.equals(returnType)) {
return false;
}
final PsiReferenceExpression methodExpression = methodCallExpression.getMethodExpression();
final PsiExpression qualifier = methodExpression.getQualifierExpression();
return mayEvaluateToVariable(qualifier, variable, true);
}
return evaluatesToVariable(expression, variable);
}
public static boolean evaluatesToVariable(
@Nullable PsiExpression expression,
@NotNull PsiVariable variable) {
expression = ParenthesesUtils.stripParentheses(expression);
if (!(expression instanceof PsiReferenceExpression)) {
return false;
}
final PsiReferenceExpression referenceExpression =
(PsiReferenceExpression)expression;
final PsiElement target = referenceExpression.resolve();
return variable.equals(target);
}
@Contract("_, null -> false")
public static boolean variableIsUsed(@NotNull PsiVariable variable,
@Nullable PsiElement context) {
return context != null && VariableUsedVisitor.isVariableUsedIn(variable, context);
}
public static boolean variableIsDecremented(@NotNull PsiVariable variable, @Nullable PsiStatement statement) {
return variableIsIncrementedOrDecremented(variable, statement, false); }
public static boolean variableIsIncremented(@NotNull PsiVariable variable, @Nullable PsiStatement statement) {
return variableIsIncrementedOrDecremented(variable, statement, true);
}
private static boolean variableIsIncrementedOrDecremented(@NotNull PsiVariable variable, @Nullable PsiStatement statement,
boolean incremented) {
if (!(statement instanceof PsiExpressionStatement)) {
return false;
}
final PsiExpressionStatement expressionStatement =
(PsiExpressionStatement)statement;
PsiExpression expression = expressionStatement.getExpression();
expression = ParenthesesUtils.stripParentheses(expression);
if (expression instanceof PsiUnaryExpression) {
final PsiUnaryExpression unaryExpression =
(PsiUnaryExpression)expression;
final IElementType tokenType = unaryExpression.getOperationTokenType();
if (!tokenType.equals(incremented ? JavaTokenType.PLUSPLUS : JavaTokenType.MINUSMINUS)) {
return false;
}
final PsiExpression operand = unaryExpression.getOperand();
return evaluatesToVariable(operand, variable);
}
if (expression instanceof PsiAssignmentExpression) {
final PsiAssignmentExpression assignmentExpression =
(PsiAssignmentExpression)expression;
final IElementType tokenType =
assignmentExpression.getOperationTokenType();
final PsiExpression lhs = assignmentExpression.getLExpression();
if (!evaluatesToVariable(lhs, variable)) {
return false;
}
PsiExpression rhs = assignmentExpression.getRExpression();
rhs = ParenthesesUtils.stripParentheses(rhs);
if (tokenType == JavaTokenType.EQ) {
if (!(rhs instanceof PsiBinaryExpression)) {
return false;
}
final PsiBinaryExpression binaryExpression =
(PsiBinaryExpression)rhs;
final IElementType binaryTokenType =
binaryExpression.getOperationTokenType();
if (binaryTokenType != (incremented ? JavaTokenType.PLUS : JavaTokenType.MINUS)) {
return false;
}
final PsiExpression lOperand = binaryExpression.getLOperand();
final PsiExpression rOperand = binaryExpression.getROperand();
if (ExpressionUtils.isOne(lOperand)) {
if (evaluatesToVariable(rOperand, variable)) {
return true;
}
}
else if (ExpressionUtils.isOne(rOperand)) {
if (evaluatesToVariable(lOperand, variable)) {
return true;
}
}
}
else if (tokenType == (incremented ? JavaTokenType.PLUSEQ : JavaTokenType.MINUSEQ)) {
if (ExpressionUtils.isOne(rhs)) {
return true;
}
}
}
return false;
}
public static boolean variableIsAssignedBeforeReference(@NotNull PsiReferenceExpression referenceExpression,
@Nullable PsiElement context) {
if (context == null) {
return false;
}
final PsiElement target = referenceExpression.resolve();
if (!(target instanceof PsiVariable)) {
return false;
}
final PsiVariable variable = (PsiVariable)target;
return variableIsAssignedAtPoint(variable, context, referenceExpression);
}
public static boolean variableIsAssignedAtPoint(
@NotNull PsiVariable variable, @Nullable PsiElement context,
@NotNull PsiElement point) {
if (context == null) {
return false;
}
final PsiElement directChild =
getDirectChildWhichContainsElement(context, point);
if (directChild == null) {
return false;
}
final PsiElement[] children = context.getChildren();
for (PsiElement child : children) {
if (child == directChild) {
return variableIsAssignedAtPoint(variable, directChild, point);
}
if (variableIsAssigned(variable, child)) {
return true;
}
}
return false;
}
@Nullable
private static PsiElement getDirectChildWhichContainsElement(
@NotNull PsiElement ancestor,
@NotNull PsiElement descendant) {
if (ancestor == descendant) {
return null;
}
PsiElement child = descendant;
PsiElement parent = child.getParent();
while (!parent.equals(ancestor)) {
child = parent;
parent = child.getParent();
if (parent == null) {
return null;
}
}
return child;
}
public static Set<PsiVariable> collectUsedVariables(PsiElement context) {
if (context == null) {
return Collections.emptySet();
}
final VariableCollectingVisitor visitor = new VariableCollectingVisitor();
context.accept(visitor);
return visitor.getUsedVariables();
}
public static boolean isAnyVariableAssigned(@NotNull Collection<PsiVariable> variables, @Nullable PsiElement context) {
if (context == null) {
return false;
}
final VariableAssignedVisitor visitor = new VariableAssignedVisitor(variables, true);
context.accept(visitor);
return visitor.isAssigned();
}
private static class VariableCollectingVisitor extends JavaRecursiveElementWalkingVisitor {
private final Set<PsiVariable> usedVariables = new HashSet<>();
@Override
public void visitReferenceExpression(
PsiReferenceExpression expression) {
super.visitReferenceExpression(expression);
final PsiElement target = expression.resolve();
if (!(target instanceof PsiVariable)) {
return;
}
final PsiVariable variable = (PsiVariable)target;
usedVariables.add(variable);
}
public Set<PsiVariable> getUsedVariables() {
return usedVariables;
}
}
}
| |
package pokeraidbot.infrastructure.botsupport.gymhuntr;
import main.BotServerMain;
import net.dv8tion.jda.api.EmbedBuilder;
import net.dv8tion.jda.api.entities.MessageChannel;
import net.dv8tion.jda.api.entities.MessageEmbed;
import net.dv8tion.jda.api.entities.SelfUser;
import net.dv8tion.jda.api.entities.User;
import net.dv8tion.jda.api.events.Event;
import net.dv8tion.jda.api.events.GenericEvent;
import net.dv8tion.jda.api.events.message.guild.GuildMessageReceivedEvent;
import net.dv8tion.jda.api.hooks.EventListener;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.Validate;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import pokeraidbot.BotService;
import pokeraidbot.Utils;
import pokeraidbot.commands.NewRaidGroupCommand;
import pokeraidbot.domain.config.ClockService;
import pokeraidbot.domain.config.LocaleService;
import pokeraidbot.domain.gym.Gym;
import pokeraidbot.domain.gym.GymRepository;
import pokeraidbot.domain.pokemon.Pokemon;
import pokeraidbot.domain.pokemon.PokemonRaidInfo;
import pokeraidbot.domain.pokemon.PokemonRepository;
import pokeraidbot.domain.raid.PokemonRaidStrategyService;
import pokeraidbot.domain.raid.Raid;
import pokeraidbot.domain.raid.RaidRepository;
import pokeraidbot.infrastructure.jpa.config.Config;
import pokeraidbot.infrastructure.jpa.config.ServerConfigRepository;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.LocalTime;
import java.util.*;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
import static pokeraidbot.Utils.*;
public class GymHuntrRaidEventListener implements EventListener {
private static final Logger LOGGER = LoggerFactory.getLogger(GymHuntrRaidEventListener.class);
private ServerConfigRepository serverConfigRepository;
private RaidRepository raidRepository;
private GymRepository gymRepository;
private PokemonRepository pokemonRepository;
private LocaleService localeService;
private ExecutorService executorService;
private final ClockService clockService;
private final BotService botService;
private final PokemonRaidStrategyService strategyService;
public GymHuntrRaidEventListener(ServerConfigRepository serverConfigRepository, RaidRepository raidRepository,
GymRepository gymRepository, PokemonRepository pokemonRepository,
LocaleService localeService, ExecutorService executorService,
ClockService clockService, BotService botService,
PokemonRaidStrategyService strategyService) {
this.serverConfigRepository = serverConfigRepository;
this.raidRepository = raidRepository;
this.gymRepository = gymRepository;
this.pokemonRepository = pokemonRepository;
this.localeService = localeService;
this.executorService = executorService;
this.clockService = clockService;
this.botService = botService;
this.strategyService = strategyService;
}
@Override
public void onEvent(GenericEvent event) {
if (event instanceof GuildMessageReceivedEvent) {
GuildMessageReceivedEvent guildEvent = (GuildMessageReceivedEvent) event;
final User messageAuthor = guildEvent.getAuthor();
try {
if (isUserGymhuntrBot(messageAuthor) || isUserPokeAlarmBot(messageAuthor)) {
final String serverName = guildEvent.getGuild().getName().toLowerCase();
final Config config = serverConfigRepository.getConfigForServer(serverName);
if (config == null) {
LOGGER.warn("Server configuration is null for this guild: " + guildEvent.getGuild().getName());
return;
}
if (!config.useBotIntegration()) {
if (LOGGER.isTraceEnabled()) {
LOGGER.trace("Skipping trigger, since bot integration setting is false for server " +
guildEvent.getGuild().getName());
}
return;
}
final List<MessageEmbed> embeds = guildEvent.getMessage().getEmbeds();
if (embeds != null && embeds.size() > 0) {
for (MessageEmbed embed : embeds) {
final LocalDateTime currentDateTime = clockService.getCurrentDateTime();
final String description = embed.getDescription();
final String title = embed.getTitle();
List<String> newRaidArguments;
if (isUserGymhuntrBot(messageAuthor)) {
newRaidArguments = gymhuntrArgumentsToCreateRaid(title, description, clockService);
} else if (isUserPokeAlarmBot(messageAuthor)) {
newRaidArguments = pokeAlarmArgumentsToCreateRaid(title, description, clockService);
} else {
newRaidArguments = new ArrayList<>();
}
try {
if (newRaidArguments != null && newRaidArguments.size() > 0) {
final Iterator<String> iterator = newRaidArguments.iterator();
final String gym = iterator.next();
final String pokemon = iterator.next();
final String time = iterator.next();
final Pokemon raidBoss = pokemonRepository.getByName(pokemon);
final String region = config.getRegion();
final Gym raidGym = gymRepository.findByName(gym, region);
final LocalDate currentDate = currentDateTime.toLocalDate();
final LocalDateTime endOfRaid = LocalDateTime.of(currentDate,
LocalTime.parse(time, Utils.timeParseFormatter));
final SelfUser botUser = botService.getBot().getSelfUser();
final PokemonRaidInfo raidInfo;
raidInfo = strategyService.getRaidInfo(raidBoss);
handleRaidFromIntegration(botUser,
guildEvent, raidBoss, raidGym, endOfRaid, config, clockService,
raidInfo, strategyService);
} else {
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("No arguments to create raid with for server " + config +
", skipping. Raw command: " + guildEvent.getMessage().getContentRaw());
}
}
} catch (Throwable t) {
LOGGER.warn("Exception when trying to get arguments for raid creation: " + t.getMessage());
}
}
}
}
} catch (Throwable t) {
LOGGER.warn("Exception thrown for event listener: " + t.getMessage());
if (LOGGER.isDebugEnabled()) {
LOGGER.debug("Stacktrace: ", t);
}
}
}
}
public void handleRaidFromIntegration(User user, GuildMessageReceivedEvent guildEvent, Pokemon raidBoss, Gym raidGym,
LocalDateTime endOfRaid, Config config, ClockService clockService,
PokemonRaidInfo pokemonRaidInfo,
PokemonRaidStrategyService pokemonRaidStrategyService) {
Validate.notNull(user, "User");
Validate.notNull(guildEvent, "Guild event");
Validate.notNull(config, "Config");
Validate.notNull(raidBoss, "Raid boss");
Validate.notNull(raidGym, "Gym");
Validate.notNull(user, "User");
final LocalDateTime now = clockService.getCurrentDateTime();
LocalDateTime currentDateTime = now;
final boolean moreThan10MinutesLeftOnRaid = endOfRaid.isAfter(currentDateTime.plusMinutes(10));
if (moreThan10MinutesLeftOnRaid) {
final Raid raidToCreate = new Raid(raidBoss,
endOfRaid,
raidGym,
localeService, config.getRegion(), false);
final MessageChannel channel = guildEvent.getChannel();
try {
if (raidRepository.isActiveOrExRaidAt(raidGym, config.getRegion())) {
Raid existingRaid =
raidRepository.getActiveRaidOrFallbackToExRaid(raidGym, config.getRegion(), user);
if (existingRaid.getPokemon().isEgg()) {
existingRaid = raidRepository.changePokemon(existingRaid, raidBoss,
guildEvent.getGuild(), config, user,
"(bot) " +
"!raid hatch " + raidBoss.getName() + " " + existingRaid.getGym().getName());
LOGGER.info("Hatched raid: " + existingRaid);
} else {
LOGGER.info("Raid already present, which is not an egg to hatch. " +
"Skipping raid at: " + raidGym.getName() + " for server " + config.getServer());
}
} else {
createRaid(user, guildEvent, config, clockService, pokemonRaidInfo, now, raidToCreate, channel);
}
} catch (Throwable t) {
LOGGER.warn("Exception when trying to create raid via botintegration for server " +
config.getServer() + ", channel " + (channel != null ? channel.getName() : "NULL") + ": " +
t.getMessage());
}
} else {
LOGGER.debug("Skipped creating raid at " + raidGym +
", less than 10 minutes remaining on it.");
}
}
protected void createRaid(User user, GuildMessageReceivedEvent guildEvent, Config config,
ClockService clockService, PokemonRaidInfo pokemonRaidInfo,
LocalDateTime now, Raid raidToCreate, MessageChannel channel) {
Raid createdRaid;
if (raidToCreate.isExRaid()) {
LOGGER.debug("Got an EX raid to create from gym integration, skipping: " + raidToCreate);
return;
}
createdRaid = raidRepository.newRaid(user, raidToCreate, guildEvent.getGuild(), config,
"(bot) !raid new " + raidToCreate.getPokemon().getName() + " " +
printTimeIfSameDay(raidToCreate.getEndOfRaid()) + " " + raidToCreate.getGym().getName());
final Locale locale = config.getLocale();
EmbedBuilder embedBuilder = new EmbedBuilder().setTitle(null, null);
StringBuilder sb = new StringBuilder();
sb.append(localeService.getMessageFor(LocaleService.NEW_RAID_CREATED,
locale, createdRaid.toString(locale)));
// if (user != null && channel != null) {
// createGroupIfConfigSaysSo(user, guildEvent, config, clockService,
// pokemonRaidInfo, now, createdRaid, channel);
// } else {
// LOGGER.warn("Could not create group, as some input values were null!");
// }
embedBuilder.setDescription(sb.toString());
final MessageEmbed messageEmbed = embedBuilder.build();
sendFeedbackThenCleanUp(createdRaid, channel, messageEmbed);
}
private void sendFeedbackThenCleanUp(Raid createdRaid, MessageChannel channel, MessageEmbed messageEmbed) {
LOGGER.info("Raid created via Bot integration for region " + createdRaid.getRegion() + ": " + createdRaid);
try {
channel.sendMessage(messageEmbed).queue(m -> {
// Clean up message
try {
channel.deleteMessageById(m.getId())
.queueAfter(BotServerMain.timeToRemoveFeedbackInSeconds, TimeUnit.SECONDS);
} catch (Throwable t) {
LOGGER.warn("Could not clean up feedback from raid creation: " + t.getMessage());
}
});
} catch (Throwable t) {
LOGGER.debug("Could not send feedback for raid creation: " + t.getMessage());
}
}
private void createGroupIfConfigSaysSo(User user, GuildMessageReceivedEvent guildEvent, Config config,
ClockService clockService, PokemonRaidInfo pokemonRaidInfo,
LocalDateTime now, Raid createdRaid, MessageChannel channel) {
// Auto create group for tier 5 bosses, if server config says to do so
if (pokemonRaidInfo != null && pokemonRaidInfo.getBossTier() == 5) {
LocalTime groupStart = getAutoCreatedRaidGroupStart(now, createdRaid);
if (groupStart != null) {
MessageChannel chn = config.getGroupCreationChannel(guildEvent.getGuild());
MessageChannel channelToCreateGroupIn = channel;
if (chn != null &&
config.getGroupCreationStrategy() == Config.RaidGroupCreationStrategy.NAMED_CHANNEL) {
channelToCreateGroupIn = chn;
}
if (LOGGER.isDebugEnabled()) {
if (channel != null) {
LOGGER.debug("Channel to use to create group: " + channel.getName());
}
}
try {
NewRaidGroupCommand.createRaidGroup(channelToCreateGroupIn, guildEvent.getGuild(), config, user,
config.getLocale(), groupStart, createdRaid.getId(), localeService, raidRepository,
botService, serverConfigRepository, pokemonRepository, gymRepository,
clockService, executorService, strategyService);
} catch (Throwable t) {
LOGGER.warn("Could not create raid group for server " + config.getServer() + " and raid " +
createdRaid + ": " + t.getMessage());
}
}
} else {
if (pokemonRaidInfo == null) {
LOGGER.debug("PokeRaidInfo was null for pokemon " + createdRaid.getPokemon().getName());
}
}
}
protected static LocalTime getAutoCreatedRaidGroupStart(LocalDateTime now, Raid createdRaid) {
LocalTime groupStart = null;
final LocalDateTime endOfRaid = createdRaid.getEndOfRaid();
final LocalDateTime startOfRaid = getStartOfRaid(endOfRaid, createdRaid.isExRaid());
final int defaultNumberOfMinutesAfterHatchForGroupCreation = getDefaultNumberOfMinutesAfterHatchForGroupCreation();
if (now.isBefore(startOfRaid)) {
groupStart = startOfRaid.toLocalTime().plusMinutes(defaultNumberOfMinutesAfterHatchForGroupCreation);
} else if (now.isAfter(startOfRaid) && now.plusMinutes(defaultNumberOfMinutesAfterHatchForGroupCreation)
.plusMinutes(5)
.isBefore(endOfRaid)) {
groupStart = now.toLocalTime().plusMinutes(defaultNumberOfMinutesAfterHatchForGroupCreation);
} else if (now.isBefore(endOfRaid.minusMinutes(10))) {
groupStart = endOfRaid.toLocalTime().minusMinutes(5);
}
return groupStart;
}
protected static int getDefaultNumberOfMinutesAfterHatchForGroupCreation() {
return BotService.currentTier5Bosses.size() > 1 ? 30 : 10;
}
public static boolean isUserPokeAlarmBot(User user) {
return user.isBot() && (user.getName().equalsIgnoreCase("raid") ||
user.getName().equalsIgnoreCase("egg"));
}
public static boolean isUserGymhuntrBot(User user) {
return user.isBot() && StringUtils.containsIgnoreCase(
user.getName(), "gymhuntrbot");
}
public static List<String> pokeAlarmArgumentsToCreateRaid(String title, String description,
ClockService clockService) {
String gym, pokemon, timeString;
if (title.contains("raid is available against")) {
final String[] titleSplit = title.replaceAll("!", "").split(" ");
pokemon = titleSplit[titleSplit.length - 1];
final String[] descriptionSplitNewLines = description.split("\n");
final String[] descriptionSplit = descriptionSplitNewLines[0].split(" ");
timeString = printTime(LocalTime.parse(descriptionSplit[descriptionSplit.length - 3]));
final String[] gymSplit = title.split("raid is available against");
gym = gymSplit[0].trim();
} else if (title.contains("has a level 5") && description.contains("will hatch")) {
final String[] descriptionSplit = description.split(" ");
timeString = printTime(LocalTime.parse(descriptionSplit[descriptionSplit.length - 3])
.plusMinutes(Utils.RAID_DURATION_IN_MINUTES));
gym = title.split("has a level 5")[0].trim();
pokemon = getTier5RaidBossBasedOnSeason(clockService);
} else if (title.contains("has a level 6") && description.contains("will hatch")) {
final String[] descriptionSplit = description.split(" ");
timeString = printTime(LocalTime.parse(descriptionSplit[descriptionSplit.length - 3])
.plusMinutes(Utils.RAID_DURATION_IN_MINUTES));
gym = title.split("has a level 6")[0].trim();
pokemon = PokemonRepository.EGG_6;
} else {
return new ArrayList<>(); // We shouldn't create a raid for this case, non-tier 5 egg
}
return Arrays.asList(new String[]{gym, pokemon, timeString});
}
protected static String getTier5RaidBossBasedOnSeason(ClockService clockService) {
String pokemon;
final List<String> currentTier5Bosses = BotService.currentTier5Bosses;
if (currentTier5Bosses == null || currentTier5Bosses.size() != 1){
pokemon = "Egg5";
} else {
pokemon = currentTier5Bosses.iterator().next();
}
return pokemon;
}
public static List<String> gymhuntrArgumentsToCreateRaid(String title, String description,
ClockService clockService) {
String gym, pokemon, timeString;
if (title.contains("Raid has started!")) {
final String[] firstPass = description.replaceAll("[*]", "")
.replaceAll("[.]", "")
.replaceAll("Raid Ending: ", "").split("\n");
final String[] timeArguments = firstPass[3].replaceAll("hours ", "")
.replaceAll("min ", "").replaceAll("sec", "").split(" ");
timeString = printTime(clockService.getCurrentTime()
.plusHours(Long.parseLong(timeArguments[0]))
.plusMinutes(Long.parseLong(timeArguments[1]))
.plusSeconds(Long.parseLong(timeArguments[2])));
gym = firstPass[0].trim();
pokemon = firstPass[1].trim();
} else if (title.contains("Level 5 Raid is starting soon!")) {
final String[] firstPass = description.replaceAll("[*]", "")
.replaceAll("[.]", "")
.replaceAll("Raid Starting: ", "").split("\n");
gym = firstPass[0].trim();
final String[] timeArguments = firstPass[1].replaceAll("hours ", "")
.replaceAll("min ", "").replaceAll("sec", "").split(" ");
timeString = printTime(clockService.getCurrentTime()
.plusHours(Long.parseLong(timeArguments[0]))
.plusMinutes(Long.parseLong(timeArguments[1]))
.plusSeconds(Long.parseLong(timeArguments[2]))
.plusMinutes(Utils.RAID_DURATION_IN_MINUTES));
pokemon = getTier5RaidBossBasedOnSeason(clockService);
} else if (title.contains("Level 6 Raid is starting soon!")) {
final String[] firstPass = description.replaceAll("[*]", "")
.replaceAll("[.]", "")
.replaceAll("Raid Starting: ", "").split("\n");
gym = firstPass[0].trim();
final String[] timeArguments = firstPass[1].replaceAll("hours ", "")
.replaceAll("min ", "").replaceAll("sec", "").split(" ");
timeString = printTime(clockService.getCurrentTime()
.plusHours(Long.parseLong(timeArguments[0]))
.plusMinutes(Long.parseLong(timeArguments[1]))
.plusSeconds(Long.parseLong(timeArguments[2]))
.plusMinutes(Utils.RAID_DURATION_IN_MINUTES));
pokemon = PokemonRepository.EGG_6;
} else {
return new ArrayList<>(); // = We shouldn't create this raid, since it is a non-tier 5 egg
}
final String[] argumentsInOrder = new String[]{gym, pokemon, timeString};
return Arrays.asList(argumentsInOrder);
}
}
| |
package com.wookler.server.common.model;
import com.google.common.base.Preconditions;
import com.wookler.server.common.Configurable;
import com.wookler.server.common.ConfigurationException;
import com.wookler.server.common.DataNotFoundException;
import com.wookler.server.common.config.*;
import com.wookler.server.common.utils.LogUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.HashMap;
import java.util.List;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.ReentrantLock;
/**
* Environment registry containing all the maps for the serialization handlers.
* <p/>
* Created by Subho on 2/20/2015.
*/
public class SerializerRegistry implements Configurable {
public static final class Constants {
public static final String CONFIG_NODE_ROOT = "serializers";
public static final String CONFIG_NODE_NAME = "serializer";
public static final String CONFIG_ATTR_CLASS = "class";
public static final String CONFIG_ATTR_IMPL = "handler";
}
private HashMap<Class<?>, Serializer<?>> serializers = new HashMap<>();
private ReentrantLock registryLock = new ReentrantLock();
public SerializerRegistry() {
// Default serializer for byte array. Does nothing.
Serializer<byte[]> bs = new ByteSerializer();
serializers.put(byte[].class, bs);
// Default UTF-8 serializer for Strings.
Serializer<String> ss = new UTF8StringSerializer();
serializers.put(String.class, ss);
}
public void lock() {
registryLock.lock();
}
public boolean lock(long timeout, TimeUnit unit)
throws InterruptedException {
return registryLock.tryLock(timeout, unit);
}
public void unlock() {
registryLock.unlock();
}
/**
* Register a new serializer for the specified type.
*
* @param type
* - Class type handled by the serializer.
* @param serializer
* - Serializer implementation.
* @return - Self.
*/
public SerializerRegistry add(Class<?> type, Serializer<?> serializer) {
Preconditions.checkNotNull(type);
Preconditions.checkNotNull(serializer);
serializers.put(type, serializer);
return this;
}
/**
* Get the registered Serializer for this type.
*
* @param type
* - Class type generic.
* @return - Registered serializer or NULL if none found.
*/
public Serializer<?> get(Class<?> type) {
Preconditions.checkNotNull(type);
if (serializers.containsKey(type)) {
Serializer<?> serializer = serializers.get(type);
return serializer;
}
return null;
}
/**
* Find a serialization handler for the specified type. This also searches
* to check if any serializer registered for an ancestor class can handle
* this type.
*
* @param type
* - Class type.
* @return - Registered serializer or NULL if none found.
*/
public Serializer<?> find(Class<?> type) {
Preconditions.checkNotNull(type);
return find(type, type);
}
private Serializer<?> find(Class<?> current, Class<?> type) {
if (serializers.containsKey(current)) {
Serializer<?> s = serializers.get(type);
if (s.accept(type)) {
return s;
}
} else {
Class<?> parent = current.getSuperclass();
if (!parent.equals(Object.class)) {
return find(parent, type);
}
}
return null;
}
/**
* Configure this instance using the specified parameters.
*
* @param config
* - Configuration node for this instance.
* @throws -
* Configuration Exception
*/
@Override
public void configure(ConfigNode config) throws ConfigurationException {
Preconditions.checkNotNull(config);
try {
if (config.name().compareTo(Constants.CONFIG_NODE_NAME) != 0)
throw new ConfigurationException(String.format(
"Invalid configuration node specified. [name=%s]",
config.name()));
if (config instanceof ConfigPath) {
configSerializer(config);
} else if (config instanceof ConfigValueList) {
ConfigValueList cvl = (ConfigValueList) config;
List<ConfigNode> nodes = cvl.values();
if (nodes != null && !nodes.isEmpty()) {
for (ConfigNode node : nodes) {
configSerializer(node);
}
}
} else {
throw new ConfigurationException(String.format(
"Invalid config node type. [expected:%s][actual:%s]",
ConfigPath.class.getCanonicalName(),
config.getClass().getCanonicalName()));
}
} catch (ConfigurationException e) {
LogUtils.stacktrace(getClass(), e);
throw e;
}
}
public void configSerializer(ConfigNode node)
throws ConfigurationException {
try {
if (!(node instanceof ConfigPath))
throw new ConfigurationException(String.format(
"Invalid config node type. [expected:%s][actual:%s]",
ConfigPath.class.getCanonicalName(),
node.getClass().getCanonicalName()));
ConfigAttributes attrs = ConfigUtils.attributes(node);
String type = attrs.attribute(Constants.CONFIG_ATTR_CLASS);
if (StringUtils.isEmpty(type)) {
throw new ConfigurationException("Missing attribute. [name="
+ Constants.CONFIG_ATTR_CLASS + "]");
}
Class<?> cls = Class.forName(type);
String stype = attrs.attribute(Constants.CONFIG_ATTR_IMPL);
if (StringUtils.isEmpty(stype)) {
throw new ConfigurationException("Missing attribute. [name="
+ Constants.CONFIG_ATTR_IMPL + "]");
}
Class<?> scls = Class.forName(stype);
Object o = scls.newInstance();
if (!(o instanceof Serializer))
throw new ConfigurationException(
"Invalid Serializer implementation. [class="
+ scls.getCanonicalName() + "]");
Serializer<?> serializer = (Serializer<?>) o;
if (!serializer.accept(cls)) {
throw new ConfigurationException(String.format(
"Invalid Serializer being registered. Serializer [%s] does not handle type [%s]",
scls.getCanonicalName(), cls.getCanonicalName()));
}
serializers.put(cls, serializer);
} catch (DataNotFoundException e) {
throw new ConfigurationException("Missing attributes.", e);
} catch (ClassNotFoundException e) {
throw new ConfigurationException("Invalid attribute.", e);
} catch (InstantiationException e) {
throw new ConfigurationException("Invalid Serializer class.", e);
} catch (IllegalAccessException e) {
throw new ConfigurationException("Invalid Serializer class.", e);
}
}
/**
* Dispose this configured instance.
*/
@Override
public void dispose() {
serializers.clear();
}
private static final SerializerRegistry REGISTRY = new SerializerRegistry();
/**
* Get the handle to the Serializer registry.
*
* @return - Serializer registry singleton.
*/
public static final SerializerRegistry get() {
return REGISTRY;
}
public static final SerializerRegistry init(ConfigNode config)
throws ConfigurationException {
REGISTRY.configure(config);
return REGISTRY;
}
}
| |
package chav1961.purelib.fsys;
import java.io.IOException;
import java.net.URI;
import java.nio.channels.SeekableByteChannel;
import java.nio.file.AccessMode;
import java.nio.file.CopyOption;
import java.nio.file.DirectoryStream;
import java.nio.file.DirectoryStream.Filter;
import java.nio.file.FileStore;
import java.nio.file.FileSystem;
import java.nio.file.LinkOption;
import java.nio.file.OpenOption;
import java.nio.file.Path;
import java.nio.file.PathMatcher;
import java.nio.file.WatchService;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileAttribute;
import java.nio.file.attribute.FileAttributeView;
import java.nio.file.attribute.UserPrincipalLookupService;
import java.nio.file.spi.FileSystemProvider;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.atomic.AtomicBoolean;
import chav1961.purelib.fsys.interfaces.FileSystemInterface;
public class PureLibFileSystemProvider extends FileSystemProvider {
private static final Map<String,?> EMPTY_ENV = new HashMap<>();
@Override
public String getScheme() {
return FileSystemInterface.FILESYSTEM_URI_SCHEME;
}
@Override
public FileSystem newFileSystem(final URI uri, Map<String, ?> env) throws IOException {
if (uri == null) {
throw new NullPointerException("File system URI to create/get can't be null");
}
else {
return new PureLibFileSystem(this,FileSystemFactory.createFileSystem(uri));
}
}
@Override
public FileSystem getFileSystem(final URI uri) {
try{
return newFileSystem(uri,EMPTY_ENV);
} catch (IOException e) {
return null;
}
}
@Override
public Path getPath(final URI uri) {
// TODO Auto-generated method stub
return null;
}
@Override
public SeekableByteChannel newByteChannel(final Path path, final Set<? extends OpenOption> options, final FileAttribute<?>... attrs) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public DirectoryStream<Path> newDirectoryStream(final Path dir, final Filter<? super Path> filter) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public void createDirectory(final Path dir, final FileAttribute<?>... attrs) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void delete(final Path path) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void copy(final Path source, final Path target, final CopyOption... options) throws IOException {
// TODO Auto-generated method stub
}
@Override
public void move(final Path source, final Path target, final CopyOption... options) throws IOException {
// TODO Auto-generated method stub
}
@Override
public boolean isSameFile(final Path path, final Path path2) throws IOException {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isHidden(final Path path) throws IOException {
// TODO Auto-generated method stub
return false;
}
@Override
public FileStore getFileStore(final Path path) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public void checkAccess(final Path path, final AccessMode... modes) throws IOException {
// TODO Auto-generated method stub
}
@Override
public <V extends FileAttributeView> V getFileAttributeView(final Path path, final Class<V> type, final LinkOption... options) {
// TODO Auto-generated method stub
return null;
}
@Override
public <A extends BasicFileAttributes> A readAttributes(final Path path, final Class<A> type, final LinkOption... options) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public Map<String, Object> readAttributes(final Path path, final String attributes, final LinkOption... options) throws IOException {
// TODO Auto-generated method stub
return null;
}
@Override
public void setAttribute(final Path path, final String attribute, final Object value, final LinkOption... options) throws IOException {
// TODO Auto-generated method stub
}
public FileSystemInterface getWrappedFileSystem() {
// TODO Auto-generated method stub
return null;
}
private static class PureLibFileSystem extends FileSystem {
private final FileSystemProvider fsp;
private final FileSystemInterface fsi;
private final AtomicBoolean closed = new AtomicBoolean(false);
PureLibFileSystem(final FileSystemProvider provider, final FileSystemInterface wrappedFS) {
this.fsp = provider;
this.fsi = wrappedFS;
}
@Override
public FileSystemProvider provider() {
return fsp;
}
@Override
public void close() throws IOException {
if (!closed.getAndSet(true)) {
fsi.close();
}
}
@Override
public boolean isOpen() {
// TODO Auto-generated method stub
return false;
}
@Override
public boolean isReadOnly() {
// TODO Auto-generated method stub
return false;
}
@Override
public String getSeparator() {
// TODO Auto-generated method stub
return null;
}
@Override
public Iterable<Path> getRootDirectories() {
// TODO Auto-generated method stub
return null;
}
@Override
public Iterable<FileStore> getFileStores() {
// TODO Auto-generated method stub
return null;
}
@Override
public Set<String> supportedFileAttributeViews() {
// TODO Auto-generated method stub
return null;
}
@Override
public Path getPath(String first, String... more) {
// TODO Auto-generated method stub
return null;
}
@Override
public PathMatcher getPathMatcher(String syntaxAndPattern) {
// TODO Auto-generated method stub
return null;
}
@Override
public UserPrincipalLookupService getUserPrincipalLookupService() {
// TODO Auto-generated method stub
return null;
}
@Override
public WatchService newWatchService() throws IOException {
// TODO Auto-generated method stub
return null;
}
}
}
| |
/*
* Copyright (c) 2008-2017, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.internal.partition.operation;
import com.hazelcast.internal.partition.InternalPartition;
import com.hazelcast.internal.partition.InternalPartitionService;
import com.hazelcast.internal.partition.MigrationInfo;
import com.hazelcast.internal.partition.NonFragmentedServiceNamespace;
import com.hazelcast.internal.partition.PartitionReplicaVersionManager;
import com.hazelcast.internal.partition.ReplicaFragmentMigrationState;
import com.hazelcast.internal.partition.impl.InternalPartitionServiceImpl;
import com.hazelcast.internal.partition.impl.MigrationManager;
import com.hazelcast.internal.partition.impl.PartitionDataSerializerHook;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.nio.ObjectDataInput;
import com.hazelcast.nio.ObjectDataOutput;
import com.hazelcast.spi.FragmentedMigrationAwareService;
import com.hazelcast.spi.NodeEngine;
import com.hazelcast.spi.Operation;
import com.hazelcast.spi.PartitionReplicationEvent;
import com.hazelcast.spi.ServiceNamespace;
import com.hazelcast.spi.UrgentSystemOperation;
import com.hazelcast.spi.impl.NodeEngineImpl;
import com.hazelcast.spi.impl.PartitionSpecificRunnable;
import com.hazelcast.spi.impl.SimpleExecutionCallback;
import com.hazelcast.spi.impl.operationservice.InternalOperationService;
import com.hazelcast.spi.impl.servicemanager.ServiceInfo;
import java.io.IOException;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import static java.util.Collections.singleton;
/**
* Migration request operation used by Hazelcast version 3.9
* Sent from the master node to the partition owner. It will perform the migration by preparing the migration operations and
* sending them to the destination. A response with a value equal to {@link Boolean#TRUE} indicates a successful migration.
* It runs on the migration source and transfers the partition with multiple shots.
* It divides the partition data into fragments and send a group of fragments within each shot.
*/
public class MigrationRequestOperation extends BaseMigrationSourceOperation {
private boolean fragmentedMigrationEnabled;
private transient ServiceNamespacesContext namespacesContext;
public MigrationRequestOperation() {
}
public MigrationRequestOperation(MigrationInfo migrationInfo, int partitionStateVersion,
boolean fragmentedMigrationEnabled) {
super(migrationInfo, partitionStateVersion);
this.fragmentedMigrationEnabled = fragmentedMigrationEnabled;
}
@Override
public void run() {
verifyMasterOnMigrationSource();
NodeEngineImpl nodeEngine = (NodeEngineImpl) getNodeEngine();
Address source = migrationInfo.getSource();
Address destination = migrationInfo.getDestination();
verifyExistingTarget(nodeEngine, destination);
if (destination.equals(source)) {
getLogger().warning("Source and destination addresses are the same! => " + toString());
setFailed();
return;
}
InternalPartition partition = getPartition();
verifySource(nodeEngine.getThisAddress(), partition);
setActiveMigration();
if (!migrationInfo.startProcessing()) {
getLogger().warning("Migration is cancelled -> " + migrationInfo);
setFailed();
return;
}
try {
executeBeforeMigrations();
namespacesContext = new ServiceNamespacesContext(nodeEngine, getPartitionReplicationEvent());
ReplicaFragmentMigrationState migrationState
= fragmentedMigrationEnabled ? createNextReplicaFragmentMigrationState()
: createAllReplicaFragmentsMigrationState();
invokeMigrationOperation(destination, migrationState, true);
returnResponse = false;
} catch (Throwable e) {
logThrowable(e);
setFailed();
} finally {
migrationInfo.doneProcessing();
}
}
/**
* Invokes the {@link MigrationOperation} on the migration destination.
*/
private void invokeMigrationOperation(Address destination, ReplicaFragmentMigrationState migrationState,
boolean firstFragment) throws IOException {
boolean lastFragment = !fragmentedMigrationEnabled || !namespacesContext.hasNext();
Operation operation = new MigrationOperation(migrationInfo, partitionStateVersion, migrationState,
firstFragment, lastFragment);
ILogger logger = getLogger();
if (logger.isFinestEnabled()) {
Set<ServiceNamespace> namespaces = migrationState != null
? migrationState.getNamespaceVersionMap().keySet() : Collections.<ServiceNamespace>emptySet();
logger.finest("Invoking MigrationOperation for namespaces " + namespaces + " and " + migrationInfo
+ ", lastFragment: " + lastFragment);
}
NodeEngine nodeEngine = getNodeEngine();
InternalPartitionServiceImpl partitionService = getService();
nodeEngine.getOperationService()
.createInvocationBuilder(InternalPartitionService.SERVICE_NAME, operation, destination)
.setExecutionCallback(new MigrationCallback())
.setResultDeserialized(true)
.setCallTimeout(partitionService.getPartitionMigrationTimeout())
.setTryCount(InternalPartitionService.MIGRATION_RETRY_COUNT)
.setTryPauseMillis(InternalPartitionService.MIGRATION_RETRY_PAUSE)
.setReplicaIndex(getReplicaIndex())
.invoke();
}
private void trySendNewFragment() {
try {
assert fragmentedMigrationEnabled : "Fragmented migration should be enabled!";
verifyMasterOnMigrationSource();
NodeEngine nodeEngine = getNodeEngine();
Address destination = migrationInfo.getDestination();
verifyExistingTarget(nodeEngine, destination);
InternalPartitionServiceImpl partitionService = getService();
MigrationManager migrationManager = partitionService.getMigrationManager();
MigrationInfo currentActiveMigration = migrationManager.setActiveMigration(migrationInfo);
if (!migrationInfo.equals(currentActiveMigration)) {
throw new IllegalStateException("Current active migration " + currentActiveMigration
+ " is different than expected: " + migrationInfo);
}
ReplicaFragmentMigrationState migrationState = createNextReplicaFragmentMigrationState();
if (migrationState != null) {
invokeMigrationOperation(destination, migrationState, false);
} else {
getLogger().finest("All migration fragments done for " + migrationInfo);
completeMigration(true);
}
} catch (Throwable e) {
logThrowable(e);
completeMigration(false);
}
}
private ReplicaFragmentMigrationState createNextReplicaFragmentMigrationState() {
assert fragmentedMigrationEnabled : "Fragmented migration should be enabled!";
if (!namespacesContext.hasNext()) {
return null;
}
ServiceNamespace namespace = namespacesContext.next();
if (namespace.equals(NonFragmentedServiceNamespace.INSTANCE)) {
return createNonFragmentedReplicaFragmentMigrationState();
}
return createReplicaFragmentMigrationStateFor(namespace);
}
private ReplicaFragmentMigrationState createNonFragmentedReplicaFragmentMigrationState() {
PartitionReplicationEvent event = getPartitionReplicationEvent();
Collection<Operation> operations = createNonFragmentedReplicationOperations(event);
Collection<ServiceNamespace> namespaces =
Collections.<ServiceNamespace>singleton(NonFragmentedServiceNamespace.INSTANCE);
return createReplicaFragmentMigrationState(namespaces, operations);
}
private ReplicaFragmentMigrationState createReplicaFragmentMigrationStateFor(ServiceNamespace ns) {
PartitionReplicationEvent event = getPartitionReplicationEvent();
Collection<String> serviceNames = namespacesContext.getServiceNames(ns);
Collection<Operation> operations = createFragmentReplicationOperations(event, ns, serviceNames);
return createReplicaFragmentMigrationState(singleton(ns), operations);
}
private ReplicaFragmentMigrationState createAllReplicaFragmentsMigrationState() {
PartitionReplicationEvent event = getPartitionReplicationEvent();
Collection<Operation> operations = createAllReplicationOperations(event);
return createReplicaFragmentMigrationState(namespacesContext.allNamespaces, operations);
}
private ReplicaFragmentMigrationState createReplicaFragmentMigrationState(Collection<ServiceNamespace>
namespaces, Collection<Operation> operations) {
InternalPartitionService partitionService = getService();
PartitionReplicaVersionManager versionManager = partitionService.getPartitionReplicaVersionManager();
Map<ServiceNamespace, long[]> versions = new HashMap<ServiceNamespace, long[]>(namespaces.size());
for (ServiceNamespace namespace : namespaces) {
long[] v = versionManager.getPartitionReplicaVersions(getPartitionId(), namespace);
versions.put(namespace, v);
}
return new ReplicaFragmentMigrationState(versions, operations);
}
@Override
public int getId() {
return PartitionDataSerializerHook.MIGRATION_REQUEST;
}
@Override
protected void writeInternal(ObjectDataOutput out) throws IOException {
super.writeInternal(out);
out.writeBoolean(fragmentedMigrationEnabled);
}
@Override
protected void readInternal(ObjectDataInput in) throws IOException {
super.readInternal(in);
fragmentedMigrationEnabled = in.readBoolean();
}
/**
* Processes the migration result sent from the migration destination and sends the response to the caller of this operation.
* A response equal to {@link Boolean#TRUE} indicates successful migration.
*/
private final class MigrationCallback extends SimpleExecutionCallback<Object> {
private MigrationCallback() {
}
@Override
public void notify(Object result) {
if (Boolean.TRUE.equals(result)) {
if (fragmentedMigrationEnabled) {
InternalOperationService operationService = (InternalOperationService) getNodeEngine().getOperationService();
operationService.execute(new SendNewMigrationFragmentRunnable());
} else {
completeMigration(true);
}
} else {
completeMigration(false);
}
}
}
private final class SendNewMigrationFragmentRunnable implements PartitionSpecificRunnable, UrgentSystemOperation {
@Override
public int getPartitionId() {
return MigrationRequestOperation.this.getPartitionId();
}
@Override
public void run() {
trySendNewFragment();
}
}
private static class ServiceNamespacesContext {
final Collection<ServiceNamespace> allNamespaces = new HashSet<ServiceNamespace>();
final Map<ServiceNamespace, Collection<String>> namespaceToServices = new HashMap<ServiceNamespace, Collection<String>>();
final Iterator<ServiceNamespace> namespaceIterator;
ServiceNamespacesContext(NodeEngineImpl nodeEngine, PartitionReplicationEvent event) {
Collection<ServiceInfo> services = nodeEngine.getServiceInfos(FragmentedMigrationAwareService.class);
for (ServiceInfo serviceInfo : services) {
FragmentedMigrationAwareService service = serviceInfo.getService();
Collection<ServiceNamespace> namespaces = service.getAllServiceNamespaces(event);
if (namespaces != null) {
String serviceName = serviceInfo.getName();
allNamespaces.addAll(namespaces);
addNamespaceToServiceMappings(namespaces, serviceName);
}
}
allNamespaces.add(NonFragmentedServiceNamespace.INSTANCE);
namespaceIterator = allNamespaces.iterator();
}
private void addNamespaceToServiceMappings(Collection<ServiceNamespace> namespaces, String serviceName) {
for (ServiceNamespace ns : namespaces) {
Collection<String> serviceNames = namespaceToServices.get(ns);
if (serviceNames == null) {
// generally a namespace belongs to a single service only
namespaceToServices.put(ns, singleton(serviceName));
} else if (serviceNames.size() == 1) {
serviceNames = new HashSet<String>(serviceNames);
serviceNames.add(serviceName);
namespaceToServices.put(ns, serviceNames);
} else {
serviceNames.add(serviceName);
}
}
}
boolean hasNext() {
return namespaceIterator.hasNext();
}
ServiceNamespace next() {
return namespaceIterator.next();
}
Collection<String> getServiceNames(ServiceNamespace ns) {
return namespaceToServices.get(ns);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package org.apache.hadoop.hbase.io.encoding;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.util.ByteBufferUtils;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.RawComparator;
/**
* Encoder similar to {@link DiffKeyDeltaEncoder} but supposedly faster.
*
* Compress using:
* - store size of common prefix
* - save column family once in the first KeyValue
* - use integer compression for key, value and prefix (7-bit encoding)
* - use bits to avoid duplication key length, value length
* and type if it same as previous
* - store in 3 bits length of prefix timestamp
* with previous KeyValue's timestamp
* - one bit which allow to omit value if it is the same
*
* Format:
* - 1 byte: flag
* - 1-5 bytes: key length (only if FLAG_SAME_KEY_LENGTH is not set in flag)
* - 1-5 bytes: value length (only if FLAG_SAME_VALUE_LENGTH is not set in flag)
* - 1-5 bytes: prefix length
* - ... bytes: rest of the row (if prefix length is small enough)
* - ... bytes: qualifier (or suffix depending on prefix length)
* - 1-8 bytes: timestamp suffix
* - 1 byte: type (only if FLAG_SAME_TYPE is not set in the flag)
* - ... bytes: value (only if FLAG_SAME_VALUE is not set in the flag)
*
*/
public class FastDiffDeltaEncoder extends BufferedDataBlockEncoder {
final int MASK_TIMESTAMP_LENGTH = (1 << 0) | (1 << 1) | (1 << 2);
final int SHIFT_TIMESTAMP_LENGTH = 0;
final int FLAG_SAME_KEY_LENGTH = 1 << 3;
final int FLAG_SAME_VALUE_LENGTH = 1 << 4;
final int FLAG_SAME_TYPE = 1 << 5;
final int FLAG_SAME_VALUE = 1 << 6;
private static class FastDiffCompressionState extends CompressionState {
byte[] timestamp = new byte[KeyValue.TIMESTAMP_SIZE];
int prevTimestampOffset;
@Override
protected void readTimestamp(ByteBuffer in) {
in.get(timestamp);
}
@Override
void copyFrom(CompressionState state) {
super.copyFrom(state);
FastDiffCompressionState state2 = (FastDiffCompressionState) state;
System.arraycopy(state2.timestamp, 0, timestamp, 0,
KeyValue.TIMESTAMP_SIZE);
prevTimestampOffset = state2.prevTimestampOffset;
}
/**
* Copies the first key/value from the given stream, and initializes
* decompression state based on it. Assumes that we have already read key
* and value lengths. Does not set {@link #qualifierLength} (not used by
* decompression) or {@link #prevOffset} (set by the calle afterwards).
*/
private void decompressFirstKV(ByteBuffer out, DataInputStream in)
throws IOException {
int kvPos = out.position();
out.putInt(keyLength);
out.putInt(valueLength);
prevTimestampOffset = out.position() + keyLength -
KeyValue.TIMESTAMP_TYPE_SIZE;
ByteBufferUtils.copyFromStreamToBuffer(out, in, keyLength + valueLength);
rowLength = out.getShort(kvPos + KeyValue.ROW_OFFSET);
familyLength = out.get(kvPos + KeyValue.ROW_OFFSET +
KeyValue.ROW_LENGTH_SIZE + rowLength);
type = out.get(prevTimestampOffset + KeyValue.TIMESTAMP_SIZE);
}
}
private void compressSingleKeyValue(
FastDiffCompressionState previousState,
FastDiffCompressionState currentState,
OutputStream out, ByteBuffer in) throws IOException {
currentState.prevOffset = in.position();
int keyLength = in.getInt();
int valueOffset =
currentState.prevOffset + keyLength + KeyValue.ROW_OFFSET;
int valueLength = in.getInt();
byte flag = 0;
if (previousState.isFirst()) {
// copy the key, there is no common prefix with none
out.write(flag);
ByteBufferUtils.putCompressedInt(out, keyLength);
ByteBufferUtils.putCompressedInt(out, valueLength);
ByteBufferUtils.putCompressedInt(out, 0);
currentState.readKey(in, keyLength, valueLength);
ByteBufferUtils.moveBufferToStream(out, in, keyLength + valueLength);
} else {
// find a common prefix and skip it
int commonPrefix = ByteBufferUtils.findCommonPrefix(in, in.position(),
previousState.prevOffset + KeyValue.ROW_OFFSET,
Math.min(keyLength, previousState.keyLength) -
KeyValue.TIMESTAMP_TYPE_SIZE);
currentState.readKey(in, keyLength, valueLength,
commonPrefix, previousState);
if (keyLength == previousState.keyLength) {
flag |= FLAG_SAME_KEY_LENGTH;
}
if (valueLength == previousState.valueLength) {
flag |= FLAG_SAME_VALUE_LENGTH;
}
if (currentState.type == previousState.type) {
flag |= FLAG_SAME_TYPE;
}
int commonTimestampPrefix = findCommonTimestampPrefix(
currentState, previousState);
flag |= commonTimestampPrefix << SHIFT_TIMESTAMP_LENGTH;
// Check if current and previous values are the same. Compare value
// length first as an optimization.
if (valueLength == previousState.valueLength) {
int previousValueOffset = previousState.prevOffset
+ previousState.keyLength + KeyValue.ROW_OFFSET;
if (ByteBufferUtils.arePartsEqual(in,
previousValueOffset, previousState.valueLength,
valueOffset, valueLength)) {
flag |= FLAG_SAME_VALUE;
}
}
out.write(flag);
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
ByteBufferUtils.putCompressedInt(out, keyLength);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
ByteBufferUtils.putCompressedInt(out, valueLength);
}
ByteBufferUtils.putCompressedInt(out, commonPrefix);
ByteBufferUtils.skip(in, commonPrefix);
if (commonPrefix < currentState.rowLength + KeyValue.ROW_LENGTH_SIZE) {
// Previous and current rows are different. Copy the differing part of
// the row, skip the column family, and copy the qualifier.
ByteBufferUtils.moveBufferToStream(out, in,
currentState.rowLength + KeyValue.ROW_LENGTH_SIZE - commonPrefix);
ByteBufferUtils.skip(in, currentState.familyLength +
KeyValue.FAMILY_LENGTH_SIZE);
ByteBufferUtils.moveBufferToStream(out, in,
currentState.qualifierLength);
} else {
// The common part includes the whole row. As the column family is the
// same across the whole file, it will automatically be included in the
// common prefix, so we need not special-case it here.
int restKeyLength = keyLength - commonPrefix -
KeyValue.TIMESTAMP_TYPE_SIZE;
ByteBufferUtils.moveBufferToStream(out, in, restKeyLength);
}
ByteBufferUtils.skip(in, commonTimestampPrefix);
ByteBufferUtils.moveBufferToStream(out, in,
KeyValue.TIMESTAMP_SIZE - commonTimestampPrefix);
// Write the type if it is not the same as before.
if ((flag & FLAG_SAME_TYPE) == 0) {
out.write(currentState.type);
}
// Write the value if it is not the same as before.
if ((flag & FLAG_SAME_VALUE) == 0) {
ByteBufferUtils.copyBufferToStream(out, in, valueOffset, valueLength);
}
// Skip key type and value in the input buffer.
ByteBufferUtils.skip(in, KeyValue.TYPE_SIZE + currentState.valueLength);
}
}
private int findCommonTimestampPrefix(FastDiffCompressionState left,
FastDiffCompressionState right) {
int prefixTimestamp = 0;
while (prefixTimestamp < (KeyValue.TIMESTAMP_SIZE - 1) &&
left.timestamp[prefixTimestamp]
== right.timestamp[prefixTimestamp]) {
prefixTimestamp++;
}
return prefixTimestamp; // has to be at most 7 bytes
}
private void uncompressSingleKeyValue(DataInputStream source,
ByteBuffer out, FastDiffCompressionState state)
throws IOException, EncoderBufferTooSmallException {
byte flag = source.readByte();
int prevKeyLength = state.keyLength;
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
state.keyLength = ByteBufferUtils.readCompressedInt(source);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
state.valueLength = ByteBufferUtils.readCompressedInt(source);
}
int commonLength = ByteBufferUtils.readCompressedInt(source);
ByteBufferUtils.ensureSpace(out, state.keyLength + state.valueLength +
KeyValue.ROW_OFFSET);
int kvPos = out.position();
if (!state.isFirst()) {
// copy the prefix
int common;
int prevOffset;
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
out.putInt(state.keyLength);
out.putInt(state.valueLength);
prevOffset = state.prevOffset + KeyValue.ROW_OFFSET;
common = commonLength;
} else {
if ((flag & FLAG_SAME_KEY_LENGTH) != 0) {
prevOffset = state.prevOffset;
common = commonLength + KeyValue.ROW_OFFSET;
} else {
out.putInt(state.keyLength);
prevOffset = state.prevOffset + KeyValue.KEY_LENGTH_SIZE;
common = commonLength + KeyValue.KEY_LENGTH_SIZE;
}
}
ByteBufferUtils.copyFromBufferToBuffer(out, out, prevOffset, common);
// copy the rest of the key from the buffer
int keyRestLength;
if (commonLength < state.rowLength + KeyValue.ROW_LENGTH_SIZE) {
// omit the family part of the key, it is always the same
int rowWithSizeLength;
int rowRestLength;
// check length of row
if (commonLength < KeyValue.ROW_LENGTH_SIZE) {
// not yet copied, do it now
ByteBufferUtils.copyFromStreamToBuffer(out, source,
KeyValue.ROW_LENGTH_SIZE - commonLength);
rowWithSizeLength = out.getShort(out.position() -
KeyValue.ROW_LENGTH_SIZE) + KeyValue.ROW_LENGTH_SIZE;
rowRestLength = rowWithSizeLength - KeyValue.ROW_LENGTH_SIZE;
} else {
// already in kvBuffer, just read it
rowWithSizeLength = out.getShort(kvPos + KeyValue.ROW_OFFSET) +
KeyValue.ROW_LENGTH_SIZE;
rowRestLength = rowWithSizeLength - commonLength;
}
// copy the rest of row
ByteBufferUtils.copyFromStreamToBuffer(out, source, rowRestLength);
// copy the column family
ByteBufferUtils.copyFromBufferToBuffer(out, out,
state.prevOffset + KeyValue.ROW_OFFSET + KeyValue.ROW_LENGTH_SIZE
+ state.rowLength, state.familyLength
+ KeyValue.FAMILY_LENGTH_SIZE);
state.rowLength = (short) (rowWithSizeLength -
KeyValue.ROW_LENGTH_SIZE);
keyRestLength = state.keyLength - rowWithSizeLength -
state.familyLength -
(KeyValue.FAMILY_LENGTH_SIZE + KeyValue.TIMESTAMP_TYPE_SIZE);
} else {
// prevRowWithSizeLength is the same as on previous row
keyRestLength = state.keyLength - commonLength -
KeyValue.TIMESTAMP_TYPE_SIZE;
}
// copy the rest of the key, after column family == column qualifier
ByteBufferUtils.copyFromStreamToBuffer(out, source, keyRestLength);
// copy timestamp
int prefixTimestamp =
(flag & MASK_TIMESTAMP_LENGTH) >>> SHIFT_TIMESTAMP_LENGTH;
ByteBufferUtils.copyFromBufferToBuffer(out, out,
state.prevTimestampOffset, prefixTimestamp);
state.prevTimestampOffset = out.position() - prefixTimestamp;
ByteBufferUtils.copyFromStreamToBuffer(out, source,
KeyValue.TIMESTAMP_SIZE - prefixTimestamp);
// copy the type and value
if ((flag & FLAG_SAME_TYPE) != 0) {
out.put(state.type);
if ((flag & FLAG_SAME_VALUE) != 0) {
ByteBufferUtils.copyFromBufferToBuffer(out, out, state.prevOffset +
KeyValue.ROW_OFFSET + prevKeyLength, state.valueLength);
} else {
ByteBufferUtils.copyFromStreamToBuffer(out, source,
state.valueLength);
}
} else {
if ((flag & FLAG_SAME_VALUE) != 0) {
ByteBufferUtils.copyFromStreamToBuffer(out, source,
KeyValue.TYPE_SIZE);
ByteBufferUtils.copyFromBufferToBuffer(out, out, state.prevOffset +
KeyValue.ROW_OFFSET + prevKeyLength, state.valueLength);
} else {
ByteBufferUtils.copyFromStreamToBuffer(out, source,
state.valueLength + KeyValue.TYPE_SIZE);
}
state.type = out.get(state.prevTimestampOffset +
KeyValue.TIMESTAMP_SIZE);
}
} else { // this is the first element
state.decompressFirstKV(out, source);
}
state.prevOffset = kvPos;
}
@Override
public void compressKeyValues(DataOutputStream out,
ByteBuffer in, boolean includesMemstoreTS) throws IOException {
in.rewind();
ByteBufferUtils.putInt(out, in.limit());
FastDiffCompressionState previousState = new FastDiffCompressionState();
FastDiffCompressionState currentState = new FastDiffCompressionState();
while (in.hasRemaining()) {
compressSingleKeyValue(previousState, currentState,
out, in);
afterEncodingKeyValue(in, out, includesMemstoreTS);
// swap previousState <-> currentState
FastDiffCompressionState tmp = previousState;
previousState = currentState;
currentState = tmp;
}
}
@Override
public ByteBuffer uncompressKeyValues(DataInputStream source,
int allocHeaderLength, int skipLastBytes, boolean includesMemstoreTS)
throws IOException {
int decompressedSize = source.readInt();
ByteBuffer buffer = ByteBuffer.allocate(decompressedSize +
allocHeaderLength);
buffer.position(allocHeaderLength);
FastDiffCompressionState state = new FastDiffCompressionState();
while (source.available() > skipLastBytes) {
uncompressSingleKeyValue(source, buffer, state);
afterDecodingKeyValue(source, buffer, includesMemstoreTS);
}
if (source.available() != skipLastBytes) {
throw new IllegalStateException("Read too much bytes.");
}
return buffer;
}
@Override
public ByteBuffer getFirstKeyInBlock(ByteBuffer block) {
block.mark();
block.position(Bytes.SIZEOF_INT + Bytes.SIZEOF_BYTE);
int keyLength = ByteBufferUtils.readCompressedInt(block);
ByteBufferUtils.readCompressedInt(block); // valueLength
ByteBufferUtils.readCompressedInt(block); // commonLength
int pos = block.position();
block.reset();
return ByteBuffer.wrap(block.array(), pos, keyLength).slice();
}
@Override
public String toString() {
return FastDiffDeltaEncoder.class.getSimpleName();
}
protected static class FastDiffSeekerState extends SeekerState {
private byte[] prevTimestampAndType =
new byte[KeyValue.TIMESTAMP_TYPE_SIZE];
private int rowLengthWithSize;
private int familyLengthWithSize;
@Override
protected void copyFromNext(SeekerState that) {
super.copyFromNext(that);
FastDiffSeekerState other = (FastDiffSeekerState) that;
System.arraycopy(other.prevTimestampAndType, 0,
prevTimestampAndType, 0,
KeyValue.TIMESTAMP_TYPE_SIZE);
rowLengthWithSize = other.rowLengthWithSize;
familyLengthWithSize = other.familyLengthWithSize;
}
}
@Override
public EncodedSeeker createSeeker(RawComparator<byte[]> comparator,
final boolean includesMemstoreTS) {
return new BufferedEncodedSeeker<FastDiffSeekerState>(comparator) {
private void decode(boolean isFirst) {
byte flag = currentBuffer.get();
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
if (!isFirst) {
System.arraycopy(current.keyBuffer,
current.keyLength - current.prevTimestampAndType.length,
current.prevTimestampAndType, 0,
current.prevTimestampAndType.length);
}
current.keyLength = ByteBufferUtils.readCompressedInt(currentBuffer);
}
if ((flag & FLAG_SAME_VALUE_LENGTH) == 0) {
current.valueLength =
ByteBufferUtils.readCompressedInt(currentBuffer);
}
current.lastCommonPrefix =
ByteBufferUtils.readCompressedInt(currentBuffer);
current.ensureSpaceForKey();
if (isFirst) {
// copy everything
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
current.keyLength - current.prevTimestampAndType.length);
current.rowLengthWithSize = Bytes.toShort(current.keyBuffer, 0) +
Bytes.SIZEOF_SHORT;
current.familyLengthWithSize =
current.keyBuffer[current.rowLengthWithSize] + Bytes.SIZEOF_BYTE;
} else if (current.lastCommonPrefix < Bytes.SIZEOF_SHORT) {
// length of row is different, copy everything except family
// copy the row size
int oldRowLengthWithSize = current.rowLengthWithSize;
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
Bytes.SIZEOF_SHORT - current.lastCommonPrefix);
current.rowLengthWithSize = Bytes.toShort(current.keyBuffer, 0) +
Bytes.SIZEOF_SHORT;
// move the column family
System.arraycopy(current.keyBuffer, oldRowLengthWithSize,
current.keyBuffer, current.rowLengthWithSize,
current.familyLengthWithSize);
// copy the rest of row
currentBuffer.get(current.keyBuffer, Bytes.SIZEOF_SHORT,
current.rowLengthWithSize - Bytes.SIZEOF_SHORT);
// copy the qualifier
currentBuffer.get(current.keyBuffer, current.rowLengthWithSize
+ current.familyLengthWithSize, current.keyLength
- current.rowLengthWithSize - current.familyLengthWithSize
- current.prevTimestampAndType.length);
} else if (current.lastCommonPrefix < current.rowLengthWithSize) {
// We have to copy part of row and qualifier, but the column family
// is in the right place.
// before column family (rest of row)
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
current.rowLengthWithSize - current.lastCommonPrefix);
// after column family (qualifier)
currentBuffer.get(current.keyBuffer, current.rowLengthWithSize
+ current.familyLengthWithSize, current.keyLength
- current.rowLengthWithSize - current.familyLengthWithSize
- current.prevTimestampAndType.length);
} else {
// copy just the ending
currentBuffer.get(current.keyBuffer, current.lastCommonPrefix,
current.keyLength - current.prevTimestampAndType.length
- current.lastCommonPrefix);
}
// timestamp
int pos = current.keyLength - current.prevTimestampAndType.length;
int commonTimestampPrefix = (flag & MASK_TIMESTAMP_LENGTH) >>>
SHIFT_TIMESTAMP_LENGTH;
if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
System.arraycopy(current.prevTimestampAndType, 0, current.keyBuffer,
pos, commonTimestampPrefix);
}
pos += commonTimestampPrefix;
currentBuffer.get(current.keyBuffer, pos,
Bytes.SIZEOF_LONG - commonTimestampPrefix);
pos += Bytes.SIZEOF_LONG - commonTimestampPrefix;
// type
if ((flag & FLAG_SAME_TYPE) == 0) {
currentBuffer.get(current.keyBuffer, pos, Bytes.SIZEOF_BYTE);
} else if ((flag & FLAG_SAME_KEY_LENGTH) == 0) {
current.keyBuffer[pos] =
current.prevTimestampAndType[Bytes.SIZEOF_LONG];
}
// handle value
if ((flag & FLAG_SAME_VALUE) == 0) {
current.valueOffset = currentBuffer.position();
ByteBufferUtils.skip(currentBuffer, current.valueLength);
}
if (includesMemstoreTS) {
current.memstoreTS = ByteBufferUtils.readVLong(currentBuffer);
} else {
current.memstoreTS = 0;
}
current.nextKvOffset = currentBuffer.position();
}
@Override
protected void decodeFirst() {
ByteBufferUtils.skip(currentBuffer, Bytes.SIZEOF_INT);
decode(true);
}
@Override
protected void decodeNext() {
decode(false);
}
@Override
protected FastDiffSeekerState createSeekerState() {
return new FastDiffSeekerState();
}
};
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2012 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package org.appcelerator.titanium.proxy;
import java.lang.ref.WeakReference;
import java.lang.reflect.Array;
import java.util.ArrayList;
import java.util.ConcurrentModificationException;
import java.util.HashMap;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollFunction;
import org.appcelerator.kroll.KrollProxy;
import org.appcelerator.kroll.KrollRuntime;
import org.appcelerator.kroll.annotations.Kroll;
import org.appcelerator.kroll.common.AsyncResult;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.kroll.common.TiMessenger;
import org.appcelerator.titanium.TiApplication;
import org.appcelerator.titanium.TiBaseActivity;
import org.appcelerator.titanium.TiBlob;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiDimension;
import org.appcelerator.titanium.util.TiAnimationBuilder;
import org.appcelerator.titanium.util.TiConvert;
import org.appcelerator.titanium.util.TiUrl;
import org.appcelerator.titanium.util.TiUIHelper;
import org.appcelerator.titanium.view.TiAnimation;
import org.appcelerator.titanium.view.TiUIView;
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
import android.os.Build;
import android.os.Handler;
import android.os.Message;
import android.view.View;
import android.view.ViewAnimationUtils;
/**
* The parent class of view proxies.
*/
@Kroll.proxy(propertyAccessors={
// background properties
"backgroundImage", "backgroundRepeat", "backgroundSelectedImage",
"backgroundFocusedImage", "backgroundDisabledImage", "backgroundColor",
"backgroundSelectedColor", "backgroundFocusedColor", "backgroundDisabledColor",
"backgroundPadding", "backgroundGradient",
// border properties
"borderColor", "borderRadius", "borderWidth",
// layout / dimension (size/width/height have custom accessors)
"left", "top", "right", "bottom", "layout", "zIndex",
// accessibility
TiC.PROPERTY_ACCESSIBILITY_HINT, TiC.PROPERTY_ACCESSIBILITY_LABEL, TiC.PROPERTY_ACCESSIBILITY_VALUE,
TiC.PROPERTY_ACCESSIBILITY_HIDDEN,
// others
"focusable", "touchEnabled", "visible", "enabled", "opacity",
"softKeyboardOnFocus", "transform", "elevation", "touchTestId",
"translationX", "translationY", "translationZ",
TiC.PROPERTY_TRANSITION_NAME
})
public abstract class TiViewProxy extends KrollProxy implements Handler.Callback
{
private static final String TAG = "TiViewProxy";
private static final int MSG_FIRST_ID = KrollProxy.MSG_LAST_ID + 1;
private static final int MSG_GETVIEW = MSG_FIRST_ID + 100;
private static final int MSG_ADD_CHILD = MSG_FIRST_ID + 102;
private static final int MSG_REMOVE_CHILD = MSG_FIRST_ID + 103;
private static final int MSG_BLUR = MSG_FIRST_ID + 104;
private static final int MSG_FOCUS = MSG_FIRST_ID + 105;
private static final int MSG_SHOW = MSG_FIRST_ID + 106;
private static final int MSG_HIDE = MSG_FIRST_ID + 107;
private static final int MSG_ANIMATE = MSG_FIRST_ID + 108;
private static final int MSG_TOIMAGE = MSG_FIRST_ID + 109;
private static final int MSG_GETSIZE = MSG_FIRST_ID + 110;
private static final int MSG_GETRECT = MSG_FIRST_ID + 111;
private static final int MSG_FINISH_LAYOUT = MSG_FIRST_ID + 112;
private static final int MSG_UPDATE_LAYOUT = MSG_FIRST_ID + 113;
private static final int MSG_QUEUED_ANIMATE = MSG_FIRST_ID + 114;
private static final int MSG_INSERT_VIEW_AT = MSG_FIRST_ID + 115;
private static final int MSG_HIDE_KEYBOARD = MSG_FIRST_ID + 116;
protected static final int MSG_LAST_ID = MSG_FIRST_ID + 999;
protected ArrayList<TiViewProxy> children;
protected WeakReference<TiViewProxy> parent;
protected TiUIView view;
protected Object pendingAnimationLock;
protected TiAnimationBuilder pendingAnimation;
private boolean isDecorView = false;
private boolean overrideCurrentAnimation = false;
// TODO: Deprecated since Release 3.0.0
@Deprecated private AtomicBoolean layoutStarted = new AtomicBoolean();
/**
* Constructs a new TiViewProxy instance.
* @module.api
*/
public TiViewProxy()
{
pendingAnimationLock = new Object();
defaultValues.put(TiC.PROPERTY_BACKGROUND_REPEAT, false);
defaultValues.put(TiC.PROPERTY_VISIBLE, true);
defaultValues.put(TiC.PROPERTY_ENABLED, true);
}
@Override
public void handleCreationDict(KrollDict options)
{
options = handleStyleOptions(options);
super.handleCreationDict(options);
if (options.containsKey(TiC.PROPERTY_OVERRIDE_CURRENT_ANIMATION)) {
overrideCurrentAnimation = TiConvert.toBoolean(options, TiC.PROPERTY_OVERRIDE_CURRENT_ANIMATION, false);
}
//TODO eventManager.addOnEventChangeListener(this);
}
public boolean getOverrideCurrentAnimation() {
return overrideCurrentAnimation;
}
private static HashMap<TiUrl,String> styleSheetUrlCache = new HashMap<TiUrl,String>(5);
protected String getBaseUrlForStylesheet()
{
TiUrl creationUrl = getCreationUrl();
if (styleSheetUrlCache.containsKey(creationUrl)) {
return styleSheetUrlCache.get(creationUrl);
}
String baseUrl = creationUrl.baseUrl;
if (baseUrl == null || (baseUrl.equals("app://") && creationUrl.url.equals(""))) {
baseUrl = "app://app.js";
} else {
baseUrl = creationUrl.resolve();
}
int idx = baseUrl.lastIndexOf("/");
if (idx != -1) {
baseUrl = baseUrl.substring(idx + 1).replace(".js", "");
}
styleSheetUrlCache.put(creationUrl,baseUrl);
return baseUrl;
}
protected KrollDict handleStyleOptions(KrollDict options)
{
String viewId = getProxyId();
TreeSet<String> styleClasses = new TreeSet<String>();
// TODO styleClasses.add(getShortAPIName().toLowerCase());
if (options.containsKey(TiC.PROPERTY_ID)) {
viewId = TiConvert.toString(options, TiC.PROPERTY_ID);
}
if (options.containsKey(TiC.PROPERTY_CLASS_NAME)) {
String className = TiConvert.toString(options, TiC.PROPERTY_CLASS_NAME);
for (String clazz : className.split(" ")) {
styleClasses.add(clazz);
}
}
if (options.containsKey(TiC.PROPERTY_CLASS_NAMES)) {
Object c = options.get(TiC.PROPERTY_CLASS_NAMES);
if (c.getClass().isArray()) {
int length = Array.getLength(c);
for (int i = 0; i < length; i++) {
Object clazz = Array.get(c, i);
if (clazz != null) {
styleClasses.add(clazz.toString());
}
}
}
}
String baseUrl = getBaseUrlForStylesheet();
KrollDict dict = TiApplication.getInstance().getStylesheet(baseUrl, styleClasses, viewId);
if (dict == null || dict.isEmpty()) {
return options;
}
extend(dict);
if (Log.isDebugModeEnabled()) {
Log.d(TAG, "trying to get stylesheet for base:" + baseUrl + ",classes:" + styleClasses + ",id:" + viewId + ",dict:"
+ dict, Log.DEBUG_MODE);
}
// merge in our stylesheet details to the passed in dictionary
// our passed in dictionary takes precedence over the stylesheet
dict.putAll(options);
return dict;
}
public TiAnimationBuilder getPendingAnimation()
{
synchronized(pendingAnimationLock) {
return pendingAnimation;
}
}
public void clearAnimation(TiAnimationBuilder builder)
{
synchronized(pendingAnimationLock) {
if (pendingAnimation != null && pendingAnimation == builder) {
pendingAnimation = null;
}
}
}
//This handler callback is tied to the UI thread.
@SuppressWarnings({ "unchecked", "rawtypes" })
public boolean handleMessage(Message msg)
{
switch(msg.what) {
case MSG_GETVIEW : {
AsyncResult result = (AsyncResult) msg.obj;
result.setResult(handleGetView());
return true;
}
case MSG_ADD_CHILD : {
AsyncResult result = (AsyncResult) msg.obj;
handleAdd((TiViewProxy) result.getArg());
result.setResult(null); //Signal added.
return true;
}
case MSG_REMOVE_CHILD : {
AsyncResult result = (AsyncResult) msg.obj;
handleRemove((TiViewProxy) result.getArg());
result.setResult(null); //Signal removed.
return true;
}
case MSG_BLUR : {
handleBlur();
return true;
}
case MSG_HIDE_KEYBOARD : {
handleHideKeyboard();
return true;
}
case MSG_FOCUS : {
handleFocus();
return true;
}
case MSG_SHOW : {
handleShow((KrollDict) msg.obj);
return true;
}
case MSG_HIDE : {
handleHide((KrollDict) msg.obj);
return true;
}
case MSG_ANIMATE : {
handleAnimate();
return true;
}
case MSG_QUEUED_ANIMATE: {
// An animation that was re-queued
// because the view's height and width
// were not yet known (i.e., not yet laid out)
handleQueuedAnimate();
return true;
}
case MSG_TOIMAGE: {
AsyncResult result = (AsyncResult) msg.obj;
result.setResult(handleToImage());
return true;
}
case MSG_GETSIZE : {
AsyncResult result = (AsyncResult) msg.obj;
KrollDict d = null;
d = new KrollDict();
d.put(TiC.PROPERTY_X, 0);
d.put(TiC.PROPERTY_Y, 0);
if (view != null) {
View v = view.getNativeView();
if (v != null) {
TiDimension nativeWidth = new TiDimension(v.getWidth(), TiDimension.TYPE_WIDTH);
TiDimension nativeHeight = new TiDimension(v.getHeight(), TiDimension.TYPE_HEIGHT);
// TiDimension needs a view to grab the window manager, so we'll just use the decorview of the current window
View decorView = TiApplication.getAppCurrentActivity().getWindow().getDecorView();
d.put(TiC.PROPERTY_WIDTH, nativeWidth.getAsDefault(decorView));
d.put(TiC.PROPERTY_HEIGHT, nativeHeight.getAsDefault(decorView));
}
}
if (!d.containsKey(TiC.PROPERTY_WIDTH)) {
d.put(TiC.PROPERTY_WIDTH, 0);
d.put(TiC.PROPERTY_HEIGHT, 0);
}
result.setResult(d);
return true;
}
case MSG_GETRECT: {
AsyncResult result = (AsyncResult) msg.obj;
KrollDict d = null;
d = new KrollDict();
if (view != null) {
View v = view.getOuterView();
if (v != null) {
TiDimension nativeWidth = new TiDimension(v.getWidth(), TiDimension.TYPE_WIDTH);
TiDimension nativeHeight = new TiDimension(v.getHeight(), TiDimension.TYPE_HEIGHT);
TiDimension nativeLeft = new TiDimension(v.getLeft(), TiDimension.TYPE_LEFT);
TiDimension nativeTop = new TiDimension(v.getTop(), TiDimension.TYPE_TOP);
// TiDimension needs a view to grab the window manager, so we'll just use the decorview of the current window
View decorView = TiApplication.getAppCurrentActivity().getWindow().getDecorView();
d.put(TiC.PROPERTY_WIDTH, nativeWidth.getAsDefault(decorView));
d.put(TiC.PROPERTY_HEIGHT, nativeHeight.getAsDefault(decorView));
d.put(TiC.PROPERTY_X, nativeLeft.getAsDefault(decorView));
d.put(TiC.PROPERTY_Y, nativeTop.getAsDefault(decorView));
}
}
if (!d.containsKey(TiC.PROPERTY_WIDTH)) {
d.put(TiC.PROPERTY_WIDTH, 0);
d.put(TiC.PROPERTY_HEIGHT, 0);
d.put(TiC.PROPERTY_X, 0);
d.put(TiC.PROPERTY_Y, 0);
}
result.setResult(d);
return true;
}
case MSG_FINISH_LAYOUT : {
handleFinishLayout();
return true;
}
case MSG_UPDATE_LAYOUT : {
handleUpdateLayout((HashMap) msg.obj);
return true;
}
case MSG_INSERT_VIEW_AT : {
handleInsertAt((HashMap) msg.obj);
return true;
}
}
return super.handleMessage(msg);
}
/*
public Context getContext()
{
return getActivity();
}
*/
@Kroll.getProperty @Kroll.method
public KrollDict getRect()
{
return (KrollDict) TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_GETRECT), getActivity());
}
@Kroll.getProperty @Kroll.method
public KrollDict getSize()
{
return (KrollDict) TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_GETSIZE), getActivity());
}
@Kroll.getProperty @Kroll.method
public Object getWidth()
{
if (hasProperty(TiC.PROPERTY_WIDTH)) {
return getProperty(TiC.PROPERTY_WIDTH);
}
return KrollRuntime.UNDEFINED;
}
@Kroll.setProperty(retain=false) @Kroll.method
public void setWidth(Object width)
{
setPropertyAndFire(TiC.PROPERTY_WIDTH, width);
}
@Kroll.getProperty @Kroll.method
public Object getHeight()
{
if (hasProperty(TiC.PROPERTY_HEIGHT)) {
return getProperty(TiC.PROPERTY_HEIGHT);
}
return KrollRuntime.UNDEFINED;
}
@Kroll.setProperty(retain=false) @Kroll.method
public void setHeight(Object height)
{
setPropertyAndFire(TiC.PROPERTY_HEIGHT, height);
}
@Kroll.getProperty @Kroll.method
public Object getCenter()
{
Object dict = KrollRuntime.UNDEFINED;
if (hasProperty(TiC.PROPERTY_CENTER)) {
dict = getProperty(TiC.PROPERTY_CENTER);
}
return dict;
}
public void clearView()
{
if (view != null) {
view.release();
}
view = null;
}
/**
* @return the TiUIView associated with this proxy.
* @module.api
*/
public TiUIView peekView()
{
return view;
}
public void setView(TiUIView view)
{
this.view = view;
}
public TiUIView forceCreateView()
{
view = null;
return getOrCreateView();
}
/**
* Transfer an existing view to this view proxy.
* Special use in tableView. Do not use anywhere else.
* Called from TiTableViewRowProxyItem.java
* @param transferview - The view to transfer
* @param oldProxy - The currentProxy of the view
*/
public void transferView(TiUIView transferview, TiViewProxy oldProxy) {
if(oldProxy != null) {
oldProxy.setView(null);
oldProxy.setModelListener(null);
}
view = transferview;
modelListener = transferview;
view.setProxy(this);
}
/**
* Creates or retrieves the view associated with this proxy.
* @return a TiUIView instance.
* @module.api
*/
public TiUIView getOrCreateView()
{
if (activity == null || view != null) {
return view;
}
if (TiApplication.isUIThread()) {
return handleGetView();
}
return (TiUIView) TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_GETVIEW), 0);
}
protected TiUIView handleGetView()
{
if (view == null) {
if (Log.isDebugModeEnabled()) {
Log.d(TAG, "getView: " + getClass().getSimpleName(), Log.DEBUG_MODE);
}
Activity activity = getActivity();
view = createView(activity);
if (isDecorView) {
if (activity != null) {
((TiBaseActivity)activity).setViewProxy(view.getProxy());
} else {
Log.w(TAG, "Activity is null", Log.DEBUG_MODE);
}
}
realizeViews(view);
view.registerForTouch();
view.registerForKeyPress();
}
return view;
}
public void realizeViews(TiUIView view)
{
setModelListener(view);
// Use a copy so bundle can be modified as it passes up the inheritance
// tree. Allows defaults to be added and keys removed.
if (children != null) {
try {
for (TiViewProxy p : children) {
TiUIView cv = p.getOrCreateView();
view.add(cv);
}
} catch (ConcurrentModificationException e) {
Log.e(TAG, e.getMessage(), e);
}
}
synchronized(pendingAnimationLock) {
if (pendingAnimation != null) {
handlePendingAnimation(true);
}
}
}
public void releaseViews()
{
if (view != null) {
if (children != null) {
for (TiViewProxy p : children) {
p.releaseViews();
}
}
view.release();
view = null;
}
setModelListener(null);
KrollRuntime.suggestGC();
}
/**
* Implementing classes should use this method to create and return the appropriate view.
* @param activity the context activity.
* @return a TiUIView instance.
* @module.api
*/
public abstract TiUIView createView(Activity activity);
/**
* Adds a child to this view proxy.
* @param child The child view proxy to add.
* @module.api
*/
@Kroll.method
public void add(TiViewProxy child)
{
if (child == null) {
Log.e(TAG, "Add called with a null child");
return;
}
if (children == null) {
children = new ArrayList<TiViewProxy>();
}
if (peekView() != null) {
if (TiApplication.isUIThread()) {
handleAdd(child);
return;
}
TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_ADD_CHILD), child);
} else {
children.add(child);
child.parent = new WeakReference<TiViewProxy>(this);
}
//TODO zOrder
}
@Kroll.method
public void replaceAt(Object params)
{
if (!(params instanceof HashMap)) {
Log.e(TAG, "Argument for replaceAt must be a dictionary");
return;
}
@SuppressWarnings("rawtypes")
HashMap options = (HashMap) params;
Integer position = -1;
if(options.containsKey("position")) {
position = (Integer) options.get("position");
}
if(children != null && children.size() > position) {
TiViewProxy childToRemove = children.get(position);
insertAt(params);
remove(childToRemove);
}
}
/**
* Adds a child to this view proxy in the specified position. This is useful for "vertical" and
* "horizontal" layouts.
* @param params A Dictionary containing a TiViewProxy for the view and an int for the position
* @module.api
*/
@Kroll.method
public void insertAt(Object params)
{
if (!(params instanceof HashMap)) {
Log.e(TAG, "Argument for insertAt must be a dictionary");
return;
}
@SuppressWarnings("rawtypes")
HashMap options = (HashMap) params;
if (children == null) {
children = new ArrayList<TiViewProxy>();
}
if (view != null) {
if (TiApplication.isUIThread()) {
handleInsertAt(options);
return;
}
getMainHandler().obtainMessage(MSG_INSERT_VIEW_AT, options).sendToTarget();
} else {
handleInsertAt(options);
}
}
private void handleInsertAt(@SuppressWarnings("rawtypes") HashMap options)
{
TiViewProxy child = null;
Integer position = -1;
if(options.containsKey("view")) {
child = (TiViewProxy) options.get("view");
}
if(options.containsKey("position")) {
position = (Integer) options.get("position");
}
if(child == null) {
Log.e(TAG, "insertAt must be contain a view");
return;
}
if(position < 0 || position > children.size()) {
position = children.size();
}
children.add(position, child);
child.parent = new WeakReference<TiViewProxy>(this);
if (view != null) {
child.setActivity(getActivity());
if (this instanceof DecorViewProxy) {
child.isDecorView = true;
}
TiUIView cv = child.getOrCreateView();
view.insertAt(cv, position);
}
}
private void handleAdd(TiViewProxy child)
{
children.add(child);
child.parent = new WeakReference<TiViewProxy>(this);
if (view != null) {
child.setActivity(getActivity());
if (this instanceof DecorViewProxy) {
child.isDecorView = true;
}
view.add(child.getOrCreateView());
}
}
/**
* Removes a view from this view proxy, releasing the underlying native view if it exists.
* @param child The child to remove.
* @module.api
*/
@Kroll.method
public void remove(TiViewProxy child)
{
if (child == null) {
Log.e(TAG, "Add called with null child");
return;
}
if (peekView() != null) {
if (TiApplication.isUIThread()) {
handleRemove(child);
return;
}
TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_REMOVE_CHILD), child);
} else {
if (children != null) {
children.remove(child);
if (child.parent != null && child.parent.get() == this) {
child.parent = null;
}
}
}
}
/**
* Removes all children views.
* @module.api
*/
@Kroll.method
public void removeAllChildren()
{
if (children != null) {
//children might be altered while we loop through it (threading)
//so we first copy children as it was when asked to remove all children
ArrayList<TiViewProxy> childViews = new ArrayList<TiViewProxy>();
childViews.addAll(children);
for (TiViewProxy child : childViews) {
remove(child);
}
}
}
public void handleRemove(TiViewProxy child)
{
if (children != null) {
children.remove(child);
if (view != null) {
view.remove(child.peekView());
}
if (child != null) {
child.releaseViews();
}
}
}
@Kroll.method
public void show(@Kroll.argument(optional=true) KrollDict options)
{
setProperty(TiC.PROPERTY_VISIBLE, true);
if (TiApplication.isUIThread()) {
handleShow(options);
} else {
getMainHandler().obtainMessage(MSG_SHOW, options).sendToTarget();
}
}
protected void handleShow(KrollDict options)
{
if (view != null) {
if (Build.VERSION.SDK_INT >= 21 && TiConvert.toBoolean(options, TiC.PROPERTY_ANIMATED, false)) {
View nativeView = view.getOuterView();
int width = nativeView.getWidth();
int height = nativeView.getHeight();
int radius = Math.max(width, height);
Animator anim = ViewAnimationUtils.createCircularReveal(nativeView, width/2, height/2, 0, radius);
view.show();
anim.start();
return;
}
view.show();
}
}
@Kroll.method
public void hide(@Kroll.argument(optional=true) KrollDict options)
{
setProperty(TiC.PROPERTY_VISIBLE, false);
if (TiApplication.isUIThread()) {
handleHide(options);
} else {
getMainHandler().obtainMessage(MSG_HIDE, options).sendToTarget();
}
}
protected void handleHide(KrollDict options)
{
if (view != null) {
synchronized(pendingAnimationLock) {
if (pendingAnimation != null) {
handlePendingAnimation(false);
}
}
if (Build.VERSION.SDK_INT >= 21 && TiConvert.toBoolean(options, TiC.PROPERTY_ANIMATED, false)) {
View nativeView = view.getOuterView();
int width = nativeView.getWidth();
int height = nativeView.getHeight();
int radius = Math.max(width, height);
Animator anim = ViewAnimationUtils.createCircularReveal(nativeView, width/2, height/2, radius, 0);
anim.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
view.hide();
}
});
anim.start();
return;
}
view.hide();
}
}
@Kroll.method
public void animate(Object arg, @Kroll.argument(optional=true) KrollFunction callback)
{
synchronized (pendingAnimationLock) {
if (arg instanceof HashMap) {
@SuppressWarnings("rawtypes")
HashMap options = (HashMap) arg;
pendingAnimation = new TiAnimationBuilder();
pendingAnimation.applyOptions(options);
} else if (arg instanceof TiAnimation) {
TiAnimation anim = (TiAnimation) arg;
pendingAnimation = new TiAnimationBuilder();
pendingAnimation.applyAnimation(anim);
} else {
throw new IllegalArgumentException("Unhandled argument to animate: " + arg.getClass().getSimpleName());
}
if (callback != null) {
pendingAnimation.setCallback(callback);
}
handlePendingAnimation(false);
}
}
public void handlePendingAnimation(boolean forceQueue)
{
if (pendingAnimation != null && peekView() != null) {
if (forceQueue || !(TiApplication.isUIThread())) {
if (Build.VERSION.SDK_INT < TiC.API_LEVEL_HONEYCOMB) {
// Even this very small delay can help eliminate the bug
// whereby the animated view's parent suddenly becomes
// transparent (pre-honeycomb). cf. TIMOB-9813.
getMainHandler().sendEmptyMessageDelayed(MSG_ANIMATE, 10);
} else {
getMainHandler().sendEmptyMessage(MSG_ANIMATE);
}
} else {
handleAnimate();
}
}
}
protected void handleAnimate()
{
TiUIView tiv = peekView();
if (tiv != null) {
// If the nativeView's width and height are
// zero, it could be that animate() was called
// immediately upon window opening and the first
// layout hasn't happened yet. In this case,
// queue up a new request to animate.
// Also do the same if layout properties
// are changed and layout hasn't completed.
View view = tiv.getNativeView();
if (view == null || (view.getWidth() == 0 && view.getHeight() == 0) || tiv.isLayoutPending()) {
getMainHandler().sendEmptyMessage(MSG_QUEUED_ANIMATE);
return;
} else {
tiv.animate();
}
}
}
protected void handleQueuedAnimate()
{
TiUIView tiv = peekView();
if (tiv != null) {
tiv.animate();
}
}
@Kroll.method
public void blur()
{
if (TiApplication.isUIThread()) {
handleBlur();
} else {
getMainHandler().sendEmptyMessage(MSG_BLUR);
}
}
protected void handleBlur()
{
if (view != null) {
view.blur();
}
}
@Kroll.method
public void focus()
{
if (TiApplication.isUIThread()) {
handleFocus();
} else {
getMainHandler().sendEmptyMessage(MSG_FOCUS);
}
}
protected void handleFocus()
{
if (view != null) {
view.focus();
}
}
public TiBlob toImage()
{
//backward compat with maps
return toImage(null);
}
@Kroll.method
public TiBlob toImage(final @Kroll.argument(optional=true) KrollFunction callback)
{
final boolean waitForFinish = (callback == null);
TiBlob blob;
/*
* Callback don't exist. Just render on main thread and return blob.
*/
if (waitForFinish) {
if (TiApplication.isUIThread()) {
blob = handleToImage();
} else {
blob = (TiBlob) TiMessenger.sendBlockingMainMessage(getMainHandler().obtainMessage(MSG_TOIMAGE), getActivity());
}
}
/*
* Callback exists. Perform async rendering and return an empty blob.
*/
else {
// Create a non-null empty blob to return.
blob = TiBlob.blobFromImage(Bitmap.createBitmap(1, 1, Config.ARGB_8888));
Runnable renderRunnable = new Runnable() {
public void run() {
callback.callAsync(getKrollObject(), new Object[] {handleToImage()});
}
};
Thread renderThread = new Thread(renderRunnable);
renderThread.setPriority(Thread.MAX_PRIORITY);
renderThread.start();
}
return blob;
}
protected TiBlob handleToImage()
{
TiUIView view = getOrCreateView();
if (view == null) {
return null;
}
KrollDict dict = view.toImage();
return TiUIHelper.getImageFromDict(dict);
}
/**
* Fires an event that can optionally be "bubbled" to the parent view.
*
* @param eventName event to get dispatched to listeners
* @param data data to include in the event
* @param bubbles if true will send the event to the parent view after it has been dispatched to this view's listeners.
* @return true if the event was handled
*/
@SuppressWarnings({ "rawtypes", "unchecked" })
public boolean fireEvent(String eventName, Object data, boolean bubbles)
{
if (data == null) {
data = new KrollDict();
}
// Set the "bubbles" property to indicate if the event needs to be bubbled.
if (data instanceof HashMap) {
((HashMap)data).put(TiC.PROPERTY_BUBBLES, bubbles);
}
// Dispatch the event to JavaScript which takes care of the bubbling.
return super.fireEvent(eventName, data);
}
/**
* Fires an event that will be bubbled to the parent view.
*/
@Override
public boolean fireEvent(String eventName, Object data)
{
// To remain compatible this override of fireEvent will always
// bubble the event to the parent view. It should eventually be deprecated
// in favor of using the fireEvent(String, Object, boolean) method.
return fireEvent(eventName, data, true);
}
/**
* @return The parent view proxy of this view proxy.
* @module.api
*/
@Kroll.getProperty @Kroll.method
public TiViewProxy getParent()
{
if (this.parent == null) {
return null;
}
return this.parent.get();
}
public void setParent(TiViewProxy parent)
{
if (parent == null) {
this.parent = null;
return;
}
this.parent = new WeakReference<TiViewProxy>(parent);
}
@Override
public KrollProxy getParentForBubbling()
{
return getParent();
}
@Override
public void setActivity(Activity activity)
{
super.setActivity(activity);
if (children != null) {
for (TiViewProxy child : children) {
child.setActivity(activity);
}
}
}
/**
* @return An array of the children view proxies of this view.
* @module.api
*/
@Kroll.getProperty @Kroll.method
public TiViewProxy[] getChildren()
{
if (children == null) return new TiViewProxy[0];
return children.toArray(new TiViewProxy[children.size()]);
}
@Override
public void eventListenerAdded(String eventName, int count, KrollProxy proxy)
{
super.eventListenerAdded(eventName, count, proxy);
if (eventName.equals(TiC.EVENT_CLICK) && proxy.equals(this) && count == 1 && !(proxy instanceof TiWindowProxy)) {
if (!proxy.hasProperty(TiC.PROPERTY_TOUCH_ENABLED)
|| TiConvert.toBoolean(proxy.getProperty(TiC.PROPERTY_TOUCH_ENABLED))) {
setClickable(true);
}
}
}
@Override
public void eventListenerRemoved(String eventName, int count, KrollProxy proxy)
{
super.eventListenerRemoved(eventName, count, proxy);
if (eventName.equals(TiC.EVENT_CLICK) && count == 0 && proxy.equals(this) && !(proxy instanceof TiWindowProxy)) {
if (proxy.hasProperty(TiC.PROPERTY_TOUCH_ENABLED)
&& !TiConvert.toBoolean(proxy.getProperty(TiC.PROPERTY_TOUCH_ENABLED))) {
setClickable(false);
}
}
}
public void setClickable(boolean clickable)
{
TiUIView v = peekView();
if (v != null) {
View nv = v.getNativeView();
if (nv != null) {
nv.setClickable(clickable);
}
}
}
@Kroll.method
public void addClass(Object[] classNames)
{
// This is a pretty naive implementation right now,
// but it will work for our current needs
String baseUrl = getBaseUrlForStylesheet();
ArrayList<String> classes = new ArrayList<String>();
for (Object c : classNames) {
classes.add(TiConvert.toString(c));
}
KrollDict options = TiApplication.getInstance().getStylesheet(baseUrl, classes, null);
extend(options);
}
@Kroll.method @Kroll.getProperty
public boolean getKeepScreenOn()
{
Boolean keepScreenOn = null;
TiUIView v = peekView();
if (v != null) {
View nv = v.getNativeView();
if (nv != null) {
keepScreenOn = nv.getKeepScreenOn();
}
}
//Keep the proxy in the correct state
Object current = getProperty(TiC.PROPERTY_KEEP_SCREEN_ON);
if (current != null) {
boolean currentValue = TiConvert.toBoolean(current);
if (keepScreenOn == null) {
keepScreenOn = currentValue;
} else {
if (currentValue != keepScreenOn) {
setProperty(TiC.PROPERTY_KEEP_SCREEN_ON, keepScreenOn);
} else {
keepScreenOn = currentValue;
}
}
} else {
if (keepScreenOn == null) {
keepScreenOn = false; // Android default
}
setProperty(TiC.PROPERTY_KEEP_SCREEN_ON, keepScreenOn);
}
return keepScreenOn;
}
@Kroll.method @Kroll.setProperty(retain=false)
public void setKeepScreenOn(boolean keepScreenOn)
{
setPropertyAndFire(TiC.PROPERTY_KEEP_SCREEN_ON, keepScreenOn);
}
@Kroll.method
public KrollDict convertPointToView(KrollDict point, TiViewProxy dest)
{
if (point == null) {
throw new IllegalArgumentException("convertPointToView: point must not be null");
}
if (dest == null) {
throw new IllegalArgumentException("convertPointToView: destinationView must not be null");
}
if (!point.containsKey(TiC.PROPERTY_X)) {
throw new IllegalArgumentException("convertPointToView: required property \"x\" not found in point");
}
if (!point.containsKey(TiC.PROPERTY_Y)) {
throw new IllegalArgumentException("convertPointToView: required property \"y\" not found in point");
}
// The spec says to throw an exception if x or y cannot be converted to numbers.
// TiConvert does that automatically for us.
int x = TiConvert.toInt(point, TiC.PROPERTY_X);
int y = TiConvert.toInt(point, TiC.PROPERTY_Y);
TiUIView view = peekView();
TiUIView destView = dest.peekView();
if (view == null) {
Log.w(TAG, "convertPointToView: View has not been attached, cannot convert point");
return null;
}
if (destView == null) {
Log.w(TAG, "convertPointToView: DestinationView has not been attached, cannot convert point");
return null;
}
View nativeView = view.getNativeView();
View destNativeView = destView.getNativeView();
if (nativeView == null || nativeView.getParent() == null) {
Log.w(TAG, "convertPointToView: View has not been attached, cannot convert point");
return null;
}
if (destNativeView == null || destNativeView.getParent() == null) {
Log.w(TAG, "convertPointToView: DestinationView has not been attached, cannot convert point");
return null;
}
int viewLocation[] = new int[2];
int destLocation[] = new int[2];
nativeView.getLocationInWindow(viewLocation);
destNativeView.getLocationInWindow(destLocation);
if (Log.isDebugModeEnabled()) {
Log.d(TAG, "nativeView location in window, x: " + viewLocation[0] + ", y: " + viewLocation[1], Log.DEBUG_MODE);
Log.d(TAG, "destNativeView location in window, x: " + destLocation[0] + ", y: " + destLocation[1], Log.DEBUG_MODE);
}
int pointWindowX = viewLocation[0] + x;
int pointWindowY = viewLocation[1] + y;
// Apply reverse transformation to get the original location
float[] points = new float[] { pointWindowX - destLocation[0], pointWindowY - destLocation[1] };
points = destView.getPreTranslationValue(points);
KrollDict destPoint = new KrollDict();
destPoint.put(TiC.PROPERTY_X, (int) points[0]);
destPoint.put(TiC.PROPERTY_Y, (int) points[1]);
return destPoint;
}
// TODO: Deprecated since Release 3.0.0
@Kroll.method @Deprecated
public void startLayout()
{
Log.w(TAG, "startLayout() is deprecated.", Log.DEBUG_MODE);
layoutStarted.set(true);
}
// TODO: Deprecated since Release 3.0.0
@Kroll.method @Deprecated
public void finishLayout()
{
Log.w(TAG, "finishLayout() is deprecated.", Log.DEBUG_MODE);
// Don't force a layout if startLayout() was never called
if (!isLayoutStarted()) {
return;
}
if (TiApplication.isUIThread()) {
handleFinishLayout();
} else {
getMainHandler().sendEmptyMessage(MSG_FINISH_LAYOUT);
}
layoutStarted.set(false);
}
// TODO: Deprecated since Release 3.0.0
@SuppressWarnings({ "rawtypes", "unchecked" })
@Kroll.method @Deprecated
public void updateLayout(Object params)
{
Log.w(TAG, "updateLayout() is deprecated.", Log.DEBUG_MODE);
HashMap<String, Object> paramsMap;
if (!(params instanceof HashMap)) {
Log.e(TAG, "Argument for updateLayout must be a dictionary");
return;
}
paramsMap = (HashMap) params;
layoutStarted.set(true);
if (TiApplication.isUIThread()) {
handleUpdateLayout(paramsMap);
} else {
getMainHandler().obtainMessage(MSG_UPDATE_LAYOUT, paramsMap).sendToTarget();
}
layoutStarted.set(false);
}
private void handleFinishLayout()
{
if (view.iszIndexChanged()) {
view.forceLayoutNativeView(true);
view.setzIndexChanged(false);
} else {
view.forceLayoutNativeView(false);
}
}
private void handleUpdateLayout(HashMap<String, Object> params)
{
for (String key : params.keySet()) {
setPropertyAndFire(key, params.get(key));
}
handleFinishLayout();
}
// TODO: Deprecated since Release 3.0.0
// This is used to check if the user has called startLayout(). We mainly use this to perform a check before running
// deprecated behavior. (i.e. performing layout when a property has changed, and the user didn't call startLayout)
@Deprecated
public boolean isLayoutStarted()
{
return layoutStarted.get();
}
@Kroll.method
public void hideKeyboard()
{
if (TiApplication.isUIThread()) {
handleHideKeyboard();
} else {
getMainHandler().sendEmptyMessage(MSG_HIDE_KEYBOARD);
}
}
protected void handleHideKeyboard()
{
TiUIView v = peekView();
if (v != null) {
View nv = v.getNativeView();
if (nv != null) {
TiUIHelper.showSoftKeyboard(nv, false);
}
}
}
}
| |
/*
* Copyright (c) 2014, Cloudera and Intel, Inc. All Rights Reserved.
*
* Cloudera, Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"). You may not use this file except in
* compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for
* the specific language governing permissions and limitations under the
* License.
*/
package com.cloudera.oryx.app.batch.mllib.als;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import com.typesafe.config.Config;
import org.apache.hadoop.conf.Configuration;
import org.dmg.pmml.PMML;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.cloudera.oryx.app.als.ALSUtilsTest;
import com.cloudera.oryx.app.pmml.AppPMMLUtils;
import com.cloudera.oryx.common.collection.Pair;
import com.cloudera.oryx.common.io.IOUtils;
import com.cloudera.oryx.common.settings.ConfigUtils;
import com.cloudera.oryx.common.pmml.PMMLUtils;
import com.cloudera.oryx.ml.MLUpdate;
public final class ALSUpdateIT extends AbstractALSIT {
private static final Logger log = LoggerFactory.getLogger(ALSUpdateIT.class);
private static final int DATA_TO_WRITE = 2000;
private static final int WRITE_INTERVAL_MSEC = 10;
private static final int FEATURES = 4;
private static final double LAMBDA = 0.001;
private static final int NUM_USERS_ITEMS = 1000;
@Test
public void testALS() throws Exception {
Path tempDir = getTempDir();
Path dataDir = tempDir.resolve("data");
Path modelDir = tempDir.resolve("model");
Map<String,Object> overlayConfig = new HashMap<>();
overlayConfig.put("oryx.batch.update-class", ALSUpdate.class.getName());
ConfigUtils.set(overlayConfig, "oryx.batch.storage.data-dir", dataDir);
ConfigUtils.set(overlayConfig, "oryx.batch.storage.model-dir", modelDir);
overlayConfig.put("oryx.batch.streaming.generation-interval-sec", GEN_INTERVAL_SEC);
overlayConfig.put("oryx.als.implicit", false);
overlayConfig.put("oryx.als.hyperparams.lambda", LAMBDA);
overlayConfig.put("oryx.als.hyperparams.features", FEATURES);
Config config = ConfigUtils.overlayOn(overlayConfig, getConfig());
startMessaging();
List<Pair<String,String>> updates = startServerProduceConsumeTopics(
config,
new RandomALSDataGenerator(NUM_USERS_ITEMS, NUM_USERS_ITEMS, 1, 5),
DATA_TO_WRITE,
WRITE_INTERVAL_MSEC);
List<Path> modelInstanceDirs = IOUtils.listFiles(modelDir, "*");
int generations = modelInstanceDirs.size();
checkIntervals(generations, DATA_TO_WRITE, WRITE_INTERVAL_MSEC, GEN_INTERVAL_SEC);
List<Collection<String>> userIDs = new ArrayList<>();
userIDs.add(Collections.<String>emptySet()); // Add dummy empty set as prior value
List<Collection<String>> productIDs = new ArrayList<>();
productIDs.add(Collections.<String>emptySet()); // Add dummy empty set as prior value
for (Path modelInstanceDir : modelInstanceDirs) {
Path modelFile = modelInstanceDir.resolve(MLUpdate.MODEL_FILE_NAME);
assertTrue("Model file should exist: " + modelFile, Files.exists(modelFile));
assertTrue("Model file should not be empty: " + modelFile, Files.size(modelFile) > 0);
PMMLUtils.read(modelFile); // Shouldn't throw exception
Path xDir = modelInstanceDir.resolve("X");
assertTrue(Files.exists(xDir));
userIDs.add(checkFeatures(xDir, userIDs.get(userIDs.size() - 1)));
Path yDir = modelInstanceDir.resolve("Y");
assertTrue(Files.exists(yDir));
productIDs.add(checkFeatures(yDir, productIDs.get(productIDs.size() - 1)));
}
// Remove dummy empty sets
userIDs.remove(0);
productIDs.remove(0);
Collection<String> expectedUsers = null;
Collection<String> expectedProducts = null;
Collection<String> seenUsers = null;
Collection<String> seenProducts = null;
Collection<String> lastModelUsers = null;
Collection<String> lastModelProducts = null;
int whichGeneration = -1;
for (Pair<String,String> km : updates) {
String type = km.getFirst();
String value = km.getSecond();
log.debug("{} = {}", type, value);
if ("UP".equals(type)) {
assertNotNull(seenUsers);
assertNotNull(seenProducts);
List<?> update = MAPPER.readValue(value, List.class);
// First field is X or Y, depending on whether it's a user or item vector
String whichMatrixField = update.get(0).toString();
boolean isUser = "X".equals(whichMatrixField);
boolean isProduct = "Y".equals(whichMatrixField);
// Next is user/item ID
String id = update.get(1).toString();
assertTrue(isUser || isProduct);
if (isUser) {
seenUsers.add(id);
} else {
seenProducts.add(id);
}
// Verify that feature vector are valid floats
for (float f : MAPPER.convertValue(update.get(2), float[].class)) {
assertTrue(!Float.isNaN(f) && !Float.isInfinite(f));
}
if (isUser) {
// Only known-items for users exist now, not known users for items
@SuppressWarnings("unchecked")
Collection<String> knownUsersItems = (Collection<String>) update.get(3);
assertFalse(knownUsersItems.isEmpty());
for (String known : knownUsersItems) {
int i = ALSUtilsTest.stringIDtoID(known);
assertTrue(i >= 0 && i < NUM_USERS_ITEMS);
}
}
} else {
assertTrue("MODEL".equals(type) || "MODEL-REF".equals(type));
PMML pmml = AppPMMLUtils.readPMMLFromUpdateKeyMessage(type, value, new Configuration());
checkHeader(pmml.getHeader());
assertEquals(7, pmml.getExtensions().size());
Map<String,Object> expected = new HashMap<>();
expected.put("features", FEATURES);
expected.put("lambda", LAMBDA);
expected.put("implicit", false);
checkExtensions(pmml, expected);
// See if users/item sets seen in updates match what was expected from output
assertContainsSame(expectedUsers, seenUsers);
assertContainsSame(expectedProducts, seenProducts);
// Also check key sets reported in model
assertContainsSame(expectedUsers, lastModelUsers);
assertContainsSame(expectedProducts, lastModelProducts);
// Update for next round
whichGeneration++;
expectedUsers = userIDs.get(whichGeneration);
expectedProducts = productIDs.get(whichGeneration);
seenUsers = new HashSet<>();
seenProducts = new HashSet<>();
lastModelUsers = AppPMMLUtils.getExtensionContent(pmml, "XIDs");
lastModelProducts = AppPMMLUtils.getExtensionContent(pmml, "YIDs");
}
}
}
private static Collection<String> checkFeatures(Path path, Collection<String> previousIDs)
throws IOException {
Collection<String> seenIDs = new HashSet<>();
for (Path file : IOUtils.listFiles(path, "part-*")) {
for (String line : IOUtils.readLines(file)) {
List<?> update = MAPPER.readValue(line, List.class);
seenIDs.add(update.get(0).toString());
assertEquals(FEATURES, MAPPER.convertValue(update.get(1), float[].class).length);
}
}
assertFalse(seenIDs.isEmpty());
assertTrue(seenIDs.containsAll(previousIDs));
return seenIDs;
}
}
| |
/**
* New BSD License
* http://www.opensource.org/licenses/bsd-license.php
* Copyright 2009-2011 RaptorProject (http://code.google.com/p/raptor-chess-interface/)
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
*
* Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
* Neither the name of the RaptorProject nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package raptor.pref;
import org.eclipse.jface.preference.PreferenceDialog;
import org.eclipse.jface.preference.PreferenceManager;
import org.eclipse.jface.preference.PreferenceNode;
import org.eclipse.jface.preference.PreferencePage;
import raptor.Raptor;
import raptor.action.RaptorAction.RaptorActionContainer;
import raptor.connector.Connector;
import raptor.international.L10n;
import raptor.pref.page.ActionContainerPage;
import raptor.pref.page.ActionKeyBindingsPage;
import raptor.pref.page.ActionScriptsPage;
import raptor.pref.page.BughousePage;
import raptor.pref.page.ChatConsoleBehaviorPage;
import raptor.pref.page.ChatConsoleChannelColorsPage;
import raptor.pref.page.ChatConsoleMessageColorsPage;
import raptor.pref.page.ChatConsolePage;
import raptor.pref.page.ChatConsoleRightClickScripts;
import raptor.pref.page.ChatConsoleTagsPage;
import raptor.pref.page.ChatConsoleToolbarsPage;
import raptor.pref.page.ChessBoardArrowsPage;
import raptor.pref.page.ChessBoardBehaviorPage;
import raptor.pref.page.ChessBoardColorsPage;
import raptor.pref.page.ChessBoardFontsPage;
import raptor.pref.page.ChessBoardHighlightsPage;
import raptor.pref.page.ChessBoardMouseActions;
import raptor.pref.page.ChessBoardPage;
import raptor.pref.page.ChessBoardResultsPage;
import raptor.pref.page.ChessBoardToolbarsPage;
import raptor.pref.page.ChessEnginesPage;
import raptor.pref.page.GeneralPage;
import raptor.pref.page.InactiveMouseActionsPage;
import raptor.pref.page.ObservingMouseActionsPage;
import raptor.pref.page.PlayingMouseActionsPage;
import raptor.pref.page.RaptorWindowPage;
import raptor.pref.page.RaptorWindowQuadrantsPage;
import raptor.pref.page.ChatEventScripts;
import raptor.pref.page.ScriptsPage;
import raptor.pref.page.SeekPage;
import raptor.pref.page.SoundPage;
import raptor.pref.page.SpeechPage;
import raptor.pref.page.UciEnginesPage;
import raptor.pref.page.XboardEnginesPage;
import raptor.service.ConnectorService;
/**
* A class containing utility methods for Preferences.
*/
public class PreferenceUtils {
protected static L10n local = L10n.getInstance();
private static PreferenceDialog dlg;
/**
* Launches the preference dialog.
*
* All connectors in the ConnectorService have their preference nodes added.
*/
public static void launchPreferenceDialog() {
create();
// Open the dialog
dlg.open();
}
public static void restartDialog() {
dlg.close();
create();
dlg.open();
}
private static void create() {
// Create the preference manager
PreferenceManager mgr = new PreferenceManager('/');
mgr.addToRoot(new PreferenceNode("general", new GeneralPage()));
mgr.addTo("general", new PreferenceNode("layout1",
new RaptorWindowQuadrantsPage("1")));
mgr.addTo("general", new PreferenceNode("window",
new RaptorWindowPage()));
mgr.addToRoot(new PreferenceNode("bughouse", new BughousePage()));
mgr
.addTo(
"bughouse",
new PreferenceNode(
"buttons",
new ActionContainerPage(
local.getString("prefUtil1"),
local.getString("prefUtil2"),
RaptorActionContainer.BugButtons)));
mgr.addToRoot(new PreferenceNode("chessBoard", new ChessBoardPage()));
mgr.addTo("chessBoard", new PreferenceNode("arrows",
new ChessBoardArrowsPage()));
mgr.addTo("chessBoard", new PreferenceNode("behavior",
new ChessBoardBehaviorPage()));
mgr.addTo("chessBoard", new PreferenceNode("colors",
new ChessBoardColorsPage()));
mgr.addTo("chessBoard", new PreferenceNode("fonts",
new ChessBoardFontsPage()));
mgr.addTo("chessBoard", new PreferenceNode("highlights",
new ChessBoardHighlightsPage()));
mgr.addTo("chessBoard", new PreferenceNode("mouseActions",
new ChessBoardMouseActions()));
mgr.addTo("chessBoard/mouseActions", new PreferenceNode("inactive",
new InactiveMouseActionsPage()));
mgr.addTo("chessBoard/mouseActions", new PreferenceNode("playing",
new PlayingMouseActionsPage()));
mgr.addTo("chessBoard/mouseActions", new PreferenceNode("observing",
new ObservingMouseActionsPage()));
mgr.addTo("chessBoard", new PreferenceNode("toolbar",
new ChessBoardToolbarsPage()));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"bugSuggest",
new ActionContainerPage(
local.getString("prefUtil3"),
local.getString("prefUtil4"),
RaptorActionContainer.BughouseSuggestChessBoard)));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"examining",
new ActionContainerPage(
local.getString("prefUtil5"),
local.getString("prefUtil6"),
RaptorActionContainer.ExaminingChessBoard)));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"inactive",
new ActionContainerPage(
local.getString("prefUtil7"),
local.getString("prefUtil8"),
RaptorActionContainer.InactiveChessBoard)));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"observing",
new ActionContainerPage(
local.getString("prefUtil9"),
local.getString("prefUtil10"),
RaptorActionContainer.ObservingChessBoard)));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"playing",
new ActionContainerPage(
local.getString("prefUtil11"),
local.getString("prefUtil12"),
RaptorActionContainer.PlayingChessBoard)));
mgr
.addTo(
"chessBoard/toolbar",
new PreferenceNode(
"setup",
new ActionContainerPage(
local.getString("prefUtil13"),
local.getString("prefUtil14"),
RaptorActionContainer.SetupChessBoard)));
mgr.addTo("chessBoard", new PreferenceNode("results",
new ChessBoardResultsPage()));
mgr.addToRoot(new PreferenceNode("chatConsole", new ChatConsolePage()));
// Currently unused but keeping it around in case more options are
// added.
mgr.addTo("chatConsole", new PreferenceNode("behavior",
new ChatConsoleBehaviorPage()));
mgr.addTo("chatConsole", new PreferenceNode("channelColors",
new ChatConsoleChannelColorsPage()));
mgr.addTo("chatConsole", new PreferenceNode("messageColors",
new ChatConsoleMessageColorsPage()));
mgr.addTo("chatConsole", new PreferenceNode("tags",
new ChatConsoleTagsPage()));
mgr.addTo("chatConsole", new PreferenceNode("toolbar",
new ChatConsoleToolbarsPage()));
mgr
.addTo(
"chatConsole/toolbar",
new PreferenceNode(
"channel",
new ActionContainerPage(
local.getString("prefUtil15"),
local.getString("prefUtil16"),
RaptorActionContainer.ChannelChatConsole)));
mgr
.addTo(
"chatConsole/toolbar",
new PreferenceNode(
"main",
new ActionContainerPage(
local.getString("prefUtil17"),
local.getString("prefUtil18"),
RaptorActionContainer.MainChatConsole)));
mgr
.addTo(
"chatConsole/toolbar",
new PreferenceNode(
"partner",
new ActionContainerPage(
local.getString("prefUtil19"),
local.getString("prefUtil20"),
RaptorActionContainer.BughousePartnerChatConsole)));
mgr
.addTo(
"chatConsole/toolbar",
new PreferenceNode(
"person",
new ActionContainerPage(
local.getString("prefUtil21"),
local.getString("prefUtil22"),
RaptorActionContainer.PersonChatConsole)));
mgr
.addTo(
"chatConsole/toolbar",
new PreferenceNode(
"regex",
new ActionContainerPage(
local.getString("prefUtil23"),
local.getString("prefUtil24"),
RaptorActionContainer.RegExChatConsole)));
mgr.addToRoot(new PreferenceNode("engines", new ChessEnginesPage()));
mgr.addTo("engines", new PreferenceNode("uciEngines",
new UciEnginesPage()));
mgr.addTo("engines", new PreferenceNode("xboardEngines",
new XboardEnginesPage()));
mgr.addToRoot(new PreferenceNode("scripts", new ScriptsPage()));
mgr.addTo("scripts", new PreferenceNode("actionScripts",
new ActionScriptsPage()));
mgr.addTo("scripts", new PreferenceNode("actionScriptKeys",
new ActionKeyBindingsPage()));
mgr.addTo("scripts", new PreferenceNode("regex",
new ChatEventScripts()));
mgr.addTo("scripts", new PreferenceNode("rightClickScripts",
new ChatConsoleRightClickScripts()));
mgr.addToRoot(new PreferenceNode("seeks", new SeekPage()));
mgr.addToRoot(new PreferenceNode("sound", new SoundPage()));
mgr.addToRoot(new PreferenceNode("speech", new SpeechPage()));
// Add the connector preference nodes.
Connector[] connectors = ConnectorService.getInstance().getConnectors();
for (Connector connector : connectors) {
PreferencePage root = connector.getRootPreferencePage();
if (root != null) {
mgr
.addToRoot(new PreferenceNode(connector.getShortName(),
root));
PreferenceNode[] secondaries = connector
.getSecondaryPreferenceNodes();
if (secondaries != null && secondaries.length > 0) {
for (PreferenceNode node : secondaries) {
mgr.addTo(connector.getShortName(), node);
}
}
}
}
// Create the preferences dialog
dlg = new PreferenceDialog(Raptor.getInstance()
.getWindow().getShell(), mgr);
}
}
| |
// Copyright 2021 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.page_info;
import static androidx.test.espresso.Espresso.onView;
import static androidx.test.espresso.action.ViewActions.click;
import static androidx.test.espresso.assertion.ViewAssertions.matches;
import static androidx.test.espresso.matcher.ViewMatchers.isDisplayed;
import static androidx.test.espresso.matcher.ViewMatchers.withId;
import static androidx.test.espresso.matcher.ViewMatchers.withText;
import static org.hamcrest.CoreMatchers.allOf;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doReturn;
import static org.chromium.ui.test.util.ViewUtils.onViewWaiting;
import android.annotation.SuppressLint;
import androidx.annotation.NonNull;
import androidx.test.espresso.ViewAssertion;
import androidx.test.filters.MediumTest;
import org.junit.Before;
import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.MockitoAnnotations;
import org.chromium.base.StrictModeContext;
import org.chromium.base.metrics.RecordHistogram;
import org.chromium.base.test.util.Batch;
import org.chromium.base.test.util.CallbackHelper;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.JniMocker;
import org.chromium.chrome.R;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.flags.ChromeFeatureList;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.browser.tab.Tab;
import org.chromium.chrome.browser.tab.TabLaunchType;
import org.chromium.chrome.browser.tabmodel.TabModel;
import org.chromium.chrome.browser.tabmodel.TabModelObserver;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.chrome.test.ChromeTabbedActivityTestRule;
import org.chromium.chrome.test.batch.BlankCTATabInitialStateRule;
import org.chromium.chrome.test.util.ChromeRenderTestRule;
import org.chromium.chrome.test.util.browser.Features;
import org.chromium.components.page_info.PageInfoAction;
import org.chromium.components.page_info.PageInfoController;
import org.chromium.components.page_info.proto.AboutThisSiteMetadataProto.Hyperlink;
import org.chromium.components.page_info.proto.AboutThisSiteMetadataProto.SiteDescription;
import org.chromium.components.page_info.proto.AboutThisSiteMetadataProto.SiteInfo;
import org.chromium.content_public.browser.BrowserContextHandle;
import org.chromium.content_public.browser.WebContents;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
import org.chromium.content_public.common.ContentSwitches;
import org.chromium.net.test.EmbeddedTestServerRule;
import org.chromium.ui.test.util.DisableAnimationsTestRule;
import org.chromium.url.GURL;
import java.io.IOException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeoutException;
/**
* Tests for PageInfoAboutThisSite.
*/
@RunWith(ChromeJUnit4ClassRunner.class)
@Features.EnableFeatures(ChromeFeatureList.PAGE_INFO_ABOUT_THIS_SITE)
@CommandLineFlags.
Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE, ChromeSwitches.DISABLE_STARTUP_PROMOS,
ContentSwitches.HOST_RESOLVER_RULES + "=MAP * 127.0.0.1"})
@Batch(Batch.PER_CLASS)
@SuppressLint("VisibleForTests")
public class PageInfoAboutThisSiteTest {
private static final String sSimpleHtml = "/chrome/test/data/android/simple.html";
@ClassRule
public static final ChromeTabbedActivityTestRule sActivityTestRule =
new ChromeTabbedActivityTestRule();
@ClassRule
public static DisableAnimationsTestRule sDisableAnimationsTestRule =
new DisableAnimationsTestRule();
@Rule
public final BlankCTATabInitialStateRule mInitialStateRule =
new BlankCTATabInitialStateRule(sActivityTestRule, false);
@Rule
public EmbeddedTestServerRule mTestServerRule = new EmbeddedTestServerRule();
@Rule
public JniMocker mMocker = new JniMocker();
@Rule
public ChromeRenderTestRule mRenderTestRule =
ChromeRenderTestRule.Builder.withPublicCorpus().build();
@Mock
private PageInfoAboutThisSiteController.Natives mMockAboutThisSiteJni;
@Before
public void setUp() {
MockitoAnnotations.initMocks(this);
mMocker.mock(PageInfoAboutThisSiteControllerJni.TEST_HOOKS, mMockAboutThisSiteJni);
mTestServerRule.setServerUsesHttps(true);
sActivityTestRule.loadUrl(mTestServerRule.getServer().getURL(sSimpleHtml));
RecordHistogram.forgetHistogramForTesting("Security.PageInfo.TimeOpen.AboutThisSiteShown");
RecordHistogram.forgetHistogramForTesting(
"Security.PageInfo.TimeOpen.AboutThisSiteNotShown");
RecordHistogram.forgetHistogramForTesting("WebsiteSettings.Action");
}
private void openPageInfo() {
ChromeTabbedActivity activity = sActivityTestRule.getActivity();
Tab tab = activity.getActivityTab();
TestThreadUtils.runOnUiThreadBlocking(() -> {
new ChromePageInfo(activity.getModalDialogManagerSupplier(), null,
PageInfoController.OpenedFromSource.TOOLBAR, null)
.show(tab, ChromePageInfoHighlight.noHighlight());
});
onViewWaiting(allOf(withId(org.chromium.chrome.R.id.page_info_url_wrapper), isDisplayed()));
}
private void dismissPageInfo() throws TimeoutException {
CallbackHelper helper = new CallbackHelper();
TestThreadUtils.runOnUiThreadBlocking(() -> {
PageInfoController.getLastPageInfoControllerForTesting().runAfterDismiss(
helper::notifyCalled);
});
helper.waitForCallback(0);
}
@NonNull
private ViewAssertion renderView(String renderId) {
return (v, noMatchException) -> {
if (noMatchException != null) throw noMatchException;
// Allow disk writes and slow calls to render from UI thread.
try (StrictModeContext ignored = StrictModeContext.allowAllThreadPolicies()) {
mRenderTestRule.render(v, renderId);
} catch (IOException e) {
throw new RuntimeException(e);
}
};
}
private void mockResponse(byte[] bytes) {
doReturn(bytes)
.when(mMockAboutThisSiteJni)
.getSiteInfo(
any(BrowserContextHandle.class), any(GURL.class), any(WebContents.class));
}
private byte[] createDescription() {
String url = mTestServerRule.getServer().getURL(sSimpleHtml);
SiteDescription.Builder description =
SiteDescription.newBuilder()
.setDescription("Some description about example.com for testing purposes")
.setSource(Hyperlink.newBuilder().setUrl(url).setLabel("Example Source"));
return SiteInfo.newBuilder().setDescription(description).build().toByteArray();
}
@Test
@MediumTest
public void testAboutThisSiteRowWithData() throws TimeoutException {
mockResponse(createDescription());
openPageInfo();
onView(withId(PageInfoAboutThisSiteController.ROW_ID)).check(matches(isDisplayed()));
onView(withText(containsString("Some description"))).check(matches(isDisplayed()));
dismissPageInfo();
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
"Security.PageInfo.TimeOpen.AboutThisSiteShown"));
assertEquals(0,
RecordHistogram.getHistogramTotalCountForTesting(
"Security.PageInfo.TimeOpen.AboutThisSiteNotShown"));
}
@Test
@MediumTest
public void testAboutThisSiteRowWithDataOnInsecureSite() {
sActivityTestRule.loadUrl(
mTestServerRule.getServer().getURLWithHostName("invalidcert.com", sSimpleHtml));
mockResponse(createDescription());
openPageInfo();
onView(withId(PageInfoAboutThisSiteController.ROW_ID)).check(matches(not(isDisplayed())));
}
@Test
@MediumTest
public void testAboutThisSiteRowWithoutData() throws TimeoutException {
mockResponse(null);
openPageInfo();
onView(withId(PageInfoAboutThisSiteController.ROW_ID)).check(matches(not(isDisplayed())));
dismissPageInfo();
assertEquals(0,
RecordHistogram.getHistogramTotalCountForTesting(
"Security.PageInfo.TimeOpen.AboutThisSiteShown"));
assertEquals(1,
RecordHistogram.getHistogramTotalCountForTesting(
"Security.PageInfo.TimeOpen.AboutThisSiteNotShown"));
}
@Test
@MediumTest
@Feature({"RenderTest"})
public void testAboutThisSiteRowRendering() {
mockResponse(createDescription());
openPageInfo();
onView(withId(PageInfoAboutThisSiteController.ROW_ID))
.check(renderView("page_info_about_this_site_row"));
}
@Test
@MediumTest
@Feature({"RenderTest"})
public void testAboutThisSiteSubPageRendering() {
mockResponse(createDescription());
openPageInfo();
onView(withId(PageInfoAboutThisSiteController.ROW_ID)).perform(click());
onView(withId(R.id.page_info_wrapper))
.check(renderView("page_info_about_this_site_subpage"));
}
@Test
@MediumTest
public void testAboutThisSiteSubPageSourceClicked()
throws ExecutionException, TimeoutException {
assertEquals(0, RecordHistogram.getHistogramTotalCountForTesting("WebsiteSettings.Action"));
mockResponse(createDescription());
openPageInfo();
assertEquals(1, RecordHistogram.getHistogramTotalCountForTesting("WebsiteSettings.Action"));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting(
"WebsiteSettings.Action", PageInfoAction.PAGE_INFO_OPENED));
onView(withId(PageInfoAboutThisSiteController.ROW_ID)).perform(click());
assertEquals(2, RecordHistogram.getHistogramTotalCountForTesting("WebsiteSettings.Action"));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting("WebsiteSettings.Action",
PageInfoAction.PAGE_INFO_ABOUT_THIS_SITE_PAGE_OPENED));
final CallbackHelper onTabAdded = new CallbackHelper();
final TabModelObserver observer = new TabModelObserver() {
@Override
public void willAddTab(Tab tab, @TabLaunchType int type) {
onTabAdded.notifyCalled();
}
};
final TabModel tabModel = sActivityTestRule.getActivity().getCurrentTabModel();
TestThreadUtils.runOnUiThreadBlocking(() -> tabModel.addObserver(observer));
final int callCount = onTabAdded.getCallCount();
onView(withText(containsString("Example Source"))).perform(click());
onTabAdded.waitForCallback(callCount);
TestThreadUtils.runOnUiThreadBlocking(() -> tabModel.removeObserver(observer));
assertEquals(3, RecordHistogram.getHistogramTotalCountForTesting("WebsiteSettings.Action"));
assertEquals(1,
RecordHistogram.getHistogramValueCountForTesting("WebsiteSettings.Action",
PageInfoAction.PAGE_INFO_ABOUT_THIS_SITE_SOURCE_LINK_CLICKED));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.common;
import static org.apache.solr.common.cloud.ZkStateReader.URL_SCHEME;
import static org.apache.solr.common.cloud.ZkStateReader.getCollectionPathRoot;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.function.BiConsumer;
import org.apache.solr.cluster.api.*;
import org.apache.solr.common.cloud.*;
import org.apache.solr.common.util.Utils;
import org.apache.solr.common.util.WrappedSimpleMap;
import org.apache.zookeeper.KeeperException;
/**
* Reference implementation for SolrCluster. As much as possible fetch all the values lazily because
* the value of anything can change any moment Creating an instance is a low cost operation. It does
* not result in a network call or large object creation
*/
public class LazySolrCluster implements SolrCluster {
final ZkStateReader zkStateReader;
private final Map<String, SolrCollectionImpl> cached = new ConcurrentHashMap<>();
private final SimpleMap<SolrCollection> collections;
private final SimpleMap<SolrCollection> collectionsAndAliases;
private final SimpleMap<SolrNode> nodes;
private SimpleMap<CollectionConfig> configs;
public LazySolrCluster(ZkStateReader zkStateReader) {
this.zkStateReader = zkStateReader;
collections = lazyCollectionsMap(zkStateReader);
collectionsAndAliases = lazyCollectionsWithAlias(zkStateReader);
nodes = lazyNodeMap();
}
private SimpleMap<CollectionConfig> lazyConfigMap() {
Set<String> configNames = new HashSet<>();
new SimpleZkMap(zkStateReader, ZkStateReader.CONFIGS_ZKNODE)
.abortableForEach(
(name, resource) -> {
if (!name.contains("/")) {
configNames.add(name);
return Boolean.TRUE;
}
return Boolean.FALSE;
});
return new SimpleMap<CollectionConfig>() {
@Override
public CollectionConfig get(String key) {
if (configNames.contains(key)) {
return new ConfigImpl(key);
} else {
return null;
}
}
@Override
public void forEachEntry(BiConsumer<String, ? super CollectionConfig> fun) {
for (String name : configNames) {
fun.accept(name, new ConfigImpl(name));
}
}
@Override
public int size() {
return configNames.size();
}
};
}
private SimpleMap<SolrNode> lazyNodeMap() {
return new SimpleMap<SolrNode>() {
@Override
public SolrNode get(String key) {
if (!zkStateReader.getClusterState().liveNodesContain(key)) {
return null;
}
return new Node(key);
}
@Override
public void forEachEntry(BiConsumer<String, ? super SolrNode> fun) {
for (String s : zkStateReader.getClusterState().getLiveNodes()) {
fun.accept(s, new Node(s));
}
}
@Override
public int size() {
return zkStateReader.getClusterState().getLiveNodes().size();
}
};
}
private SimpleMap<SolrCollection> lazyCollectionsWithAlias(ZkStateReader zkStateReader) {
return new SimpleMap<SolrCollection>() {
@Override
public SolrCollection get(String key) {
SolrCollection result = collections.get(key);
if (result != null) return result;
Aliases aliases = zkStateReader.getAliases();
List<String> aliasNames = aliases.resolveAliases(key);
if (aliasNames == null || aliasNames.isEmpty()) return null;
return _collection(aliasNames.get(0), null);
}
@Override
public void forEachEntry(BiConsumer<String, ? super SolrCollection> fun) {
collections.forEachEntry(fun);
Aliases aliases = zkStateReader.getAliases();
aliases.forEachAlias(
(s, colls) -> {
if (colls == null || colls.isEmpty()) return;
fun.accept(s, _collection(colls.get(0), null));
});
}
@Override
public int size() {
return collections.size() + zkStateReader.getAliases().size();
}
};
}
private SimpleMap<SolrCollection> lazyCollectionsMap(ZkStateReader zkStateReader) {
return new SimpleMap<SolrCollection>() {
@Override
public SolrCollection get(String key) {
return _collection(key, null);
}
@Override
public void forEachEntry(BiConsumer<String, ? super SolrCollection> fun) {
zkStateReader
.getClusterState()
.forEachCollection(
coll -> fun.accept(coll.getName(), _collection(coll.getName(), coll)));
}
@Override
public int size() {
return zkStateReader.getClusterState().size();
}
};
}
private SolrCollection _collection(String key, DocCollection c) {
if (c == null) c = zkStateReader.getCollection(key);
if (c == null) {
cached.remove(key);
return null;
}
SolrCollectionImpl existing = cached.get(key);
if (existing == null || existing.coll != c) {
cached.put(key, existing = new SolrCollectionImpl(c, zkStateReader));
}
return existing;
}
@Override
public SimpleMap<SolrCollection> collections() throws SolrException {
return collections;
}
@Override
public SimpleMap<SolrCollection> collections(boolean includeAlias) throws SolrException {
return includeAlias ? collectionsAndAliases : collections;
}
@Override
public SimpleMap<SolrNode> nodes() throws SolrException {
return nodes;
}
@Override
public SimpleMap<CollectionConfig> configs() throws SolrException {
if (configs == null) {
// these are lightweight objects and we don't care even if multiple objects ar ecreated b/c of
// a race condition
configs = lazyConfigMap();
}
return configs;
}
@Override
public String overseerNode() throws SolrException {
return null;
}
@Override
public String thisNode() {
return null;
}
private class SolrCollectionImpl implements SolrCollection {
final DocCollection coll;
final SimpleMap<Shard> shards;
final ZkStateReader zkStateReader;
final Router router;
String confName;
private SolrCollectionImpl(DocCollection coll, ZkStateReader zkStateReader) {
this.coll = coll;
this.zkStateReader = zkStateReader;
this.router = key -> coll.getRouter().getTargetSlice(key, null, null, null, null).getName();
LinkedHashMap<String, Shard> map = new LinkedHashMap<>();
for (Slice slice : coll.getSlices()) {
map.put(slice.getName(), new ShardImpl(this, slice));
}
shards = new WrappedSimpleMap<>(map);
}
@Override
public String name() {
return coll.getName();
}
@Override
public SimpleMap<Shard> shards() {
return shards;
}
@Override
public String config() {
if (confName == null) {
// do this lazily . It's usually not necessary
try {
byte[] d =
zkStateReader
.getZkClient()
.getData(getCollectionPathRoot(coll.getName()), null, null, true);
if (d == null || d.length == 0) return null;
Map<?, ?> m = (Map<?, ?>) Utils.fromJSON(d);
confName = (String) m.get("configName");
} catch (KeeperException | InterruptedException e) {
SimpleZkMap.throwZkExp(e);
// cannot read from ZK
return null;
}
}
return confName;
}
@Override
public Router router() {
return router;
}
}
private class ShardImpl implements Shard {
final SolrCollectionImpl collection;
final Slice slice;
final HashRange range;
final SimpleMap<ShardReplica> replicas;
private ShardImpl(SolrCollectionImpl collection, Slice slice) {
this.collection = collection;
this.slice = slice;
range = _range(slice);
replicas = _replicas();
}
private SimpleMap<ShardReplica> _replicas() {
Map<String, ShardReplica> replicas = new HashMap<>();
slice.forEach(
replica ->
replicas.put(replica.getName(), new ShardReplicaImpl(ShardImpl.this, replica)));
return new WrappedSimpleMap<>(replicas);
}
private HashRange _range(Slice slice) {
return slice.getRange() == null
? null
: new HashRange() {
@Override
public int min() {
return slice.getRange().min;
}
@Override
public int max() {
return slice.getRange().max;
}
};
}
@Override
public String name() {
return slice.getName();
}
@Override
public String collection() {
return collection.name();
}
@Override
public HashRange range() {
return range;
}
@Override
public SimpleMap<ShardReplica> replicas() {
return replicas;
}
@Override
public String leader() {
Replica leader = slice.getLeader();
return leader == null ? null : leader.getName();
}
}
private class ShardReplicaImpl implements ShardReplica {
private final ShardImpl shard;
private final Replica replica;
private ShardReplicaImpl(ShardImpl shard, Replica replica) {
this.shard = shard;
this.replica = replica;
}
@Override
public String name() {
return replica.getName();
}
@Override
public String shard() {
return shard.name();
}
@Override
public String collection() {
return shard.collection.name();
}
@Override
public String node() {
return replica.getNodeName();
}
@Override
public String core() {
return replica.getCoreName();
}
@Override
public Replica.Type type() {
return replica.getType();
}
@Override
public boolean alive() {
return zkStateReader.getClusterState().getLiveNodes().contains(node())
&& replica.getState() == Replica.State.ACTIVE;
}
@Override
public long indexSize() {
// todo implement later
throw new UnsupportedOperationException("Not yet implemented");
}
@Override
public boolean isLeader() {
return Objects.equals(shard.leader(), name());
}
@Override
public String url(ApiType type) {
String base = nodes.get(node()).baseUrl(type);
if (type == ApiType.V2) {
return base + "/cores/" + core();
} else {
return base + "/" + core();
}
}
}
private class Node implements SolrNode {
private final String name;
private Node(String name) {
this.name = name;
}
@Override
public String name() {
return name;
}
@Override
public String baseUrl(ApiType apiType) {
return Utils.getBaseUrlForNodeName(
name, zkStateReader.getClusterProperty(URL_SCHEME, "http"), apiType == ApiType.V2);
}
@Override
public SimpleMap<ShardReplica> cores() {
// todo implement later
// this requires a call to the node
throw new UnsupportedOperationException("Not yet implemented");
}
}
private class ConfigImpl implements CollectionConfig {
final String name;
final SimpleMap<Resource> resources;
final String path;
private ConfigImpl(String name) {
this.name = name;
path = ZkStateReader.CONFIGS_ZKNODE + "/" + name;
this.resources = new SimpleZkMap(zkStateReader, path);
}
@Override
public SimpleMap<Resource> resources() {
return resources;
}
@Override
public String name() {
return name;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode.ha;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.ha.HAServiceProtocol.RequestSource;
import org.apache.hadoop.ha.HAServiceProtocol.StateChangeRequestInfo;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DFSClientAdapter;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSOutputStream;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.MiniDFSNNTopology;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManagerTestUtil;
import org.apache.hadoop.hdfs.server.namenode.FSImage;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.ipc.StandbyException;
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcResponseHeaderProto.RpcErrorCodeProto;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.mockito.internal.util.reflection.Whitebox;
import com.google.common.base.Supplier;
import com.google.common.collect.Lists;
/**
* Tests that exercise safemode in an HA cluster.
*/
public class TestHASafeMode {
private static final Log LOG = LogFactory.getLog(TestHASafeMode.class);
private static final int BLOCK_SIZE = 1024;
private NameNode nn0;
private NameNode nn1;
private FileSystem fs;
private MiniDFSCluster cluster;
static {
DFSTestUtil.setNameNodeLogLevel(Level.ALL);
GenericTestUtils.setLogLevel(FSImage.LOG, Level.ALL);
}
@Before
public void setupCluster() throws Exception {
Configuration conf = new Configuration();
conf.setInt(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.nnTopology(MiniDFSNNTopology.simpleHATopology())
.numDataNodes(3)
.waitSafeMode(false)
.build();
cluster.waitActive();
nn0 = cluster.getNameNode(0);
nn1 = cluster.getNameNode(1);
fs = HATestUtil.configureFailoverFs(cluster, conf);
cluster.transitionToActive(0);
}
@After
public void shutdownCluster() {
if (cluster != null) {
cluster.shutdown();
cluster = null;
}
}
/**
* Make sure the client retries when the active NN is in safemode
*/
@Test (timeout=300000)
public void testClientRetrySafeMode() throws Exception {
final Map<Path, Boolean> results = Collections
.synchronizedMap(new HashMap<Path, Boolean>());
final Path test = new Path("/test");
// let nn0 enter safemode
cluster.getConfiguration(0).setInt(
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_MIN_DATANODES_KEY, 3);
NameNodeAdapter.enterSafeMode(nn0, false);
Whitebox.setInternalState(nn0.getNamesystem(), "manualSafeMode", false);
BlockManagerTestUtil.setStartupSafeModeForTest(nn0.getNamesystem()
.getBlockManager());
assertTrue(nn0.getNamesystem().isInStartupSafeMode());
LOG.info("enter safemode");
new Thread() {
@Override
public void run() {
try {
boolean mkdir = fs.mkdirs(test);
LOG.info("mkdir finished, result is " + mkdir);
synchronized (TestHASafeMode.this) {
results.put(test, mkdir);
TestHASafeMode.this.notifyAll();
}
} catch (Exception e) {
LOG.info("Got Exception while calling mkdir", e);
}
}
}.start();
// make sure the client's call has actually been handled by the active NN
assertFalse("The directory should not be created while NN in safemode",
fs.exists(test));
Thread.sleep(1000);
// let nn0 leave safemode
NameNodeAdapter.leaveSafeMode(nn0);
LOG.info("leave safemode");
synchronized (this) {
while (!results.containsKey(test)) {
this.wait();
}
assertTrue(results.get(test));
}
}
private void restartStandby() throws IOException {
cluster.shutdownNameNode(1);
// Set the safemode extension to be lengthy, so that the tests
// can check the safemode message after the safemode conditions
// have been achieved, without being racy.
cluster.getConfiguration(1).setInt(
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_EXTENSION_KEY, 30000);
cluster.getConfiguration(1).setInt(
DFSConfigKeys.DFS_HA_TAILEDITS_PERIOD_KEY, 1);
cluster.restartNameNode(1);
nn1 = cluster.getNameNode(1);
assertEquals(nn1.getNamesystem().getTransactionsSinceLastLogRoll(), 0L);
}
/**
* Test case for enter safemode in active namenode, when it is already in startup safemode.
* It is a regression test for HDFS-2747.
*/
@Test
public void testEnterSafeModeInANNShouldNotThrowNPE() throws Exception {
banner("Restarting active");
DFSTestUtil
.createFile(fs, new Path("/test"), 3 * BLOCK_SIZE, (short) 3, 1L);
restartActive();
nn0.getRpcServer().transitionToActive(
new StateChangeRequestInfo(RequestSource.REQUEST_BY_USER));
FSNamesystem namesystem = nn0.getNamesystem();
String status = namesystem.getSafemode();
assertTrue("Bad safemode status: '" + status + "'", status
.startsWith("Safe mode is ON."));
NameNodeAdapter.enterSafeMode(nn0, false);
assertTrue("Failed to enter into safemode in active", namesystem
.isInSafeMode());
NameNodeAdapter.enterSafeMode(nn0, false);
assertTrue("Failed to enter into safemode in active", namesystem
.isInSafeMode());
}
/**
* Test case for enter safemode in standby namenode, when it is already in startup safemode.
* It is a regression test for HDFS-2747.
*/
@Test
public void testEnterSafeModeInSBNShouldNotThrowNPE() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil
.createFile(fs, new Path("/test"), 3 * BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup and enter safemode.
nn0.getRpcServer().rollEditLog();
banner("Creating some blocks that won't be in the edit log");
DFSTestUtil.createFile(fs, new Path("/test2"), 5 * BLOCK_SIZE, (short) 3,
1L);
banner("Deleting the original blocks");
fs.delete(new Path("/test"), true);
banner("Restarting standby");
restartStandby();
FSNamesystem namesystem = nn1.getNamesystem();
String status = namesystem.getSafemode();
assertTrue("Bad safemode status: '" + status + "'", status
.startsWith("Safe mode is ON."));
NameNodeAdapter.enterSafeMode(nn1, false);
assertTrue("Failed to enter into safemode in standby", namesystem
.isInSafeMode());
NameNodeAdapter.enterSafeMode(nn1, false);
assertTrue("Failed to enter into safemode in standby", namesystem
.isInSafeMode());
}
private void restartActive() throws IOException {
cluster.shutdownNameNode(0);
// Set the safemode extension to be lengthy, so that the tests
// can check the safemode message after the safemode conditions
// have been achieved, without being racy.
cluster.getConfiguration(0).setInt(
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_EXTENSION_KEY, 30000);
cluster.restartNameNode(0);
nn0 = cluster.getNameNode(0);
}
/**
* Tests the case where, while a standby is down, more blocks are
* added to the namespace, but not rolled. So, when it starts up,
* it receives notification about the new blocks during
* the safemode extension period.
*/
@Test
public void testBlocksAddedBeforeStandbyRestart() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 3*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
banner("Creating some blocks that won't be in the edit log");
DFSTestUtil.createFile(fs, new Path("/test2"), 5*BLOCK_SIZE, (short) 3, 1L);
banner("Restarting standby");
restartStandby();
// We expect it not to be stuck in safemode, since those blocks
// that are already visible to the SBN should be processed
// in the initial block reports.
assertSafeMode(nn1, 3, 3, 3, 0);
banner("Waiting for standby to catch up to active namespace");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 8, 8, 3, 0);
}
/**
* Similar to {@link #testBlocksAddedBeforeStandbyRestart()} except that
* the new blocks are allocated after the SBN has restarted. So, the
* blocks were not present in the original block reports at startup
* but are reported separately by blockReceived calls.
*/
@Test
public void testBlocksAddedWhileInSafeMode() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 3*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
banner("Restarting standby");
restartStandby();
assertSafeMode(nn1, 3, 3, 3, 0);
// Create a few blocks which will send blockReceived calls to the
// SBN.
banner("Creating some blocks while SBN is in safe mode");
DFSTestUtil.createFile(fs, new Path("/test2"), 5*BLOCK_SIZE, (short) 3, 1L);
banner("Waiting for standby to catch up to active namespace");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 8, 8, 3, 0);
}
/**
* Test for the following case proposed by ATM:
* 1. Both NNs are up, one is active. There are 100 blocks. Both are
* out of safemode.
* 2. 10 block deletions get processed by NN1. NN2 enqueues these DN messages
* until it next reads from a checkpointed edits file.
* 3. NN2 gets restarted. Its queues are lost.
* 4. NN2 comes up, reads from all the finalized edits files. Concludes there
* should still be 100 blocks.
* 5. NN2 receives a block report from all the DNs, which only accounts for
* 90 blocks. It doesn't leave safemode.
* 6. NN1 dies or is transitioned to standby.
* 7. NN2 is transitioned to active. It reads all the edits from NN1. It now
* knows there should only be 90 blocks, but it's still in safemode.
* 8. NN2 doesn't ever recheck whether it should leave safemode.
*
* This is essentially the inverse of {@link #testBlocksAddedBeforeStandbyRestart()}
*/
@Test
public void testBlocksRemovedBeforeStandbyRestart() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 5*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
// Delete those blocks again, so they won't get reported to the SBN
// once it starts up
banner("Removing the blocks without rolling the edit log");
fs.delete(new Path("/test"), true);
BlockManagerTestUtil.computeAllPendingWork(
nn0.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
banner("Restarting standby");
restartStandby();
assertSafeMode(nn1, 0, 5, 3, 0);
banner("Waiting for standby to catch up to active namespace");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 0, 0, 3, 0);
}
/**
* Similar to {@link #testBlocksRemovedBeforeStandbyRestart()} except that
* the blocks are removed after the SBN has restarted. So, the
* blocks were present in the original block reports at startup
* but are deleted separately later by deletion reports.
*/
@Test
public void testBlocksRemovedWhileInSafeMode() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 10*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
banner("Restarting standby");
restartStandby();
// It will initially have all of the blocks necessary.
assertSafeMode(nn1, 10, 10, 3, 0);
// Delete those blocks while the SBN is in safe mode.
// This doesn't affect the SBN, since deletions are not
// ACKed when due to block removals.
banner("Removing the blocks without rolling the edit log");
fs.delete(new Path("/test"), true);
BlockManagerTestUtil.computeAllPendingWork(
nn0.getNamesystem().getBlockManager());
banner("Triggering deletions on DNs and Deletion Reports");
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertSafeMode(nn1, 10, 10, 3, 0);
// When we catch up to active namespace, it will restore back
// to 0 blocks.
banner("Waiting for standby to catch up to active namespace");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 0, 0, 3, 0);
}
/**
* Tests that the standby node properly tracks the number of total
* and safe blocks while it is in safe mode. Since safe-mode only
* counts completed blocks, append needs to decrement the total
* number of blocks and then re-increment when the file is closed
* again.
*/
@Test
public void testAppendWhileInSafeMode() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
// Make 4.5 blocks so that append() will re-open an existing block
// instead of just adding a new one
DFSTestUtil.createFile(fs, new Path("/test"),
4*BLOCK_SIZE + BLOCK_SIZE/2, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
banner("Restarting standby");
restartStandby();
// It will initially have all of the blocks necessary.
assertSafeMode(nn1, 5, 5, 3, 0);
// Append to a block while SBN is in safe mode. This should
// not affect safemode initially, since the DN message
// will get queued.
FSDataOutputStream stm = fs.append(new Path("/test"));
try {
assertSafeMode(nn1, 5, 5, 3, 0);
// if we roll edits now, the SBN should see that it's under construction
// and change its total count and safe count down by one, since UC
// blocks are not counted by safe mode.
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 4, 4, 3, 0);
} finally {
IOUtils.closeStream(stm);
}
// Delete those blocks while the SBN is in safe mode.
// This will not ACK the deletions to the SBN, so it won't
// notice until we roll the edit log.
banner("Removing the blocks without rolling the edit log");
fs.delete(new Path("/test"), true);
BlockManagerTestUtil.computeAllPendingWork(
nn0.getNamesystem().getBlockManager());
banner("Triggering deletions on DNs and Deletion Reports");
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
assertSafeMode(nn1, 4, 4, 3, 0);
// When we roll the edit log, the deletions will go through.
banner("Waiting for standby to catch up to active namespace");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 0, 0, 3, 0);
}
/**
* Regression test for a bug experienced while developing
* HDFS-2742. The scenario here is:
* - image contains some blocks
* - edits log contains at least one block addition, followed
* by deletion of more blocks than were added.
* - When node starts up, some incorrect accounting of block
* totals caused an assertion failure.
*/
@Test
public void testBlocksDeletedInEditLog() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
// Make 4 blocks persisted in the image.
DFSTestUtil.createFile(fs, new Path("/test"),
4*BLOCK_SIZE, (short) 3, 1L);
NameNodeAdapter.enterSafeMode(nn0, false);
NameNodeAdapter.saveNamespace(nn0);
NameNodeAdapter.leaveSafeMode(nn0);
// OP_ADD for 2 blocks
DFSTestUtil.createFile(fs, new Path("/test2"),
2*BLOCK_SIZE, (short) 3, 1L);
// OP_DELETE for 4 blocks
fs.delete(new Path("/test"), true);
restartActive();
}
private static void assertSafeMode(NameNode nn, int safe, int total,
int numNodes, int nodeThresh) {
String status = nn.getNamesystem().getSafemode();
if (safe == total) {
assertTrue("Bad safemode status: '" + status + "'",
status.startsWith(
"Safe mode is ON. The reported blocks " + safe + " has reached the "
+ "threshold 0.9990 of total blocks " + total + ". The number of "
+ "live datanodes " + numNodes + " has reached the minimum number "
+ nodeThresh + ". In safe mode extension. "
+ "Safe mode will be turned off automatically"));
} else {
int additional = (int) (total * 0.9990) - safe;
assertTrue("Bad safemode status: '" + status + "'",
status.startsWith(
"Safe mode is ON. " +
"The reported blocks " + safe + " needs additional " +
additional + " blocks"));
}
}
/**
* Set up a namesystem with several edits, both deletions and
* additions, and failover to a new NN while that NN is in
* safemode. Ensure that it will exit safemode.
*/
@Test
public void testComplexFailoverIntoSafemode() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 3*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup and enter safemode.
nn0.getRpcServer().rollEditLog();
banner("Creating some blocks that won't be in the edit log");
DFSTestUtil.createFile(fs, new Path("/test2"), 5*BLOCK_SIZE, (short) 3, 1L);
banner("Deleting the original blocks");
fs.delete(new Path("/test"), true);
banner("Restarting standby");
restartStandby();
// We expect it to be on its way out of safemode, since all of the blocks
// from the edit log have been reported.
assertSafeMode(nn1, 3, 3, 3, 0);
// Initiate a failover into it while it's in safemode
banner("Initiating a failover into NN1 in safemode");
NameNodeAdapter.abortEditLogs(nn0);
cluster.transitionToActive(1);
assertSafeMode(nn1, 5, 5, 3, 0);
}
/**
* Similar to {@link #testBlocksRemovedWhileInSafeMode()} except that
* the OP_DELETE edits arrive at the SBN before the block deletion reports.
* The tracking of safe blocks needs to properly account for the removal
* of the blocks as well as the safe count. This is a regression test for
* HDFS-2742.
*/
@Test
public void testBlocksRemovedWhileInSafeModeEditsArriveFirst() throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some blocks");
DFSTestUtil.createFile(fs, new Path("/test"), 10*BLOCK_SIZE, (short) 3, 1L);
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup.
nn0.getRpcServer().rollEditLog();
banner("Restarting standby");
restartStandby();
// It will initially have all of the blocks necessary.
String status = nn1.getNamesystem().getSafemode();
assertTrue("Bad safemode status: '" + status + "'",
status.startsWith(
"Safe mode is ON. The reported blocks 10 has reached the threshold "
+ "0.9990 of total blocks 10. The number of live datanodes 3 has "
+ "reached the minimum number 0. In safe mode extension. "
+ "Safe mode will be turned off automatically"));
// Delete those blocks while the SBN is in safe mode.
// Immediately roll the edit log before the actual deletions are sent
// to the DNs.
banner("Removing the blocks without rolling the edit log");
fs.delete(new Path("/test"), true);
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// Should see removal of the blocks as well as their contribution to safe block count.
assertSafeMode(nn1, 0, 0, 3, 0);
banner("Triggering sending deletions to DNs and Deletion Reports");
BlockManagerTestUtil.computeAllPendingWork(
nn0.getNamesystem().getBlockManager());
cluster.triggerHeartbeats();
HATestUtil.waitForDNDeletions(cluster);
cluster.triggerDeletionReports();
// No change in assertion status here, but some of the consistency checks
// in safemode will fire here if we accidentally decrement safe block count
// below 0.
assertSafeMode(nn1, 0, 0, 3, 0);
}
@Test
public void testSafeBlockTracking() throws Exception {
testSafeBlockTracking(false);
}
@Test
public void testSafeBlockTracking2() throws Exception {
testSafeBlockTracking(true);
}
/**
* Test that the number of safe blocks is accounted correctly even when
* blocks move between under-construction state and completed state.
* If a FINALIZED report arrives at the SBN before the block is marked
* COMPLETE, then when we get the OP_CLOSE we need to count it as "safe"
* at that point. This is a regression test for HDFS-2742.
*
* @param noFirstBlockReport If this is set to true, we shutdown NN1 before
* closing the writing streams. In this way, when NN1 restarts, all DNs will
* first send it incremental block report before the first full block report.
* And NN1 will not treat the full block report as the first block report
* in BlockManager#processReport.
*/
private void testSafeBlockTracking(boolean noFirstBlockReport)
throws Exception {
banner("Starting with NN0 active and NN1 standby, creating some " +
"UC blocks plus some other blocks to force safemode");
DFSTestUtil.createFile(fs, new Path("/other-blocks"), 10*BLOCK_SIZE, (short) 3, 1L);
List<FSDataOutputStream> stms = Lists.newArrayList();
try {
for (int i = 0; i < 5; i++) {
FSDataOutputStream stm = fs.create(new Path("/test-uc-" + i));
stms.add(stm);
stm.write(1);
stm.hflush();
}
// Roll edit log so that, when the SBN restarts, it will load
// the namespace during startup and enter safemode.
nn0.getRpcServer().rollEditLog();
} finally {
if (noFirstBlockReport) {
cluster.shutdownNameNode(1);
}
for (FSDataOutputStream stm : stms) {
IOUtils.closeStream(stm);
}
}
banner("Restarting SBN");
restartStandby();
assertSafeMode(nn1, 10, 10, 3, 0);
banner("Allowing SBN to catch up");
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
assertSafeMode(nn1, 15, 15, 3, 0);
}
/**
* Regression test for HDFS-2753. In this bug, the following sequence was
* observed:
* - Some blocks are written to DNs while the SBN was down. This causes
* the blockReceived messages to get queued in the BPServiceActor on the
* DN.
* - When the SBN returns, the DN re-registers with the SBN, and then
* flushes its blockReceived queue to the SBN before it sends its
* first block report. This caused the first block report to be
* incorrect ignored.
* - The SBN would become stuck in safemode.
*/
@Test
public void testBlocksAddedWhileStandbyIsDown() throws Exception {
DFSTestUtil.createFile(fs, new Path("/test"), 3*BLOCK_SIZE, (short) 3, 1L);
banner("Stopping standby");
cluster.shutdownNameNode(1);
DFSTestUtil.createFile(fs, new Path("/test2"), 3*BLOCK_SIZE, (short) 3, 1L);
banner("Rolling edit log so standby gets all edits on restart");
nn0.getRpcServer().rollEditLog();
restartStandby();
assertSafeMode(nn1, 6, 6, 3, 0);
}
/**
* Regression test for HDFS-2804: standby should not populate replication
* queues when exiting safe mode.
*/
@Test
public void testNoPopulatingReplQueuesWhenExitingSafemode() throws Exception {
DFSTestUtil.createFile(fs, new Path("/test"), 15*BLOCK_SIZE, (short)3, 1L);
HATestUtil.waitForStandbyToCatchUp(nn0, nn1);
// get some blocks in the SBN's image
nn1.getRpcServer().setSafeMode(SafeModeAction.SAFEMODE_ENTER, false);
NameNodeAdapter.saveNamespace(nn1);
nn1.getRpcServer().setSafeMode(SafeModeAction.SAFEMODE_LEAVE, false);
// and some blocks in the edit logs
DFSTestUtil.createFile(fs, new Path("/test2"), 15*BLOCK_SIZE, (short)3, 1L);
nn0.getRpcServer().rollEditLog();
cluster.stopDataNode(1);
cluster.shutdownNameNode(1);
//Configuration sbConf = cluster.getConfiguration(1);
//sbConf.setInt(DFSConfigKeys.DFS_NAMENODE_SAFEMODE_EXTENSION_KEY, 1);
cluster.restartNameNode(1, false);
nn1 = cluster.getNameNode(1);
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return !nn1.isInSafeMode();
}
}, 100, 10000);
BlockManagerTestUtil.updateState(nn1.getNamesystem().getBlockManager());
assertEquals(0L, nn1.getNamesystem().getUnderReplicatedBlocks());
assertEquals(0L, nn1.getNamesystem().getPendingReplicationBlocks());
}
/**
* Make sure that when we transition to active in safe mode that we don't
* prematurely consider blocks missing just because not all DNs have reported
* yet.
*
* This is a regression test for HDFS-3921.
*/
@Test
public void testNoPopulatingReplQueuesWhenStartingActiveInSafeMode()
throws IOException {
DFSTestUtil.createFile(fs, new Path("/test"), 15*BLOCK_SIZE, (short)3, 1L);
// Stop the DN so that when the NN restarts not all blocks wil be reported
// and the NN won't leave safe mode.
cluster.stopDataNode(1);
// Restart the namenode but don't wait for it to hear from all DNs (since
// one DN is deliberately shut down.)
cluster.restartNameNode(0, false);
cluster.transitionToActive(0);
assertTrue(cluster.getNameNode(0).isInSafeMode());
// We shouldn't yet consider any blocks "missing" since we're in startup
// safemode, i.e. not all DNs may have reported.
assertEquals(0, cluster.getNamesystem(0).getMissingBlocksCount());
}
/**
* Print a big banner in the test log to make debug easier.
*/
static void banner(String string) {
LOG.info("\n\n\n\n================================================\n" +
string + "\n" +
"==================================================\n\n");
}
/**
* DFS#isInSafeMode should check the ActiveNNs safemode in HA enabled cluster. HDFS-3507
*
* @throws Exception
*/
@Test
public void testIsInSafemode() throws Exception {
// Check for the standby nn without client failover.
NameNode nn2 = cluster.getNameNode(1);
assertTrue("nn2 should be in standby state", nn2.isStandbyState());
InetSocketAddress nameNodeAddress = nn2.getNameNodeAddress();
Configuration conf = new Configuration();
DistributedFileSystem dfs = new DistributedFileSystem();
try {
dfs.initialize(
URI.create("hdfs://" + nameNodeAddress.getHostName() + ":"
+ nameNodeAddress.getPort()), conf);
dfs.isInSafeMode();
fail("StandBy should throw exception for isInSafeMode");
} catch (IOException e) {
if (e instanceof RemoteException) {
assertEquals("RPC Error code should indicate app failure.", RpcErrorCodeProto.ERROR_APPLICATION,
((RemoteException) e).getErrorCode());
IOException sbExcpetion = ((RemoteException) e).unwrapRemoteException();
assertTrue("StandBy nn should not support isInSafeMode",
sbExcpetion instanceof StandbyException);
} else {
throw e;
}
} finally {
if (null != dfs) {
dfs.close();
}
}
// Check with Client FailOver
cluster.transitionToStandby(0);
cluster.transitionToActive(1);
cluster.getNameNodeRpc(1).setSafeMode(SafeModeAction.SAFEMODE_ENTER, false);
DistributedFileSystem dfsWithFailOver = (DistributedFileSystem) fs;
assertTrue("ANN should be in SafeMode", dfsWithFailOver.isInSafeMode());
cluster.getNameNodeRpc(1).setSafeMode(SafeModeAction.SAFEMODE_LEAVE, false);
assertFalse("ANN should be out of SafeMode", dfsWithFailOver.isInSafeMode());
}
/** Test NN crash and client crash/stuck immediately after block allocation */
@Test(timeout = 100000)
public void testOpenFileWhenNNAndClientCrashAfterAddBlock() throws Exception {
cluster.getConfiguration(0).set(
DFSConfigKeys.DFS_NAMENODE_SAFEMODE_THRESHOLD_PCT_KEY, "1.0f");
String testData = "testData";
// to make sure we write the full block before creating dummy block at NN.
cluster.getConfiguration(0).setInt("io.bytes.per.checksum",
testData.length());
cluster.restartNameNode(0);
try {
cluster.waitActive();
cluster.transitionToActive(0);
cluster.transitionToStandby(1);
DistributedFileSystem dfs = cluster.getFileSystem(0);
String pathString = "/tmp1.txt";
Path filePath = new Path(pathString);
FSDataOutputStream create = dfs.create(filePath,
FsPermission.getDefault(), true, 1024, (short) 3, testData.length(),
null);
create.write(testData.getBytes());
create.hflush();
long fileId = ((DFSOutputStream)create.
getWrappedStream()).getFileId();
FileStatus fileStatus = dfs.getFileStatus(filePath);
DFSClient client = DFSClientAdapter.getClient(dfs);
// add one dummy block at NN, but not write to DataNode
ExtendedBlock previousBlock =
DFSClientAdapter.getPreviousBlock(client, fileId);
DFSClientAdapter.getNamenode(client).addBlock(
pathString,
client.getClientName(),
new ExtendedBlock(previousBlock),
new DatanodeInfo[0],
DFSClientAdapter.getFileId((DFSOutputStream) create
.getWrappedStream()), null, null);
cluster.restartNameNode(0, true);
cluster.restartDataNode(0);
cluster.transitionToActive(0);
// let the block reports be processed.
Thread.sleep(2000);
FSDataInputStream is = dfs.open(filePath);
is.close();
dfs.recoverLease(filePath);// initiate recovery
assertTrue("Recovery also should be success", dfs.recoverLease(filePath));
} finally {
cluster.shutdown();
}
}
@Test(timeout = 60000)
public void testSafeModeExitAfterTransition() throws Exception {
DFSTestUtil.createFile(fs, new Path("/test"), 5 * BLOCK_SIZE, (short) 3,
1L);
banner("Stopping standby");
cluster.shutdownNameNode(1);
DFSTestUtil.createFile(fs, new Path("/test2"), 3 * BLOCK_SIZE, (short) 3,
1L);
// Roll edit logs to be read by standby
nn0.getRpcServer().rollEditLog();
fs.delete(new Path("/test"), true);
// Wait till the blocks are deleted from all DNs
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return cluster.getNamesystem(0).getBlockManager()
.getPendingDeletionBlocksCount() == 0;
}
}, 1000, 10000);
restartStandby();
// Wait till all the datanodes are registered.
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return cluster.getNamesystem(1).getNumLiveDataNodes() == 3;
}
}, 1000, 10000);
cluster.triggerBlockReports();
NameNodeAdapter.abortEditLogs(nn0);
cluster.shutdownNameNode(0);
banner(nn1.getNamesystem().getSafemode());
cluster.transitionToActive(1);
assertSafeMode(nn1, 3, 3, 3, 0);
}
}
| |
package com.longluo.demo.qrcode.zxing.client.android.encode;
import android.content.Context;
import android.content.Intent;
import android.graphics.Bitmap;
import android.net.Uri;
import android.os.Bundle;
import android.provider.ContactsContract;
import android.telephony.PhoneNumberUtils;
import android.util.Log;
import com.google.zxing.BarcodeFormat;
import com.google.zxing.EncodeHintType;
import com.google.zxing.MultiFormatWriter;
import com.google.zxing.Result;
import com.google.zxing.WriterException;
import com.google.zxing.client.result.AddressBookParsedResult;
import com.google.zxing.client.result.ParsedResult;
import com.google.zxing.client.result.ResultParser;
import com.google.zxing.common.BitMatrix;
import com.longluo.demo.R;
import com.longluo.demo.qrcode.zxing.client.android.Contents;
import com.longluo.demo.qrcode.zxing.client.android.Intents;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.EnumMap;
import java.util.List;
import java.util.Map;
/**
* This class does the work of decoding the user's request and extracting all the data
* to be encoded in a barcode.
*
* @author dswitkin@google.com (Daniel Switkin)
*/
final class QRCodeEncoder {
private static final String TAG = QRCodeEncoder.class.getSimpleName();
private static final int WHITE = 0xFFFFFFFF;
private static final int BLACK = 0xFF000000;
private final Context activity;
private String contents;
private String displayContents;
private String title;
private BarcodeFormat format;
private final int dimension;
private final boolean useVCard;
QRCodeEncoder(Context activity, Intent intent, int dimension, boolean useVCard) throws WriterException {
this.activity = activity;
this.dimension = dimension;
this.useVCard = useVCard;
String action = intent.getAction();
if (Intents.Encode.ACTION.equals(action)) {
encodeContentsFromZXingIntent(intent);
} else if (Intent.ACTION_SEND.equals(action)) {
encodeContentsFromShareIntent(intent);
}
}
String getContents() {
return contents;
}
String getDisplayContents() {
return displayContents;
}
String getTitle() {
return title;
}
boolean isUseVCard() {
return useVCard;
}
// It would be nice if the string encoding lived in the core ZXing library,
// but we use platform specific code like PhoneNumberUtils, so it can't.
private boolean encodeContentsFromZXingIntent(Intent intent) {
// Default to QR_CODE if no format given.
String formatString = intent.getStringExtra(Intents.Encode.FORMAT);
format = null;
if (formatString != null) {
try {
format = BarcodeFormat.valueOf(formatString);
} catch (IllegalArgumentException iae) {
// Ignore it then
}
}
if (format == null || format == BarcodeFormat.QR_CODE) {
String type = intent.getStringExtra(Intents.Encode.TYPE);
if (type == null || type.isEmpty()) {
return false;
}
this.format = BarcodeFormat.QR_CODE;
encodeQRCodeContents(intent, type);
} else {
String data = intent.getStringExtra(Intents.Encode.DATA);
if (data != null && !data.isEmpty()) {
contents = data;
displayContents = data;
title = activity.getString(R.string.contents_text);
}
}
return contents != null && !contents.isEmpty();
}
// Handles send intents from multitude of Android applications
private void encodeContentsFromShareIntent(Intent intent) throws WriterException {
// Check if this is a plain text encoding, or contact
if (intent.hasExtra(Intent.EXTRA_STREAM)) {
encodeFromStreamExtra(intent);
} else {
encodeFromTextExtras(intent);
}
}
private void encodeFromTextExtras(Intent intent) throws WriterException {
// Notice: Google Maps shares both URL and details in one text, bummer!
String theContents = ContactEncoder.trim(intent.getStringExtra(Intent.EXTRA_TEXT));
if (theContents == null) {
theContents = ContactEncoder.trim(intent.getStringExtra("android.intent.extra.HTML_TEXT"));
// Intent.EXTRA_HTML_TEXT
if (theContents == null) {
theContents = ContactEncoder.trim(intent.getStringExtra(Intent.EXTRA_SUBJECT));
if (theContents == null) {
String[] emails = intent.getStringArrayExtra(Intent.EXTRA_EMAIL);
if (emails != null) {
theContents = ContactEncoder.trim(emails[0]);
} else {
theContents = "?";
}
}
}
}
// Trim text to avoid URL breaking.
if (theContents == null || theContents.isEmpty()) {
throw new WriterException("Empty EXTRA_TEXT");
}
contents = theContents;
// We only do QR code.
format = BarcodeFormat.QR_CODE;
if (intent.hasExtra(Intent.EXTRA_SUBJECT)) {
displayContents = intent.getStringExtra(Intent.EXTRA_SUBJECT);
} else if (intent.hasExtra(Intent.EXTRA_TITLE)) {
displayContents = intent.getStringExtra(Intent.EXTRA_TITLE);
} else {
displayContents = contents;
}
title = activity.getString(R.string.contents_text);
}
// Handles send intents from the Contacts app, retrieving a contact as a VCARD.
private void encodeFromStreamExtra(Intent intent) throws WriterException {
format = BarcodeFormat.QR_CODE;
Bundle bundle = intent.getExtras();
if (bundle == null) {
throw new WriterException("No extras");
}
Uri uri = bundle.getParcelable(Intent.EXTRA_STREAM);
if (uri == null) {
throw new WriterException("No EXTRA_STREAM");
}
byte[] vcard;
String vcardString;
InputStream stream = null;
try {
stream = activity.getContentResolver().openInputStream(uri);
if (stream == null) {
throw new WriterException("Can't open stream for " + uri);
}
ByteArrayOutputStream baos = new ByteArrayOutputStream();
byte[] buffer = new byte[2048];
int bytesRead;
while ((bytesRead = stream.read(buffer)) > 0) {
baos.write(buffer, 0, bytesRead);
}
vcard = baos.toByteArray();
vcardString = new String(vcard, 0, vcard.length, "UTF-8");
} catch (IOException ioe) {
throw new WriterException(ioe);
} finally {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// continue
}
}
}
Log.d(TAG, "Encoding share intent content:");
Log.d(TAG, vcardString);
Result result = new Result(vcardString, vcard, null, BarcodeFormat.QR_CODE);
ParsedResult parsedResult = ResultParser.parseResult(result);
if (!(parsedResult instanceof AddressBookParsedResult)) {
throw new WriterException("Result was not an address");
}
encodeQRCodeContents((AddressBookParsedResult) parsedResult);
if (contents == null || contents.isEmpty()) {
throw new WriterException("No content to encode");
}
}
private void encodeQRCodeContents(Intent intent, String type) {
switch (type) {
case Contents.Type.TEXT:
String textData = intent.getStringExtra(Intents.Encode.DATA);
if (textData != null && !textData.isEmpty()) {
contents = textData;
displayContents = textData;
title = activity.getString(R.string.contents_text);
}
break;
case Contents.Type.EMAIL:
String emailData = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA));
if (emailData != null) {
contents = "mailto:" + emailData;
displayContents = emailData;
title = activity.getString(R.string.contents_email);
}
break;
case Contents.Type.PHONE:
String phoneData = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA));
if (phoneData != null) {
contents = "tel:" + phoneData;
displayContents = PhoneNumberUtils.formatNumber(phoneData);
title = activity.getString(R.string.contents_phone);
}
break;
case Contents.Type.SMS:
String smsData = ContactEncoder.trim(intent.getStringExtra(Intents.Encode.DATA));
if (smsData != null) {
contents = "sms:" + smsData;
displayContents = PhoneNumberUtils.formatNumber(smsData);
title = activity.getString(R.string.contents_sms);
}
break;
case Contents.Type.CONTACT:
Bundle contactBundle = intent.getBundleExtra(Intents.Encode.DATA);
if (contactBundle != null) {
String name = contactBundle.getString(ContactsContract.Intents.Insert.NAME);
String organization = contactBundle.getString(ContactsContract.Intents.Insert.COMPANY);
String address = contactBundle.getString(ContactsContract.Intents.Insert.POSTAL);
List<String> phones = getAllBundleValues(contactBundle, Contents.PHONE_KEYS);
List<String> phoneTypes = getAllBundleValues(contactBundle, Contents.PHONE_TYPE_KEYS);
List<String> emails = getAllBundleValues(contactBundle, Contents.EMAIL_KEYS);
String url = contactBundle.getString(Contents.URL_KEY);
List<String> urls = url == null ? null : Collections.singletonList(url);
String note = contactBundle.getString(Contents.NOTE_KEY);
ContactEncoder encoder = useVCard ? new VCardContactEncoder() : new MECARDContactEncoder();
String[] encoded = encoder.encode(Collections.singletonList(name),
organization,
Collections.singletonList(address),
phones,
phoneTypes,
emails,
urls,
note);
// Make sure we've encoded at least one field.
if (!encoded[1].isEmpty()) {
contents = encoded[0];
displayContents = encoded[1];
title = activity.getString(R.string.contents_contact);
}
}
break;
case Contents.Type.LOCATION:
Bundle locationBundle = intent.getBundleExtra(Intents.Encode.DATA);
if (locationBundle != null) {
// These must use Bundle.getFloat(), not getDouble(), it's part of the API.
float latitude = locationBundle.getFloat("LAT", Float.MAX_VALUE);
float longitude = locationBundle.getFloat("LONG", Float.MAX_VALUE);
if (latitude != Float.MAX_VALUE && longitude != Float.MAX_VALUE) {
contents = "geo:" + latitude + ',' + longitude;
displayContents = latitude + "," + longitude;
title = activity.getString(R.string.contents_location);
}
}
break;
}
}
private static List<String> getAllBundleValues(Bundle bundle, String[] keys) {
List<String> values = new ArrayList<>(keys.length);
for (String key : keys) {
Object value = bundle.get(key);
values.add(value == null ? null : value.toString());
}
return values;
}
private void encodeQRCodeContents(AddressBookParsedResult contact) {
ContactEncoder encoder = useVCard ? new VCardContactEncoder() : new MECARDContactEncoder();
String[] encoded = encoder.encode(toList(contact.getNames()),
contact.getOrg(),
toList(contact.getAddresses()),
toList(contact.getPhoneNumbers()),
null,
toList(contact.getEmails()),
toList(contact.getURLs()),
null);
// Make sure we've encoded at least one field.
if (!encoded[1].isEmpty()) {
contents = encoded[0];
displayContents = encoded[1];
title = activity.getString(R.string.contents_contact);
}
}
private static List<String> toList(String[] values) {
return values == null ? null : Arrays.asList(values);
}
Bitmap encodeAsBitmap() throws WriterException {
String contentsToEncode = contents;
if (contentsToEncode == null) {
return null;
}
Map<EncodeHintType, Object> hints = null;
String encoding = guessAppropriateEncoding(contentsToEncode);
if (encoding != null) {
hints = new EnumMap<>(EncodeHintType.class);
hints.put(EncodeHintType.CHARACTER_SET, encoding);
}
BitMatrix result;
try {
result = new MultiFormatWriter().encode(contentsToEncode, format, dimension, dimension, hints);
} catch (IllegalArgumentException iae) {
// Unsupported format
return null;
}
int width = result.getWidth();
int height = result.getHeight();
int[] pixels = new int[width * height];
for (int y = 0; y < height; y++) {
int offset = y * width;
for (int x = 0; x < width; x++) {
pixels[offset + x] = result.get(x, y) ? BLACK : WHITE;
}
}
Bitmap bitmap = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
bitmap.setPixels(pixels, 0, width, 0, 0, width, height);
return bitmap;
}
private static String guessAppropriateEncoding(CharSequence contents) {
// Very crude at the moment
for (int i = 0; i < contents.length(); i++) {
if (contents.charAt(i) > 0xFF) {
return "UTF-8";
}
}
return null;
}
}
| |
/*
* The MIT License
*
* Copyright (c) 2004-2009, Sun Microsystems, Inc., Kohsuke Kawaguchi
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package hudson.model;
import hudson.Extension;
import hudson.ExtensionList;
import hudson.ExtensionPoint;
import hudson.ProxyConfiguration;
import hudson.util.FormValidation;
import hudson.util.FormValidation.Kind;
import hudson.util.QuotedStringTokenizer;
import hudson.util.TextFile;
import static hudson.util.TimeUnit2.DAYS;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.net.URLEncoder;
import java.util.logging.Logger;
import jenkins.model.DownloadSettings;
import jenkins.model.Jenkins;
import jenkins.util.JSONSignatureValidator;
import net.sf.json.JSONException;
import net.sf.json.JSONObject;
import org.apache.commons.io.IOUtils;
import org.kohsuke.accmod.Restricted;
import org.kohsuke.accmod.restrictions.NoExternalUse;
import org.kohsuke.stapler.Stapler;
import org.kohsuke.stapler.StaplerRequest;
import org.kohsuke.stapler.StaplerResponse;
/**
* Service for plugins to periodically retrieve update data files
* (like the one in the update center) through browsers.
*
* <p>
* Because the retrieval of the file goes through XmlHttpRequest,
* we cannot reliably pass around binary.
*
* @author Kohsuke Kawaguchi
*/
@Extension
public class DownloadService extends PageDecorator {
/**
* Builds up an HTML fragment that starts all the download jobs.
*/
public String generateFragment() {
if (!DownloadSettings.get().isUseBrowser()) {
return "";
}
if (neverUpdate) return "";
if (doesNotSupportPostMessage()) return "";
StringBuilder buf = new StringBuilder();
if(Jenkins.getInstance().hasPermission(Jenkins.READ)) {
long now = System.currentTimeMillis();
for (Downloadable d : Downloadable.all()) {
if(d.getDue()<now && d.lastAttempt+10*1000<now) {
buf.append("<script>")
.append("Behaviour.addLoadEvent(function() {")
.append(" downloadService.download(")
.append(QuotedStringTokenizer.quote(d.getId()))
.append(',')
.append(QuotedStringTokenizer.quote(mapHttps(d.getUrl())))
.append(',')
.append("{version:"+QuotedStringTokenizer.quote(Jenkins.VERSION)+'}')
.append(',')
.append(QuotedStringTokenizer.quote(Stapler.getCurrentRequest().getContextPath()+'/'+getUrl()+"/byId/"+d.getId()+"/postBack"))
.append(',')
.append("null);")
.append("});")
.append("</script>");
d.lastAttempt = now;
}
}
}
return buf.toString();
}
private boolean doesNotSupportPostMessage() {
StaplerRequest req = Stapler.getCurrentRequest();
if (req==null) return false;
String ua = req.getHeader("User-Agent");
if (ua==null) return false;
// according to http://caniuse.com/#feat=x-doc-messaging, IE <=7 doesn't support pstMessage
// see http://www.useragentstring.com/pages/Internet%20Explorer/ for user agents
// we want to err on the cautious side here.
// Because of JENKINS-15105, we can't serve signed metadata from JSON, which means we need to be
// using a modern browser as a vehicle to request these data. This check is here to prevent Jenkins
// from using older browsers that are known not to support postMessage as the vehicle.
return ua.contains("Windows") && (ua.contains(" MSIE 5.") || ua.contains(" MSIE 6.") || ua.contains(" MSIE 7."));
}
private String mapHttps(String url) {
/*
HACKISH:
Loading scripts in HTTP from HTTPS pages cause browsers to issue a warning dialog.
The elegant way to solve the problem is to always load update center from HTTPS,
but our backend mirroring scheme isn't ready for that. So this hack serves regular
traffic in HTTP server, and only use HTTPS update center for Jenkins in HTTPS.
We'll monitor the traffic to see if we can sustain this added traffic.
*/
if (url.startsWith("http://updates.jenkins-ci.org/") && Jenkins.getInstance().isRootUrlSecure())
return "https"+url.substring(4);
return url;
}
/**
* Gets {@link Downloadable} by its ID.
* Used to bind them to URL.
*/
public Downloadable getById(String id) {
for (Downloadable d : Downloadable.all())
if(d.getId().equals(id))
return d;
return null;
}
/**
* Loads JSON from a JSONP URL.
* Metadata for downloadables and update centers is offered in two formats, both designed for download from the browser (predating {@link DownloadSettings}):
* HTML using {@code postMessage} for newer browsers, and JSONP as a fallback.
* Confusingly, the JSONP files are given the {@code *.json} file extension, when they are really JavaScript and should be {@code *.js}.
* This method extracts the JSON from a JSONP URL, since that is what we actually want when we download from the server.
* (Currently the true JSON is not published separately, and extracting from the {@code *.json.html} is more work.)
* @param src a URL to a JSONP file (typically including {@code id} and {@code version} query parameters)
* @return the embedded JSON text
* @throws IOException if either downloading or processing failed
*/
@Restricted(NoExternalUse.class)
public static String loadJSON(URL src) throws IOException {
InputStream is = ProxyConfiguration.open(src).getInputStream();
try {
String jsonp = IOUtils.toString(is, "UTF-8");
int start = jsonp.indexOf('{');
int end = jsonp.lastIndexOf('}');
if (start >= 0 && end > start) {
return jsonp.substring(start, end + 1);
} else {
throw new IOException("Could not find JSON in " + src);
}
} finally {
is.close();
}
}
/**
* Represents a periodically updated JSON data file obtained from a remote URL.
*
* <p>
* This mechanism is one of the basis of the update center, which involves fetching
* up-to-date data file.
*
* @since 1.305
*/
public static class Downloadable implements ExtensionPoint {
private final String id;
private final String url;
private final long interval;
private volatile long due=0;
private volatile long lastAttempt=Long.MIN_VALUE;
/**
*
* @param url
* URL relative to {@link UpdateCenter#getDefaultBaseUrl()}.
* So if this string is "foo.json", the ultimate URL will be
* something like "http://updates.jenkins-ci.org/updates/foo.json"
*
* For security and privacy reasons, we don't allow the retrieval
* from random locations.
*/
public Downloadable(String id, String url, long interval) {
this.id = id;
this.url = url;
this.interval = interval;
}
public Downloadable() {
this.id = getClass().getName().replace('$','.');
this.url = this.id+".json";
this.interval = DEFAULT_INTERVAL;
}
/**
* Uses the class name as an ID.
*/
public Downloadable(Class id) {
this(id.getName().replace('$','.'));
}
public Downloadable(String id) {
this(id,id+".json");
}
public Downloadable(String id, String url) {
this(id,url, DEFAULT_INTERVAL);
}
public String getId() {
return id;
}
/**
* URL to download.
*/
public String getUrl() {
return Jenkins.getInstance().getUpdateCenter().getDefaultBaseUrl()+"updates/"+url;
}
/**
* How often do we retrieve the new image?
*
* @return
* number of milliseconds between retrieval.
*/
public long getInterval() {
return interval;
}
/**
* This is where the retrieved file will be stored.
*/
public TextFile getDataFile() {
return new TextFile(new File(Jenkins.getInstance().getRootDir(),"updates/"+id));
}
/**
* When shall we retrieve this file next time?
*/
public long getDue() {
if(due==0)
// if the file doesn't exist, this code should result
// in a very small (but >0) due value, which should trigger
// the retrieval immediately.
due = getDataFile().file.lastModified()+interval;
return due;
}
/**
* Loads the current file into JSON and returns it, or null
* if no data exists.
*/
public JSONObject getData() throws IOException {
TextFile df = getDataFile();
if(df.exists())
try {
return JSONObject.fromObject(df.read());
} catch (JSONException e) {
df.delete(); // if we keep this file, it will cause repeated failures
throw new IOException("Failed to parse "+df+" into JSON",e);
}
return null;
}
/**
* This is where the browser sends us the data.
*/
public void doPostBack(StaplerRequest req, StaplerResponse rsp) throws IOException {
if (!DownloadSettings.get().isUseBrowser()) {
throw new IOException("not allowed");
}
long dataTimestamp = System.currentTimeMillis();
due = dataTimestamp+getInterval(); // success or fail, don't try too often
String json = IOUtils.toString(req.getInputStream(),"UTF-8");
FormValidation e = load(json, dataTimestamp);
if (e.kind != Kind.OK) {
LOGGER.severe(e.renderHtml());
throw e;
}
rsp.setContentType("text/plain"); // So browser won't try to parse response
}
private FormValidation load(String json, long dataTimestamp) throws IOException {
JSONObject o = JSONObject.fromObject(json);
if (signatureCheck) {
FormValidation e = new JSONSignatureValidator("downloadable '"+id+"'").verifySignature(o);
if (e.kind!= Kind.OK) {
return e;
}
}
TextFile df = getDataFile();
df.write(json);
df.file.setLastModified(dataTimestamp);
LOGGER.info("Obtained the updated data file for "+id);
return FormValidation.ok();
}
@Restricted(NoExternalUse.class)
public FormValidation updateNow() throws IOException {
return load(loadJSON(new URL(getUrl() + "?id=" + URLEncoder.encode(getId(), "UTF-8") + "&version=" + URLEncoder.encode(Jenkins.VERSION, "UTF-8"))), System.currentTimeMillis());
}
/**
* Returns all the registered {@link Downloadable}s.
*/
public static ExtensionList<Downloadable> all() {
return Jenkins.getInstance().getExtensionList(Downloadable.class);
}
/**
* Returns the {@link Downloadable} that has the given ID.
*/
public static Downloadable get(String id) {
for (Downloadable d : all()) {
if(d.id.equals(id))
return d;
}
return null;
}
private static final Logger LOGGER = Logger.getLogger(Downloadable.class.getName());
private static final long DEFAULT_INTERVAL =
Long.getLong(Downloadable.class.getName()+".defaultInterval", DAYS.toMillis(1));
}
public static boolean neverUpdate = Boolean.getBoolean(DownloadService.class.getName()+".never");
/**
* May be used to temporarily disable signature checking on {@link DownloadService} and {@link UpdateCenter}.
* Useful when upstream signatures are broken, such as due to expired certificates.
* Should only be used when {@link DownloadSettings#isUseBrowser};
* disabling signature checks for in-browser downloads is <em>very dangerous</em> as unprivileged users could submit spoofed metadata!
*/
public static boolean signatureCheck = !Boolean.getBoolean(DownloadService.class.getName()+".noSignatureCheck");
}
| |
/*
* Copyright 2013 Google Inc.
* Copyright 2014 Andreas Schildbach
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.bitcoinj.protocols.payments;
import org.bitcoinj.core.*;
import org.bitcoinj.crypto.X509Utils;
import org.bitcoinj.script.ScriptBuilder;
import com.google.common.base.MoreObjects;
import com.google.common.collect.ImmutableList;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import org.bitcoin.protocols.payments.Protos;
import javax.annotation.Nullable;
import java.io.Serializable;
import java.security.*;
import java.security.cert.*;
import java.security.cert.Certificate;
import java.util.ArrayList;
import java.util.List;
/**
* <p>Utility methods and constants for working with <a href="https://github.com/bitcoin/bips/blob/master/bip-0070.mediawiki">
* BIP 70 aka the payment protocol</a>. These are low level wrappers around the protocol buffers. If you're implementing
* a wallet app, look at {@link PaymentSession} for a higher level API that should simplify working with the protocol.</p>
*
* <p>BIP 70 defines a binary, protobuf based protocol that runs directly between sender and receiver of funds. Payment
* protocol data does not flow over the Bitcoin P2P network or enter the block chain. It's instead for data that is only
* of interest to the parties involved but isn't otherwise needed for consensus.</p>
*/
public class PaymentProtocol {
// MIME types as defined in BIP71.
public static final String MIMETYPE_PAYMENTREQUEST = "application/bitcoin-paymentrequest";
public static final String MIMETYPE_PAYMENT = "application/bitcoin-payment";
public static final String MIMETYPE_PAYMENTACK = "application/bitcoin-paymentack";
/**
* Create a payment request with one standard pay to address output. You may want to sign the request using
* {@link #signPaymentRequest}. Use {@link Protos.PaymentRequest.Builder#build} to get the actual payment
* request.
*
* @param params network parameters
* @param amount amount of coins to request, or null
* @param toAddress address to request coins to
* @param memo arbitrary, user readable memo, or null if none
* @param paymentUrl URL to send payment message to, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment request, in its builder form
*/
public static Protos.PaymentRequest.Builder createPaymentRequest(NetworkParameters params,
@Nullable Coin amount, Address toAddress, @Nullable String memo, @Nullable String paymentUrl,
@Nullable byte[] merchantData) {
return createPaymentRequest(params, ImmutableList.of(createPayToAddressOutput(amount, toAddress)), memo,
paymentUrl, merchantData);
}
/**
* Create a payment request. You may want to sign the request using {@link #signPaymentRequest}. Use
* {@link Protos.PaymentRequest.Builder#build} to get the actual payment request.
*
* @param params network parameters
* @param outputs list of outputs to request coins to
* @param memo arbitrary, user readable memo, or null if none
* @param paymentUrl URL to send payment message to, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment request, in its builder form
*/
public static Protos.PaymentRequest.Builder createPaymentRequest(NetworkParameters params,
List<Protos.Output> outputs, @Nullable String memo, @Nullable String paymentUrl,
@Nullable byte[] merchantData) {
final Protos.PaymentDetails.Builder paymentDetails = Protos.PaymentDetails.newBuilder();
paymentDetails.setNetwork(params.getPaymentProtocolId());
for (Protos.Output output : outputs)
paymentDetails.addOutputs(output);
if (memo != null)
paymentDetails.setMemo(memo);
if (paymentUrl != null)
paymentDetails.setPaymentUrl(paymentUrl);
if (merchantData != null)
paymentDetails.setMerchantData(ByteString.copyFrom(merchantData));
paymentDetails.setTime(Utils.currentTimeSeconds());
final Protos.PaymentRequest.Builder paymentRequest = Protos.PaymentRequest.newBuilder();
paymentRequest.setSerializedPaymentDetails(paymentDetails.build().toByteString());
return paymentRequest;
}
/**
* Parse a payment request.
*
* @param paymentRequest payment request to parse
* @return instance of {@link PaymentSession}, used as a value object
* @throws PaymentProtocolException
*/
public static PaymentSession parsePaymentRequest(Protos.PaymentRequest paymentRequest)
throws PaymentProtocolException {
return new PaymentSession(paymentRequest, false, null);
}
/**
* Sign the provided payment request.
*
* @param paymentRequest Payment request to sign, in its builder form.
* @param certificateChain Certificate chain to send with the payment request, ordered from client certificate to root
* certificate. The root certificate itself may be omitted.
* @param privateKey The key to sign with. Must match the public key from the first certificate of the certificate chain.
*/
public static void signPaymentRequest(Protos.PaymentRequest.Builder paymentRequest,
X509Certificate[] certificateChain, PrivateKey privateKey) {
try {
final Protos.X509Certificates.Builder certificates = Protos.X509Certificates.newBuilder();
for (final Certificate certificate : certificateChain)
certificates.addCertificate(ByteString.copyFrom(certificate.getEncoded()));
paymentRequest.setPkiType("x509+sha256");
paymentRequest.setPkiData(certificates.build().toByteString());
paymentRequest.setSignature(ByteString.EMPTY);
final Protos.PaymentRequest paymentRequestToSign = paymentRequest.build();
final String algorithm;
if ("RSA".equalsIgnoreCase(privateKey.getAlgorithm()))
algorithm = "SHA256withRSA";
else
throw new IllegalStateException(privateKey.getAlgorithm());
final Signature signature = Signature.getInstance(algorithm);
signature.initSign(privateKey);
signature.update(paymentRequestToSign.toByteArray());
paymentRequest.setSignature(ByteString.copyFrom(signature.sign()));
} catch (final GeneralSecurityException x) {
// Should never happen so don't make users have to think about it.
throw new RuntimeException(x);
}
}
/**
* Uses the provided PKI method to find the corresponding public key and verify the provided signature.
*
* @param paymentRequest Payment request to verify.
* @param trustStore KeyStore of trusted root certificate authorities.
* @return verification data, or null if no PKI method was specified in the {@link Protos.PaymentRequest}.
* @throws PaymentProtocolException if payment request could not be verified.
*/
@Nullable
public static PkiVerificationData verifyPaymentRequestPki(Protos.PaymentRequest paymentRequest, KeyStore trustStore)
throws PaymentProtocolException {
List<X509Certificate> certs = null;
try {
final String pkiType = paymentRequest.getPkiType();
if ("none".equals(pkiType))
// Nothing to verify. Everything is fine. Move along.
return null;
String algorithm;
if ("x509+sha256".equals(pkiType))
algorithm = "SHA256withRSA";
else if ("x509+sha1".equals(pkiType))
algorithm = "SHA1withRSA";
else
throw new PaymentProtocolException.InvalidPkiType("Unsupported PKI type: " + pkiType);
Protos.X509Certificates protoCerts = Protos.X509Certificates.parseFrom(paymentRequest.getPkiData());
if (protoCerts.getCertificateCount() == 0)
throw new PaymentProtocolException.InvalidPkiData("No certificates provided in message: server config error");
// Parse the certs and turn into a certificate chain object. Cert factories can parse both DER and base64.
// The ordering of certificates is defined by the payment protocol spec to be the same as what the Java
// crypto API requires - convenient!
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
certs = new ArrayList<>();
for (ByteString bytes : protoCerts.getCertificateList())
certs.add((X509Certificate) certificateFactory.generateCertificate(bytes.newInput()));
CertPath path = certificateFactory.generateCertPath(certs);
// Retrieves the most-trusted CAs from keystore.
PKIXParameters params = new PKIXParameters(trustStore);
// Revocation not supported in the current version.
params.setRevocationEnabled(false);
// Now verify the certificate chain is correct and trusted. This let's us get an identity linked pubkey.
CertPathValidator validator = CertPathValidator.getInstance("PKIX");
PKIXCertPathValidatorResult result = (PKIXCertPathValidatorResult) validator.validate(path, params);
PublicKey publicKey = result.getPublicKey();
// OK, we got an identity, now check it was used to sign this message.
Signature signature = Signature.getInstance(algorithm);
// Note that we don't use signature.initVerify(certs.get(0)) here despite it being the most obvious
// way to set it up, because we don't care about the constraints specified on the certificates: any
// cert that links a key to a domain name or other identity will do for us.
signature.initVerify(publicKey);
Protos.PaymentRequest.Builder reqToCheck = paymentRequest.toBuilder();
reqToCheck.setSignature(ByteString.EMPTY);
signature.update(reqToCheck.build().toByteArray());
if (!signature.verify(paymentRequest.getSignature().toByteArray()))
throw new PaymentProtocolException.PkiVerificationException("Invalid signature, this payment request is not valid.");
// Signature verifies, get the names from the identity we just verified for presentation to the user.
final X509Certificate cert = certs.get(0);
String displayName = X509Utils.getDisplayNameFromCertificate(cert, true);
if (displayName == null)
throw new PaymentProtocolException.PkiVerificationException("Could not extract name from certificate");
// Everything is peachy. Return some useful data to the caller.
return new PkiVerificationData(displayName, publicKey, result.getTrustAnchor());
} catch (InvalidProtocolBufferException e) {
// Data structures are malformed.
throw new PaymentProtocolException.InvalidPkiData(e);
} catch (CertificateException e) {
// The X.509 certificate data didn't parse correctly.
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (NoSuchAlgorithmException e) {
// Should never happen so don't make users have to think about it. PKIX is always present.
throw new RuntimeException(e);
} catch (InvalidAlgorithmParameterException e) {
throw new RuntimeException(e);
} catch (CertPathValidatorException e) {
// The certificate chain isn't known or trusted, probably, the server is using an SSL root we don't
// know about and the user needs to upgrade to a new version of the software (or import a root cert).
throw new PaymentProtocolException.PkiVerificationException(e, certs);
} catch (InvalidKeyException e) {
// Shouldn't happen if the certs verified correctly.
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (SignatureException e) {
// Something went wrong during hashing (yes, despite the name, this does not mean the sig was invalid).
throw new PaymentProtocolException.PkiVerificationException(e);
} catch (KeyStoreException e) {
throw new RuntimeException(e);
}
}
/**
* Information about the X.509 signature's issuer and subject.
*/
public static class PkiVerificationData {
/** Display name of the payment requestor, could be a domain name, email address, legal name, etc */
public final String displayName;
/** SSL public key that was used to sign. */
public final PublicKey merchantSigningKey;
/** Object representing the CA that verified the merchant's ID */
public final TrustAnchor rootAuthority;
/** String representing the display name of the CA that verified the merchant's ID */
public final String rootAuthorityName;
private PkiVerificationData(@Nullable String displayName, PublicKey merchantSigningKey,
TrustAnchor rootAuthority) throws PaymentProtocolException.PkiVerificationException {
try {
this.displayName = displayName;
this.merchantSigningKey = merchantSigningKey;
this.rootAuthority = rootAuthority;
this.rootAuthorityName = X509Utils.getDisplayNameFromCertificate(rootAuthority.getTrustedCert(), true);
} catch (CertificateParsingException x) {
throw new PaymentProtocolException.PkiVerificationException(x);
}
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("displayName", displayName)
.add("rootAuthorityName", rootAuthorityName)
.add("merchantSigningKey", merchantSigningKey)
.add("rootAuthority", rootAuthority)
.toString();
}
}
/**
* Create a payment message with one standard pay to address output.
*
* @param transactions one or more transactions that satisfy the requested outputs.
* @param refundAmount amount of coins to request as a refund, or null if no refund.
* @param refundAddress address to refund coins to
* @param memo arbitrary, user readable memo, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment message
*/
public static Protos.Payment createPaymentMessage(List<Transaction> transactions,
@Nullable Coin refundAmount, @Nullable Address refundAddress, @Nullable String memo,
@Nullable byte[] merchantData) {
if (refundAddress != null) {
if (refundAmount == null)
throw new IllegalArgumentException("Specify refund amount if refund address is specified.");
return createPaymentMessage(transactions,
ImmutableList.of(createPayToAddressOutput(refundAmount, refundAddress)), memo, merchantData);
} else {
return createPaymentMessage(transactions, null, memo, merchantData);
}
}
/**
* Create a payment message. This wraps up transaction data along with anything else useful for making a payment.
*
* @param transactions transactions to include with the payment message
* @param refundOutputs list of outputs to refund coins to, or null
* @param memo arbitrary, user readable memo, or null if none
* @param merchantData arbitrary merchant data, or null if none
* @return created payment message
*/
public static Protos.Payment createPaymentMessage(List<Transaction> transactions,
@Nullable List<Protos.Output> refundOutputs, @Nullable String memo, @Nullable byte[] merchantData) {
Protos.Payment.Builder builder = Protos.Payment.newBuilder();
for (Transaction transaction : transactions) {
transaction.verify();
builder.addTransactions(ByteString.copyFrom(transaction.unsafeBitcoinSerialize()));
}
if (refundOutputs != null) {
for (Protos.Output output : refundOutputs)
builder.addRefundTo(output);
}
if (memo != null)
builder.setMemo(memo);
if (merchantData != null)
builder.setMerchantData(ByteString.copyFrom(merchantData));
return builder.build();
}
/**
* Parse transactions from payment message.
*
* @param params network parameters (needed for transaction deserialization)
* @param paymentMessage payment message to parse
* @return list of transactions
*/
public static List<Transaction> parseTransactionsFromPaymentMessage(NetworkParameters params,
Protos.Payment paymentMessage) {
final List<Transaction> transactions = new ArrayList<>(paymentMessage.getTransactionsCount());
for (final ByteString transaction : paymentMessage.getTransactionsList())
transactions.add(params.getDefaultSerializer().makeTransaction(transaction.toByteArray()));
return transactions;
}
/**
* Message returned by the merchant in response to a Payment message.
*/
public static class Ack {
@Nullable private final String memo;
Ack(@Nullable String memo) {
this.memo = memo;
}
/**
* Returns the memo included by the merchant in the payment ack. This message is typically displayed to the user
* as a notification (e.g. "Your payment was received and is being processed"). If none was provided, returns
* null.
*/
@Nullable public String getMemo() {
return memo;
}
}
/**
* Create a payment ack.
*
* @param paymentMessage payment message to send with the ack
* @param memo arbitrary, user readable memo, or null if none
* @return created payment ack
*/
public static Protos.PaymentACK createPaymentAck(Protos.Payment paymentMessage, @Nullable String memo) {
final Protos.PaymentACK.Builder builder = Protos.PaymentACK.newBuilder();
builder.setPayment(paymentMessage);
if (memo != null)
builder.setMemo(memo);
return builder.build();
}
/**
* Parse payment ack into an object.
*/
public static Ack parsePaymentAck(Protos.PaymentACK paymentAck) {
final String memo = paymentAck.hasMemo() ? paymentAck.getMemo() : null;
return new Ack(memo);
}
/**
* Create a standard pay to address output for usage in {@link #createPaymentRequest} and
* {@link #createPaymentMessage}.
*
* @param amount amount to pay, or null
* @param address address to pay to
* @return output
*/
public static Protos.Output createPayToAddressOutput(@Nullable Coin amount, Address address) {
Protos.Output.Builder output = Protos.Output.newBuilder();
if (amount != null) {
final NetworkParameters params = address.getParameters();
if (params.hasMaxMoney() && amount.compareTo(params.getMaxMoney()) > 0)
throw new IllegalArgumentException("Amount too big: " + amount);
output.setAmount(amount.value);
} else {
output.setAmount(0);
}
output.setScript(ByteString.copyFrom(ScriptBuilder.createOutputScript(address).getProgram()));
return output.build();
}
/**
* Value object to hold amount/script pairs.
*/
public static class Output implements Serializable {
@Nullable public final Coin amount;
public final byte[] scriptData;
public Output(@Nullable Coin amount, byte[] scriptData) {
this.amount = amount;
this.scriptData = scriptData;
}
}
}
| |
package com.github.wovnio.wovnjava;
import java.util.ArrayList;
import java.util.Map;
import java.util.LinkedHashMap;
import junit.framework.TestCase;
public class PathUrlLanguagePatternHandlerTest extends TestCase {
private Lang english;
private Lang japanese;
private Lang french;
private Lang chinese;
private Lang defaultLang;
private ArrayList<Lang> supportedLangs;
protected void setUp() throws Exception {
this.english = Lang.get("en");
this.japanese = Lang.get("ja");
this.french = Lang.get("fr");
this.chinese = Lang.get("zh-cht");
this.supportedLangs = new ArrayList<Lang>();
this.supportedLangs.add(this.english);
this.supportedLangs.add(this.japanese);
this.supportedLangs.add(this.french);
this.supportedLangs.add(this.chinese);
}
private PathUrlLanguagePatternHandler create(Lang defaultLanguage, String sitePrefixPath) {
this.defaultLang = defaultLanguage;
Map<Lang, String> langCodeAliasSetting = new LinkedHashMap<Lang, String>();
LanguageAliases languageAliasesEmpty = new LanguageAliases(this.supportedLangs, langCodeAliasSetting, this.defaultLang);
return new PathUrlLanguagePatternHandler(this.defaultLang, languageAliasesEmpty, sitePrefixPath);
}
private PathUrlLanguagePatternHandler create(String sitePrefixPath) {
return create(this.english, sitePrefixPath);
}
private PathUrlLanguagePatternHandler createWithAliases(Lang defaultLanguage, String sitePrefixPath) {
this.defaultLang = defaultLanguage;
Map<Lang, String> langCodeAliasSetting = new LinkedHashMap<Lang, String>();
langCodeAliasSetting.put(this.english, "us");
langCodeAliasSetting.put(this.japanese, "japan");
LanguageAliases languageAliasesConfigured = new LanguageAliases(this.supportedLangs, langCodeAliasSetting, this.defaultLang);
return new PathUrlLanguagePatternHandler(this.defaultLang, languageAliasesConfigured, sitePrefixPath);
}
private PathUrlLanguagePatternHandler createWithAliases(String sitePrefixPath) {
return createWithAliases(this.english, sitePrefixPath);
}
public void testGetLang__NonMatchingPath__ReturnDefaultLang() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals(this.defaultLang, sut.getLang(""));
assertEquals(this.defaultLang, sut.getLang("/"));
assertEquals(this.defaultLang, sut.getLang("?query"));
assertEquals(this.defaultLang, sut.getLang("/page"));
assertEquals(this.defaultLang, sut.getLang("site.com/page/index.html"));
assertEquals(this.defaultLang, sut.getLang("en.site.com/pre/fix/index.html"));
assertEquals(this.defaultLang, sut.getLang("/page?wovn=en"));
assertEquals(this.defaultLang, sut.getLang("site.com/French/"));
assertEquals(this.defaultLang, sut.getLang("http://site.com/Suomi/page/index.html"));
}
public void testGetLang__MatchingPath__ValidSupportedLang__ReturnTargetLangObject() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals(this.french, sut.getLang("/fr"));
assertEquals(this.french, sut.getLang("/fr/"));
assertEquals(this.french, sut.getLang("/fr?wovn=en"));
assertEquals(this.french, sut.getLang("/fr/?wovn=en"));
assertEquals(this.french, sut.getLang("http://site.com/fr/page"));
assertEquals(this.french, sut.getLang("https://site.com/fr/page/index.html"));
assertEquals(this.french, sut.getLang("en.site.com/fr/page/index.html?wovn=es"));
}
public void testGetLang__MatchingPath__ValidSupportedLangRegionalLang__ReturnTargetLangObject() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals(this.chinese, sut.getLang("/zh-CHT"));
assertEquals(this.chinese, sut.getLang("/zh-CHT/"));
assertEquals(this.chinese, sut.getLang("/zh-CHT?wovn=en"));
assertEquals(this.chinese, sut.getLang("/zh-CHT/?wovn=en"));
assertEquals(this.chinese, sut.getLang("http://site.com/zh-CHT/page"));
assertEquals(this.chinese, sut.getLang("https://site.com/zh-CHT/page/index.html"));
assertEquals(this.chinese, sut.getLang("en.site.com/zh-CHT/page/index.html?wovn=es"));
}
public void testGetLang__MatchingPath__NotSupportedLang__ReturnDefaultLang() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals(this.defaultLang, sut.getLang("/no"));
assertEquals(this.defaultLang, sut.getLang("/sv/"));
assertEquals(this.defaultLang, sut.getLang("/pl?wovn=en"));
assertEquals(this.defaultLang, sut.getLang("/th/?wovn=en"));
assertEquals(this.defaultLang, sut.getLang("http://site.com/vi/page"));
assertEquals(this.defaultLang, sut.getLang("https://site.com/es/page/index.html"));
assertEquals(this.defaultLang, sut.getLang("en.site.com/it/page/index.html?wovn=es"));
}
public void testGetLang__HasLanguageAliases__NonMatchingPath__ReturnNull() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals(null, sut.getLang(""));
assertEquals(null, sut.getLang("/"));
assertEquals(null, sut.getLang("?query"));
assertEquals(null, sut.getLang("/page"));
assertEquals(null, sut.getLang("site.com/page/index.html"));
assertEquals(null, sut.getLang("en.site.com/pre/fix/index.html"));
assertEquals(null, sut.getLang("/page?wovn=en"));
assertEquals(null, sut.getLang("site.com/French/"));
assertEquals(null, sut.getLang("http://site.com/Suomi/page/index.html"));
assertEquals(null, sut.getLang("http://site.com/en/page/"));
assertEquals(null, sut.getLang("http://site.com/ja/page/"));
assertEquals(null, sut.getLang("http://site.com/th/page/"));
}
public void testGetLang__HasLanguageAliases__MatchingPath__ReturnTargetLangObject() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals(this.english, sut.getLang("http://site.com/us"));
assertEquals(this.english, sut.getLang("http://site.com/us?u=Tom"));
assertEquals(this.english, sut.getLang("http://site.com/us/page"));
assertEquals(this.french, sut.getLang("http://site.com/fr/page"));
assertEquals(this.japanese, sut.getLang("http://site.com/japan/page"));
assertEquals(this.chinese, sut.getLang("http://site.com/zh-CHT/page"));
}
public void testGetLang__SitePrefixPath__NonMatchingPath__ReturnNull() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals(null, sut.getLang("site.com/fr"));
assertEquals(null, sut.getLang("site.com/fr"));
assertEquals(null, sut.getLang("en.site.com/en/?wovn=en"));
assertEquals(null, sut.getLang("/es/pre/fix/page/index.html"));
assertEquals(null, sut.getLang("/pre/fr/fix/page/index.html"));
assertEquals(null, sut.getLang("/pre/en/fix/page/index.html"));
assertEquals(null, sut.getLang("https://en.site.com/en/page/"));
}
public void testGetLang__SitePrefixPath__MatchingPath__ValidSupportedLang__ReturnTargetLangObject() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals(this.french, sut.getLang("site.com/pre/fix/fr"));
assertEquals(this.french, sut.getLang("site.com/pre/fix/fr/"));
assertEquals(this.french, sut.getLang("site.com/pre/fix/fr?query"));
assertEquals(this.french, sut.getLang("site.com/pre/fix/fr/?query"));
assertEquals(this.french, sut.getLang("en.site.com/pre/fix/fr/index.html?wovn=es"));
assertEquals(this.french, sut.getLang("/pre/fix/fr/index.html"));
assertEquals(this.french, sut.getLang("/pre/fix/fr/page/index.html"));
assertEquals(this.french, sut.getLang("https://en.site.com/pre/fix/fr/page/"));
}
public void testGetLang__SitePrefixPath__MatchingPath__ValidSupportedLang__ReturnTargetLangObjectOfChinese() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals(this.chinese, sut.getLang("site.com/pre/fix/zh-CHT"));
assertEquals(this.chinese, sut.getLang("site.com/pre/fix/zh-CHT/"));
assertEquals(this.chinese, sut.getLang("site.com/pre/fix/zh-CHT?query"));
assertEquals(this.chinese, sut.getLang("site.com/pre/fix/zh-CHT/?query"));
assertEquals(this.chinese, sut.getLang("en.site.com/pre/fix/zh-CHT/index.html?wovn=es"));
assertEquals(this.chinese, sut.getLang("/pre/fix/zh-CHT/index.html"));
assertEquals(this.chinese, sut.getLang("/pre/fix/zh-CHT/page/index.html"));
assertEquals(this.chinese, sut.getLang("https://en.site.com/pre/fix/zh-CHT/page/"));
}
public void testGetLang__SitePrefixPath__MatchingPath__NotSupportedLang__ReturnDefaultLang() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals(this.defaultLang, sut.getLang("site.com/pre/fix/vi"));
assertEquals(this.defaultLang, sut.getLang("https://en.site.com/pre/fix/th/page/"));
assertEquals(this.defaultLang, sut.getLang("/pre/fix/page/en/index.html"));
assertEquals(this.defaultLang, sut.getLang("/pre/fix/french/page/index.html"));
}
public void testGetLang__SitePrefixPath__HasLanguageAliases__NonMatchingPath__ReturnNull() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals(null, sut.getLang("/"));
assertEquals(null, sut.getLang("site.com/pre/fix/"));
assertEquals(null, sut.getLang("site.com/pre/fix/ja"));
assertEquals(null, sut.getLang("site.com/pre/fix/en/?query"));
assertEquals(null, sut.getLang("site.com/us/pre/fix/"));
assertEquals(null, sut.getLang("https://site.com/ja/pre/fix/"));
assertEquals(null, sut.getLang("https://site.com/japan/pre/fix/"));
}
public void testGetLang__SitePrefixPath__HasLanguageAliases__MatchingPath__ReturnLanguageObject() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals(this.french, sut.getLang("site.com/pre/fix/fr/"));
assertEquals(this.english, sut.getLang("site.com/pre/fix/us?query"));
assertEquals(this.japanese, sut.getLang("en.site.com/pre/fix/japan/index.html?wovn=es"));
assertEquals(this.chinese, sut.getLang("en.site.com/pre/fix/zh-CHT/index.html?wovn=es"));
}
public void testConvertToDefaultLanguage__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals("", sut.convertToDefaultLanguage(""));
assertEquals("?query", sut.convertToDefaultLanguage("?query"));
assertEquals("/", sut.convertToDefaultLanguage("/"));
assertEquals("/?query", sut.convertToDefaultLanguage("/?query"));
assertEquals("site.com", sut.convertToDefaultLanguage("site.com"));
assertEquals("site.com?query", sut.convertToDefaultLanguage("site.com?query"));
assertEquals("site.com/", sut.convertToDefaultLanguage("site.com/"));
assertEquals("site.com/page/", sut.convertToDefaultLanguage("site.com/page/"));
assertEquals("/global/en/page/", sut.convertToDefaultLanguage("/global/en/page/"));
assertEquals("site.com/ru/page/", sut.convertToDefaultLanguage("site.com/ru/page/"));
assertEquals("site.com/english/page/", sut.convertToDefaultLanguage("site.com/english/page/"));
assertEquals("site.com/ru/ja/page/", sut.convertToDefaultLanguage("site.com/ru/ja/page/"));
assertEquals("/global/page/ja/index.html", sut.convertToDefaultLanguage("/global/page/ja/index.html"));
assertEquals("http://www.site.com/global/ja", sut.convertToDefaultLanguage("http://www.site.com/global/ja"));
assertEquals("https://test.com/ru/path/", sut.convertToDefaultLanguage("https://test.com/ru/path/"));
}
public void testConvertToDefaultLanguage__MatchingSupportedLang__RemoveLangCode() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals("", sut.convertToDefaultLanguage("/ja"));
assertEquals("", sut.convertToDefaultLanguage("/zh-CHT"));
assertEquals("/", sut.convertToDefaultLanguage("/ja/"));
assertEquals("/", sut.convertToDefaultLanguage("/zh-CHT/"));
assertEquals("?query", sut.convertToDefaultLanguage("/ja?query"));
assertEquals("?query", sut.convertToDefaultLanguage("/zh-CHT?query"));
assertEquals("/?query", sut.convertToDefaultLanguage("/ja/?query"));
assertEquals("/?query", sut.convertToDefaultLanguage("/zh-CHT/?query"));
assertEquals("site.com", sut.convertToDefaultLanguage("site.com/en"));
assertEquals("site.com", sut.convertToDefaultLanguage("site.com/zh-CHT"));
assertEquals("site.com/", sut.convertToDefaultLanguage("site.com/ja/"));
assertEquals("site.com/", sut.convertToDefaultLanguage("site.com/zh-CHT/"));
assertEquals("site.com/?query", sut.convertToDefaultLanguage("site.com/ja/?query"));
assertEquals("site.com/?query", sut.convertToDefaultLanguage("site.com/zh-CHT/?query"));
assertEquals("site.com/index.html", sut.convertToDefaultLanguage("site.com/fr/index.html"));
assertEquals("site.com/index.html", sut.convertToDefaultLanguage("site.com/zh-CHT/index.html"));
assertEquals("site.com/page/index.html", sut.convertToDefaultLanguage("site.com/en/page/index.html"));
assertEquals("site.com/page/index.html", sut.convertToDefaultLanguage("site.com/zh-CHT/page/index.html"));
assertEquals("/page/index.html", sut.convertToDefaultLanguage("/en/page/index.html"));
assertEquals("/page/index.html", sut.convertToDefaultLanguage("/zh-CHT/page/index.html"));
assertEquals("/page/index.html?query", sut.convertToDefaultLanguage("/en/page/index.html?query"));
assertEquals("/page/index.html?query", sut.convertToDefaultLanguage("/zh-CHT/page/index.html?query"));
assertEquals("site.com/en/page/", sut.convertToDefaultLanguage("site.com/ja/en/page/"));
assertEquals("site.com/ja/page/", sut.convertToDefaultLanguage("site.com/ja/ja/page/"));
assertEquals("site.com/zh-CHT/page/", sut.convertToDefaultLanguage("site.com/zh-CHT/zh-CHT/page/"));
assertEquals("http://www.site.com", sut.convertToDefaultLanguage("http://www.site.com/ja"));
assertEquals("http://www.site.com", sut.convertToDefaultLanguage("http://www.site.com/zh-CHT"));
assertEquals("https://test.com/path/index.html", sut.convertToDefaultLanguage("https://test.com/en/path/index.html"));
assertEquals("https://test.com/path/index.html", sut.convertToDefaultLanguage("https://test.com/zh-CHT/path/index.html"));
}
public void testConvertToDefaultLanguage__SitePrefixPath__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals("/", sut.convertToDefaultLanguage("/"));
assertEquals("site.com", sut.convertToDefaultLanguage("site.com"));
assertEquals("site.com?query", sut.convertToDefaultLanguage("site.com?query"));
assertEquals("site.com/pre/fix/", sut.convertToDefaultLanguage("site.com/pre/fix/"));
assertEquals("site.com/pre/fix/#section", sut.convertToDefaultLanguage("site.com/pre/fix/#section"));
assertEquals("site.com/no/index.html", sut.convertToDefaultLanguage("site.com/no/index.html"));
assertEquals("site.com/no/index.html#section", sut.convertToDefaultLanguage("site.com/no/index.html#section"));
assertEquals("site.com/no/index.html?query", sut.convertToDefaultLanguage("site.com/no/index.html?query"));
assertEquals("site.com/fr/pre/fix/", sut.convertToDefaultLanguage("site.com/fr/pre/fix/"));
assertEquals("site.com/pre/ja/fix/", sut.convertToDefaultLanguage("site.com/pre/ja/fix/"));
assertEquals("site.com/prefix/fr", sut.convertToDefaultLanguage("site.com/prefix/fr"));
assertEquals("/pre/fix/page/en/index.html", sut.convertToDefaultLanguage("/pre/fix/page/en/index.html"));
assertEquals("/pre/fix/ru/page/index.html", sut.convertToDefaultLanguage("/pre/fix/ru/page/index.html"));
assertEquals("http://www.site.com/ja", sut.convertToDefaultLanguage("http://www.site.com/ja"));
}
public void testConvertToDefaultLanguage__SitePrefixPath__MatchingSupportedLang__RemoveLangCode() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals("/pre/fix", sut.convertToDefaultLanguage("/pre/fix/ja"));
assertEquals("/pre/fix", sut.convertToDefaultLanguage("/pre/fix/zh-CHT"));
assertEquals("/pre/fix?query", sut.convertToDefaultLanguage("/pre/fix/ja?query"));
assertEquals("/pre/fix?query", sut.convertToDefaultLanguage("/pre/fix/zh-CHT?query"));
assertEquals("/pre/fix/", sut.convertToDefaultLanguage("/pre/fix/ja/"));
assertEquals("/pre/fix/", sut.convertToDefaultLanguage("/pre/fix/zh-CHT/"));
assertEquals("/pre/fix/?query", sut.convertToDefaultLanguage("/pre/fix/ja/?query"));
assertEquals("/pre/fix/?query", sut.convertToDefaultLanguage("/pre/fix/zh-CHT/?query"));
assertEquals("http://site.com/pre/fix/", sut.convertToDefaultLanguage("http://site.com/pre/fix/en/"));
assertEquals("http://site.com/pre/fix/", sut.convertToDefaultLanguage("http://site.com/pre/fix/zh-CHT/"));
assertEquals("site.com/pre/fix/page/index.html", sut.convertToDefaultLanguage("site.com/pre/fix/fr/page/index.html"));
assertEquals("site.com/pre/fix/page/index.html", sut.convertToDefaultLanguage("site.com/pre/fix/zh-CHT/page/index.html"));
assertEquals("site.com/pre/fix/page/index.html?query", sut.convertToDefaultLanguage("site.com/pre/fix/fr/page/index.html?query"));
assertEquals("site.com/pre/fix/page/index.html?query", sut.convertToDefaultLanguage("site.com/pre/fix/zh-CHT/page/index.html?query"));
assertEquals("http://www.site.com/pre/fix", sut.convertToDefaultLanguage("http://www.site.com/pre/fix/ja"));
assertEquals("http://www.site.com/pre/fix", sut.convertToDefaultLanguage("http://www.site.com/pre/fix/zh-CHT"));
}
public void testConvertToDefaultLanguage__HasLanguageAliases__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals("/ja", sut.convertToDefaultLanguage("/ja"));
assertEquals("/ja/", sut.convertToDefaultLanguage("/ja/"));
assertEquals("http://site.com/en/page/", sut.convertToDefaultLanguage("http://site.com/en/page/"));
assertEquals("http://site.com/th/page/", sut.convertToDefaultLanguage("http://site.com/th/page/"));
}
public void testConvertToDefaultLanguage__HasLanguageAliases__MatchingPath__InsertDefaultLanguageAlias() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals("/us?query", sut.convertToDefaultLanguage("/japan?query"));
assertEquals("/us/?query", sut.convertToDefaultLanguage("/japan/?query"));
assertEquals("http://site.com/us/page/", sut.convertToDefaultLanguage("http://site.com/us/page/"));
assertEquals("http://site.com/us/page/", sut.convertToDefaultLanguage("http://site.com/japan/page/"));
assertEquals("http://site.com/us/page/", sut.convertToDefaultLanguage("http://site.com/fr/page/"));
}
public void testConvertToDefaultLanguage__SitePrefixPath__HasLanguageAliases__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals("/pre/fix/ja", sut.convertToDefaultLanguage("/pre/fix/ja"));
assertEquals("http://site.com/pre/fix/en/page/", sut.convertToDefaultLanguage("http://site.com/pre/fix/en/page/"));
assertEquals("http://site.com/pre/fix/th/page/", sut.convertToDefaultLanguage("http://site.com/pre/fix/th/page/"));
assertEquals("http://site.com/us/pre/fix/page/", sut.convertToDefaultLanguage("http://site.com/us/pre/fix/page/"));
assertEquals("http://site.com/japan/pre/fix/page/", sut.convertToDefaultLanguage("http://site.com/japan/pre/fix/page/"));
}
public void testConvertToDefaultLanguage__SitePrefixPath__HasLanguageAliases__MatchingPath__InsertDefaultLanguageAlias() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals("/pre/fix/us", sut.convertToDefaultLanguage("/pre/fix/japan"));
assertEquals("/pre/fix/us?query", sut.convertToDefaultLanguage("/pre/fix/fr?query"));
assertEquals("http://site.com/pre/fix/us/page/", sut.convertToDefaultLanguage("http://site.com/pre/fix/us/page/"));
assertEquals("http://site.com/pre/fix/us/page/", sut.convertToDefaultLanguage("http://site.com/pre/fix/japan/page/"));
assertEquals("http://site.com/pre/fix/us/page/", sut.convertToDefaultLanguage("http://site.com/pre/fix/fr/page/"));
}
public void testConvertToTargetLanguage__DefaultSettings() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals("/ja", sut.convertToTargetLanguage("", this.japanese));
assertEquals("/zh-CHT", sut.convertToTargetLanguage("", this.chinese));
assertEquals("/ja/", sut.convertToTargetLanguage("/", this.japanese));
assertEquals("/ja/path/index.html", sut.convertToTargetLanguage("/path/index.html", this.japanese));
assertEquals("site.com/ja/", sut.convertToTargetLanguage("site.com/", this.japanese));
assertEquals("site.com/ja/", sut.convertToTargetLanguage("site.com/ja/", this.japanese));
assertEquals("site.com/ja/", sut.convertToTargetLanguage("site.com/fr/", this.japanese));
assertEquals("site.com/ja/ru/", sut.convertToTargetLanguage("site.com/ru/", this.japanese));
assertEquals("http://site.com/ja/home", sut.convertToTargetLanguage("http://site.com/home", this.japanese));
assertEquals("https://fr.site.co.uk/ja?query", sut.convertToTargetLanguage("https://fr.site.co.uk?query", this.japanese));
}
public void testConvertToTargetLanguage__SitePrefixPath__MatchingPath__ConvertUrl() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals("/pre/fix/ja", sut.convertToTargetLanguage("/pre/fix", this.japanese));
assertEquals("/pre/fix/zh-CHT", sut.convertToTargetLanguage("/pre/fix", this.chinese));
assertEquals("/pre/fix/ja/", sut.convertToTargetLanguage("/pre/fix/", this.japanese));
assertEquals("/pre/fix/ja/path/index.html", sut.convertToTargetLanguage("/pre/fix/path/index.html", this.japanese));
assertEquals("site.com/pre/fix/ja/", sut.convertToTargetLanguage("site.com/pre/fix/", this.japanese));
assertEquals("http://site.com/pre/fix/ja?query", sut.convertToTargetLanguage("http://site.com/pre/fix?query", this.japanese));
}
public void testConvertToTargetLanguage__SitePrefixPath__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = create("/pre/fix");
assertEquals("", sut.convertToTargetLanguage("", this.japanese));
assertEquals("/", sut.convertToTargetLanguage("/", this.japanese));
assertEquals("/path/index.html", sut.convertToTargetLanguage("/path/index.html", this.japanese));
assertEquals("site.com/", sut.convertToTargetLanguage("site.com/", this.japanese));
assertEquals("http://site.com/home", sut.convertToTargetLanguage("http://site.com/home", this.japanese));
assertEquals("https://fr.site.co.uk?query", sut.convertToTargetLanguage("https://fr.site.co.uk?query", this.japanese));
}
public void testConvertToTargetLanguage__HasLanguageAliases__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals("http://site.com", sut.convertToTargetLanguage("http://site.com", this.japanese));
assertEquals("http://site.com/", sut.convertToTargetLanguage("http://site.com/", this.japanese));
assertEquals("http://site.com/page/index.html", sut.convertToTargetLanguage("http://site.com/page/index.html", this.japanese));
assertEquals("http://site.com/en/page/index.html", sut.convertToTargetLanguage("http://site.com/en/page/index.html", this.japanese));
}
public void testConvertToTargetLanguage__HasLanguageAliases__MatchingPath__ConvertUrl() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals("http://site.com/japan", sut.convertToTargetLanguage("http://site.com/us", this.japanese));
assertEquals("http://site.com/japan/", sut.convertToTargetLanguage("http://site.com/fr/", this.japanese));
assertEquals("http://site.com/japan/page/index.html", sut.convertToTargetLanguage("http://site.com/japan/page/index.html", this.japanese));
}
public void testConvertToTargetLanguage__SitePrefixPath__HasLanguageAliases__NonMatchingPath__DoNotModify() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals("http://site.com/us/", sut.convertToTargetLanguage("http://site.com/us/", this.japanese));
assertEquals("http://site.com/en/", sut.convertToTargetLanguage("http://site.com/en/", this.japanese));
assertEquals("http://site.com/pre/fix/en/index.html", sut.convertToTargetLanguage("http://site.com/pre/fix/en/index.html", this.japanese));
assertEquals("http://site.com/pre/fix/ja/index.html", sut.convertToTargetLanguage("http://site.com/pre/fix/ja/index.html", this.japanese));
}
public void testConvertToTargetLanguage__SitePrefixPath__HasLanguageAliases__MatchingPath__ConvertUrl() {
PathUrlLanguagePatternHandler sut = createWithAliases("/pre/fix");
assertEquals("http://site.com/pre/fix/japan", sut.convertToTargetLanguage("http://site.com/pre/fix/us", this.japanese));
assertEquals("http://site.com/pre/fix/japan/", sut.convertToTargetLanguage("http://site.com/pre/fix/fr/", this.japanese));
assertEquals("http://site.com/pre/fix/japan/page/index.html", sut.convertToTargetLanguage("http://site.com/pre/fix/japan/page/index.html", this.japanese));
}
public void testShouldRedirectExplicitDefaultLangUrl() {
PathUrlLanguagePatternHandler sut = create("");
assertEquals(true, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/en"));
assertEquals(true, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/en/"));
assertEquals(true, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/en/home"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/ja"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/ja/home"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/path/en/home"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://en.site.com/home"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/home?wovn=en"));
}
public void testShouldRedirectExplicitDefaultLangUrl__HasLanguageAliases() {
PathUrlLanguagePatternHandler sut = createWithAliases("");
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/en"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/en/"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/us"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/us/"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/page"));
assertEquals(false, sut.shouldRedirectExplicitDefaultLangUrl("http://site.com/ja"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.binary;
import java.io.Externalizable;
import java.io.IOException;
import java.io.ObjectInput;
import java.io.ObjectOutput;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.nio.ByteBuffer;
import java.sql.Time;
import java.sql.Timestamp;
import java.util.Date;
import java.util.UUID;
import org.apache.ignite.IgniteCheckedException;
import org.apache.ignite.binary.BinaryObject;
import org.apache.ignite.binary.BinaryObjectException;
import org.apache.ignite.binary.BinaryType;
import org.apache.ignite.internal.GridDirectTransient;
import org.apache.ignite.internal.IgniteCodeGeneratingFail;
import org.apache.ignite.internal.binary.streams.BinaryHeapInputStream;
import org.apache.ignite.internal.processors.cache.CacheObject;
import org.apache.ignite.internal.processors.cache.CacheObjectAdapter;
import org.apache.ignite.internal.processors.cache.CacheObjectContext;
import org.apache.ignite.internal.processors.cache.CacheObjectValueContext;
import org.apache.ignite.internal.processors.cache.KeyCacheObject;
import org.apache.ignite.internal.processors.cache.binary.CacheObjectBinaryProcessorImpl;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.apache.ignite.plugin.extensions.communication.MessageReader;
import org.apache.ignite.plugin.extensions.communication.MessageWriter;
import org.jetbrains.annotations.Nullable;
import static java.nio.charset.StandardCharsets.UTF_8;
/**
* Binary object implementation.
*/
@IgniteCodeGeneratingFail // Fields arr and start should not be generated by MessageCodeGenerator.
public final class BinaryObjectImpl extends BinaryObjectExImpl implements Externalizable, KeyCacheObject {
/** */
private static final long serialVersionUID = 0L;
/** */
@GridDirectTransient
private BinaryContext ctx;
/** */
private byte[] arr;
/** */
private int start;
/** */
@GridDirectTransient
private Object obj;
/** */
@GridDirectTransient
private boolean detachAllowed;
/** */
private int part = -1;
/**
* For {@link Externalizable}.
*/
public BinaryObjectImpl() {
// No-op.
}
/**
* @param ctx Context.
* @param arr Array.
* @param start Start.
*/
public BinaryObjectImpl(BinaryContext ctx, byte[] arr, int start) {
assert ctx != null;
assert arr != null;
this.ctx = ctx;
this.arr = arr;
this.start = start;
}
/** {@inheritDoc} */
@Override public KeyCacheObject copy(int part) {
if (this.part == part)
return this;
BinaryObjectImpl cp = new BinaryObjectImpl(ctx, arr, start);
cp.part = part;
return cp;
}
/** {@inheritDoc} */
@Override public int partition() {
return part;
}
/** {@inheritDoc} */
@Override public void partition(int part) {
this.part = part;
}
/** {@inheritDoc} */
@Override public byte cacheObjectType() {
return TYPE_BINARY;
}
/** {@inheritDoc} */
@Override public boolean isPlatformType() {
return false;
}
/** {@inheritDoc} */
@Override public boolean internal() {
return false;
}
/** {@inheritDoc} */
@Nullable @Override public <T> T value(CacheObjectValueContext ctx, boolean cpy) {
Object obj0 = obj;
if (obj0 == null || (cpy && needCopy(ctx)))
obj0 = deserializeValue(ctx);
return (T)obj0;
}
/** {@inheritDoc} */
@Override public byte[] valueBytes(CacheObjectValueContext ctx) throws IgniteCheckedException {
if (detached())
return array();
int len = length();
byte[] arr0 = new byte[len];
U.arrayCopy(arr, start, arr0, 0, len);
return arr0;
}
/** {@inheritDoc} */
@Override public boolean putValue(ByteBuffer buf) throws IgniteCheckedException {
return putValue(buf, 0, CacheObjectAdapter.objectPutSize(length()));
}
/** {@inheritDoc} */
@Override public int putValue(long addr) throws IgniteCheckedException {
return CacheObjectAdapter.putValue(addr, cacheObjectType(), arr, start);
}
/** {@inheritDoc} */
@Override public boolean putValue(final ByteBuffer buf, int off, int len) throws IgniteCheckedException {
return CacheObjectAdapter.putValue(cacheObjectType(), buf, off, len, arr, start);
}
/** {@inheritDoc} */
@Override public int valueBytesLength(CacheObjectContext ctx) throws IgniteCheckedException {
return CacheObjectAdapter.objectPutSize(length());
}
/** {@inheritDoc} */
@Override public CacheObject prepareForCache(CacheObjectContext ctx) {
if (detached())
return this;
return (BinaryObjectImpl)detach();
}
/** {@inheritDoc} */
@Override public void finishUnmarshal(CacheObjectValueContext ctx, ClassLoader ldr) throws IgniteCheckedException {
CacheObjectBinaryProcessorImpl binaryProc = (CacheObjectBinaryProcessorImpl)ctx.kernalContext().cacheObjects();
this.ctx = binaryProc.binaryContext();
binaryProc.waitMetadataWriteIfNeeded(typeId());
}
/** {@inheritDoc} */
@Override public void prepareMarshal(CacheObjectValueContext ctx) throws IgniteCheckedException {
// No-op.
}
/** {@inheritDoc} */
@Override public int length() {
return BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.TOTAL_LEN_POS);
}
/**
* @return Detached binary object.
*/
public BinaryObjectImpl detach() {
if (!detachAllowed || detached())
return this;
int len = length();
byte[] arr0 = new byte[len];
U.arrayCopy(arr, start, arr0, 0, len);
return new BinaryObjectImpl(ctx, arr0, 0);
}
/**
* @return Detached or not.
*/
public boolean detached() {
return start == 0 && length() == arr.length;
}
/**
* @param detachAllowed Detach allowed flag.
*/
public void detachAllowed(boolean detachAllowed) {
this.detachAllowed = detachAllowed;
}
/** {@inheritDoc} */
@Override public BinaryContext context() {
return ctx;
}
/**
* @param ctx Context.
*/
public void context(BinaryContext ctx) {
this.ctx = ctx;
}
/** {@inheritDoc} */
@Override public byte[] array() {
return arr;
}
/** {@inheritDoc} */
@Override public int start() {
return start;
}
/** {@inheritDoc} */
@Override public long offheapAddress() {
return 0;
}
/** {@inheritDoc} */
@Override public boolean hasArray() {
return true;
}
/** {@inheritDoc} */
@Override public boolean isFlagSet(short flag) {
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
return BinaryUtils.isFlagSet(flags, flag);
}
/** {@inheritDoc} */
@Override public int typeId() {
int off = start + GridBinaryMarshaller.TYPE_ID_POS;
int typeId = BinaryPrimitives.readInt(arr, off);
if (typeId == GridBinaryMarshaller.UNREGISTERED_TYPE_ID) {
off = start + GridBinaryMarshaller.DFLT_HDR_LEN;
assert arr[off] == GridBinaryMarshaller.STRING : arr[off];
int len = BinaryPrimitives.readInt(arr, ++off);
String clsName = new String(arr, off + 4, len, UTF_8);
typeId = ctx.typeId(clsName);
}
return typeId;
}
/** {@inheritDoc} */
@Nullable @Override public BinaryType type() throws BinaryObjectException {
return BinaryUtils.typeProxy(ctx, this);
}
/** {@inheritDoc} */
@Nullable @Override public BinaryType rawType() throws BinaryObjectException {
return BinaryUtils.type(ctx, this);
}
/** {@inheritDoc} */
@Nullable @Override public <F> F field(String fieldName) throws BinaryObjectException {
return (F) reader(null, false).unmarshalField(fieldName);
}
/** {@inheritDoc} */
@Nullable @Override public <F> F field(int fieldId) throws BinaryObjectException {
return (F) reader(null, false).unmarshalField(fieldId);
}
/** {@inheritDoc} */
@Override public BinarySerializedFieldComparator createFieldComparator() {
int schemaOff = BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.SCHEMA_OR_RAW_OFF_POS);
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
int fieldIdLen = BinaryUtils.isCompactFooter(flags) ? 0 : BinaryUtils.FIELD_ID_LEN;
int fieldOffLen = BinaryUtils.fieldOffsetLength(flags);
int orderBase = start + schemaOff + fieldIdLen;
int orderMultiplier = fieldIdLen + fieldOffLen;
return new BinarySerializedFieldComparator(this, arr, 0L, start, orderBase, orderMultiplier, fieldOffLen);
}
/** {@inheritDoc} */
@Override public int dataStartOffset() {
int typeId = BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.TYPE_ID_POS);
if (typeId == GridBinaryMarshaller.UNREGISTERED_TYPE_ID) {
int len = BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.DFLT_HDR_LEN + 1);
return start + GridBinaryMarshaller.DFLT_HDR_LEN + len + 5;
} else
return start + GridBinaryMarshaller.DFLT_HDR_LEN;
}
/** {@inheritDoc} */
@Override public int footerStartOffset() {
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
if (!BinaryUtils.hasSchema(flags))
return start + length();
return start + BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.SCHEMA_OR_RAW_OFF_POS);
}
/** {@inheritDoc} */
@Nullable @Override public <F> F fieldByOrder(int order) {
if (order == BinarySchema.ORDER_NOT_FOUND)
return null;
Object val;
// Calculate field position.
int schemaOff = BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.SCHEMA_OR_RAW_OFF_POS);
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
int fieldIdLen = BinaryUtils.isCompactFooter(flags) ? 0 : BinaryUtils.FIELD_ID_LEN;
int fieldOffLen = BinaryUtils.fieldOffsetLength(flags);
int fieldOffsetPos = start + schemaOff + order * (fieldIdLen + fieldOffLen) + fieldIdLen;
int fieldPos;
if (fieldOffLen == BinaryUtils.OFFSET_1)
fieldPos = start + ((int)BinaryPrimitives.readByte(arr, fieldOffsetPos) & 0xFF);
else if (fieldOffLen == BinaryUtils.OFFSET_2)
fieldPos = start + ((int)BinaryPrimitives.readShort(arr, fieldOffsetPos) & 0xFFFF);
else
fieldPos = start + BinaryPrimitives.readInt(arr, fieldOffsetPos);
// Read header and try performing fast lookup for well-known types (the most common types go first).
byte hdr = BinaryPrimitives.readByte(arr, fieldPos);
switch (hdr) {
case GridBinaryMarshaller.INT:
val = BinaryPrimitives.readInt(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.LONG:
val = BinaryPrimitives.readLong(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.BOOLEAN:
val = BinaryPrimitives.readBoolean(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.SHORT:
val = BinaryPrimitives.readShort(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.BYTE:
val = BinaryPrimitives.readByte(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.CHAR:
val = BinaryPrimitives.readChar(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.FLOAT:
val = BinaryPrimitives.readFloat(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.DOUBLE:
val = BinaryPrimitives.readDouble(arr, fieldPos + 1);
break;
case GridBinaryMarshaller.STRING: {
int dataLen = BinaryPrimitives.readInt(arr, fieldPos + 1);
val = new String(arr, fieldPos + 5, dataLen, UTF_8);
break;
}
case GridBinaryMarshaller.DATE: {
long time = BinaryPrimitives.readLong(arr, fieldPos + 1);
val = new Date(time);
break;
}
case GridBinaryMarshaller.TIMESTAMP: {
long time = BinaryPrimitives.readLong(arr, fieldPos + 1);
int nanos = BinaryPrimitives.readInt(arr, fieldPos + 1 + 8);
Timestamp ts = new Timestamp(time);
ts.setNanos(ts.getNanos() + nanos);
val = ts;
break;
}
case GridBinaryMarshaller.TIME: {
long time = BinaryPrimitives.readLong(arr, fieldPos + 1);
val = new Time(time);
break;
}
case GridBinaryMarshaller.UUID: {
long most = BinaryPrimitives.readLong(arr, fieldPos + 1);
long least = BinaryPrimitives.readLong(arr, fieldPos + 1 + 8);
val = new UUID(most, least);
break;
}
case GridBinaryMarshaller.DECIMAL: {
int scale = BinaryPrimitives.readInt(arr, fieldPos + 1);
int dataLen = BinaryPrimitives.readInt(arr, fieldPos + 5);
byte[] data = BinaryPrimitives.readByteArray(arr, fieldPos + 9, dataLen);
boolean negative = data[0] < 0;
if (negative)
data[0] &= 0x7F;
BigInteger intVal = new BigInteger(data);
if (negative)
intVal = intVal.negate();
val = new BigDecimal(intVal, scale);
break;
}
case GridBinaryMarshaller.NULL:
val = null;
break;
default:
val = BinaryUtils.unmarshal(BinaryHeapInputStream.create(arr, fieldPos), ctx, null);
break;
}
return (F)val;
}
/** {@inheritDoc} */
@Override public boolean writeFieldByOrder(int order, ByteBuffer buf) {
// Calculate field position.
int schemaOffset = BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.SCHEMA_OR_RAW_OFF_POS);
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
int fieldIdLen = BinaryUtils.isCompactFooter(flags) ? 0 : BinaryUtils.FIELD_ID_LEN;
int fieldOffsetLen = BinaryUtils.fieldOffsetLength(flags);
int fieldOffsetPos = start + schemaOffset + order * (fieldIdLen + fieldOffsetLen) + fieldIdLen;
int fieldPos;
if (fieldOffsetLen == BinaryUtils.OFFSET_1)
fieldPos = start + ((int)BinaryPrimitives.readByte(arr, fieldOffsetPos) & 0xFF);
else if (fieldOffsetLen == BinaryUtils.OFFSET_2)
fieldPos = start + ((int)BinaryPrimitives.readShort(arr, fieldOffsetPos) & 0xFFFF);
else
fieldPos = start + BinaryPrimitives.readInt(arr, fieldOffsetPos);
// Read header and try performing fast lookup for well-known types (the most common types go first).
byte hdr = BinaryPrimitives.readByte(arr, fieldPos);
int totalLen;
switch (hdr) {
case GridBinaryMarshaller.NULL:
totalLen = 1;
break;
case GridBinaryMarshaller.INT:
case GridBinaryMarshaller.FLOAT:
totalLen = 5;
break;
case GridBinaryMarshaller.LONG:
case GridBinaryMarshaller.DOUBLE:
case GridBinaryMarshaller.DATE:
case GridBinaryMarshaller.TIME:
totalLen = 9;
break;
case GridBinaryMarshaller.BOOLEAN:
totalLen = 2;
break;
case GridBinaryMarshaller.SHORT:
totalLen = 3;
break;
case GridBinaryMarshaller.BYTE:
totalLen = 2;
break;
case GridBinaryMarshaller.CHAR:
totalLen = 3;
break;
case GridBinaryMarshaller.STRING: {
int dataLen = BinaryPrimitives.readInt(arr, fieldPos + 1);
totalLen = dataLen + 5;
break;
}
case GridBinaryMarshaller.TIMESTAMP:
totalLen = 13;
break;
case GridBinaryMarshaller.UUID:
totalLen = 17;
break;
case GridBinaryMarshaller.DECIMAL: {
int dataLen = BinaryPrimitives.readInt(arr, fieldPos + 5);
totalLen = dataLen + 9;
break;
}
case GridBinaryMarshaller.OBJ:
totalLen = BinaryPrimitives.readInt(arr, fieldPos + GridBinaryMarshaller.TOTAL_LEN_POS);
break;
case GridBinaryMarshaller.OPTM_MARSH:
totalLen = BinaryPrimitives.readInt(arr, fieldPos + 1);
break;
default:
throw new UnsupportedOperationException("Failed to write field of the given type " +
"(field type is not supported): " + hdr);
}
if (buf.remaining() < totalLen)
return false;
buf.put(arr, fieldPos, totalLen);
return true;
}
/** {@inheritDoc} */
@Nullable @Override protected <F> F field(BinaryReaderHandles rCtx, String fieldName) {
return (F)reader(rCtx, false).unmarshalField(fieldName);
}
/** {@inheritDoc} */
@Override public boolean hasField(String fieldName) {
return reader(null, false).findFieldByName(fieldName);
}
/** {@inheritDoc} */
@Nullable @Override public <T> T deserialize(@Nullable ClassLoader ldr) throws BinaryObjectException {
if (ldr == null)
return deserialize();
GridBinaryMarshaller.USE_CACHE.set(Boolean.FALSE);
try {
return (T)reader(null, ldr, true).deserialize();
}
finally {
GridBinaryMarshaller.USE_CACHE.set(Boolean.TRUE);
}
}
/** {@inheritDoc} */
@Nullable @Override public <T> T deserialize() throws BinaryObjectException {
Object obj0 = obj;
if (obj0 == null)
obj0 = deserializeValue(null);
return (T)obj0;
}
/** {@inheritDoc} */
@Override public BinaryObject clone() throws CloneNotSupportedException {
return super.clone();
}
/** {@inheritDoc} */
@Override public int hashCode() {
return BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.HASH_CODE_POS);
}
/** {@inheritDoc} */
@Override public boolean hasSchema() {
short flags = BinaryPrimitives.readShort(arr, start + GridBinaryMarshaller.FLAGS_POS);
return BinaryUtils.hasSchema(flags);
}
/** {@inheritDoc} */
@Override public int schemaId() {
return BinaryPrimitives.readInt(arr, start + GridBinaryMarshaller.SCHEMA_ID_POS);
}
/** {@inheritDoc} */
@Override public BinarySchema createSchema() {
return reader(null, false).getOrCreateSchema();
}
/** {@inheritDoc} */
@Override public void onAckReceived() {
// No-op.
}
/** {@inheritDoc} */
@Override public void writeExternal(ObjectOutput out) throws IOException {
if (detachAllowed) {
int len = length();
out.writeInt(len);
out.write(arr, start, len);
out.writeInt(0);
}
else {
out.writeInt(arr.length);
out.write(arr);
out.writeInt(start);
}
}
/** {@inheritDoc} */
@Override public void readExternal(ObjectInput in) throws IOException, ClassNotFoundException {
ctx = GridBinaryMarshaller.threadLocalContext();
arr = new byte[in.readInt()];
in.readFully(arr);
start = in.readInt();
}
/** {@inheritDoc} */
@Override public boolean writeTo(ByteBuffer buf, MessageWriter writer) {
writer.setBuffer(buf);
if (!writer.isHeaderWritten()) {
if (!writer.writeHeader(directType(), fieldsCount()))
return false;
writer.onHeaderWritten();
}
switch (writer.state()) {
case 0:
if (!writer.writeByteArray("arr",
arr,
detachAllowed ? start : 0,
detachAllowed ? length() : arr.length))
return false;
writer.incrementState();
case 1:
if (!writer.writeInt("part", part))
return false;
writer.incrementState();
case 2:
if (!writer.writeInt("start", detachAllowed ? 0 : start))
return false;
writer.incrementState();
}
return true;
}
/** {@inheritDoc} */
@Override public boolean readFrom(ByteBuffer buf, MessageReader reader) {
reader.setBuffer(buf);
if (!reader.beforeMessageRead())
return false;
switch (reader.state()) {
case 0:
arr = reader.readByteArray("arr");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 1:
part = reader.readInt("part");
if (!reader.isLastRead())
return false;
reader.incrementState();
case 2:
start = reader.readInt("start");
if (!reader.isLastRead())
return false;
reader.incrementState();
}
return reader.afterMessageRead(BinaryObjectImpl.class);
}
/** {@inheritDoc} */
@Override public short directType() {
return 113;
}
/** {@inheritDoc} */
@Override public byte fieldsCount() {
return 3;
}
/**
* Runs value deserialization regardless of whether obj already has the deserialized value.
* Will set obj if descriptor is configured to keep deserialized values.
* @param coCtx CacheObjectContext.
* @return Object.
*/
private Object deserializeValue(@Nullable CacheObjectValueContext coCtx) {
BinaryReaderExImpl reader = reader(null, coCtx != null ?
coCtx.kernalContext().config().getClassLoader() : ctx.configuration().getClassLoader(), true);
Object obj0 = reader.deserialize();
BinaryClassDescriptor desc = reader.descriptor();
assert desc != null;
if (coCtx != null && coCtx.storeValue())
obj = obj0;
return obj0;
}
/**
* @param ctx Context.
* @return {@code True} need to copy value returned to user.
*/
private boolean needCopy(CacheObjectValueContext ctx) {
return ctx.copyOnGet() && obj != null && !ctx.kernalContext().cacheObjects().immutable(obj);
}
/**
* Create new reader for this object.
*
* @param rCtx Reader context.
* @param ldr Class loader.
* @param forUnmarshal {@code True} if reader is need to unmarshal object.
* @return Reader.
*/
private BinaryReaderExImpl reader(@Nullable BinaryReaderHandles rCtx, @Nullable ClassLoader ldr,
boolean forUnmarshal) {
if (ldr == null)
ldr = ctx.configuration().getClassLoader();
return new BinaryReaderExImpl(ctx,
BinaryHeapInputStream.create(arr, start),
ldr,
rCtx,
false,
forUnmarshal);
}
/**
* Create new reader for this object.
*
* @param rCtx Reader context.
* @param forUnmarshal {@code True} if reader is need to unmarshal object.
* @return Reader.
*/
private BinaryReaderExImpl reader(@Nullable BinaryReaderHandles rCtx, boolean forUnmarshal) {
return reader(rCtx, null, forUnmarshal);
}
/**
* Compare two objects for DML operation.
*
* @param first First.
* @param second Second.
* @return Comparison result.
*/
@SuppressWarnings("unchecked")
public static int compareForDml(Object first, Object second) {
boolean firstBinary = first instanceof BinaryObjectImpl;
boolean secondBinary = second instanceof BinaryObjectImpl;
if (firstBinary) {
if (secondBinary)
return compareForDml0((BinaryObjectImpl)first, (BinaryObjectImpl)second);
else
return 1; // Go to the right part.
}
else {
if (secondBinary)
return -1; // Go to the left part.
else
return ((Comparable)first).compareTo(second);
}
}
/**
* Internal DML comparison routine.
*
* @param first First item.
* @param second Second item.
* @return Comparison result.
*/
private static int compareForDml0(BinaryObjectImpl first, BinaryObjectImpl second) {
int res = Integer.compare(first.typeId(), second.typeId());
if (res == 0) {
res = Integer.compare(first.hashCode(), second.hashCode());
if (res == 0) {
// Pessimistic case: need to perform binary comparison.
int firstDataStart = first.dataStartOffset();
int secondDataStart = second.dataStartOffset();
int firstLen = first.footerStartOffset() - firstDataStart;
int secondLen = second.footerStartOffset() - secondDataStart;
res = Integer.compare(firstLen, secondLen);
if (res == 0) {
for (int i = 0; i < firstLen; i++) {
byte firstByte = first.arr[firstDataStart + i];
byte secondByte = second.arr[secondDataStart + i];
res = Byte.compare(firstByte, secondByte);
if (res != 0)
break;
}
}
}
}
return res;
}
/** {@inheritDoc} */
@Override public String toString() {
if (arr == null || ctx == null)
return "BinaryObjectImpl [arr= " + (arr != null) + ", ctx=" + (ctx != null) + ", start=" + start + "]";
return super.toString();
}
}
| |
package trendli.me.makhana.common.net.msg;
import java.io.IOException;
import java.security.InvalidKeyException;
import java.security.Key;
import java.security.NoSuchAlgorithmException;
import java.util.Base64;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.crypto.BadPaddingException;
import javax.crypto.Cipher;
import javax.crypto.IllegalBlockSizeException;
import javax.crypto.KeyGenerator;
import javax.crypto.NoSuchPaddingException;
import javax.crypto.SealedObject;
import javax.crypto.SecretKey;
import javax.crypto.spec.SecretKeySpec;
import trendli.me.makhana.common.data.LoginCredentials;
import com.jme3.network.AbstractMessage;
import com.jme3.network.serializing.Serializable;
/**
*
* @author Elliott Butler
*/
@Serializable
public class SecureLoginRequest extends AbstractMessage
{
/**
* The logger for this class.
*/
private final static Logger logger = Logger.getLogger( SecureLoginRequest.class.getName( ) );
private String encodedEncryptedAESKey;
private SealedObject sealedCredentials;
/**
* The login type
*/
private boolean isServer;
/**
* Used by the serializer.
*/
public SecureLoginRequest( )
{
}
/**
*
* @param loginCredentials
* @param publicKey
* @param isServer
* @throws IllegalStateException
*/
public SecureLoginRequest( LoginCredentials loginCredentials, Key publicKey, boolean isServer ) throws IllegalStateException
{
if ( loginCredentials == null )
throw new IllegalStateException( "LoginCredentials can not be null" );
if ( publicKey == null )
throw new IllegalStateException( "Public key can not be null " );
try
{
Cipher keyCipher = Cipher.getInstance( publicKey.getAlgorithm( ) );
keyCipher.init( Cipher.ENCRYPT_MODE, publicKey );
KeyGenerator kgen = KeyGenerator.getInstance( "AES" );
kgen.init( 128 );
SecretKey key = kgen.generateKey( );
byte[ ] aesKey = key.getEncoded( );
SecretKeySpec aeskeySpec = new SecretKeySpec( aesKey, "AES" );
Cipher credentialCipher = Cipher.getInstance( "AES" );
credentialCipher.init( Cipher.ENCRYPT_MODE, aeskeySpec );
this.encodedEncryptedAESKey = Base64.getEncoder( ).encodeToString( keyCipher.doFinal( aesKey ) );
// TODO: Investigate implementing SerializableSerializer to handle
// this class type, or look into extending it and using jMonkey's
// @Serializable
this.sealedCredentials = new SealedObject( loginCredentials, credentialCipher );
this.isServer = isServer;
}
catch ( NoSuchAlgorithmException e )
{
throw new IllegalStateException( "No Such Algorithm:" + e.getMessage( ) );
}
catch ( NoSuchPaddingException e )
{
throw new IllegalStateException( "No Such Padding:" + e.getMessage( ) );
}
catch ( InvalidKeyException e )
{
throw new IllegalStateException( "Invalid Key:" + e.getMessage( ) );
}
catch ( IllegalBlockSizeException e )
{
throw new IllegalStateException( "Illegal Block:" + e.getMessage( ) );
}
catch ( IOException e )
{
throw new IllegalStateException( "I/O Error:" + e.getMessage( ) );
}
catch ( BadPaddingException e )
{
// TODO Auto-generated catch block
e.printStackTrace( );
}
}
public Key getAESKey( Key privateKey )
{
Key key = null;
try
{
Cipher keyCipher = Cipher.getInstance( privateKey.getAlgorithm( ) );
keyCipher.init( Cipher.DECRYPT_MODE, privateKey );
byte[ ] encryptedAESKeyBytes = Base64.getDecoder( ).decode( encodedEncryptedAESKey );
byte[ ] aesKey = keyCipher.doFinal( encryptedAESKeyBytes );
key = new SecretKeySpec( aesKey, "AES" );
}
catch ( NoSuchAlgorithmException e )
{
logger.log( Level.WARNING, "No Such Algorithm: " + e.getMessage( ) );
}
catch ( NoSuchPaddingException e )
{
logger.log( Level.WARNING, "No Such Padding: " + e.getMessage( ) );
}
catch ( InvalidKeyException e )
{
logger.log( Level.WARNING, "Invalid key: " + e.getMessage( ) );
}
catch ( IllegalBlockSizeException e )
{
logger.log( Level.WARNING, "Illegal Block Size: " + e.getMessage( ) );
}
catch ( BadPaddingException e )
{
logger.log( Level.WARNING, "Bad Padding: " + e.getMessage( ) );
}
return key;
}
/**
* Decrypts and returns the LoginCredentials contained. If any failure
* occurs during decryption, returns null;
*
* @param privateKey
* the private key to be used for decryption
* @return the decrypted LoginCredentials or null
*/
public LoginCredentials getLoginCredentials( Key privateKey )
{
try
{
return ( LoginCredentials ) sealedCredentials.getObject( getAESKey( privateKey ) );
}
catch ( InvalidKeyException e )
{
logger.log( Level.WARNING, "Invalid key: " + e.getMessage( ) );
}
catch ( ClassNotFoundException e )
{
logger.log( Level.WARNING, "Class Not Found: " + e.getMessage( ) );
}
catch ( NoSuchAlgorithmException e )
{
logger.log( Level.WARNING, "No Such Algorithm: " + e.getMessage( ) );
}
catch ( IOException e )
{
logger.log( Level.WARNING, "IO Exception: " + e.getMessage( ) );
}
return null;
}
/**
*
* Returns the SealedObject containing the LoginCredentials object.
*
* @return the Sealed LoginCredentials
*/
public SealedObject getSealedCredentials( )
{
return sealedCredentials;
}
/**
* Gets the type associated with this login request.
*
* @return the type (true = server)
*/
public boolean isServer( )
{
return isServer;
}
/**
* Returns a string representation of this DAO.
*/
public String toString( )
{
return "sealedCredentials=" + sealedCredentials + ", isServer=" + isServer;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.drill.exec.vector.complex.writer;
import static org.apache.drill.test.TestBuilder.listOf;
import static org.apache.drill.test.TestBuilder.mapOf;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.BufferedOutputStream;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Paths;
import java.util.List;
import java.util.zip.GZIPOutputStream;
import org.apache.drill.test.BaseTestQuery;
import org.apache.drill.common.expression.SchemaPath;
import org.apache.drill.common.util.DrillFileUtils;
import org.apache.drill.exec.exception.SchemaChangeException;
import org.apache.drill.exec.proto.UserBitShared;
import org.apache.drill.exec.record.RecordBatchLoader;
import org.apache.drill.exec.record.VectorWrapper;
import org.apache.drill.exec.rpc.user.QueryDataBatch;
import org.apache.drill.exec.store.easy.json.JSONRecordReader;
import org.apache.drill.exec.vector.IntVector;
import org.apache.drill.exec.vector.RepeatedBigIntVector;
import org.junit.BeforeClass;
import org.junit.Ignore;
import org.junit.Test;
import com.google.common.base.Charsets;
import com.google.common.io.Files;
public class TestJsonReader extends BaseTestQuery {
private static final boolean VERBOSE_DEBUG = false;
@BeforeClass
public static void setupTestFiles() {
dirTestWatcher.copyResourceToRoot(Paths.get("store", "json"));
dirTestWatcher.copyResourceToRoot(Paths.get("vector","complex", "writer"));
}
@Test
public void testEmptyList() throws Exception {
final String root = "store/json/emptyLists";
testBuilder()
.sqlQuery("select count(a[0]) as ct from dfs.`%s`", root, root)
.ordered()
.baselineColumns("ct")
.baselineValues(6l)
.build()
.run();
}
@Test
public void schemaChange() throws Exception {
test("select b from dfs.`vector/complex/writer/schemaChange/`");
}
@Test
public void testFieldSelectionBug() throws Exception {
try {
testBuilder()
.sqlQuery("select t.field_4.inner_3 as col_1, t.field_4 as col_2 from cp.`store/json/schema_change_int_to_string.json` t")
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("col_1", "col_2")
.baselineValues(
mapOf(),
mapOf(
"inner_1", listOf(),
"inner_3", mapOf()))
.baselineValues(
mapOf("inner_object_field_1", "2"),
mapOf(
"inner_1", listOf("1", "2", "3"),
"inner_2", "3",
"inner_3", mapOf("inner_object_field_1", "2")))
.baselineValues(
mapOf(),
mapOf(
"inner_1", listOf("4", "5", "6"),
"inner_2", "3",
"inner_3", mapOf()))
.go();
} finally {
test("alter session set `store.json.all_text_mode` = false");
}
}
@Test
public void testSplitAndTransferFailure() throws Exception {
final String testVal = "a string";
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(listOf())
.baselineValues(listOf(testVal))
.go();
test("select flatten(config) as flat from cp.`store/json/null_list_v2.json`");
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v2.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_varchar", listOf()))
.baselineValues(mapOf("repeated_varchar", listOf(testVal)))
.go();
testBuilder()
.sqlQuery("select flatten(config) as flat from cp.`store/json/null_list_v3.json`")
.ordered()
.baselineColumns("flat")
.baselineValues(mapOf("repeated_map", listOf(mapOf("repeated_varchar", listOf()))))
.baselineValues(mapOf("repeated_map", listOf(mapOf("repeated_varchar", listOf(testVal)))))
.go();
}
@Test
@Ignore("DRILL-1824")
public void schemaChangeValidate() throws Exception {
testBuilder()
.sqlQuery("select b from dfs.`vector/complex/writer/schemaChange/`")
.unOrdered()
.jsonBaselineFile("/vector/complex/writer/expected.json")
.build()
.run();
}
public void runTestsOnFile(String filename, UserBitShared.QueryType queryType, String[] queries, long[] rowCounts) throws Exception {
if (VERBOSE_DEBUG) {
System.out.println("===================");
System.out.println("source data in json");
System.out.println("===================");
System.out.println(Files.toString(DrillFileUtils.getResourceAsFile(filename), Charsets.UTF_8));
}
int i = 0;
for (String query : queries) {
if (VERBOSE_DEBUG) {
System.out.println("=====");
System.out.println("query");
System.out.println("=====");
System.out.println(query);
System.out.println("======");
System.out.println("result");
System.out.println("======");
}
int rowCount = testRunAndPrint(queryType, query);
assertEquals(rowCounts[i], rowCount);
System.out.println();
i++;
}
}
@Test
public void testReadCompressed() throws Exception {
String filepath = "compressed_json.json";
File f = new File(dirTestWatcher.getRootDir(), filepath);
PrintWriter out = new PrintWriter(f);
out.println("{\"a\" :5}");
out.close();
gzipIt(f);
testBuilder()
.sqlQuery("select * from dfs.`%s.gz`", filepath)
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
.build().run();
// test reading the uncompressed version as well
testBuilder()
.sqlQuery("select * from dfs.`%s`", filepath)
.unOrdered()
.baselineColumns("a")
.baselineValues(5l)
.build().run();
}
public static void gzipIt(File sourceFile) throws IOException {
// modified from: http://www.mkyong.com/java/how-to-compress-a-file-in-gzip-format/
byte[] buffer = new byte[1024];
GZIPOutputStream gzos =
new GZIPOutputStream(new FileOutputStream(sourceFile.getPath() + ".gz"));
FileInputStream in =
new FileInputStream(sourceFile);
int len;
while ((len = in.read(buffer)) > 0) {
gzos.write(buffer, 0, len);
}
in.close();
gzos.finish();
gzos.close();
}
@Test
public void testDrill_1419() throws Exception {
String[] queries = {"select t.trans_id, t.trans_info.prod_id[0],t.trans_info.prod_id[1] from cp.`store/json/clicks.json` t limit 5"};
long[] rowCounts = {5};
String filename = "/store/json/clicks.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testRepeatedCount() throws Exception {
test("select repeated_count(str_list) from cp.`store/json/json_basic_repeated_varchar.json`");
test("select repeated_count(INT_col) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_count(FLOAT4_col) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_count(VARCHAR_col) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_count(BIT_col) from cp.`parquet/alltypes_repeated.json`");
}
@Test
public void testRepeatedContains() throws Exception {
test("select repeated_contains(str_list, 'asdf') from cp.`store/json/json_basic_repeated_varchar.json`");
test("select repeated_contains(INT_col, -2147483648) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_contains(FLOAT4_col, -1000000000000.0) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_contains(VARCHAR_col, 'qwerty' ) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_contains(BIT_col, true) from cp.`parquet/alltypes_repeated.json`");
test("select repeated_contains(BIT_col, false) from cp.`parquet/alltypes_repeated.json`");
}
@Test
public void testSingleColumnRead_vector_fill_bug() throws Exception {
String[] queries = {"select * from cp.`store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testNonExistentColumnReadAlone() throws Exception {
String[] queries = {"select non_existent_column from cp.`store/json/single_column_long_file.json`"};
long[] rowCounts = {13512};
String filename = "/store/json/single_column_long_file.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
}
@Test
public void testAllTextMode() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`store/json/schema_change_int_to_string.json`"};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void readComplexWithStar() throws Exception {
List<QueryDataBatch> results = testSqlWithResults("select * from cp.`store/json/test_complex_read_with_star.json`");
assertEquals(1, results.size());
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
assertEquals(3, batchLoader.getSchema().getFieldCount());
testExistentColumns(batchLoader);
batch.release();
batchLoader.clear();
}
@Test
public void testNullWhereListExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`store/json/null_where_list_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_list_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void testNullWhereMapExpected() throws Exception {
test("alter system set `store.json.all_text_mode` = true");
String[] queries = {"select * from cp.`store/json/null_where_map_expected.json`"};
long[] rowCounts = {3};
String filename = "/store/json/null_where_map_expected.json";
runTestsOnFile(filename, UserBitShared.QueryType.SQL, queries, rowCounts);
test("alter system set `store.json.all_text_mode` = false");
}
@Test
public void ensureProjectionPushdown() throws Exception {
// Tests to make sure that we are correctly eliminating schema changing columns. If completes, means that the projection pushdown was successful.
test("alter system set `store.json.all_text_mode` = false; "
+ "select t.field_1, t.field_3.inner_1, t.field_3.inner_2, t.field_4.inner_1 "
+ "from cp.`store/json/schema_change_int_to_string.json` t");
}
// The project pushdown rule is correctly adding the projected columns to the scan, however it is not removing
// the redundant project operator after the scan, this tests runs a physical plan generated from one of the tests to
// ensure that the project is filtering out the correct data in the scan alone
@Test
public void testProjectPushdown() throws Exception {
String[] queries = {Files.toString(DrillFileUtils.getResourceAsFile("/store/json/project_pushdown_json_physical_plan.json"), Charsets.UTF_8)};
long[] rowCounts = {3};
String filename = "/store/json/schema_change_int_to_string.json";
test("alter system set `store.json.all_text_mode` = false");
runTestsOnFile(filename, UserBitShared.QueryType.PHYSICAL, queries, rowCounts);
List<QueryDataBatch> results = testPhysicalWithResults(queries[0]);
assertEquals(1, results.size());
// "`field_1`", "`field_3`.`inner_1`", "`field_3`.`inner_2`", "`field_4`.`inner_1`"
RecordBatchLoader batchLoader = new RecordBatchLoader(getAllocator());
QueryDataBatch batch = results.get(0);
assertTrue(batchLoader.load(batch.getHeader().getDef(), batch.getData()));
// this used to be five. It is now three. This is because the plan doesn't have a project.
// Scanners are not responsible for projecting non-existent columns (as long as they project one column)
assertEquals(3, batchLoader.getSchema().getFieldCount());
testExistentColumns(batchLoader);
batch.release();
batchLoader.clear();
}
@Test
public void testJsonDirectoryWithEmptyFile() throws Exception {
testBuilder()
.sqlQuery("select * from dfs.`store/json/jsonDirectoryWithEmpyFile`")
.unOrdered()
.baselineColumns("a")
.baselineValues(1l)
.build()
.run();
}
private void testExistentColumns(RecordBatchLoader batchLoader) throws SchemaChangeException {
VectorWrapper<?> vw = batchLoader.getValueAccessorById(
RepeatedBigIntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_1")).getFieldIds() //
);
assertEquals("[1]", vw.getValueVector().getAccessor().getObject(0).toString());
assertEquals("[5]", vw.getValueVector().getAccessor().getObject(1).toString());
assertEquals("[5,10,15]", vw.getValueVector().getAccessor().getObject(2).toString());
vw = batchLoader.getValueAccessorById(
IntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_1")).getFieldIds() //
);
assertNull(vw.getValueVector().getAccessor().getObject(0));
assertEquals(2l, vw.getValueVector().getAccessor().getObject(1));
assertEquals(5l, vw.getValueVector().getAccessor().getObject(2));
vw = batchLoader.getValueAccessorById(
IntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_3", "inner_2")).getFieldIds() //
);
assertNull(vw.getValueVector().getAccessor().getObject(0));
assertNull(vw.getValueVector().getAccessor().getObject(1));
assertEquals(3l, vw.getValueVector().getAccessor().getObject(2));
vw = batchLoader.getValueAccessorById(
RepeatedBigIntVector.class, //
batchLoader.getValueVectorId(SchemaPath.getCompoundPath("field_4", "inner_1")).getFieldIds() //
);
assertEquals("[]", vw.getValueVector().getAccessor().getObject(0).toString());
assertEquals("[1,2,3]", vw.getValueVector().getAccessor().getObject(1).toString());
assertEquals("[4,5,6]", vw.getValueVector().getAccessor().getObject(2).toString());
}
@Test
public void testSelectStarWithUnionType() throws Exception {
try {
testBuilder()
.sqlQuery("select * from cp.`jsoninput/union/a.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("field1", "field2")
.baselineValues(
1L, 1.2
)
.baselineValues(
listOf(2L), 1.2
)
.baselineValues(
mapOf("inner1", 3L, "inner2", 4L), listOf(3L, 4.0, "5")
)
.baselineValues(
mapOf("inner1", 3L,
"inner2", listOf(
mapOf(
"innerInner1", 1L,
"innerInner2",
listOf(
3L,
"a"
)
)
)
),
listOf(
mapOf("inner3", 7L),
4.0,
"5",
mapOf("inner4", 9L),
listOf(
mapOf(
"inner5", 10L,
"inner6", 11L
),
mapOf(
"inner5", 12L,
"inner7", 13L
)
)
)
).go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testSelectFromListWithCase() throws Exception {
try {
testBuilder()
.sqlQuery("select a, typeOf(a) `type` from " +
"(select case when is_list(field2) then field2[4][1].inner7 end a " +
"from cp.`jsoninput/union/a.json`) where a is not null")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("a", "type")
.baselineValues(13L, "BIGINT")
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testTypeCase() throws Exception {
try {
testBuilder()
.sqlQuery("select case when is_bigint(field1) " +
"then field1 when is_list(field1) then field1[0] " +
"when is_map(field1) then t.field1.inner1 end f1 from cp.`jsoninput/union/a.json` t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("f1")
.baselineValues(1L)
.baselineValues(2L)
.baselineValues(3L)
.baselineValues(3L)
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testSumWithTypeCase() throws Exception {
try {
testBuilder()
.sqlQuery("select sum(cast(f1 as bigint)) sum_f1 from " +
"(select case when is_bigint(field1) then field1 " +
"when is_list(field1) then field1[0] when is_map(field1) then t.field1.inner1 end f1 " +
"from cp.`jsoninput/union/a.json` t)")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum_f1")
.baselineValues(9L)
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testUnionExpressionMaterialization() throws Exception {
try {
testBuilder()
.sqlQuery("select a + b c from cp.`jsoninput/union/b.json`")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("c")
.baselineValues(3L)
.baselineValues(7.0)
.baselineValues(11.0)
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testSumMultipleBatches() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_batch"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
}
os.flush();
os.close();
try {
testBuilder()
.sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_batch t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum")
.baselineValues(20000L)
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void testSumFilesWithDifferentSchema() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("multi_file"));
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"map\", data : { a : 1 } }\n".getBytes());
}
os.flush();
os.close();
os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")));
for (int i = 0; i < 10000; i++) {
os.write("{ type : \"bigint\", data : 1 }\n".getBytes());
}
os.flush();
os.close();
try {
testBuilder()
.sqlQuery("select sum(cast(case when `type` = 'map' then t.data.a else data end as bigint)) `sum` from dfs.tmp.multi_file t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.baselineColumns("sum")
.baselineValues(20000L)
.go();
} finally {
testNoResult("alter session set `exec.enable_union_type` = false");
}
}
@Test
public void drill_4032() throws Exception {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4032"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "a.json")));
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.write("{\"col1\": \"val1\",\"col2\": {\"col3\":\"abc\", \"col4\":\"xyz\"}}".getBytes());
os.flush();
os.close();
os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "b.json")));
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.write("{\"col1\": \"val1\",\"col2\": null}".getBytes());
os.flush();
os.close();
testNoResult("select t.col2.col3 from dfs.tmp.drill_4032 t");
}
@Test
public void drill_4479() throws Exception {
try {
File table_dir = dirTestWatcher.makeTestTmpSubDir(Paths.get("drill_4479"));
table_dir.mkdir();
BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(new File(table_dir, "mostlynulls.json")));
// Create an entire batch of null values for 3 columns
for (int i = 0 ; i < JSONRecordReader.DEFAULT_ROWS_PER_BATCH; i++) {
os.write("{\"a\": null, \"b\": null, \"c\": null}".getBytes());
}
// Add a row with {bigint, float, string} values
os.write("{\"a\": 123456789123, \"b\": 99.999, \"c\": \"Hello World\"}".getBytes());
os.flush();
os.close();
testBuilder()
.sqlQuery("select c, count(*) as cnt from dfs.tmp.drill_4479 t group by c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("c", "cnt")
.baselineValues(null, 4096L)
.baselineValues("Hello World", 1L)
.go();
testBuilder()
.sqlQuery("select a, b, c, count(*) as cnt from dfs.tmp.drill_4479 t group by a, b, c")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("a", "b", "c", "cnt")
.baselineValues(null, null, null, 4096L)
.baselineValues("123456789123", "99.999", "Hello World", 1L)
.go();
testBuilder()
.sqlQuery("select max(a) as x, max(b) as y, max(c) as z from dfs.tmp.drill_4479 t")
.ordered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.baselineColumns("x", "y", "z")
.baselineValues("123456789123", "99.999", "Hello World")
.go();
} finally {
testNoResult("alter session set `store.json.all_text_mode` = false");
}
}
@Test
public void testFlattenEmptyArrayWithAllTextMode() throws Exception {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array_all_text_mode.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
try {
String query = "select flatten(t.a.b.c) as c from dfs.`empty_array_all_text_mode.json` t";
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.expectsEmptyResultSet()
.go();
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = false")
.expectsEmptyResultSet()
.go();
} finally {
testNoResult("alter session reset `store.json.all_text_mode`");
}
}
@Test
public void testFlattenEmptyArrayWithUnionType() throws Exception {
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), "empty_array.json")))) {
writer.write("{ \"a\": { \"b\": { \"c\": [] }, \"c\": [] } }");
}
try {
String query = "select flatten(t.a.b.c) as c from dfs.`empty_array.json` t";
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.expectsEmptyResultSet()
.go();
testBuilder()
.sqlQuery(query)
.unOrdered()
.optionSettingQueriesForTestQuery("alter session set `exec.enable_union_type` = true")
.optionSettingQueriesForTestQuery("alter session set `store.json.all_text_mode` = true")
.expectsEmptyResultSet()
.go();
} finally {
testNoResult("alter session reset `store.json.all_text_mode`");
testNoResult("alter session reset `exec.enable_union_type`");
}
}
@Test // DRILL-5521
public void testKvgenWithUnionAll() throws Exception {
String fileName = "map.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"rk\": \"a\", \"m\": {\"a\":\"1\"}}");
}
String query = String.format("select kvgen(m) as res from (select m from dfs.`%s` union all " +
"select convert_from('{\"a\" : null}' ,'json') as m from (values(1)))", fileName);
assertEquals("Row count should match", 2, testSql(query));
}
@Test // DRILL-4264
public void testFieldWithDots() throws Exception {
String fileName = "table.json";
try (BufferedWriter writer = new BufferedWriter(new FileWriter(new File(dirTestWatcher.getRootDir(), fileName)))) {
writer.write("{\"rk.q\": \"a\", \"m\": {\"a.b\":\"1\", \"a\":{\"b\":\"2\"}, \"c\":\"3\"}}");
}
testBuilder()
.sqlQuery("select t.m.`a.b` as a,\n" +
"t.m.a.b as b,\n" +
"t.m['a.b'] as c,\n" +
"t.rk.q as d,\n" +
"t.`rk.q` as e\n" +
"from dfs.`%s` t", fileName)
.unOrdered()
.baselineColumns("a", "b", "c", "d", "e")
.baselineValues("1", "2", "1", null, "a")
.go();
}
}
| |
// Copyright (c) 2003-present, Jodd Team (http://jodd.org)
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// 1. Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// 2. Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package jodd.json;
import jodd.json.meta.JSON;
import jodd.json.meta.JsonAnnotationManager;
import jodd.json.model.State;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import static jodd.util.ArraysUtil.bytes;
import static jodd.util.ArraysUtil.ints;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class JsonSerializerTest {
public static class Foo {
protected String name;
protected Long id;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
}
public static class Bar {
private Foo foo;
private int number;
public Foo getFoo() {
return foo;
}
public void setFoo(Foo foo) {
this.foo = foo;
}
public int getNumber() {
return number;
}
public void setNumber(int number) {
this.number = number;
}
}
public static class WhiteBar {
@JSON
private WhiteBar[] bars;
private int sum;
public WhiteBar[] getBars() {
return bars;
}
public void setBars(WhiteBar[] bars) {
this.bars = bars;
}
public int getSum() {
return sum;
}
public void setSum(int sum) {
this.sum = sum;
}
}
public static class White {
private int intensity;
private Black black;
public int getIntensity() {
return intensity;
}
public void setIntensity(int intensity) {
this.intensity = intensity;
}
public Black getBlack() {
return black;
}
public void setBlack(Black black) {
this.black = black;
}
}
public static class Black {
private int darkness;
private White white;
public int getDarkness() {
return darkness;
}
public void setDarkness(int darkness) {
this.darkness = darkness;
}
public White getWhite() {
return white;
}
public void setWhite(White white) {
this.white = white;
}
}
// ---------------------------------------------------------------- tests
@Test
public void testSimpleMap() {
Map map = new LinkedHashMap();
map.put("one", "uno");
map.put("two", "duo");
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(map);
assertEquals("{\"one\":\"uno\",\"two\":\"duo\"}", json);
map = new LinkedHashMap();
map.put("one", Long.valueOf(173));
map.put("two", Double.valueOf(7.89));
map.put("three", Boolean.TRUE);
map.put("four", null);
map.put("five", "new\nline");
jsonSerializer = new JsonSerializer();
json = jsonSerializer.serialize(map);
assertEquals("{\"one\":173,\"two\":7.89,\"three\":true,\"four\":null,\"five\":\"new\\nline\"}", json);
}
public static class InBean {
HashMap<String, Object> params = new HashMap<>();
ArrayList<String> names = new ArrayList<>();
public HashMap<String, Object> getParams() {
return params;
}
public void setParams(HashMap<String, Object> params) {
this.params = params;
}
public ArrayList<String> getNames() {
return names;
}
public void setNames(ArrayList<String> names) {
this.names = names;
}
}
@Test
public void testInMapVsInBeanbsInList() {
HashMap<String, Object> params = new HashMap<>();
params.put("myid", Integer.valueOf(4343));
ArrayList<String> names = new ArrayList<>();
names.add("veqna");
// in map
LinkedHashMap<String, Object> rootMap = new LinkedHashMap<>();
rootMap.put("params", params);
rootMap.put("names", names);
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(rootMap);
Assert.assertEquals("{\"params\":{\"myid\":4343}}", json);
// in bean
InBean inBean = new InBean();
inBean.setParams(params);
inBean.setNames(names);
jsonSerializer = new JsonSerializer();
json = jsonSerializer.serialize(inBean);
Assert.assertEquals("{}", json);
// in list
ArrayList list = new ArrayList();
list.add(params);
list.add(names);
jsonSerializer = new JsonSerializer();
json = jsonSerializer.serialize(inBean);
Assert.assertEquals("{}", json);
}
@Test
public void testSimpleObjects() {
Foo foo = new Foo();
foo.setName("jodd");
foo.setId(Long.valueOf(976));
Bar bar = new Bar();
bar.setFoo(foo);
bar.setNumber(575);
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(bar);
assertEquals("{\"foo\":{\"id\":976,\"name\":\"jodd\"},\"number\":575}", json);
}
@Test
public void testSimpleList() {
List list = new LinkedList();
list.add("one");
list.add(new Bar());
list.add(Double.valueOf(31E302));
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(list);
assertEquals("[\"one\",{\"foo\":null,\"number\":0},3.1E303]", json);
}
@Test
public void testSimpleArray() {
int[] numbers = ints(1, 2, 3, 4, 5);
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(numbers);
assertEquals("[1,2,3,4,5]", json);
byte[] numbers2 = bytes((byte)1, (byte)2, (byte)3, (byte)4, (byte)5);
json = jsonSerializer.serialize(numbers2);
assertEquals("[1,2,3,4,5]", json);
int[][] matrix = new int[][] {
ints(1,2,3),
ints(7,8,9)
};
json = jsonSerializer.serialize(matrix);
assertEquals("[[1,2,3],[7,8,9]]", json);
}
@Test
public void testEscapeChars() {
String json = "\"1\\\" 2\\\\ 3\\/ 4\\b 5\\f 6\\n 7\\r 8\\t\"";
String str = new JsonParser().parse(json);
assertEquals("1\" 2\\ 3/ 4\b 5\f 6\n 7\r 8\t", str);
String jsonStr = new JsonSerializer().serialize(str);
assertEquals(json, jsonStr);
}
@Test
public void testStrings() {
String text = "Hello";
String json = new JsonSerializer().serialize(new StringBuilder(text));
assertEquals("\"Hello\"", json);
json = new JsonSerializer().serialize(new StringBuffer(text));
assertEquals("\"Hello\"", json);
}
@Test
public void testChar() {
Character character = Character.valueOf('J');
String json = new JsonSerializer().serialize(character);
assertEquals("\"J\"", json);
}
@Test
public void testClass() {
String json = new JsonSerializer().serialize(JoddJson.class);
assertEquals("\"" + JoddJson.class.getName() + "\"", json);
}
@JSON(strict = false)
public static class Cook {
// no annotation
private String aaa = "AAA";
private String bbb = "BBB";
private String ccc = "CCC";
public String getAaa() {
return aaa;
}
public void setAaa(String aaa) {
this.aaa = aaa;
}
@JSON(include = false)
public String getBbb() {
return bbb;
}
public void setBbb(String bbb) {
this.bbb = bbb;
}
@JSON(include = true)
public String getCcc() {
return ccc;
}
public void setCcc(String ccc) {
this.ccc = ccc;
}
}
@JSON(strict = true)
public static class MasterCook extends Cook {
}
@Test
public void testStrictMode() {
Cook cook = new Cook();
JsonAnnotationManager jam = JoddJson.annotationManager;
JsonAnnotationManager.TypeData typeData = jam.lookupTypeData(Cook.class);
assertEquals(1, typeData.rules.totalIncludeRules());
assertEquals(1, typeData.rules.totalExcludeRules());
assertEquals("ccc", typeData.rules.getRule(0));
assertEquals("bbb", typeData.rules.getRule(1));
JsonSerializer jsonSerializer = new JsonSerializer();
String json = jsonSerializer.serialize(cook);
assertTrue(json.contains("\"aaa\""));
assertFalse(json.contains("\"bbb\""));
assertTrue(json.contains("\"ccc\""));
// now, strict = true, serialize only annotated properties!
MasterCook masterCook = new MasterCook();
typeData = jam.lookupTypeData(MasterCook.class);
assertEquals(1, typeData.rules.totalIncludeRules());
assertEquals(1, typeData.rules.totalExcludeRules());
assertEquals("ccc", typeData.rules.getRule(0));
assertEquals("bbb", typeData.rules.getRule(1));
json = jsonSerializer.serialize(masterCook);
assertFalse(json.contains("\"aaa\""));
assertFalse(json.contains("\"bbb\""));
assertTrue(json.contains("\"ccc\""));
}
@Test
public void testCuriousModeOfSerialization() {
Map<String, Object> map = new HashMap<>();
List<Integer> numbers = new ArrayList<>();
numbers.add(Integer.valueOf(8));
numbers.add(Integer.valueOf(4));
numbers.add(Integer.valueOf(2));
map.put("array", numbers);
map.put("value", "BIG");
List<Map<String, Object>> list = new ArrayList<>();
map.put("list", list);
Map<String, Object> val = new HashMap<>();
val.put("name", "Root");
val.put("value", "Hack");
list.add(val);
val = new HashMap<>();
val.put("name", "John");
val.put("value", "Protected");
list.add(val);
// serialize
JsonSerializer jsonSerializer = new JsonSerializer();
jsonSerializer.exclude("list"); // not applied
jsonSerializer.include("array");
// jsonSerializer.include("list"); // not needed, will be included by next two
jsonSerializer.include("list.name");
jsonSerializer.include("list.value");
String str = jsonSerializer.serialize(map);
Map<String, Object> result = new JsonParser().parse(str);
assertEquals(map, result);
}
@Test
public void testCircularDependenciesBean() {
White white = new White();
white.setIntensity(20);
Black black = new Black();
black.setDarkness(80);
black.setWhite(white);
white.setBlack(black);
String json = new JsonSerializer().serialize(white);
White whiteNew = new JsonParser().parse(json, White.class);
assertEquals(white.getIntensity(), whiteNew.getIntensity());
assertEquals(white.getBlack().getDarkness(), whiteNew.getBlack().getDarkness());
assertNull(whiteNew.getBlack().getWhite());
}
@Test
public void testCircularDependenciesMap() {
Map<String, Object> white = new HashMap<>();
white.put("intensity", Integer.valueOf(20));
Map<String, Object> black = new HashMap<>();
black.put("darkness", Integer.valueOf(80));
black.put("white", white);
white.put("black", black);
String json = new JsonSerializer().serialize(white);
Map<String, Object> whiteNew = new JsonParser().parse(json);
assertEquals(white.get("intensity"), whiteNew.get("intensity"));
assertEquals(
((Map<String, Object>)(white.get("black"))).get("darkness"),
((Map<String, Object>)(whiteNew.get("black"))).get("darkness"));
assertNull(((Map<String, Object>) (whiteNew.get("black"))).get("black"));
assertFalse(((Map<String, Object>) (whiteNew.get("black"))).containsKey("white"));
}
@Test
public void testCircularDependenciesArray() {
WhiteBar[] whiteBars = new WhiteBar[1];
WhiteBar white = new WhiteBar();
white.setSum(1);
white.setBars(whiteBars);
whiteBars[0] = white;
String json = new JsonSerializer().serialize(whiteBars);
assertEquals("[{\"sum\":1}]", json);
}
@Test
public void testExcludingNulls() {
Map<String, Object> map = new HashMap<>();
map.put("one", null);
String json = new JsonSerializer().serialize(map);
assertEquals("{\"one\":null}", json);
json = new JsonSerializer().excludeNulls(true).serialize(map);
assertEquals("{}", json);
State state = new State();
map.put("one", state);
json = new JsonSerializer().serialize(map);
assertTrue(json.startsWith("{\"one\":"));
json = new JsonSerializer().excludeNulls(true).serialize(map);
assertEquals("{\"one\":{}}", json);
state.setId(1);
json = new JsonSerializer().excludeNulls(true).serialize(map);
assertEquals("{\"one\":{\"id\":1}}", json);
}
}
| |
/*
* Copyright (C) 2017-2019 Dremio Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.dremio.sabot.rpc.user;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Optional;
import java.util.Set;
import java.util.concurrent.Executor;
import javax.net.ssl.SSLException;
import org.apache.arrow.memory.BufferAllocator;
import com.dremio.common.config.SabotConfig;
import com.dremio.exec.proto.CoordinationProtos.NodeEndpoint;
import com.dremio.exec.proto.GeneralRPCProtos.Ack;
import com.dremio.exec.proto.UserBitShared;
import com.dremio.exec.proto.UserBitShared.QueryData;
import com.dremio.exec.proto.UserBitShared.QueryId;
import com.dremio.exec.proto.UserBitShared.QueryResult;
import com.dremio.exec.proto.UserBitShared.RpcEndpointInfos;
import com.dremio.exec.proto.UserProtos.BitToUserHandshake;
import com.dremio.exec.proto.UserProtos.CreatePreparedStatementResp;
import com.dremio.exec.proto.UserProtos.GetCatalogsResp;
import com.dremio.exec.proto.UserProtos.GetColumnsResp;
import com.dremio.exec.proto.UserProtos.GetQueryPlanFragments;
import com.dremio.exec.proto.UserProtos.GetSchemasResp;
import com.dremio.exec.proto.UserProtos.GetServerMetaResp;
import com.dremio.exec.proto.UserProtos.GetTablesResp;
import com.dremio.exec.proto.UserProtos.HandshakeStatus;
import com.dremio.exec.proto.UserProtos.QueryPlanFragments;
import com.dremio.exec.proto.UserProtos.RecordBatchFormat;
import com.dremio.exec.proto.UserProtos.RecordBatchType;
import com.dremio.exec.proto.UserProtos.RpcType;
import com.dremio.exec.proto.UserProtos.RunQuery;
import com.dremio.exec.proto.UserProtos.UserProperties;
import com.dremio.exec.proto.UserProtos.UserToBitHandshake;
import com.dremio.exec.rpc.Acks;
import com.dremio.exec.rpc.BasicClientWithConnection;
import com.dremio.exec.rpc.ConnectionThrottle;
import com.dremio.exec.rpc.MessageDecoder;
import com.dremio.exec.rpc.Response;
import com.dremio.exec.rpc.RpcConnectionHandler;
import com.dremio.exec.rpc.RpcException;
import com.dremio.exec.rpc.RpcFuture;
import com.dremio.ssl.SSLConfig;
import com.dremio.ssl.SSLEngineFactory;
import com.google.common.collect.Sets;
import com.google.protobuf.MessageLite;
import io.netty.buffer.ByteBuf;
import io.netty.channel.EventLoopGroup;
public class UserClient extends BasicClientWithConnection<RpcType, UserToBitHandshake, BitToUserHandshake> {
private static final org.slf4j.Logger logger = org.slf4j.LoggerFactory.getLogger(UserClient.class);
private final QueryResultHandler queryResultHandler = new QueryResultHandler();
private final boolean supportComplexTypes;
private final String clientName;
private volatile RpcEndpointInfos serverInfos = null;
private volatile Set<RpcType> supportedMethods = null;
public UserClient(String clientName, SabotConfig config, boolean supportComplexTypes, BufferAllocator alloc,
EventLoopGroup eventLoopGroup, Executor eventExecutor, Optional<SSLConfig> sslConfig) throws RpcException {
super(
UserRpcConfig.getMapping(config, eventExecutor, sslConfig),
alloc,
eventLoopGroup,
RpcType.HANDSHAKE,
BitToUserHandshake.class,
BitToUserHandshake.PARSER,
"user client",
newSSLEngineFactory(sslConfig)
);
this.clientName = checkNotNull(clientName);
this.supportComplexTypes = supportComplexTypes;
}
private static Optional<SSLEngineFactory> newSSLEngineFactory(Optional<SSLConfig> sslConfig) throws RpcException {
try {
return SSLEngineFactory.create(sslConfig);
} catch (SSLException e) {
throw new RpcException(e);
}
}
public RpcEndpointInfos getServerInfos() {
return serverInfos;
}
public Set<RpcType> getSupportedMethods() {
return supportedMethods;
}
public void submitQuery(UserResultsListener resultsListener, RunQuery query) {
send(queryResultHandler.getWrappedListener(resultsListener), RpcType.RUN_QUERY, query, QueryId.class);
}
public void connect(RpcConnectionHandler<ServerConnection> handler, NodeEndpoint endpoint,
UserProperties props, UserBitShared.UserCredentials credentials) {
UserToBitHandshake.Builder hsBuilder = UserToBitHandshake.newBuilder()
.setRpcVersion(UserRpcConfig.RPC_VERSION)
.setSupportListening(true)
.setSupportComplexTypes(supportComplexTypes)
.setSupportTimeout(true)
.setCredentials(credentials)
.setRecordBatchType(RecordBatchType.DREMIO)
.addSupportedRecordBatchFormats(RecordBatchFormat.DREMIO_1_4)
.addSupportedRecordBatchFormats(RecordBatchFormat.DREMIO_0_9)
.setClientInfos(UserRpcUtils.getRpcEndpointInfos(clientName));
if (props != null) {
hsBuilder.setProperties(props);
}
this.connectAsClient(queryResultHandler.getWrappedConnectionHandler(handler),
hsBuilder.build(), endpoint.getAddress(), endpoint.getUserPort());
}
@Override
protected MessageLite getResponseDefaultInstance(int rpcType) throws RpcException {
switch (rpcType) {
case RpcType.ACK_VALUE:
return Ack.getDefaultInstance();
case RpcType.HANDSHAKE_VALUE:
return BitToUserHandshake.getDefaultInstance();
case RpcType.QUERY_HANDLE_VALUE:
return QueryId.getDefaultInstance();
case RpcType.QUERY_RESULT_VALUE:
return QueryResult.getDefaultInstance();
case RpcType.QUERY_DATA_VALUE:
return QueryData.getDefaultInstance();
case RpcType.QUERY_PLAN_FRAGMENTS_VALUE:
return QueryPlanFragments.getDefaultInstance();
case RpcType.CATALOGS_VALUE:
return GetCatalogsResp.getDefaultInstance();
case RpcType.SCHEMAS_VALUE:
return GetSchemasResp.getDefaultInstance();
case RpcType.TABLES_VALUE:
return GetTablesResp.getDefaultInstance();
case RpcType.COLUMNS_VALUE:
return GetColumnsResp.getDefaultInstance();
case RpcType.PREPARED_STATEMENT_VALUE:
return CreatePreparedStatementResp.getDefaultInstance();
case RpcType.SERVER_META_VALUE:
return GetServerMetaResp.getDefaultInstance();
}
throw new RpcException(String.format("Unable to deal with RpcType of %d", rpcType));
}
@Override
protected Response handleReponse(ConnectionThrottle throttle, int rpcType, byte[] pBody, ByteBuf dBody) throws RpcException {
switch (rpcType) {
case RpcType.QUERY_DATA_VALUE:
queryResultHandler.batchArrived(throttle, pBody, dBody);
return new Response(RpcType.ACK, Acks.OK);
case RpcType.QUERY_RESULT_VALUE:
queryResultHandler.resultArrived(pBody);
return new Response(RpcType.ACK, Acks.OK);
default:
throw new RpcException(String.format("Unknown Rpc Type %d. ", rpcType));
}
}
@Override
protected void validateHandshake(BitToUserHandshake inbound) throws RpcException {
// logger.debug("Handling handshake from bit to user. {}", inbound);
if (inbound.getStatus() != HandshakeStatus.SUCCESS) {
final String errMsg = String.format("Status: %s, Error Id: %s, Error message: %s",
inbound.getStatus(), inbound.getErrorId(), inbound.getErrorMessage());
logger.error(errMsg);
throw new RpcException(errMsg, inbound.getStatus().toString(), inbound.getErrorId());
}
// Successful connection...
if (inbound.hasServerInfos()) {
serverInfos = inbound.getServerInfos();
}
supportedMethods = Sets.immutableEnumSet(inbound.getSupportedMethodsList());
// Older servers don't return record batch format: assume pre-1.4 servers
RecordBatchFormat recordBatchFormat = inbound.hasRecordBatchFormat() ? inbound.getRecordBatchFormat() : RecordBatchFormat.DREMIO_0_9;
switch(recordBatchFormat) {
case DREMIO_1_4:
break;
case DREMIO_0_9:
{
/*
* From Dremio 1.4 onwards we have moved to Little Endian Decimal format. We need to
* add a new decoder in the netty pipeline when talking to old (1.3 and less) Dremio
* servers.
*/
final BufferAllocator bcAllocator = connection.getAllocator()
.newChildAllocator("dremio09-backward", 0, Long.MAX_VALUE);
logger.debug("Adding dremio 09 backwards compatibility decoder");
connection.getChannel()
.pipeline()
.addAfter(PROTOCOL_DECODER, UserRpcUtils.DREMIO09_COMPATIBILITY_ENCODER,
new BackwardsCompatibilityDecoder(bcAllocator, new Dremio09BackwardCompatibilityHandler(bcAllocator)));
}
break;
case UNKNOWN:
default:
throw new RpcException("Unsupported record batch format: " + recordBatchFormat);
}
}
@Override
protected void finalizeConnection(BitToUserHandshake handshake, BasicClientWithConnection.ServerConnection connection) {
}
@Override
public MessageDecoder newDecoder(BufferAllocator allocator) {
return new UserProtobufLengthDecoder(allocator);
}
/**
* planQuery is an API to plan a query without query execution
* @param req - data necessary to plan query
* @return list of PlanFragments that can later on be submitted for execution
*/
public RpcFuture<QueryPlanFragments> planQuery(
GetQueryPlanFragments req) {
return send(RpcType.GET_QUERY_PLAN_FRAGMENTS, req, QueryPlanFragments.class);
}
}
| |
/*
* Copyright (C) 2017 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.android.exoplayer2.castdemo;
import android.content.Context;
import android.net.Uri;
import android.view.KeyEvent;
import android.view.View;
import com.google.android.exoplayer2.C;
import com.google.android.exoplayer2.DefaultRenderersFactory;
import com.google.android.exoplayer2.ExoPlayerFactory;
import com.google.android.exoplayer2.Player;
import com.google.android.exoplayer2.Player.DefaultEventListener;
import com.google.android.exoplayer2.Player.DiscontinuityReason;
import com.google.android.exoplayer2.Player.TimelineChangeReason;
import com.google.android.exoplayer2.RenderersFactory;
import com.google.android.exoplayer2.SimpleExoPlayer;
import com.google.android.exoplayer2.Timeline;
import com.google.android.exoplayer2.Timeline.Period;
import com.google.android.exoplayer2.castdemo.DemoUtil.Sample;
import com.google.android.exoplayer2.ext.cast.CastPlayer;
import com.google.android.exoplayer2.source.ConcatenatingMediaSource;
import com.google.android.exoplayer2.source.ExtractorMediaSource;
import com.google.android.exoplayer2.source.MediaSource;
import com.google.android.exoplayer2.source.dash.DashMediaSource;
import com.google.android.exoplayer2.source.dash.DefaultDashChunkSource;
import com.google.android.exoplayer2.source.hls.HlsMediaSource;
import com.google.android.exoplayer2.source.smoothstreaming.DefaultSsChunkSource;
import com.google.android.exoplayer2.source.smoothstreaming.SsMediaSource;
import com.google.android.exoplayer2.trackselection.DefaultTrackSelector;
import com.google.android.exoplayer2.ui.PlayerControlView;
import com.google.android.exoplayer2.ui.PlayerView;
import com.google.android.exoplayer2.upstream.DefaultBandwidthMeter;
import com.google.android.exoplayer2.upstream.DefaultHttpDataSourceFactory;
import com.google.android.gms.cast.MediaInfo;
import com.google.android.gms.cast.MediaMetadata;
import com.google.android.gms.cast.MediaQueueItem;
import com.google.android.gms.cast.framework.CastContext;
import java.util.ArrayList;
/**
* Manages players and an internal media queue for the ExoPlayer/Cast demo app.
*/
/* package */ final class PlayerManager extends DefaultEventListener
implements CastPlayer.SessionAvailabilityListener {
/**
* Listener for changes in the media queue playback position.
*/
public interface QueuePositionListener {
/**
* Called when the currently played item of the media queue changes.
*/
void onQueuePositionChanged(int previousIndex, int newIndex);
}
private static final String USER_AGENT = "ExoCastDemoPlayer";
private static final DefaultBandwidthMeter BANDWIDTH_METER = new DefaultBandwidthMeter();
private static final DefaultHttpDataSourceFactory DATA_SOURCE_FACTORY =
new DefaultHttpDataSourceFactory(USER_AGENT, BANDWIDTH_METER);
private final PlayerView localPlayerView;
private final PlayerControlView castControlView;
private final SimpleExoPlayer exoPlayer;
private final CastPlayer castPlayer;
private final ArrayList<DemoUtil.Sample> mediaQueue;
private final QueuePositionListener queuePositionListener;
private final ConcatenatingMediaSource concatenatingMediaSource;
private boolean castMediaQueueCreationPending;
private int currentItemIndex;
private Player currentPlayer;
/**
* @param queuePositionListener A {@link QueuePositionListener} for queue position changes.
* @param localPlayerView The {@link PlayerView} for local playback.
* @param castControlView The {@link PlayerControlView} to control remote playback.
* @param context A {@link Context}.
* @param castContext The {@link CastContext}.
*/
public static PlayerManager createPlayerManager(
QueuePositionListener queuePositionListener,
PlayerView localPlayerView,
PlayerControlView castControlView,
Context context,
CastContext castContext) {
PlayerManager playerManager =
new PlayerManager(
queuePositionListener, localPlayerView, castControlView, context, castContext);
playerManager.init();
return playerManager;
}
private PlayerManager(
QueuePositionListener queuePositionListener,
PlayerView localPlayerView,
PlayerControlView castControlView,
Context context,
CastContext castContext) {
this.queuePositionListener = queuePositionListener;
this.localPlayerView = localPlayerView;
this.castControlView = castControlView;
mediaQueue = new ArrayList<>();
currentItemIndex = C.INDEX_UNSET;
concatenatingMediaSource = new ConcatenatingMediaSource();
DefaultTrackSelector trackSelector = new DefaultTrackSelector(BANDWIDTH_METER);
RenderersFactory renderersFactory = new DefaultRenderersFactory(context);
exoPlayer = ExoPlayerFactory.newSimpleInstance(renderersFactory, trackSelector);
exoPlayer.addListener(this);
localPlayerView.setPlayer(exoPlayer);
castPlayer = new CastPlayer(castContext);
castPlayer.addListener(this);
castPlayer.setSessionAvailabilityListener(this);
castControlView.setPlayer(castPlayer);
}
// Queue manipulation methods.
/**
* Plays a specified queue item in the current player.
*
* @param itemIndex The index of the item to play.
*/
public void selectQueueItem(int itemIndex) {
setCurrentItem(itemIndex, C.TIME_UNSET, true);
}
/**
* Returns the index of the currently played item.
*/
public int getCurrentItemIndex() {
return currentItemIndex;
}
/**
* Appends {@code sample} to the media queue.
*
* @param sample The {@link Sample} to append.
*/
public void addItem(Sample sample) {
mediaQueue.add(sample);
concatenatingMediaSource.addMediaSource(buildMediaSource(sample));
if (currentPlayer == castPlayer) {
castPlayer.addItems(buildMediaQueueItem(sample));
}
}
/**
* Returns the size of the media queue.
*/
public int getMediaQueueSize() {
return mediaQueue.size();
}
/**
* Returns the item at the given index in the media queue.
*
* @param position The index of the item.
* @return The item at the given index in the media queue.
*/
public Sample getItem(int position) {
return mediaQueue.get(position);
}
/**
* Removes the item at the given index from the media queue.
*
* @param itemIndex The index of the item to remove.
* @return Whether the removal was successful.
*/
public boolean removeItem(int itemIndex) {
concatenatingMediaSource.removeMediaSource(itemIndex);
if (currentPlayer == castPlayer) {
if (castPlayer.getPlaybackState() != Player.STATE_IDLE) {
Timeline castTimeline = castPlayer.getCurrentTimeline();
if (castTimeline.getPeriodCount() <= itemIndex) {
return false;
}
castPlayer.removeItem((int) castTimeline.getPeriod(itemIndex, new Period()).id);
}
}
mediaQueue.remove(itemIndex);
if (itemIndex == currentItemIndex && itemIndex == mediaQueue.size()) {
maybeSetCurrentItemAndNotify(C.INDEX_UNSET);
} else if (itemIndex < currentItemIndex) {
maybeSetCurrentItemAndNotify(currentItemIndex - 1);
}
return true;
}
/**
* Moves an item within the queue.
*
* @param fromIndex The index of the item to move.
* @param toIndex The target index of the item in the queue.
* @return Whether the item move was successful.
*/
public boolean moveItem(int fromIndex, int toIndex) {
// Player update.
concatenatingMediaSource.moveMediaSource(fromIndex, toIndex);
if (currentPlayer == castPlayer && castPlayer.getPlaybackState() != Player.STATE_IDLE) {
Timeline castTimeline = castPlayer.getCurrentTimeline();
int periodCount = castTimeline.getPeriodCount();
if (periodCount <= fromIndex || periodCount <= toIndex) {
return false;
}
int elementId = (int) castTimeline.getPeriod(fromIndex, new Period()).id;
castPlayer.moveItem(elementId, toIndex);
}
mediaQueue.add(toIndex, mediaQueue.remove(fromIndex));
// Index update.
if (fromIndex == currentItemIndex) {
maybeSetCurrentItemAndNotify(toIndex);
} else if (fromIndex < currentItemIndex && toIndex >= currentItemIndex) {
maybeSetCurrentItemAndNotify(currentItemIndex - 1);
} else if (fromIndex > currentItemIndex && toIndex <= currentItemIndex) {
maybeSetCurrentItemAndNotify(currentItemIndex + 1);
}
return true;
}
// Miscellaneous methods.
/**
* Dispatches a given {@link KeyEvent} to the corresponding view of the current player.
*
* @param event The {@link KeyEvent}.
* @return Whether the event was handled by the target view.
*/
public boolean dispatchKeyEvent(KeyEvent event) {
if (currentPlayer == exoPlayer) {
return localPlayerView.dispatchKeyEvent(event);
} else /* currentPlayer == castPlayer */ {
return castControlView.dispatchKeyEvent(event);
}
}
/**
* Releases the manager and the players that it holds.
*/
public void release() {
currentItemIndex = C.INDEX_UNSET;
mediaQueue.clear();
concatenatingMediaSource.clear();
castPlayer.setSessionAvailabilityListener(null);
castPlayer.release();
localPlayerView.setPlayer(null);
exoPlayer.release();
}
// Player.EventListener implementation.
@Override
public void onPlayerStateChanged(boolean playWhenReady, int playbackState) {
updateCurrentItemIndex();
}
@Override
public void onPositionDiscontinuity(@DiscontinuityReason int reason) {
updateCurrentItemIndex();
}
@Override
public void onTimelineChanged(
Timeline timeline, Object manifest, @TimelineChangeReason int reason) {
updateCurrentItemIndex();
if (timeline.isEmpty()) {
castMediaQueueCreationPending = true;
}
}
// CastPlayer.SessionAvailabilityListener implementation.
@Override
public void onCastSessionAvailable() {
setCurrentPlayer(castPlayer);
}
@Override
public void onCastSessionUnavailable() {
setCurrentPlayer(exoPlayer);
}
// Internal methods.
private void init() {
setCurrentPlayer(castPlayer.isCastSessionAvailable() ? castPlayer : exoPlayer);
}
private void updateCurrentItemIndex() {
int playbackState = currentPlayer.getPlaybackState();
maybeSetCurrentItemAndNotify(
playbackState != Player.STATE_IDLE && playbackState != Player.STATE_ENDED
? currentPlayer.getCurrentWindowIndex() : C.INDEX_UNSET);
}
private void setCurrentPlayer(Player currentPlayer) {
if (this.currentPlayer == currentPlayer) {
return;
}
// View management.
if (currentPlayer == exoPlayer) {
localPlayerView.setVisibility(View.VISIBLE);
castControlView.hide();
} else /* currentPlayer == castPlayer */ {
localPlayerView.setVisibility(View.GONE);
castControlView.show();
}
// Player state management.
long playbackPositionMs = C.TIME_UNSET;
int windowIndex = C.INDEX_UNSET;
boolean playWhenReady = false;
if (this.currentPlayer != null) {
int playbackState = this.currentPlayer.getPlaybackState();
if (playbackState != Player.STATE_ENDED) {
playbackPositionMs = this.currentPlayer.getCurrentPosition();
playWhenReady = this.currentPlayer.getPlayWhenReady();
windowIndex = this.currentPlayer.getCurrentWindowIndex();
if (windowIndex != currentItemIndex) {
playbackPositionMs = C.TIME_UNSET;
windowIndex = currentItemIndex;
}
}
this.currentPlayer.stop(true);
} else {
// This is the initial setup. No need to save any state.
}
this.currentPlayer = currentPlayer;
// Media queue management.
castMediaQueueCreationPending = currentPlayer == castPlayer;
if (currentPlayer == exoPlayer) {
exoPlayer.prepare(concatenatingMediaSource);
}
// Playback transition.
if (windowIndex != C.INDEX_UNSET) {
setCurrentItem(windowIndex, playbackPositionMs, playWhenReady);
}
}
/**
* Starts playback of the item at the given position.
*
* @param itemIndex The index of the item to play.
* @param positionMs The position at which playback should start.
* @param playWhenReady Whether the player should proceed when ready to do so.
*/
private void setCurrentItem(int itemIndex, long positionMs, boolean playWhenReady) {
maybeSetCurrentItemAndNotify(itemIndex);
if (castMediaQueueCreationPending) {
MediaQueueItem[] items = new MediaQueueItem[mediaQueue.size()];
for (int i = 0; i < items.length; i++) {
items[i] = buildMediaQueueItem(mediaQueue.get(i));
}
castMediaQueueCreationPending = false;
castPlayer.loadItems(items, itemIndex, positionMs, Player.REPEAT_MODE_OFF);
} else {
currentPlayer.seekTo(itemIndex, positionMs);
currentPlayer.setPlayWhenReady(playWhenReady);
}
}
private void maybeSetCurrentItemAndNotify(int currentItemIndex) {
if (this.currentItemIndex != currentItemIndex) {
int oldIndex = this.currentItemIndex;
this.currentItemIndex = currentItemIndex;
queuePositionListener.onQueuePositionChanged(oldIndex, currentItemIndex);
}
}
private static MediaSource buildMediaSource(DemoUtil.Sample sample) {
Uri uri = Uri.parse(sample.uri);
switch (sample.mimeType) {
case DemoUtil.MIME_TYPE_SS:
return new SsMediaSource.Factory(
new DefaultSsChunkSource.Factory(DATA_SOURCE_FACTORY), DATA_SOURCE_FACTORY)
.createMediaSource(uri);
case DemoUtil.MIME_TYPE_DASH:
return new DashMediaSource.Factory(
new DefaultDashChunkSource.Factory(DATA_SOURCE_FACTORY), DATA_SOURCE_FACTORY)
.createMediaSource(uri);
case DemoUtil.MIME_TYPE_HLS:
return new HlsMediaSource.Factory(DATA_SOURCE_FACTORY).createMediaSource(uri);
case DemoUtil.MIME_TYPE_VIDEO_MP4:
return new ExtractorMediaSource.Factory(DATA_SOURCE_FACTORY).createMediaSource(uri);
default: {
throw new IllegalStateException("Unsupported type: " + sample.mimeType);
}
}
}
private static MediaQueueItem buildMediaQueueItem(DemoUtil.Sample sample) {
MediaMetadata movieMetadata = new MediaMetadata(MediaMetadata.MEDIA_TYPE_MOVIE);
movieMetadata.putString(MediaMetadata.KEY_TITLE, sample.name);
MediaInfo mediaInfo = new MediaInfo.Builder(sample.uri)
.setStreamType(MediaInfo.STREAM_TYPE_BUFFERED).setContentType(sample.mimeType)
.setMetadata(movieMetadata).build();
return new MediaQueueItem.Builder(mediaInfo).build();
}
}
| |
/*
* Copyright (c) Facebook, Inc. and its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.buck.parser;
import com.facebook.buck.core.cell.Cell;
import com.facebook.buck.core.exceptions.DependencyStack;
import com.facebook.buck.core.exceptions.HumanReadableException;
import com.facebook.buck.core.filesystems.AbsPath;
import com.facebook.buck.core.model.BuildTarget;
import com.facebook.buck.core.model.TargetConfiguration;
import com.facebook.buck.core.model.targetgraph.TargetGraph;
import com.facebook.buck.core.model.targetgraph.TargetGraphCreationResult;
import com.facebook.buck.core.model.targetgraph.TargetNode;
import com.facebook.buck.core.model.targetgraph.TargetNodeMaybeIncompatible;
import com.facebook.buck.core.util.graph.AcyclicDepthFirstPostOrderTraversalWithPayloadAndDependencyStack;
import com.facebook.buck.core.util.graph.CycleException;
import com.facebook.buck.core.util.graph.GraphTraversableWithPayloadAndDependencyStack;
import com.facebook.buck.core.util.graph.MutableDirectedGraph;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.parser.api.BuildFileManifest;
import com.facebook.buck.parser.exceptions.BuildFileParseException;
import com.facebook.buck.parser.exceptions.BuildTargetException;
import com.facebook.buck.parser.spec.TargetNodeSpec;
import com.facebook.buck.parser.temporarytargetuniquenesschecker.TemporaryUnconfiguredTargetToTargetUniquenessChecker;
import com.facebook.buck.util.MoreMaps;
import com.facebook.buck.util.types.Pair;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Throwables;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.util.concurrent.ListenableFuture;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.SortedMap;
import java.util.concurrent.atomic.AtomicLong;
import javax.annotation.Nullable;
/**
* Evaluates build files using one of the supported interpreters and provides information about
* build targets defined in them.
*
* <p>Computed targets are cached but are automatically invalidated if Watchman reports any
* filesystem changes that may affect computed results.
*/
abstract class AbstractParser implements Parser {
protected final PerBuildStateFactory perBuildStateFactory;
protected final DaemonicParserState permState;
protected final BuckEventBus eventBus;
private final boolean buckOutIncludeTargetConfigHash;
AbstractParser(
DaemonicParserState daemonicParserState,
PerBuildStateFactory perBuildStateFactory,
BuckEventBus eventBus,
boolean buckOutIncludeTargetConfigHash) {
this.perBuildStateFactory = perBuildStateFactory;
this.permState = daemonicParserState;
this.eventBus = eventBus;
this.buckOutIncludeTargetConfigHash = buckOutIncludeTargetConfigHash;
}
@Override
public DaemonicParserState getPermState() {
return permState;
}
@Override
public PerBuildStateFactory getPerBuildStateFactory() {
return perBuildStateFactory;
}
@VisibleForTesting
static BuildFileManifest getTargetNodeRawAttributes(
PerBuildState state, Cell cell, AbsPath buildFile) throws BuildFileParseException {
return state.getBuildFileManifest(cell, buildFile);
}
@Override
public ImmutableList<TargetNodeMaybeIncompatible> getAllTargetNodes(
PerBuildState perBuildState,
Cell cell,
AbsPath buildFile,
Optional<TargetConfiguration> targetConfiguration)
throws BuildFileParseException {
return perBuildState.getAllTargetNodes(cell, buildFile, targetConfiguration);
}
@Override
public TargetNode<?> getTargetNodeAssertCompatible(
ParsingContext parsingContext, BuildTarget target, DependencyStack dependencyStack)
throws BuildFileParseException {
try (PerBuildState state = perBuildStateFactory.create(parsingContext, permState)) {
return state.getTargetNodeAssertCompatible(target, dependencyStack);
}
}
@Override
public TargetNode<?> getTargetNodeAssertCompatible(
PerBuildState perBuildState, BuildTarget target, DependencyStack dependencyStack)
throws BuildFileParseException {
return perBuildState.getTargetNodeAssertCompatible(target, dependencyStack);
}
@Override
public ListenableFuture<TargetNode<?>> getTargetNodeJobAssertCompatible(
PerBuildState perBuildState, BuildTarget target, DependencyStack dependencyStack)
throws BuildTargetException {
return perBuildState.getTargetNodeJobAssertCompatible(target, dependencyStack);
}
/**
* @deprecated Prefer {@link Parser#getTargetNodeRawAttributes(PerBuildState, Cell, TargetNode,
* DependencyStack)} and reusing a PerBuildState instance, especially when calling in a loop.
*/
@Nullable
@Deprecated
@Override
public SortedMap<String, Object> getTargetNodeRawAttributes(
ParsingContext parsingContext, TargetNode<?> targetNode, DependencyStack dependencyStack)
throws BuildFileParseException {
try (PerBuildState state = perBuildStateFactory.create(parsingContext, permState)) {
return getTargetNodeRawAttributes(
state, parsingContext.getCell(), targetNode, dependencyStack);
}
}
private RuntimeException propagateRuntimeCause(RuntimeException e)
throws IOException, InterruptedException, BuildFileParseException {
Throwables.throwIfInstanceOf(e, HumanReadableException.class);
Throwable t = e.getCause();
if (t != null) {
Throwables.throwIfInstanceOf(t, IOException.class);
Throwables.throwIfInstanceOf(t, InterruptedException.class);
Throwables.throwIfInstanceOf(t, BuildFileParseException.class);
Throwables.throwIfInstanceOf(t, BuildTargetException.class);
}
return e;
}
@Override
public TargetGraphCreationResult buildTargetGraph(
ParsingContext parsingContext, ImmutableSet<BuildTarget> toExplore)
throws IOException, InterruptedException, BuildFileParseException {
AtomicLong processedBytes = new AtomicLong();
try (PerBuildState state =
perBuildStateFactory.create(parsingContext, permState, processedBytes)) {
return buildTargetGraph(state, toExplore, processedBytes);
}
}
private TargetGraphCreationResult buildTargetGraph(
PerBuildState state, ImmutableSet<BuildTarget> toExplore, AtomicLong processedBytes)
throws IOException, InterruptedException, BuildFileParseException {
if (toExplore.isEmpty()) {
return TargetGraphCreationResult.of(TargetGraph.EMPTY, toExplore);
}
MutableDirectedGraph<TargetNode<?>> graph = new MutableDirectedGraph<>();
Map<BuildTarget, TargetNode<?>> index = new HashMap<>();
TemporaryUnconfiguredTargetToTargetUniquenessChecker checker =
TemporaryUnconfiguredTargetToTargetUniquenessChecker.create(buckOutIncludeTargetConfigHash);
ParseEvent.Started parseStart = ParseEvent.started(toExplore);
eventBus.post(parseStart);
GraphTraversableWithPayloadAndDependencyStack<BuildTarget, TargetNode<?>> traversable =
(target, dependencyStack) -> {
TargetNode<?> node;
try {
TargetNodeMaybeIncompatible nodeMaybe = state.getTargetNode(target, dependencyStack);
node = assertTargetIsCompatible(state, nodeMaybe, dependencyStack);
} catch (BuildFileParseException e) {
throw new RuntimeException(e);
} catch (HumanReadableException e) {
eventBus.post(ParseEvent.finished(parseStart, processedBytes.get(), Optional.empty()));
throw e;
}
// this second lookup loop may *seem* pointless, but it allows us to report which node is
// referring to a node we can't find - something that's very difficult in this Traversable
// visitor pattern otherwise.
// it's also work we need to do anyways. the getTargetNode() result is cached, so that
// when we come around and re-visit that node there won't actually be any work performed.
for (BuildTarget dep : node.getTotalDeps()) {
try {
state.getTargetNode(dep, dependencyStack.child(dep));
} catch (BuildFileParseException e) {
throw ParserMessages.createReadableExceptionWithWhenSuffix(target, dep, e);
} catch (HumanReadableException e) {
if (e.getDependencyStack().isEmpty()) {
// we don't have a proper stack, use simple message as fallback
throw ParserMessages.createReadableExceptionWithWhenSuffix(target, dep, e);
} else {
throw e;
}
}
}
return new Pair<>(node, node.getTotalDeps().iterator());
};
AcyclicDepthFirstPostOrderTraversalWithPayloadAndDependencyStack<BuildTarget, TargetNode<?>>
targetNodeTraversal =
new AcyclicDepthFirstPostOrderTraversalWithPayloadAndDependencyStack<>(
traversable, DependencyStack::child);
TargetGraph targetGraph = null;
try {
for (Map.Entry<BuildTarget, Pair<TargetNode<?>, DependencyStack>> targetAndNode :
targetNodeTraversal.traverse(toExplore).entrySet()) {
BuildTarget target = targetAndNode.getKey();
TargetNode<?> targetNode = targetAndNode.getValue().getFirst();
DependencyStack dependencyStack = targetAndNode.getValue().getSecond();
graph.addNode(targetNode);
MoreMaps.putCheckEquals(index, target, targetNode);
checker.addTarget(target, dependencyStack);
if (target.isFlavored()) {
BuildTarget unflavoredTarget = target.withoutFlavors();
MoreMaps.putCheckEquals(
index,
unflavoredTarget,
state.getTargetNodeAssertCompatible(unflavoredTarget, dependencyStack));
// NOTE: do not used uniqueness checked for unflavored target
// because `target.withoutFlavors()` does not switch unconfigured target
}
for (BuildTarget dep : targetNode.getParseDeps()) {
graph.addEdge(
targetNode, state.getTargetNodeAssertCompatible(dep, dependencyStack.child(dep)));
}
}
targetGraph = new TargetGraph(graph, ImmutableMap.copyOf(index));
return TargetGraphCreationResult.of(targetGraph, toExplore);
} catch (CycleException e) {
throw new HumanReadableException(e.getMessage());
} catch (RuntimeException e) {
throw propagateRuntimeCause(e);
} finally {
eventBus.post(
ParseEvent.finished(parseStart, processedBytes.get(), Optional.ofNullable(targetGraph)));
}
}
@Override
public synchronized TargetGraphCreationResult buildTargetGraphWithoutTopLevelConfigurationTargets(
ParsingContext parsingContext,
Iterable<? extends TargetNodeSpec> targetNodeSpecs,
Optional<TargetConfiguration> targetConfiguration)
throws BuildFileParseException, IOException, InterruptedException {
return buildTargetGraphForTargetNodeSpecs(
parsingContext, targetNodeSpecs, targetConfiguration, true);
}
@Override
public synchronized TargetGraphCreationResult buildTargetGraphWithTopLevelConfigurationTargets(
ParsingContext parsingContext,
Iterable<? extends TargetNodeSpec> targetNodeSpecs,
Optional<TargetConfiguration> targetConfiguration)
throws BuildFileParseException, IOException, InterruptedException {
return buildTargetGraphForTargetNodeSpecs(
parsingContext, targetNodeSpecs, targetConfiguration, false);
}
private synchronized TargetGraphCreationResult buildTargetGraphForTargetNodeSpecs(
ParsingContext parsingContext,
Iterable<? extends TargetNodeSpec> targetNodeSpecs,
Optional<TargetConfiguration> targetConfiguration,
boolean excludeConfigurationTargets)
throws BuildFileParseException, IOException, InterruptedException {
AtomicLong processedBytes = new AtomicLong();
try (PerBuildState state =
perBuildStateFactory.create(parsingContext, permState, processedBytes)) {
ImmutableSet<BuildTarget> buildTargets =
collectBuildTargetsFromTargetNodeSpecs(
parsingContext,
state,
targetNodeSpecs,
targetConfiguration,
excludeConfigurationTargets);
return buildTargetGraph(state, buildTargets, processedBytes);
}
}
protected abstract ImmutableSet<BuildTarget> collectBuildTargetsFromTargetNodeSpecs(
ParsingContext parsingContext,
PerBuildState state,
Iterable<? extends TargetNodeSpec> targetNodeSpecs,
Optional<TargetConfiguration> targetConfiguration,
boolean excludeConfigurationTargets)
throws InterruptedException;
/**
* Verifies that the provided target node is compatible with the target platform.
*
* @throws com.facebook.buck.core.exceptions.HumanReadableException if the target not is not
* compatible with the target platform.
*/
protected abstract TargetNode<?> assertTargetIsCompatible(
PerBuildState state, TargetNodeMaybeIncompatible targetNode, DependencyStack dependencyStack);
@Override
public String toString() {
return permState.toString();
}
}
| |
/**
*
* Apache License
* Version 2.0, January 2004
* http://www.apache.org/licenses/
*
* TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
*
* 1. Definitions.
*
* "License" shall mean the terms and conditions for use, reproduction,
* and distribution as defined by Sections 1 through 9 of this document.
*
* "Licensor" shall mean the copyright owner or entity authorized by
* the copyright owner that is granting the License.
*
* "Legal Entity" shall mean the union of the acting entity and all
* other entities that control, are controlled by, or are under common
* control with that entity. For the purposes of this definition,
* "control" means (i) the power, direct or indirect, to cause the
* direction or management of such entity, whether by contract or
* otherwise, or (ii) ownership of fifty percent (50%) or more of the
* outstanding shares, or (iii) beneficial ownership of such entity.
*
* "You" (or "Your") shall mean an individual or Legal Entity
* exercising permissions granted by this License.
*
* "Source" form shall mean the preferred form for making modifications,
* including but not limited to software source code, documentation
* source, and configuration files.
*
* "Object" form shall mean any form resulting from mechanical
* transformation or translation of a Source form, including but
* not limited to compiled object code, generated documentation,
* and conversions to other media types.
*
* "Work" shall mean the work of authorship, whether in Source or
* Object form, made available under the License, as indicated by a
* copyright notice that is included in or attached to the work
* (an example is provided in the Appendix below).
*
* "Derivative Works" shall mean any work, whether in Source or Object
* form, that is based on (or derived from) the Work and for which the
* editorial revisions, annotations, elaborations, or other modifications
* represent, as a whole, an original work of authorship. For the purposes
* of this License, Derivative Works shall not include works that remain
* separable from, or merely link (or bind by name) to the interfaces of,
* the Work and Derivative Works thereof.
*
* "Contribution" shall mean any work of authorship, including
* the original version of the Work and any modifications or additions
* to that Work or Derivative Works thereof, that is intentionally
* submitted to Licensor for inclusion in the Work by the copyright owner
* or by an individual or Legal Entity authorized to submit on behalf of
* the copyright owner. For the purposes of this definition, "submitted"
* means any form of electronic, verbal, or written communication sent
* to the Licensor or its representatives, including but not limited to
* communication on electronic mailing lists, source code control systems,
* and issue tracking systems that are managed by, or on behalf of, the
* Licensor for the purpose of discussing and improving the Work, but
* excluding communication that is conspicuously marked or otherwise
* designated in writing by the copyright owner as "Not a Contribution."
*
* "Contributor" shall mean Licensor and any individual or Legal Entity
* on behalf of whom a Contribution has been received by Licensor and
* subsequently incorporated within the Work.
*
* 2. Grant of Copyright License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* copyright license to reproduce, prepare Derivative Works of,
* publicly display, publicly perform, sublicense, and distribute the
* Work and such Derivative Works in Source or Object form.
*
* 3. Grant of Patent License. Subject to the terms and conditions of
* this License, each Contributor hereby grants to You a perpetual,
* worldwide, non-exclusive, no-charge, royalty-free, irrevocable
* (except as stated in this section) patent license to make, have made,
* use, offer to sell, sell, import, and otherwise transfer the Work,
* where such license applies only to those patent claims licensable
* by such Contributor that are necessarily infringed by their
* Contribution(s) alone or by combination of their Contribution(s)
* with the Work to which such Contribution(s) was submitted. If You
* institute patent litigation against any entity (including a
* cross-claim or counterclaim in a lawsuit) alleging that the Work
* or a Contribution incorporated within the Work constitutes direct
* or contributory patent infringement, then any patent licenses
* granted to You under this License for that Work shall terminate
* as of the date such litigation is filed.
*
* 4. Redistribution. You may reproduce and distribute copies of the
* Work or Derivative Works thereof in any medium, with or without
* modifications, and in Source or Object form, provided that You
* meet the following conditions:
*
* (a) You must give any other recipients of the Work or
* Derivative Works a copy of this License; and
*
* (b) You must cause any modified files to carry prominent notices
* stating that You changed the files; and
*
* (c) You must retain, in the Source form of any Derivative Works
* that You distribute, all copyright, patent, trademark, and
* attribution notices from the Source form of the Work,
* excluding those notices that do not pertain to any part of
* the Derivative Works; and
*
* (d) If the Work includes a "NOTICE" text file as part of its
* distribution, then any Derivative Works that You distribute must
* include a readable copy of the attribution notices contained
* within such NOTICE file, excluding those notices that do not
* pertain to any part of the Derivative Works, in at least one
* of the following places: within a NOTICE text file distributed
* as part of the Derivative Works; within the Source form or
* documentation, if provided along with the Derivative Works; or,
* within a display generated by the Derivative Works, if and
* wherever such third-party notices normally appear. The contents
* of the NOTICE file are for informational purposes only and
* do not modify the License. You may add Your own attribution
* notices within Derivative Works that You distribute, alongside
* or as an addendum to the NOTICE text from the Work, provided
* that such additional attribution notices cannot be construed
* as modifying the License.
*
* You may add Your own copyright statement to Your modifications and
* may provide additional or different license terms and conditions
* for use, reproduction, or distribution of Your modifications, or
* for any such Derivative Works as a whole, provided Your use,
* reproduction, and distribution of the Work otherwise complies with
* the conditions stated in this License.
*
* 5. Submission of Contributions. Unless You explicitly state otherwise,
* any Contribution intentionally submitted for inclusion in the Work
* by You to the Licensor shall be under the terms and conditions of
* this License, without any additional terms or conditions.
* Notwithstanding the above, nothing herein shall supersede or modify
* the terms of any separate license agreement you may have executed
* with Licensor regarding such Contributions.
*
* 6. Trademarks. This License does not grant permission to use the trade
* names, trademarks, service marks, or product names of the Licensor,
* except as required for reasonable and customary use in describing the
* origin of the Work and reproducing the content of the NOTICE file.
*
* 7. Disclaimer of Warranty. Unless required by applicable law or
* agreed to in writing, Licensor provides the Work (and each
* Contributor provides its Contributions) on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied, including, without limitation, any warranties or conditions
* of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
* PARTICULAR PURPOSE. You are solely responsible for determining the
* appropriateness of using or redistributing the Work and assume any
* risks associated with Your exercise of permissions under this License.
*
* 8. Limitation of Liability. In no event and under no legal theory,
* whether in tort (including negligence), contract, or otherwise,
* unless required by applicable law (such as deliberate and grossly
* negligent acts) or agreed to in writing, shall any Contributor be
* liable to You for damages, including any direct, indirect, special,
* incidental, or consequential damages of any character arising as a
* result of this License or out of the use or inability to use the
* Work (including but not limited to damages for loss of goodwill,
* work stoppage, computer failure or malfunction, or any and all
* other commercial damages or losses), even if such Contributor
* has been advised of the possibility of such damages.
*
* 9. Accepting Warranty or Additional Liability. While redistributing
* the Work or Derivative Works thereof, You may choose to offer,
* and charge a fee for, acceptance of support, warranty, indemnity,
* or other liability obligations and/or rights consistent with this
* License. However, in accepting such obligations, You may act only
* on Your own behalf and on Your sole responsibility, not on behalf
* of any other Contributor, and only if You agree to indemnify,
* defend, and hold each Contributor harmless for any liability
* incurred by, or claims asserted against, such Contributor by reason
* of your accepting any such warranty or additional liability.
*
* END OF TERMS AND CONDITIONS
*
* APPENDIX: How to apply the Apache License to your work.
*
* To apply the Apache License to your work, attach the following
* boilerplate notice, with the fields enclosed by brackets "[]"
* replaced with your own identifying information. (Don't include
* the brackets!) The text should be enclosed in the appropriate
* comment syntax for the file format. We also recommend that a
* file or class name and description of purpose be included on the
* same "printed page" as the copyright notice for easier
* identification within third-party archives.
*
* Copyright 2016 Alibaba Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.taobao.weex.ui.view.border;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.ColorFilter;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PixelFormat;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.Shader;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.annotation.NonNull;
import android.support.annotation.Nullable;
import android.util.SparseArray;
import android.util.SparseIntArray;
import com.taobao.weex.dom.flex.FloatUtil;
import com.taobao.weex.dom.flex.Spacing;
import com.taobao.weex.utils.WXLogUtils;
import com.taobao.weex.utils.WXViewUtils;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Locale;
/**
* A subclass of
* {@link Drawable} used for background of {@link com.taobao.weex.ui.component.WXComponent}.
* It supports drawing background color and borders (including rounded borders) by providing a react
* friendly API (setter for each of those properties). The implementation tries to allocate as few
* objects as possible depending on which properties are set. E.g. for views with rounded
* background/borders we allocate {@code mPathForBorderDrawn} and {@code mTempRectForBorderRadius}.
* In case when view have a rectangular borders we allocate {@code mBorderWidthResult} and similar.
* When only background color is set we won't allocate any extra/unnecessary objects.
*/
public class BorderDrawable extends Drawable {
public static final int BORDER_TOP_LEFT_RADIUS = 0;
public static final int BORDER_TOP_RIGHT_RADIUS = 1;
public static final int BORDER_BOTTOM_RIGHT_RADIUS = 2;
public static final int BORDER_BOTTOM_LEFT_RADIUS = 3;
public static final int BORDER_RADIUS_ALL = 8;
static final int DEFAULT_BORDER_COLOR = Color.BLACK;
static final float DEFAULT_BORDER_WIDTH = 0;
private static final float DEFAULT_BORDER_RADIUS = 0;
private static final BorderStyle DEFAULT_BORDER_STYLE = BorderStyle.SOLID;
private static final String TAG = "Border";
private final Paint mPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
private
@Nullable
SparseArray<Float> mBorderWidth;
private
@Nullable
SparseArray<Float> mBorderRadius;
private
@Nullable
SparseArray<Float> mOverlappingBorderRadius;
private
@Nullable
SparseIntArray mBorderColor;
private
@Nullable
SparseIntArray mBorderStyle;
private
@Nullable
Path mPathForBorderOutline;
private boolean mNeedUpdatePath = false;
private int mColor = Color.TRANSPARENT;
/**
* set background-image linear-gradient
*/
private Shader mShader=null;
private int mAlpha = 255;
public BorderDrawable() {
}
@Override
public void draw(@NonNull Canvas canvas) {
canvas.save();
updateBorderOutline();
if (mPathForBorderOutline != null) {
int useColor = WXViewUtils.multiplyColorAlpha(mColor, mAlpha);
if (mShader != null) {
mPaint.setShader(mShader);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawPath(mPathForBorderOutline, mPaint);
mPaint.setShader(null);
} else if ((useColor >>> 24) != 0) {
mPaint.setColor(useColor);
mPaint.setStyle(Paint.Style.FILL);
canvas.drawPath(mPathForBorderOutline, mPaint);
mPaint.setShader(null);
}
}
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setStrokeJoin(Paint.Join.ROUND);
drawBorders(canvas);
mPaint.setShader(null);
canvas.restore();
}
@Override
protected void onBoundsChange(Rect bounds) {
super.onBoundsChange(bounds);
mNeedUpdatePath = true;
}
@Override
public void setAlpha(int alpha) {
if (alpha != mAlpha) {
mAlpha = alpha;
invalidateSelf();
}
}
@Override
public int getAlpha() {
return mAlpha;
}
/**
* Do not support Color Filter
*/
@Override
public void setColorFilter(ColorFilter cf) {
}
@SuppressWarnings("WrongConstant")
@Override
public int getOpacity() {
return mShader!=null?PixelFormat.OPAQUE:
WXViewUtils.getOpacityFromColor(WXViewUtils.multiplyColorAlpha(mColor, mAlpha));
}
/* Android's elevation implementation requires this to be implemented to know where to draw the
shadow. */
@Override
public void getOutline(@NonNull Outline outline) {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
if (mPathForBorderOutline == null) {
mNeedUpdatePath = true;
}
updateBorderOutline();
outline.setConvexPath(mPathForBorderOutline);
}
}
public void setBorderWidth(int position, float width) {
if (mBorderWidth == null) {
mBorderWidth = new SparseArray<>(5);
mBorderWidth.put(Spacing.ALL, DEFAULT_BORDER_WIDTH);
}
if (!FloatUtil.floatsEqual(getBorderWidth(position), width)) {
BorderUtil.updateSparseArray(mBorderWidth, position, width);
mBorderWidth.put(position, width);
mNeedUpdatePath = true;
invalidateSelf();
}
}
float getBorderWidth(int position) {
return BorderUtil.fetchFromSparseArray(mBorderWidth, position, DEFAULT_BORDER_WIDTH);
}
public void setBorderRadius(int position, float radius) {
if (mBorderRadius == null) {
mBorderRadius = new SparseArray<>(5);
mBorderRadius.put(Spacing.ALL, DEFAULT_BORDER_RADIUS);
}
if (!FloatUtil.floatsEqual(getBorderRadius(mBorderRadius, position), radius)) {
BorderUtil.updateSparseArray(mBorderRadius, position, radius, true);
mNeedUpdatePath = true;
invalidateSelf();
}
}
/**
* This method is only used for Unit test, do not call this method, use
* {@link #getBorderRadius(SparseArray, int)} instead.
* @param position the index of the edge
* @return the radius considering border-overlapping of the corner.
*/
@Deprecated
float getBorderRadius(int position) {
return getBorderRadius(mOverlappingBorderRadius, position);
}
public
@NonNull
float[] getBorderRadius(RectF borderBox) {
prepareBorderRadius(borderBox);
float topLeftRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_LEFT_RADIUS);
float topRightRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_RIGHT_RADIUS);
float bottomRightRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS);
float bottomLeftRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_BOTTOM_LEFT_RADIUS);
return new float[]{topLeftRadius,topLeftRadius,
topRightRadius,topRightRadius,
bottomRightRadius, bottomRightRadius,
bottomLeftRadius,bottomLeftRadius};
}
public void setBorderColor(int position, int color) {
if (mBorderColor == null) {
mBorderColor = new SparseIntArray(5);
mBorderColor.put(Spacing.ALL, DEFAULT_BORDER_COLOR);
}
if (getBorderColor(position) != color) {
BorderUtil.updateSparseArray(mBorderColor, position, color);
invalidateSelf();
}
}
int getBorderColor(int position) {
return BorderUtil.fetchFromSparseArray(mBorderColor, position, DEFAULT_BORDER_COLOR);
}
public void setBorderStyle(int position, @NonNull String style) {
if (mBorderStyle == null) {
mBorderStyle = new SparseIntArray(5);
mBorderStyle.put(Spacing.ALL, DEFAULT_BORDER_STYLE.ordinal());
}
try {
int borderStyle = BorderStyle.valueOf(style.toUpperCase(Locale.US)).ordinal();
if (getBorderStyle(position) != borderStyle) {
BorderUtil.updateSparseArray(mBorderStyle, position, borderStyle);
invalidateSelf();
}
} catch (IllegalArgumentException e) {
WXLogUtils.e(TAG, WXLogUtils.getStackTrace(e));
}
}
int getBorderStyle(int position) {
return BorderUtil.fetchFromSparseArray(mBorderStyle, position, BorderStyle.SOLID.ordinal());
}
public int getColor() {
return mColor;
}
public void setColor(int color) {
mColor = color;
invalidateSelf();
}
public void setImage(Shader shader){
mShader=shader;
invalidateSelf();
}
public boolean isRounded() {
return mBorderRadius != null &&
(!FloatUtil.floatsEqual(getBorderRadius(mBorderRadius, BORDER_TOP_LEFT_RADIUS), 0) ||
!FloatUtil.floatsEqual(getBorderRadius(mBorderRadius, BORDER_TOP_RIGHT_RADIUS), 0) ||
!FloatUtil.floatsEqual(getBorderRadius(mBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS), 0) ||
!FloatUtil.floatsEqual(getBorderRadius(mBorderRadius, BORDER_BOTTOM_LEFT_RADIUS), 0));
}
public
@NonNull
Path getContentPath(@NonNull RectF borderBox) {
Path contentClip = new Path();
prepareBorderPath(0, 0, 0, 0, borderBox, contentClip);
return contentClip;
}
private float getBorderRadius(@Nullable SparseArray<Float> borderRadius, int position) {
return BorderUtil.fetchFromSparseArray(borderRadius, position, DEFAULT_BORDER_RADIUS);
}
private void updateBorderOutline() {
if (mNeedUpdatePath) {
mNeedUpdatePath = false;
if (mPathForBorderOutline == null) {
mPathForBorderOutline = new Path();
}
mPathForBorderOutline.reset();
prepareBorderPath(0, 0, 0, 0, new RectF(getBounds()), mPathForBorderOutline);
}
}
private void prepareBorderPath(int topPadding,
int rightPadding,
int bottomPadding,
int leftPadding,
@NonNull RectF rectF,
@NonNull Path path) {
if (mBorderRadius != null) {
prepareBorderRadius(rectF);
float topLeftRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_LEFT_RADIUS);
float topRightRadius = getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_RIGHT_RADIUS);
float bottomRightRadius = getBorderRadius(mOverlappingBorderRadius,
BORDER_BOTTOM_RIGHT_RADIUS);
float bottomLeftRadius = getBorderRadius(mOverlappingBorderRadius,
BORDER_BOTTOM_LEFT_RADIUS);
path.addRoundRect(
rectF,
new float[]{
topLeftRadius - leftPadding,
topLeftRadius - topPadding,
topRightRadius - rightPadding,
topRightRadius - topPadding,
bottomRightRadius - rightPadding,
bottomRightRadius - bottomPadding,
bottomLeftRadius - leftPadding,
bottomLeftRadius - bottomPadding
},
Path.Direction.CW);
} else {
path.addRect(rectF, Path.Direction.CW);
}
}
/**
* Process overlapping curve according to https://www.w3.org/TR/css3-background/#corner-overlap .
*/
private void prepareBorderRadius(@NonNull RectF borderBox) {
if (mBorderRadius != null) {
float factor = getScaleFactor(borderBox);
if (mOverlappingBorderRadius == null) {
mOverlappingBorderRadius = new SparseArray<>(5);
mOverlappingBorderRadius.put(Spacing.ALL, 0f);
}
if (!Float.isNaN(factor) && factor < 1) {
mOverlappingBorderRadius.put(BORDER_TOP_LEFT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_TOP_LEFT_RADIUS) *
factor);
mOverlappingBorderRadius.put(BORDER_TOP_RIGHT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_TOP_RIGHT_RADIUS) *
factor);
mOverlappingBorderRadius.put(BORDER_BOTTOM_RIGHT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS) *
factor);
mOverlappingBorderRadius.put(BORDER_BOTTOM_LEFT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_BOTTOM_LEFT_RADIUS) *
factor);
} else {
mOverlappingBorderRadius.put(BORDER_TOP_LEFT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_TOP_LEFT_RADIUS));
mOverlappingBorderRadius.put(BORDER_TOP_RIGHT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_TOP_RIGHT_RADIUS));
mOverlappingBorderRadius.put(BORDER_BOTTOM_RIGHT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS));
mOverlappingBorderRadius.put(BORDER_BOTTOM_LEFT_RADIUS,
getBorderRadius(mBorderRadius, BORDER_BOTTOM_LEFT_RADIUS));
}
}
}
private float getScaleFactor(@NonNull RectF borderBox) {
final float topRadius = getBorderRadius(mBorderRadius, BORDER_TOP_LEFT_RADIUS)
+ getBorderRadius(mBorderRadius, BORDER_TOP_RIGHT_RADIUS);
final float rightRadius = getBorderRadius(mBorderRadius, BORDER_TOP_RIGHT_RADIUS)
+ getBorderRadius(mBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS);
final float bottomRadius = getBorderRadius(mBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS)
+ getBorderRadius(mBorderRadius, BORDER_BOTTOM_LEFT_RADIUS);
final float leftRadius = getBorderRadius(mBorderRadius, BORDER_BOTTOM_LEFT_RADIUS)
+ getBorderRadius(mBorderRadius, BORDER_TOP_LEFT_RADIUS);
List<Float> factors = new ArrayList<>(4);
updateFactor(factors, borderBox.width(), topRadius);
updateFactor(factors, borderBox.height(), rightRadius);
updateFactor(factors, borderBox.width(), bottomRadius);
updateFactor(factors, borderBox.height(), leftRadius);
float factor;
if (factors.isEmpty()) {
factor = Float.NaN;
} else {
factor = Collections.min(factors);
}
return factor;
}
private void updateFactor(@NonNull List<Float> list, float numerator, float denominator) {
if (!FloatUtil.floatsEqual(denominator, 0)) {
list.add(numerator / denominator);
}
}
private void drawBorders(Canvas canvas) {
RectF rectBounds = new RectF(getBounds());
BorderCorner topLeft = new TopLeftCorner(
getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_LEFT_RADIUS),
getBorderWidth(Spacing.LEFT),
getBorderWidth(Spacing.TOP),
rectBounds);
BorderCorner topRight = new TopRightCorner(
getBorderRadius(mOverlappingBorderRadius, BORDER_TOP_RIGHT_RADIUS),
getBorderWidth(Spacing.TOP),
getBorderWidth(Spacing.RIGHT),
rectBounds);
BorderCorner bottomRight = new BottomRightCorner(
getBorderRadius(mOverlappingBorderRadius, BORDER_BOTTOM_RIGHT_RADIUS),
getBorderWidth(Spacing.RIGHT),
getBorderWidth(Spacing.BOTTOM),
rectBounds);
BorderCorner bottomLeft = new BottomLeftCorner(
getBorderRadius(mOverlappingBorderRadius, BORDER_BOTTOM_LEFT_RADIUS),
getBorderWidth(Spacing.BOTTOM),
getBorderWidth(Spacing.LEFT),
rectBounds);
drawOneSide(canvas, new BorderEdge(topLeft, topRight, Spacing.TOP,
getBorderWidth(Spacing.TOP)));
drawOneSide(canvas, new BorderEdge(topRight, bottomRight, Spacing.RIGHT,
getBorderWidth(Spacing.RIGHT)));
drawOneSide(canvas, new BorderEdge(bottomRight, bottomLeft, Spacing.BOTTOM,
getBorderWidth(Spacing.BOTTOM)));
drawOneSide(canvas, new BorderEdge(bottomLeft, topLeft, Spacing.LEFT,
getBorderWidth(Spacing.LEFT)));
}
private void drawOneSide(Canvas canvas, @NonNull BorderEdge borderEdge) {
if (!FloatUtil.floatsEqual(0, getBorderWidth(borderEdge.getEdge()))) {
preparePaint(borderEdge.getEdge());
borderEdge.drawEdge(canvas, mPaint);
}
}
private void preparePaint(int side) {
float borderWidth = getBorderWidth(side);
int color = WXViewUtils.multiplyColorAlpha(getBorderColor(side), mAlpha);
BorderStyle borderStyle = BorderStyle.values()[getBorderStyle(side)];
Shader shader = borderStyle.getLineShader(borderWidth, color, side);
mPaint.setShader(shader);
mPaint.setColor(color);
mPaint.setStrokeWidth(borderWidth);
mPaint.setStrokeCap(Paint.Cap.ROUND);
}
}
| |
/*
* Copyright 2019 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.TargetSslProxyClient.ListTargetSslProxiesPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.DeleteTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.GetTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.InsertTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.ListTargetSslProxiesHttpRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.SetBackendServiceTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.SetProxyHeaderTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.SetSslCertificatesTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.SetSslPolicyTargetSslProxyHttpRequest;
import com.google.cloud.compute.v1.TargetSslProxy;
import com.google.cloud.compute.v1.TargetSslProxyList;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.util.List;
import javax.annotation.Generated;
import org.threeten.bp.Duration;
// AUTO-GENERATED DOCUMENTATION AND CLASS
/**
* Settings class to configure an instance of {@link TargetSslProxyStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (https://www.googleapis.com/compute/v1/projects/) and default
* port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object. For
* example, to set the total timeout of deleteTargetSslProxy to 30 seconds:
*
* <pre>
* <code>
* TargetSslProxyStubSettings.Builder targetSslProxySettingsBuilder =
* TargetSslProxyStubSettings.newBuilder();
* targetSslProxySettingsBuilder.deleteTargetSslProxySettings().getRetrySettings().toBuilder()
* .setTotalTimeout(Duration.ofSeconds(30));
* TargetSslProxyStubSettings targetSslProxySettings = targetSslProxySettingsBuilder.build();
* </code>
* </pre>
*/
@Generated("by gapic-generator")
@BetaApi
public class TargetSslProxyStubSettings extends StubSettings<TargetSslProxyStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/cloud-platform")
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/compute.readonly")
.add("https://www.googleapis.com/auth/devstorage.full_control")
.add("https://www.googleapis.com/auth/devstorage.read_only")
.add("https://www.googleapis.com/auth/devstorage.read_write")
.build();
private final UnaryCallSettings<DeleteTargetSslProxyHttpRequest, Operation>
deleteTargetSslProxySettings;
private final UnaryCallSettings<GetTargetSslProxyHttpRequest, TargetSslProxy>
getTargetSslProxySettings;
private final UnaryCallSettings<InsertTargetSslProxyHttpRequest, Operation>
insertTargetSslProxySettings;
private final PagedCallSettings<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, ListTargetSslProxiesPagedResponse>
listTargetSslProxiesSettings;
private final UnaryCallSettings<SetBackendServiceTargetSslProxyHttpRequest, Operation>
setBackendServiceTargetSslProxySettings;
private final UnaryCallSettings<SetProxyHeaderTargetSslProxyHttpRequest, Operation>
setProxyHeaderTargetSslProxySettings;
private final UnaryCallSettings<SetSslCertificatesTargetSslProxyHttpRequest, Operation>
setSslCertificatesTargetSslProxySettings;
private final UnaryCallSettings<SetSslPolicyTargetSslProxyHttpRequest, Operation>
setSslPolicyTargetSslProxySettings;
/** Returns the object with the settings used for calls to deleteTargetSslProxy. */
public UnaryCallSettings<DeleteTargetSslProxyHttpRequest, Operation>
deleteTargetSslProxySettings() {
return deleteTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to getTargetSslProxy. */
public UnaryCallSettings<GetTargetSslProxyHttpRequest, TargetSslProxy>
getTargetSslProxySettings() {
return getTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to insertTargetSslProxy. */
public UnaryCallSettings<InsertTargetSslProxyHttpRequest, Operation>
insertTargetSslProxySettings() {
return insertTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to listTargetSslProxies. */
public PagedCallSettings<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, ListTargetSslProxiesPagedResponse>
listTargetSslProxiesSettings() {
return listTargetSslProxiesSettings;
}
/** Returns the object with the settings used for calls to setBackendServiceTargetSslProxy. */
public UnaryCallSettings<SetBackendServiceTargetSslProxyHttpRequest, Operation>
setBackendServiceTargetSslProxySettings() {
return setBackendServiceTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to setProxyHeaderTargetSslProxy. */
public UnaryCallSettings<SetProxyHeaderTargetSslProxyHttpRequest, Operation>
setProxyHeaderTargetSslProxySettings() {
return setProxyHeaderTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to setSslCertificatesTargetSslProxy. */
public UnaryCallSettings<SetSslCertificatesTargetSslProxyHttpRequest, Operation>
setSslCertificatesTargetSslProxySettings() {
return setSslCertificatesTargetSslProxySettings;
}
/** Returns the object with the settings used for calls to setSslPolicyTargetSslProxy. */
public UnaryCallSettings<SetSslPolicyTargetSslProxyHttpRequest, Operation>
setSslPolicyTargetSslProxySettings() {
return setSslPolicyTargetSslProxySettings;
}
@BetaApi("A restructuring of stub classes is planned, so this may break in the future")
public TargetSslProxyStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonTargetSslProxyStub.create(this);
} else {
throw new UnsupportedOperationException(
"Transport not supported: " + getTransportChannelProvider().getTransportName());
}
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
public static String getDefaultEndpoint() {
return "https://www.googleapis.com/compute/v1/projects/";
}
/** Returns the default service port. */
public static int getDefaultServicePort() {
return 443;
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder().setScopesToApply(DEFAULT_SERVICE_SCOPES);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
@BetaApi("The surface for customizing headers is not stable yet and may change in the future.")
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(TargetSslProxyStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected TargetSslProxyStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
deleteTargetSslProxySettings = settingsBuilder.deleteTargetSslProxySettings().build();
getTargetSslProxySettings = settingsBuilder.getTargetSslProxySettings().build();
insertTargetSslProxySettings = settingsBuilder.insertTargetSslProxySettings().build();
listTargetSslProxiesSettings = settingsBuilder.listTargetSslProxiesSettings().build();
setBackendServiceTargetSslProxySettings =
settingsBuilder.setBackendServiceTargetSslProxySettings().build();
setProxyHeaderTargetSslProxySettings =
settingsBuilder.setProxyHeaderTargetSslProxySettings().build();
setSslCertificatesTargetSslProxySettings =
settingsBuilder.setSslCertificatesTargetSslProxySettings().build();
setSslPolicyTargetSslProxySettings =
settingsBuilder.setSslPolicyTargetSslProxySettings().build();
}
private static final PagedListDescriptor<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, TargetSslProxy>
LIST_TARGET_SSL_PROXIES_PAGE_STR_DESC =
new PagedListDescriptor<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, TargetSslProxy>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListTargetSslProxiesHttpRequest injectToken(
ListTargetSslProxiesHttpRequest payload, String token) {
return ListTargetSslProxiesHttpRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListTargetSslProxiesHttpRequest injectPageSize(
ListTargetSslProxiesHttpRequest payload, int pageSize) {
return ListTargetSslProxiesHttpRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListTargetSslProxiesHttpRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(TargetSslProxyList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<TargetSslProxy> extractResources(TargetSslProxyList payload) {
return payload.getItemsList() != null
? payload.getItemsList()
: ImmutableList.<TargetSslProxy>of();
}
};
private static final PagedListResponseFactory<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, ListTargetSslProxiesPagedResponse>
LIST_TARGET_SSL_PROXIES_PAGE_STR_FACT =
new PagedListResponseFactory<
ListTargetSslProxiesHttpRequest,
TargetSslProxyList,
ListTargetSslProxiesPagedResponse>() {
@Override
public ApiFuture<ListTargetSslProxiesPagedResponse> getFuturePagedResponse(
UnaryCallable<ListTargetSslProxiesHttpRequest, TargetSslProxyList> callable,
ListTargetSslProxiesHttpRequest request,
ApiCallContext context,
ApiFuture<TargetSslProxyList> futureResponse) {
PageContext<ListTargetSslProxiesHttpRequest, TargetSslProxyList, TargetSslProxy>
pageContext =
PageContext.create(
callable, LIST_TARGET_SSL_PROXIES_PAGE_STR_DESC, request, context);
return ListTargetSslProxiesPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Builder for TargetSslProxyStubSettings. */
public static class Builder extends StubSettings.Builder<TargetSslProxyStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<DeleteTargetSslProxyHttpRequest, Operation>
deleteTargetSslProxySettings;
private final UnaryCallSettings.Builder<GetTargetSslProxyHttpRequest, TargetSslProxy>
getTargetSslProxySettings;
private final UnaryCallSettings.Builder<InsertTargetSslProxyHttpRequest, Operation>
insertTargetSslProxySettings;
private final PagedCallSettings.Builder<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, ListTargetSslProxiesPagedResponse>
listTargetSslProxiesSettings;
private final UnaryCallSettings.Builder<SetBackendServiceTargetSslProxyHttpRequest, Operation>
setBackendServiceTargetSslProxySettings;
private final UnaryCallSettings.Builder<SetProxyHeaderTargetSslProxyHttpRequest, Operation>
setProxyHeaderTargetSslProxySettings;
private final UnaryCallSettings.Builder<SetSslCertificatesTargetSslProxyHttpRequest, Operation>
setSslCertificatesTargetSslProxySettings;
private final UnaryCallSettings.Builder<SetSslPolicyTargetSslProxyHttpRequest, Operation>
setSslPolicyTargetSslProxySettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"idempotent",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put("non_idempotent", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelay(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelay(Duration.ofMillis(60000L))
.setInitialRpcTimeout(Duration.ofMillis(20000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeout(Duration.ofMillis(20000L))
.setTotalTimeout(Duration.ofMillis(600000L))
.build();
definitions.put("default", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this((ClientContext) null);
}
protected Builder(ClientContext clientContext) {
super(clientContext);
deleteTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listTargetSslProxiesSettings =
PagedCallSettings.newBuilder(LIST_TARGET_SSL_PROXIES_PAGE_STR_FACT);
setBackendServiceTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setProxyHeaderTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setSslCertificatesTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
setSslPolicyTargetSslProxySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteTargetSslProxySettings,
getTargetSslProxySettings,
insertTargetSslProxySettings,
listTargetSslProxiesSettings,
setBackendServiceTargetSslProxySettings,
setProxyHeaderTargetSslProxySettings,
setSslCertificatesTargetSslProxySettings,
setSslPolicyTargetSslProxySettings);
initDefaults(this);
}
private static Builder createDefault() {
Builder builder = new Builder((ClientContext) null);
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setEndpoint(getDefaultEndpoint());
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.deleteTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.getTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.insertTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.listTargetSslProxiesSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.setBackendServiceTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.setProxyHeaderTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.setSslCertificatesTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
builder
.setSslPolicyTargetSslProxySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("non_idempotent"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("default"));
return builder;
}
protected Builder(TargetSslProxyStubSettings settings) {
super(settings);
deleteTargetSslProxySettings = settings.deleteTargetSslProxySettings.toBuilder();
getTargetSslProxySettings = settings.getTargetSslProxySettings.toBuilder();
insertTargetSslProxySettings = settings.insertTargetSslProxySettings.toBuilder();
listTargetSslProxiesSettings = settings.listTargetSslProxiesSettings.toBuilder();
setBackendServiceTargetSslProxySettings =
settings.setBackendServiceTargetSslProxySettings.toBuilder();
setProxyHeaderTargetSslProxySettings =
settings.setProxyHeaderTargetSslProxySettings.toBuilder();
setSslCertificatesTargetSslProxySettings =
settings.setSslCertificatesTargetSslProxySettings.toBuilder();
setSslPolicyTargetSslProxySettings = settings.setSslPolicyTargetSslProxySettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
deleteTargetSslProxySettings,
getTargetSslProxySettings,
insertTargetSslProxySettings,
listTargetSslProxiesSettings,
setBackendServiceTargetSslProxySettings,
setProxyHeaderTargetSslProxySettings,
setSslCertificatesTargetSslProxySettings,
setSslPolicyTargetSslProxySettings);
}
// NEXT_MAJOR_VER: remove 'throws Exception'
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) throws Exception {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to deleteTargetSslProxy. */
public UnaryCallSettings.Builder<DeleteTargetSslProxyHttpRequest, Operation>
deleteTargetSslProxySettings() {
return deleteTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to getTargetSslProxy. */
public UnaryCallSettings.Builder<GetTargetSslProxyHttpRequest, TargetSslProxy>
getTargetSslProxySettings() {
return getTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to insertTargetSslProxy. */
public UnaryCallSettings.Builder<InsertTargetSslProxyHttpRequest, Operation>
insertTargetSslProxySettings() {
return insertTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to listTargetSslProxies. */
public PagedCallSettings.Builder<
ListTargetSslProxiesHttpRequest, TargetSslProxyList, ListTargetSslProxiesPagedResponse>
listTargetSslProxiesSettings() {
return listTargetSslProxiesSettings;
}
/** Returns the builder for the settings used for calls to setBackendServiceTargetSslProxy. */
public UnaryCallSettings.Builder<SetBackendServiceTargetSslProxyHttpRequest, Operation>
setBackendServiceTargetSslProxySettings() {
return setBackendServiceTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to setProxyHeaderTargetSslProxy. */
public UnaryCallSettings.Builder<SetProxyHeaderTargetSslProxyHttpRequest, Operation>
setProxyHeaderTargetSslProxySettings() {
return setProxyHeaderTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to setSslCertificatesTargetSslProxy. */
public UnaryCallSettings.Builder<SetSslCertificatesTargetSslProxyHttpRequest, Operation>
setSslCertificatesTargetSslProxySettings() {
return setSslCertificatesTargetSslProxySettings;
}
/** Returns the builder for the settings used for calls to setSslPolicyTargetSslProxy. */
public UnaryCallSettings.Builder<SetSslPolicyTargetSslProxyHttpRequest, Operation>
setSslPolicyTargetSslProxySettings() {
return setSslPolicyTargetSslProxySettings;
}
@Override
public TargetSslProxyStubSettings build() throws IOException {
return new TargetSslProxyStubSettings(this);
}
}
}
| |
/**
* Copyright 2005-2014 The Kuali Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl2.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.rice.krad.service.impl;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.mockito.Matchers.anyListOf;
import static org.mockito.Matchers.anyMapOf;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyZeroInteractions;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import javax.xml.namespace.QName;
import org.apache.ojb.broker.metadata.ClassDescriptor;
import org.apache.ojb.broker.metadata.MetadataManager;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.kuali.rice.core.api.CoreConstants;
import org.kuali.rice.core.api.config.property.ConfigContext;
import org.kuali.rice.core.api.resourceloader.GlobalResourceLoader;
import org.kuali.rice.core.api.resourceloader.ResourceLoader;
import org.kuali.rice.core.framework.config.property.SimpleConfig;
import org.kuali.rice.core.framework.resourceloader.BeanFactoryResourceLoader;
import org.kuali.rice.krad.bo.PersistableBusinessObject;
import org.kuali.rice.krad.bo.PersistableBusinessObjectBase;
import org.kuali.rice.krad.data.metadata.MetadataRepository;
import org.kuali.rice.krad.document.Document;
import org.kuali.rice.krad.document.DocumentBase;
import org.kuali.rice.krad.service.DataDictionaryService;
import org.kuali.rice.krad.service.LegacyDataAdapter;
import org.kuali.rice.krad.util.KRADConstants;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.runners.MockitoJUnitRunner;
import org.springframework.beans.factory.support.StaticListableBeanFactory;
/**
* Unit test for the {@link LegacyDataAdapterImpl}. Tests that the various methods delegate to KNS or KRAD under the
* appropriate circumstances. Also tests some of the internal code in this class is functioning properly.
*
* @author Kuali Rice Team (rice.collab@kuali.org)
*/
@RunWith(MockitoJUnitRunner.class)
public class LegacyDataAdapterImplTest {
@Mock private LegacyDataAdapter knsLegacyDataAdapter;
@Mock private LegacyDataAdapter kradLegacyDataAdapter;
@Mock private DataDictionaryService dataDictionaryService;
@Mock private MetadataRepository metadataRepository;
@InjectMocks private LegacyDataAdapterImpl lda = new LegacyDataAdapterImpl();
@Before
public void setup() throws Exception {
GlobalResourceLoader.stop();
SimpleConfig config = new SimpleConfig();
config.putProperty(CoreConstants.Config.APPLICATION_ID, getClass().getName());
ConfigContext.init(config);
ConfigContext.getCurrentContextConfig().removeProperty(KRADConstants.Config.ENABLE_LEGACY_DATA_FRAMEWORK);
ConfigContext.getCurrentContextConfig().removeProperty(KRADConstants.Config.KNS_ENABLED);
StaticListableBeanFactory testBf = new StaticListableBeanFactory();
testBf.addBean("metadataRepository", metadataRepository);
testBf.addBean("dataDictionaryService", dataDictionaryService);
testBf.addBean("knsLegacyDataAdapter", knsLegacyDataAdapter);
testBf.addBean("kradLegacyDataAdapter", kradLegacyDataAdapter);
ResourceLoader rl = new BeanFactoryResourceLoader(new QName(getClass().getName()), testBf);
GlobalResourceLoader.addResourceLoader(rl);
GlobalResourceLoader.start();
MetadataManager mm = MetadataManager.getInstance();
// register Legacy object
ClassDescriptor legacyDescriptor = new ClassDescriptor(mm.getGlobalRepository());
legacyDescriptor.setClassOfObject(Legacy.class);
mm.getGlobalRepository().put(Legacy.class, legacyDescriptor);
// register LegacyDocument object
ClassDescriptor legacyDocumentDescriptor = new ClassDescriptor(mm.getGlobalRepository());
legacyDocumentDescriptor.setClassOfObject(LegacyDocument.class);
mm.getGlobalRepository().put(LegacyDocument.class, legacyDocumentDescriptor);
}
protected void enableLegacy() {
ConfigContext.getCurrentContextConfig().putProperty(KRADConstants.Config.KNS_ENABLED, "true");
}
protected NonLegacy newNonLegacyObject() {
return new NonLegacy();
}
protected NonLegacyPersistableBusinessObject newNonLegacyPersistableBusinessObject() {
return new NonLegacyPersistableBusinessObject();
}
protected Legacy newLegacyObject() {
return new Legacy();
}
protected NonLegacyDocument newNonLegacyDocument() {
return new NonLegacyDocument();
}
protected LegacyDocument newLegacyDocument() {
return new LegacyDocument();
}
@Test
public void testSave() throws Exception {
Serializable object = newNonLegacyObject();
lda.save(object);
verify(kradLegacyDataAdapter).save(object);
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testSave_Legacy() throws Exception {
enableLegacy();
PersistableBusinessObject object = newLegacyObject();
lda.save(object);
verify(knsLegacyDataAdapter).save(object);
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testLinkAndSave() throws Exception {
Serializable object = newNonLegacyObject();
lda.linkAndSave(object);
verify(kradLegacyDataAdapter).linkAndSave(object);
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testLinkAndSave_Legacy() throws Exception {
enableLegacy();
Serializable object = newLegacyObject();
lda.linkAndSave(object);
verify(knsLegacyDataAdapter).linkAndSave(object);
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testSaveDocument() throws Exception {
Document document = newNonLegacyDocument();
lda.saveDocument(document);
verify(kradLegacyDataAdapter).saveDocument(document);
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testSaveDocument_Legacy() throws Exception {
enableLegacy();
Document document = newLegacyDocument();
lda.saveDocument(document);
verify(knsLegacyDataAdapter).saveDocument(document);
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindByPrimaryKey() throws Exception {
lda.findByPrimaryKey(NonLegacy.class, new HashMap<String, Object>());
verify(kradLegacyDataAdapter).findByPrimaryKey(eq(NonLegacy.class), anyMapOf(String.class, Object.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindByPrimaryKey_Legacy() throws Exception {
enableLegacy();
lda.findByPrimaryKey(Legacy.class, new HashMap<String, Object>());
verify(knsLegacyDataAdapter).findByPrimaryKey(eq(Legacy.class), anyMapOf(String.class, Object.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindBySinglePrimaryKey() throws Exception {
lda.findBySinglePrimaryKey(NonLegacy.class, new Object());
verify(kradLegacyDataAdapter).findBySinglePrimaryKey(eq(NonLegacy.class), anyObject());
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindBySinglePrimaryKey_Legacy() throws Exception {
enableLegacy();
lda.findBySinglePrimaryKey(Legacy.class, new Object());
verify(knsLegacyDataAdapter).findBySinglePrimaryKey(eq(Legacy.class), anyObject());
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testDelete() throws Exception {
Object object = newNonLegacyObject();
lda.delete(object);
verify(kradLegacyDataAdapter).delete(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testDelete_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.delete(object);
verify(knsLegacyDataAdapter).delete(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testDeleteMatching() throws Exception {
lda.deleteMatching(NonLegacy.class, new HashMap<String, String>());
verify(kradLegacyDataAdapter).deleteMatching(eq(NonLegacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testDeleteMatching_Legacy() throws Exception {
enableLegacy();
lda.deleteMatching(Legacy.class, new HashMap<String, String>());
verify(knsLegacyDataAdapter).deleteMatching(eq(Legacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testRetrieve() throws Exception {
Object object = newNonLegacyObject();
lda.retrieve(object);
verify(kradLegacyDataAdapter).retrieve(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testRetrieve_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.retrieve(object);
verify(knsLegacyDataAdapter).retrieve(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindAll() throws Exception {
lda.findAll(NonLegacy.class);
verify(kradLegacyDataAdapter).findAll(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindAll_Legacy() throws Exception {
enableLegacy();
lda.findAll(Legacy.class);
verify(knsLegacyDataAdapter).findAll(eq(Legacy.class));;
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindMatching() throws Exception {
lda.findMatching(NonLegacy.class, new HashMap<String, String>());
verify(kradLegacyDataAdapter).findMatching(eq(NonLegacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindMatching_Legacy() throws Exception {
enableLegacy();
lda.findMatching(Legacy.class, new HashMap<String, String>());
verify(knsLegacyDataAdapter).findMatching(eq(Legacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindMatchingOrderBy() throws Exception {
lda.findMatchingOrderBy(NonLegacy.class, new HashMap<String, String>(), "a", true);
verify(kradLegacyDataAdapter).findMatchingOrderBy(eq(NonLegacy.class), anyMapOf(String.class, String.class), eq(
"a"), eq(Boolean.TRUE));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindMatchingOrderBy_Legacy() throws Exception {
enableLegacy();
lda.findMatchingOrderBy(Legacy.class, new HashMap<String, String>(), "a", true);
verify(knsLegacyDataAdapter).findMatchingOrderBy(eq(Legacy.class), anyMapOf(String.class, String.class), eq(
"a"), eq(Boolean.TRUE));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetPrimaryKeyFieldValues() throws Exception {
Object object = newNonLegacyObject();
lda.getPrimaryKeyFieldValues(object);
verify(kradLegacyDataAdapter).getPrimaryKeyFieldValues(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetPrimaryKeyFieldValues_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getPrimaryKeyFieldValues(object);
verify(knsLegacyDataAdapter).getPrimaryKeyFieldValues(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testRetrieveNonKeyFields() throws Exception {
Object object = newNonLegacyObject();
lda.retrieveNonKeyFields(object);
verify(kradLegacyDataAdapter).retrieveNonKeyFields(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testRetrieveNonKeyFields_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.retrieveNonKeyFields(object);
verify(knsLegacyDataAdapter).retrieveNonKeyFields(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testRetrieveReferenceObject() throws Exception {
Object object = newNonLegacyObject();
lda.retrieveReferenceObject(object, "blah");
verify(kradLegacyDataAdapter).retrieveReferenceObject(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testRetrieveReferenceObject_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.retrieveReferenceObject(object, "blah");
verify(knsLegacyDataAdapter).retrieveReferenceObject(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testRefreshAllNonUpdatingReferences() throws Exception {
Object object = newNonLegacyObject();
lda.refreshAllNonUpdatingReferences(object);
verify(kradLegacyDataAdapter).refreshAllNonUpdatingReferences(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testRefreshAllNonUpdatingReferences_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.refreshAllNonUpdatingReferences(object);
verify(knsLegacyDataAdapter).refreshAllNonUpdatingReferences(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsProxied() throws Exception {
Object object = newNonLegacyObject();
lda.isProxied(object);
verify(kradLegacyDataAdapter).isProxied(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsProxied_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.isProxied(object);
verify(knsLegacyDataAdapter).isProxied(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testResolveProxy() throws Exception {
Object object = newNonLegacyObject();
lda.resolveProxy(object);
verify(kradLegacyDataAdapter).resolveProxy(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testResolveProxy_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.resolveProxy(object);
verify(knsLegacyDataAdapter).resolveProxy(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindCollectionBySearchHelper() throws Exception {
lda.findCollectionBySearchHelper(NonLegacy.class, new HashMap<String, String>(), true, true, 50);
verify(kradLegacyDataAdapter).findCollectionBySearchHelper(eq(NonLegacy.class), anyMapOf(String.class,
String.class), eq(true), eq(true), eq(50));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindCollectionBySearchHelper_Legacy() throws Exception {
enableLegacy();
lda.findCollectionBySearchHelper(Legacy.class, new HashMap<String, String>(), true, true, 50);
verify(knsLegacyDataAdapter).findCollectionBySearchHelper(eq(Legacy.class), anyMapOf(String.class,
String.class), eq(true), eq(true), eq(50));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindObjectBySearch() throws Exception {
lda.findObjectBySearch(NonLegacy.class, new HashMap<String, String>());
verify(kradLegacyDataAdapter).findObjectBySearch(eq(NonLegacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindObjectBySearch_Legacy() throws Exception {
enableLegacy();
lda.findObjectBySearch(Legacy.class, new HashMap<String, String>());
verify(knsLegacyDataAdapter).findObjectBySearch(eq(Legacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testAllPrimaryKeyValuesPresentAndNotWildcard() throws Exception {
lda.allPrimaryKeyValuesPresentAndNotWildcard(NonLegacy.class, new HashMap<String, String>());
verify(kradLegacyDataAdapter).allPrimaryKeyValuesPresentAndNotWildcard(eq(NonLegacy.class), anyMapOf(
String.class, String.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testAllPrimaryKeyValuesPresentAndNotWildcard_Legacy() throws Exception {
enableLegacy();
lda.allPrimaryKeyValuesPresentAndNotWildcard(Legacy.class, new HashMap<String, String>());
verify(knsLegacyDataAdapter).allPrimaryKeyValuesPresentAndNotWildcard(eq(Legacy.class), anyMapOf(String.class, String.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testListPrimaryKeyFieldNames() throws Exception {
lda.listPrimaryKeyFieldNames(NonLegacy.class);
verify(kradLegacyDataAdapter).listPrimaryKeyFieldNames(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testListPrimaryKeyFieldNames_Legacy() throws Exception {
enableLegacy();
lda.listPrimaryKeyFieldNames(Legacy.class);
verify(knsLegacyDataAdapter).listPrimaryKeyFieldNames(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testDetermineCollectionObjectType() throws Exception {
lda.determineCollectionObjectType(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).determineCollectionObjectType(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testDetermineCollectionObjectType_Legacy() throws Exception {
enableLegacy();
lda.determineCollectionObjectType(Legacy.class, "blah");
verify(knsLegacyDataAdapter).determineCollectionObjectType(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testHasReference() throws Exception {
lda.hasReference(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).hasReference(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testHasReference_Legacy() throws Exception {
enableLegacy();
lda.hasReference(Legacy.class, "blah");
verify(knsLegacyDataAdapter).hasReference(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testHasCollection() throws Exception {
lda.hasCollection(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).hasCollection(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testHasCollection_Legacy() throws Exception {
enableLegacy();
lda.hasCollection(Legacy.class, "blah");
verify(knsLegacyDataAdapter).hasCollection(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsExtensionAttribute() throws Exception {
lda.isExtensionAttribute(NonLegacy.class, "blah", NonLegacy.class);
verify(kradLegacyDataAdapter).isExtensionAttribute(eq(NonLegacy.class), eq("blah"), eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsExtensionAttribute_Legacy() throws Exception {
enableLegacy();
lda.isExtensionAttribute(Legacy.class, "blah", Legacy.class);
verify(knsLegacyDataAdapter).isExtensionAttribute(eq(Legacy.class), eq("blah"), eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetExtensionAttributeClass() throws Exception {
lda.getExtensionAttributeClass(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).getExtensionAttributeClass(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetExtensionAttributeClass_Legacy() throws Exception {
enableLegacy();
lda.getExtensionAttributeClass(Legacy.class, "blah");
verify(knsLegacyDataAdapter).getExtensionAttributeClass(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetPrimaryKeyFieldValuesDOMDS() throws Exception {
Object object = newNonLegacyObject();
lda.getPrimaryKeyFieldValuesDOMDS(object);
verify(kradLegacyDataAdapter).getPrimaryKeyFieldValuesDOMDS(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetPrimaryKeyFieldValuesDOMDS_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getPrimaryKeyFieldValuesDOMDS(object);
verify(knsLegacyDataAdapter).getPrimaryKeyFieldValuesDOMDS(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testEqualsByPrimaryKeys() throws Exception {
Object object1 = newNonLegacyObject();
Object object2 = newNonLegacyObject();
lda.equalsByPrimaryKeys(object1, object2);
verify(kradLegacyDataAdapter).equalsByPrimaryKeys(eq(object1), eq(object2));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testEqualsByPrimaryKeys_Legacy() throws Exception {
enableLegacy();
Object object1 = newLegacyObject();
Object object2 = newLegacyObject();
lda.equalsByPrimaryKeys(object1, object2);
verify(knsLegacyDataAdapter).equalsByPrimaryKeys(eq(object1), eq(object2));
verifyZeroInteractions(kradLegacyDataAdapter);
}
// @Test
// public void testToPersistableBusinessObject() throws Exception {
// Object object = newNonLegacyObject();
// lda.toPersistableBusinessObject(object);
// verify(kradLegacyDataAdapter).toPersistableBusinessObject(eq(object));
// verifyZeroInteractions(knsLegacyDataAdapter);
// }
//
// @Test
// public void testToPersistableBusinessObject_Legacy() throws Exception {
// enableLegacy();
// Object object = newLegacyObject();
// lda.toPersistableBusinessObject(object);
// verify(knsLegacyDataAdapter).toPersistableBusinessObject(eq(object));
// verifyZeroInteractions(kradLegacyDataAdapter);
// }
@Test
public void testMaterializeAllSubObjects() throws Exception {
Object object = newNonLegacyObject();
lda.materializeAllSubObjects(object);
verify(kradLegacyDataAdapter).materializeAllSubObjects(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testMaterializeAllSubObjects_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.materializeAllSubObjects(object);
verify(knsLegacyDataAdapter).materializeAllSubObjects(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetPropertyType() throws Exception {
Object object = newNonLegacyObject();
lda.getPropertyType(object, "blah");
verify(kradLegacyDataAdapter).getPropertyType(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetPropertyType_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getPropertyType(object, "blah");
verify(knsLegacyDataAdapter).getPropertyType(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetExtension() throws Exception {
lda.getExtension(NonLegacyPersistableBusinessObject.class);
verify(kradLegacyDataAdapter).getExtension(eq(NonLegacyPersistableBusinessObject.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetExtension_Legacy() throws Exception {
enableLegacy();
lda.getExtension(Legacy.class);
verify(knsLegacyDataAdapter).getExtension(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testRefreshReferenceObject() throws Exception {
PersistableBusinessObject object = newNonLegacyPersistableBusinessObject();
lda.refreshReferenceObject(object, "blah");
verify(kradLegacyDataAdapter).refreshReferenceObject(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testRefreshReferenceObject_Legacy() throws Exception {
enableLegacy();
PersistableBusinessObject object = newLegacyObject();
lda.refreshReferenceObject(object, "blah");
verify(knsLegacyDataAdapter).refreshReferenceObject(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsLockable() throws Exception {
Object object = newNonLegacyObject();
lda.isLockable(object);
verify(kradLegacyDataAdapter).isLockable(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsLockable_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.isLockable(object);
verify(knsLegacyDataAdapter).isLockable(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testVerifyVersionNumber() throws Exception {
Object object = newNonLegacyObject();
lda.verifyVersionNumber(object);
verify(kradLegacyDataAdapter).verifyVersionNumber(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testVerifyVersionNumber_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.verifyVersionNumber(object);
verify(knsLegacyDataAdapter).verifyVersionNumber(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testCreateQuickFinder() throws Exception {
lda.createQuickFinder(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).createQuickFinder(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testCreateQuickFinder_Legacy() throws Exception {
enableLegacy();
lda.createQuickFinder(Legacy.class, "blah");
verify(knsLegacyDataAdapter).createQuickFinder(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsReferenceUpdatable() throws Exception {
lda.isReferenceUpdatable(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).isReferenceUpdatable(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsReferenceUpdatable_Legacy() throws Exception {
enableLegacy();
lda.isReferenceUpdatable(Legacy.class, "blah");
verify(knsLegacyDataAdapter).isReferenceUpdatable(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testListReferenceObjectFields() throws Exception {
lda.listReferenceObjectFields(NonLegacy.class);
verify(kradLegacyDataAdapter).listReferenceObjectFields(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testListReferenceObjectFields_Legacy() throws Exception {
enableLegacy();
lda.listReferenceObjectFields(Legacy.class);
verify(knsLegacyDataAdapter).listReferenceObjectFields(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsCollectionUpdatable() throws Exception {
lda.isCollectionUpdatable(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).isCollectionUpdatable(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsCollectionUpdatable_Legacy() throws Exception {
enableLegacy();
lda.isCollectionUpdatable(Legacy.class, "blah");
verify(knsLegacyDataAdapter).isCollectionUpdatable(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testListCollectionObjectTypes() throws Exception {
lda.listCollectionObjectTypes(NonLegacy.class);
verify(kradLegacyDataAdapter).listCollectionObjectTypes(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testListCollectionObjectTypes_Legacy() throws Exception {
enableLegacy();
lda.listCollectionObjectTypes(Legacy.class);
verify(knsLegacyDataAdapter).listCollectionObjectTypes(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetReferenceIfExists() throws Exception {
PersistableBusinessObject object = newNonLegacyPersistableBusinessObject();
lda.getReferenceIfExists(object, "blah");
verify(kradLegacyDataAdapter).getReferenceIfExists(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetReferenceIfExists_Legacy() throws Exception {
enableLegacy();
PersistableBusinessObject object = newLegacyObject();
lda.getReferenceIfExists(object, "blah");
verify(knsLegacyDataAdapter).getReferenceIfExists(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testAllForeignKeyValuesPopulatedForReference() throws Exception {
PersistableBusinessObject object = newNonLegacyPersistableBusinessObject();
lda.allForeignKeyValuesPopulatedForReference(object, "blah");
verify(kradLegacyDataAdapter).allForeignKeyValuesPopulatedForReference(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testAllForeignKeyValuesPopulatedForReference_Legacy() throws Exception {
enableLegacy();
PersistableBusinessObject object = newLegacyObject();
lda.allForeignKeyValuesPopulatedForReference(object, "blah");
verify(knsLegacyDataAdapter).allForeignKeyValuesPopulatedForReference(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetDictionaryRelationship() throws Exception {
lda.getDictionaryRelationship(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).getDictionaryRelationship(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetDictionaryRelationship_Legacy() throws Exception {
enableLegacy();
lda.getDictionaryRelationship(Legacy.class, "blah");
verify(knsLegacyDataAdapter).getDictionaryRelationship(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetTitleAttribute() throws Exception {
lda.getTitleAttribute(NonLegacy.class);
verify(kradLegacyDataAdapter).getTitleAttribute(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetTitleAttribute_Legacy() throws Exception {
enableLegacy();
lda.getTitleAttribute(Legacy.class);
verify(knsLegacyDataAdapter).getTitleAttribute(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testAreNotesSupported() throws Exception {
lda.areNotesSupported(NonLegacy.class);
verify(kradLegacyDataAdapter).areNotesSupported(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testAreNotesSupported_Legacy() throws Exception {
enableLegacy();
lda.areNotesSupported(Legacy.class);
verify(knsLegacyDataAdapter).areNotesSupported(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetDataObjectIdentifierString() throws Exception {
Object object = newNonLegacyObject();
lda.getDataObjectIdentifierString(object);
verify(kradLegacyDataAdapter).getDataObjectIdentifierString(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetDataObjectIdentifierString_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getDataObjectIdentifierString(object);
verify(knsLegacyDataAdapter).getDataObjectIdentifierString(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetInquiryObjectClassIfNotTitle() throws Exception {
Object object = newNonLegacyObject();
lda.getInquiryObjectClassIfNotTitle(object, "blah");
verify(kradLegacyDataAdapter).getInquiryObjectClassIfNotTitle(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetInquiryObjectClassIfNotTitle_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getInquiryObjectClassIfNotTitle(object, "blah");
verify(knsLegacyDataAdapter).getInquiryObjectClassIfNotTitle(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetInquiryParameters() throws Exception {
Object object = newNonLegacyObject();
lda.getInquiryParameters(object, new ArrayList<String>(), "blah");
verify(kradLegacyDataAdapter).getInquiryParameters(eq(object), anyListOf(String.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetInquiryParameters_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getInquiryParameters(object, new ArrayList<String>(), "blah");
verify(knsLegacyDataAdapter).getInquiryParameters(eq(object), anyListOf(String.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testHasLocalLookup() throws Exception {
lda.hasLocalLookup(NonLegacy.class);
verify(kradLegacyDataAdapter).hasLocalLookup(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testHasLocalLookup_Legacy() throws Exception {
enableLegacy();
lda.hasLocalLookup(Legacy.class);
verify(knsLegacyDataAdapter).hasLocalLookup(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testHasLocalInquiry() throws Exception {
lda.hasLocalInquiry(NonLegacy.class);
verify(kradLegacyDataAdapter).hasLocalInquiry(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testHasLocalInquiry_Legacy() throws Exception {
enableLegacy();
lda.hasLocalInquiry(Legacy.class);
verify(knsLegacyDataAdapter).hasLocalInquiry(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetDataObjectRelationship() throws Exception {
Object object = newNonLegacyObject();
lda.getDataObjectRelationship(object, NonLegacy.class, "blah", "prefix", true, true, true);
verify(kradLegacyDataAdapter).getDataObjectRelationship(eq(object), eq(NonLegacy.class), eq("blah"), eq(
"prefix"), eq(true), eq(true), eq(true));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetDataObjectRelationship_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getDataObjectRelationship(object, Legacy.class, "blah", "prefix", true, true, true);
verify(knsLegacyDataAdapter).getDataObjectRelationship(eq(object), eq(Legacy.class), eq("blah"), eq(
"prefix"), eq(true), eq(true), eq(true));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsPersistable() throws Exception {
lda.isPersistable(NonLegacy.class);
verify(kradLegacyDataAdapter).isPersistable(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsPersistable_Legacy() throws Exception {
enableLegacy();
lda.isPersistable(Legacy.class);
verify(knsLegacyDataAdapter).isPersistable(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetForeignKeyFieldsPopulationState() throws Exception {
Object object = newNonLegacyObject();
lda.getForeignKeyFieldsPopulationState(object, "blah");
verify(kradLegacyDataAdapter).getForeignKeyFieldsPopulationState(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetForeignKeyFieldsPopulationState_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getForeignKeyFieldsPopulationState(object, "blah");
verify(knsLegacyDataAdapter).getForeignKeyFieldsPopulationState(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetForeignKeysForReference() throws Exception {
lda.getForeignKeysForReference(NonLegacy.class, "blah");
verify(kradLegacyDataAdapter).getForeignKeysForReference(eq(NonLegacy.class), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetForeignKeysForReference_Legacy() throws Exception {
enableLegacy();
lda.getForeignKeysForReference(Legacy.class, "blah");
verify(knsLegacyDataAdapter).getForeignKeysForReference(eq(Legacy.class), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testHasPrimaryKeyFieldValues() throws Exception {
Object object = newNonLegacyObject();
lda.hasPrimaryKeyFieldValues(object);
verify(kradLegacyDataAdapter).hasPrimaryKeyFieldValues(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testHasPrimaryKeyFieldValues_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.hasPrimaryKeyFieldValues(object);
verify(knsLegacyDataAdapter).hasPrimaryKeyFieldValues(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testSetObjectPropertyDeep() throws Exception {
Object object = newNonLegacyObject();
lda.setObjectPropertyDeep(object, "blahName", NonLegacy.class, "blahValue");
verify(kradLegacyDataAdapter).setObjectPropertyDeep(eq(object), eq("blahName"), eq(NonLegacy.class), eq(
"blahValue"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testSetObjectPropertyDeep_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.setObjectPropertyDeep(object, "blahName", Legacy.class, "blahValue");
verify(knsLegacyDataAdapter).setObjectPropertyDeep(eq(object), eq("blahName"), eq(Legacy.class), eq(
"blahValue"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testMaterializeClassForProxiedObject() throws Exception {
Object object = newNonLegacyObject();
lda.materializeClassForProxiedObject(object);
verify(kradLegacyDataAdapter).materializeClassForProxiedObject(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testMaterializeClassForProxiedObject_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.materializeClassForProxiedObject(object);
verify(knsLegacyDataAdapter).materializeClassForProxiedObject(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetNestedValue() throws Exception {
Object object = newNonLegacyObject();
lda.getNestedValue(object, "blah");
verify(kradLegacyDataAdapter).getNestedValue(eq(object), eq("blah"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testGetNestedValue_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.getNestedValue(object, "blah");
verify(knsLegacyDataAdapter).getNestedValue(eq(object), eq("blah"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testCreateNewObjectFromClass() throws Exception {
lda.createNewObjectFromClass(NonLegacy.class);
verify(kradLegacyDataAdapter).createNewObjectFromClass(eq(NonLegacy.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testCreateNewObjectFromClass_Legacy() throws Exception {
enableLegacy();
lda.createNewObjectFromClass(Legacy.class);
verify(knsLegacyDataAdapter).createNewObjectFromClass(eq(Legacy.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testIsNull() throws Exception {
Object object = newNonLegacyObject();
lda.isNull(object);
verify(kradLegacyDataAdapter).isNull(eq(object));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testIsNull_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.isNull(object);
verify(knsLegacyDataAdapter).isNull(eq(object));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testSetObjectProperty() throws Exception {
Object object = newNonLegacyObject();
lda.setObjectProperty(object, "blahName", NonLegacy.class, "blahValue");
verify(kradLegacyDataAdapter).setObjectProperty(eq(object), eq("blahName"), eq(NonLegacy.class),
eq("blahValue"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testSetObjectProperty_Legacy() throws Exception {
enableLegacy();
Object object = newLegacyObject();
lda.setObjectProperty(object, "blahName", Legacy.class, "blahValue");
verify(knsLegacyDataAdapter).setObjectProperty(eq(object), eq("blahName"), eq(Legacy.class), eq("blahValue"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindByDocumentHeaderId() throws Exception {
lda.findByDocumentHeaderId(NonLegacyDocument.class, "1234");
verify(kradLegacyDataAdapter).findByDocumentHeaderId(eq(NonLegacyDocument.class), eq("1234"));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindByDocumentHeaderId_Legacy() throws Exception {
enableLegacy();
lda.findByDocumentHeaderId(LegacyDocument.class, "1234");
verify(knsLegacyDataAdapter).findByDocumentHeaderId(eq(LegacyDocument.class), eq("1234"));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testFindByDocumentHeaderIds() throws Exception {
lda.findByDocumentHeaderIds(NonLegacyDocument.class, new ArrayList<String>());
verify(kradLegacyDataAdapter).findByDocumentHeaderIds(eq(NonLegacyDocument.class), anyListOf(String.class));
verifyZeroInteractions(knsLegacyDataAdapter);
}
@Test
public void testFindByDocumentHeaderIds_Legacy() throws Exception {
enableLegacy();
lda.findByDocumentHeaderIds(LegacyDocument.class, new ArrayList<String>());
verify(knsLegacyDataAdapter).findByDocumentHeaderIds(eq(LegacyDocument.class), anyListOf(String.class));
verifyZeroInteractions(kradLegacyDataAdapter);
}
@Test
public void testGetKnsLegacyDataAdapter() throws Exception {
assertEquals(knsLegacyDataAdapter, lda.getKnsLegacyDataAdapter());
}
@Test
public void testSetKnsLegacyDataAdapter() throws Exception {
lda.setKnsLegacyDataAdapter(null);
assertNull(lda.getKnsLegacyDataAdapter());
}
@Test
public void testGetKradLegacyDataAdapter() throws Exception {
assertEquals(kradLegacyDataAdapter, lda.getKradLegacyDataAdapter());
}
@Test
public void testSetKradLegacyDataAdapter() throws Exception {
lda.setKradLegacyDataAdapter(null);
assertNull(lda.getKradLegacyDataAdapter());
}
@Test
public void testSelectAdapter() throws Exception {
// Scenario 1: KNS is not enabled, in this case it will always default to KRAD adapter
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(NonLegacy.class));
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(newNonLegacyObject()));
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(Legacy.class));
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(newLegacyObject()));
// Now let's enable the KNS
enableLegacy();
// Scenario 2: Using a Class which is a valid legacy Class, should use KNS adapter
assertEquals(knsLegacyDataAdapter, lda.selectAdapter(Legacy.class));
// Scenario 3: Using an Object which is a valid legacy Object, should use KNS adapter
assertEquals(knsLegacyDataAdapter, lda.selectAdapter(newLegacyObject()));
// Scenario 4: Using a Class which is a not a legacy Class, should fall back to KRAD adapter even though legacy is enabled
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(NonLegacy.class));
// Scenario 5: Using an Object which is a not a legacy Object, should fall back to KRAD adapter even though legacy is enabled
assertEquals(kradLegacyDataAdapter, lda.selectAdapter(newNonLegacyObject()));
}
public static final class NonLegacy implements Serializable {}
public static final class Legacy extends PersistableBusinessObjectBase {}
public static final class NonLegacyPersistableBusinessObject extends PersistableBusinessObjectBase {}
public static final class NonLegacyDocument extends DocumentBase {}
public static final class LegacyDocument extends DocumentBase {}
}
| |
package net.instantcom.keiko.bittorrent.protocol.encryption;
import java.io.BufferedInputStream;
import java.io.BufferedOutputStream;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.DataOutputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.math.BigInteger;
import java.security.InvalidAlgorithmParameterException;
import java.security.InvalidKeyException;
import java.security.KeyFactory;
import java.security.KeyPair;
import java.security.KeyPairGenerator;
import java.security.NoSuchAlgorithmException;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Random;
import javax.crypto.KeyAgreement;
import javax.crypto.interfaces.DHPublicKey;
import javax.crypto.spec.DHParameterSpec;
import javax.crypto.spec.DHPublicKeySpec;
import org.bouncycastle.crypto.StreamCipher;
import org.bouncycastle.crypto.engines.RC4Engine;
import org.bouncycastle.crypto.params.KeyParameter;
import net.instantcom.keiko.Server;
import net.instantcom.keiko.bittorrent.protocol.HandshakeException;
import net.instantcom.keiko.bittorrent.protocol.Torrent;
import net.instantcom.util.SHA1Util;
public class EncryptedHandshake {
private static final Random random = new Random();
private static final BigInteger P =
new BigInteger(
"FFFFFFFFFFFFFFFFC90FDAA22168C234C4C6628B80DC1CD129024E088A67CC74020BBEA63B139B22514A08798E3404DDEF9519B3CD3A431B302B0A6DF25F14374FE1356D6D51C245E485B576625E7EC6F44C42E9A63A36210000000000090563",
16);
private static final BigInteger G = BigInteger.valueOf(2);
private static final int CRYPTO_PLAINTEXT = 0x01;
private static final int CRYPTO_RC4 = 0x02;
// azureus supports CRYPTO_XOR and CRYPTO_AES but no specification is
// available, utorrent supports only same things as we do
private static final int MY_CRYPTO_SUPPORTED =
CRYPTO_PLAINTEXT | CRYPTO_RC4;
/**
* Creates new encrypted handshake. Input and output streams will be
* automatically buffered if they aren't already.
*
* @param is
* input stream
* @param os
* output stream
*/
public EncryptedHandshake(InputStream is, OutputStream os) {
{
byte[] test = bigIntegerToByteArray(P, 96);
if (96 != test.length) {
throw new IllegalArgumentException(
"P is NOT 768 bits long! Length of P is "
+ (8 * test.length));
}
}
// create encrypted input and output streams with uninitialized ciphers
eis =
new EncryptedInputStream(is instanceof BufferedInputStream ? is
: new BufferedInputStream(is), new RC4Engine());
eos =
new EncryptedOutputStream(os instanceof BufferedOutputStream ? os
: new BufferedOutputStream(os), new RC4Engine());
// en/decryption is disabled by default
eis.setEnabled(false);
eos.setEnabled(false);
dis = new DataInputStream(eis);
dos = new DataOutputStream(eos);
}
/**
* Gets data input stream.
*
* @return data input stream
*/
public DataInputStream getDataInputStream() {
return dis;
}
/**
* Gets data output stream
*
* @return data output stream
*/
public DataOutputStream getDataOutputStream() {
return dos;
}
/**
* Attempts encrypted handshake.
*
* @param infoHash
* info hash of torrent for outgoing connections, null for
* incoming connections
* @return true if encrypted handshake was completed, false if incoming
* connection is plaintext (it's still possible to proceed with
* normal bt handshake)
* @throws HandshakeException
* if encrypted handshake fails
*/
public boolean doHandshake(byte[] infoHash) throws HandshakeException {
try {
final boolean incoming = null == infoHash;
// create my public key
DHPublicKey myPublicKey = startKeyExchange();
// create my random pad
byte[] myPad = new byte[1 + random.nextInt(512)];
random.nextBytes(myPad);
// create my zero pad
byte[] myPadCD = new byte[1 + random.nextInt(512)];
// VC is always zeroed
byte[] myVC = new byte[8];
// we don't have initial payload
byte[] myIA = null; // zero length
if (incoming) {
// from now on i'm B
boolean markSupported = dis.markSupported();
if (markSupported) {
// check if it's unencrypted protocol
// test for \19Bit
dis.mark(5);
if (0x13426974 == dis.readInt()) {
dis.reset();
return false;
}
dis.reset();
}
// 1 A->B: Diffie Hellman Ya, PadA
byte[] tmp = new byte[96]; // 768 bits
dis.readFully(tmp);
// finish key exchange
finishKeyExchange(byteArrayToBigInteger(tmp));
// 2 B->A: Diffie Hellman Yb, PadB
dos.write(bigIntegerToByteArray(myPublicKey.getY(), 96));
if (null != myPad) {
dos.write(myPad);
}
dos.flush();
// 3 A->B: HASH('req1', S), HASH('req2', SKEY) xor HASH('req3',
// S), ENCRYPT(VC, crypto_provide, len(PadC), PadC, len(IA)),
// ENCRYPT(IA)
{
boolean foundIt = false;
byte[] expected =
SHA1Util.getSHA1("req1".getBytes(), secretBytes);
int syncCount = 0;
int sameCount = 0;
while (syncCount < 628 /* && dis.available() > 0 */) {
int x = dis.readByte();
++syncCount;
if (expected[sameCount] == x) {
++sameCount;
if (sameCount >= expected.length) {
foundIt = true;
break;
}
} else {
sameCount = 0;
}
}
if (!foundIt) {
throw new HandshakeException(
"can't find HASH('req1', S)");
}
}
Torrent foundTorrent = null;
{
byte[] req2Hash = new byte[20];
dis.readFully(req2Hash);
{
// unxor
byte[] xored =
SHA1Util.getSHA1("req3".getBytes(), secretBytes);
for (int i = 0; i < 20; i++) {
req2Hash[i] ^= xored[i];
}
}
// check if torrent is known
HashMap<String, Torrent> torrents = Server.getTorrents();
for (Torrent torrent : torrents.values()) {
byte[] expected =
SHA1Util.getSHA1("req2".getBytes(), torrent
.getMetaInfo().getInfoHash());
if (Arrays.equals(expected, req2Hash)) {
foundTorrent = torrent;
break;
}
}
if (null == foundTorrent) {
throw new HandshakeException("no such torrent");
}
}
// from now on everything is encrypted
setupStreamCiphers(incoming, foundTorrent.getMetaInfo()
.getInfoHash());
eis.setEnabled(true);
eos.setEnabled(true);
{
// let it be whatever it is, azureus doesn't check it either
// byte[] expectedVC = new byte[8];
byte[] hisVC = new byte[8];
dis.readFully(hisVC);
// if (!Arrays.equals(expectedVC, hisVC)) {
// throw new HandshakeException("wrong VC, got: "
// + Arrays.toString(hisVC));
// }
}
int hisCryptoProvide = dis.readInt();
if (0 == (hisCryptoProvide & MY_CRYPTO_SUPPORTED)) {
throw new HandshakeException("unknown cryptoProvide: "
+ hisCryptoProvide + " ("
+ Integer.toBinaryString(hisCryptoProvide) + ")");
}
int size = dis.readUnsignedShort();
if (size < 0 || size > 512) {
throw new IllegalArgumentException(
"padC has incorrect size: " + size);
}
while (size > 0) {
dis.readByte();
--size;
}
int hisIASize = dis.readUnsignedShort();
// this is the tricky part, if it exists it is definitely
// encrypted so we need to read and decrypt it now and save it
// for later
byte[] hisIA = null;
if (hisIASize > 0) {
hisIA = new byte[hisIASize];
dis.readFully(hisIA);
}
// 4 B->A: ENCRYPT(VC, crypto_select, len(padD), padD),
// ENCRYPT2(Payload Stream)
dos.write(myVC);
int myCryptoSelect = 0;
if (0 != (hisCryptoProvide & CRYPTO_PLAINTEXT)) {
// prefer plain text from now on
myCryptoSelect = CRYPTO_PLAINTEXT;
fullyEncrypted = false;
} else {
// rc4 continues
myCryptoSelect = CRYPTO_RC4;
fullyEncrypted = true;
}
dos.writeInt(myCryptoSelect);
if (null == myPadCD) {
dos.writeShort(0);
} else {
dos.writeShort(myPadCD.length);
dos.write(myPadCD);
}
if (CRYPTO_PLAINTEXT == myCryptoSelect) {
// protocol continues unencrypted
eis.setEnabled(false);
eos.setEnabled(false);
} else {
// protocol continues encrypted
eis.setEnabled(true);
eos.setEnabled(true);
}
dos.flush();
// this is to make sure eis will read preloaded and decrypted
// data before rest of the stream
eis.setInitialPayload(null == hisIA ? null
: new ByteArrayInputStream(hisIA));
// 5 A->B: ENCRYPT2(Payload Stream)
// payload stream is usual bt protocol including handshake
// PeerConnection.doHandshake() should take care of the rest as
// encryption and decryption are totally transparent
} else {
// from now on i'm A
// 1 A->B: Diffie Hellman Ya, PadA
dos.write(bigIntegerToByteArray(myPublicKey.getY(), 96));
if (null != myPad) {
dos.write(myPad);
}
dos.flush();
// 2 B->A: Diffie Hellman Yb, PadB
byte[] tmp = new byte[96]; // 768 bits
dis.readFully(tmp);
// finish key exchange
finishKeyExchange(byteArrayToBigInteger(tmp));
// 3 A->B: HASH('req1', S), HASH('req2', SKEY) xor HASH('req3',
// S), ENCRYPT(VC, crypto_provide, len(PadC), PadC, len(IA)),
// ENCRYPT(IA)
dos.write(SHA1Util.getSHA1("req1".getBytes(), secretBytes));
byte[] hashReq2SKey =
SHA1Util.getSHA1("req2".getBytes(), infoHash);
{
// xor
byte[] hashReq3S =
SHA1Util.getSHA1("req3".getBytes(), secretBytes);
for (int i = 0; i < hashReq2SKey.length; i++) {
hashReq2SKey[i] ^= hashReq3S[i];
}
}
dos.write(hashReq2SKey);
// from now on outgoing traffic is encrypted
setupStreamCiphers(incoming, infoHash);
eos.setEnabled(true);
dos.write(myVC);
dos.writeInt(MY_CRYPTO_SUPPORTED);
if (null == myPadCD) {
dos.writeShort(0);
} else {
dos.writeShort(myPadCD.length);
dos.write(myPadCD);
}
if (null != myIA) {
dos.writeShort(myIA.length);
dos.write(myIA);
} else {
dos.writeShort(0);
}
dos.flush();
// 4 B->A: ENCRYPT(VC, crypto_select, len(padD), padD),
// ENCRYPT2(Payload Stream)
// synchronize on his VC (see below) - read undecrypted until
// you hit encrypted 8x 0x00, turn on decryption in eis after
// that
byte[] expectedVC = new byte[8];
{
StreamCipher syncCipher = new RC4Engine();
syncCipher.init(true,
keyParameterUsedToSetupOutputStreamCipher);
skip1024(syncCipher);
syncCipher.processBytes(new byte[8], 0, 8, expectedVC, 0);
}
boolean foundIt = false;
{
int syncCount = 0;
int sameCount = 0;
while (syncCount < 616) {
int x = dis.readByte();
++syncCount;
if (expectedVC[sameCount] == x) {
++sameCount;
if (sameCount >= expectedVC.length) {
foundIt = true;
break;
}
} else {
sameCount = 0;
}
}
}
if (!foundIt) {
throw new HandshakeException("can't find VC");
}
// from now on incoming traffic is encrypted
eis.setEnabled(true);
// make sure input stream cipher is synced to remote output
// cipher
eis.getStreamCipher().processBytes(new byte[8], 0, 8,
new byte[8], 0);
int hisCryptoSelect = dis.readInt();
byte[] hisPadD = null;
int size = dis.readShort();
if (size > 0) {
hisPadD = new byte[size];
dis.readFully(hisPadD);
}
if (CRYPTO_PLAINTEXT == hisCryptoSelect) {
// protocol continues unencrypted
eis.setEnabled(false);
eos.setEnabled(false);
fullyEncrypted = false;
} else if (CRYPTO_RC4 == hisCryptoSelect) {
// protocol continues encrypted
eis.setEnabled(true);
eos.setEnabled(true);
fullyEncrypted = true;
} else {
throw new HandshakeException("unknown cryptoSelect: "
+ hisCryptoSelect + " ("
+ Integer.toBinaryString(hisCryptoSelect) + ")");
}
// 5 A->B: ENCRYPT2(Payload Stream)
// payload stream is usual bt protocol including handshake
// PeerConnection.doHandshake() should take care of the rest as
// encryption and decryption are totally transparent
}
} catch (Exception e) {
throw new HandshakeException(e);
}
return true;
}
// since BigInteger.toByteArray() returns all kind of wrong sized arrays we
// need to do it manually
private byte[] bigIntegerToByteArray(BigInteger big, int numBytes) {
StringBuffer sb = new StringBuffer();
String s = big.toString(16);
int bSize = s.length();
while (bSize < 2 * numBytes) {
sb.append("0");
++bSize;
}
sb.append(s);
s = sb.toString();
if (2 * numBytes != s.length()) {
throw new IllegalArgumentException(
"string is of wrong size! should be " + (2 * numBytes)
+ " but is " + s.length());
}
byte[] array = new byte[numBytes];
for (int i = 0; i < numBytes; i++) {
array[i] =
(byte) Integer.parseInt(s.substring(2 * i, 2 + 2 * i), 16);
}
return array;
}
private BigInteger byteArrayToBigInteger(byte[] array) {
return new BigInteger(SHA1Util.convertToString(array), 16);
}
private DHPublicKey startKeyExchange() throws NoSuchAlgorithmException,
InvalidAlgorithmParameterException, InvalidKeyException {
KeyPairGenerator keyPairGenerator = KeyPairGenerator.getInstance("DH");
keyPairGenerator.initialize(new DHParameterSpec(P, G, 160));
KeyPair keyPair = keyPairGenerator.generateKeyPair();
keyAgreement = KeyAgreement.getInstance("DH");
keyAgreement.init(keyPair.getPrivate());
return (DHPublicKey) keyPair.getPublic();
}
private void finishKeyExchange(BigInteger hisY)
throws NoSuchAlgorithmException, InvalidKeySpecException,
InvalidKeyException {
KeyFactory keyFactory = KeyFactory.getInstance("DH");
PublicKey hisPublicKey =
keyFactory.generatePublic(new DHPublicKeySpec(hisY, P, G));
keyAgreement.doPhase(hisPublicKey, true);
secretBytes = keyAgreement.generateSecret();
}
private void setupStreamCiphers(boolean incoming, byte[] infoHash)
throws NoSuchAlgorithmException {
KeyParameter keyA =
new KeyParameter(SHA1Util.getSHA1("keyA".getBytes(), secretBytes,
infoHash));
KeyParameter keyB =
new KeyParameter(SHA1Util.getSHA1("keyB".getBytes(), secretBytes,
infoHash));
eis.getStreamCipher().init(false, incoming ? keyA : keyB);
eos.getStreamCipher().init(true, incoming ? keyB : keyA);
skip1024(eis.getStreamCipher());
skip1024(eos.getStreamCipher());
keyParameterUsedToSetupOutputStreamCipher = incoming ? keyA : keyB;
}
private void skip1024(StreamCipher streamCipher) {
// skip first 1024 bytes of stream to be protected against a Fluhrer,
// Mantin and Shamir attacks
byte[] tmp = new byte[1024];
streamCipher.processBytes(tmp, 0, tmp.length, tmp, 0);
}
public boolean isFullyEncrypted() {
return fullyEncrypted;
}
private KeyAgreement keyAgreement;
private byte[] secretBytes;
private KeyParameter keyParameterUsedToSetupOutputStreamCipher;
private final EncryptedInputStream eis;
private final EncryptedOutputStream eos;
private final DataInputStream dis;
private final DataOutputStream dos;
private boolean fullyEncrypted;
}
| |
package com.sharepast.domain.user;
import com.sharepast.domain.AppTimeZone;
import org.hibernate.annotations.AccessType;
import javax.persistence.Column;
import javax.persistence.Embeddable;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import java.io.Serializable;
@Embeddable
@AccessType("field")
public class Location implements Serializable {
@Column(length = 3)
private String country;
@Column(length = 3, name = "lang")
private String language;
@Column(name = "postal_code", length = 25)
private String postalCode;
@Column(name="time_zone")
@Enumerated(EnumType.STRING)
private AppTimeZone timeZone;
@Column
private double latitude;
@Column
private double longitude;
@Column
private double altitude;
@Column
private String line1;
@Column
private String line2;
@Column(length = 100)
private String city;
@Column(length = 100)
private String state;
@Column(length = 100)
private String province;
@Column(name = "neighborhood_name", length = 100)
private String neighborhoodName;
@Column(name = "dma_id")
private Integer dmaId = 0;
public Location() {
}
public Location(String country, String language, String postalCode, AppTimeZone timeZone, double latitude, double longitude, double altitude) {
this.country = country;
this.language = language;
this.postalCode = postalCode;
this.timeZone = timeZone;
this.latitude = latitude;
this.longitude = longitude;
this.altitude = altitude;
}
public synchronized String getCountry() {
return country;
}
public synchronized void setCountry(String country) {
this.country = country;
}
public synchronized String getLanguage() {
return language;
}
public synchronized void setLanguage(String language) {
this.language = language;
}
public synchronized String getPostalCode() {
return postalCode;
}
public synchronized void setPostalCode(String postalCode) {
this.postalCode = postalCode;
}
public synchronized AppTimeZone getTimeZone() {
return timeZone;
}
public synchronized void setTimeZone(AppTimeZone timeZone) {
this.timeZone = timeZone;
}
public synchronized double getLatitude() {
return latitude;
}
public synchronized void setLatitude(double latitude) {
this.latitude = latitude;
}
public synchronized double getLongitude() {
return longitude;
}
public synchronized void setLongitude(double longitude) {
this.longitude = longitude;
}
public synchronized double getAltitude() {
return altitude;
}
public synchronized void setAltitude(float altitude) {
this.altitude = altitude;
}
public synchronized String getNeighborhoodName() {
return neighborhoodName;
}
public synchronized void setNeighborhoodName(String neighborhoodName) {
this.neighborhoodName = neighborhoodName;
}
public synchronized String getProvince() {
return province;
}
public synchronized void setProvince(String province) {
this.province = province;
}
public synchronized String getState() {
return state;
}
public synchronized void setState(String state) {
this.state = state;
}
public synchronized String getCity() {
return city;
}
public synchronized void setCity(String city) {
this.city = city;
}
public synchronized String getLine2() {
return line2;
}
public synchronized void setLine2(String line2) {
this.line2 = line2;
}
public synchronized String getLine1() {
return line1;
}
public synchronized void setLine1(String line1) {
this.line1 = line1;
}
public synchronized Integer getDmaId() {
if (dmaId == null)
return 0;
return dmaId;
}
public synchronized void setDmaId(Integer dmaId) {
if (dmaId == null)
dmaId = 0;
this.dmaId = dmaId;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.test;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Properties;
import org.apache.pig.EvalFunc;
import org.apache.pig.PigServer;
import org.apache.pig.backend.hadoop.executionengine.mapReduceLayer.MRConfiguration;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.data.DataBag;
import org.apache.pig.data.DefaultDataBag;
import org.apache.pig.data.Tuple;
import org.apache.pig.impl.PigContext;
import org.apache.pig.impl.io.FileLocalizer;
import org.junit.AfterClass;
import org.junit.Assume;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestCombiner {
private static MiniGenericCluster cluster;
private static Properties properties;
@BeforeClass
public static void oneTimeSetUp() throws Exception {
cluster = MiniGenericCluster.buildCluster();
properties = cluster.getProperties();
}
@AfterClass
public static void oneTimeTearDown() throws Exception {
cluster.shutDown();
}
@Before
public void setUp() throws Exception {
Util.resetStateForExecModeSwitch();
}
@Test
public void testSuccessiveUserFuncs1() throws Exception {
String query = "a = load 'students.txt' as (c1,c2,c3,c4); " +
"c = group a by c2; " +
"f = foreach c generate COUNT(org.apache.pig.builtin.Distinct($1.$2)); " +
"store f into 'out';";
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
PigContext pc = pigServer.getPigContext();
assertTrue((Util.buildMRPlan(Util.buildPp(pigServer, query), pc).getRoots().get(0).combinePlan
.isEmpty()));
pigServer.shutdown();
}
@Test
public void testSuccessiveUserFuncs2() throws Exception {
String dummyUDF = JiraPig1030.class.getName();
String query = "a = load 'students.txt' as (c1,c2,c3,c4); " +
"c = group a by c2; " +
"f = foreach c generate COUNT(" + dummyUDF + "" +
"(org.apache.pig.builtin.Distinct($1.$2)," + dummyUDF + "())); " +
"store f into 'out';";
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
PigContext pc = pigServer.getPigContext();
assertTrue((Util.buildMRPlan(Util.buildPp(pigServer, query), pc).getRoots().get(0).combinePlan
.isEmpty()));
pigServer.shutdown();
}
@Test
public void testOnCluster() throws Exception {
// run the test on cluster
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
String inputFileName = runTest(pigServer);
Util.deleteFile(cluster, inputFileName);
pigServer.shutdown();
}
@Test
public void testLocal() throws Exception {
// run the test locally
FileLocalizer.deleteTempFiles();
runTest(new PigServer("local"));
FileLocalizer.deleteTempFiles();
}
private String runTest(PigServer pig) throws IOException {
List<String> inputLines = new ArrayList<String>();
inputLines.add("a,b,1");
inputLines.add("a,b,1");
inputLines.add("a,c,1");
String inputFileName = loadWithTestLoadFunc("A", pig, inputLines);
pig.registerQuery("B = foreach A generate $0 as (c0:chararray), $1 as (c1:chararray), $2 as (c2:int);");
pig.registerQuery("C = group B by ($0, $1);");
pig.registerQuery("D = foreach C generate flatten(group), COUNT($1) as int;");
// Since the input has no schema, using Util.getTuplesFromConstantTupleStrings fails assert.
List<Tuple> resultTuples = Util.getTuplesFromConstantTupleStrings(
new String[]{
"('a','b',2)",
"('a','c',1)",
});
Iterator<Tuple> resultIterator = pig.openIterator("D");
Util.checkQueryOutputsAfterSort(resultIterator, resultTuples);
return inputFileName;
}
private String loadWithTestLoadFunc(String loadAlias, PigServer pig,
List<String> inputLines) throws IOException {
File inputFile = File.createTempFile("test", "txt");
inputFile.deleteOnExit();
String inputFileName = inputFile.getAbsolutePath();
if (pig.getPigContext().getExecType().isLocal()) {
PrintStream ps = new PrintStream(new FileOutputStream(inputFile));
for (String line : inputLines) {
ps.println(line);
}
ps.close();
} else {
inputFileName = Util.removeColon(inputFileName);
Util.createInputFile(cluster, inputFileName, inputLines.toArray(new String[] {}));
}
pig.registerQuery(loadAlias + " = load '"
+ Util.encodeEscape(inputFileName) + "' using "
+ PigStorage.class.getName() + "(',');");
return inputFileName;
}
@Test
public void testNoCombinerUse() {
// To simulate this, we will have two input files
// with exactly one input record - this should result
// in two map tasks and each would process only one record
// hence the combiner would not get called.
}
@Test
public void testMultiCombinerUse() throws Exception {
// test the scenario where the combiner is called multiple
// times - this can happen when the output of the map > io.sort.mb
// let's set the io.sort.mb to 1MB and > 1 MB map data.
String[] input = new String[500 * 1024];
for (int i = 0; i < input.length; i++) {
if (i % 2 == 0) {
input[i] = Integer.toString(1);
} else {
input[i] = Integer.toString(0);
}
}
Util.createInputFile(cluster, "MultiCombinerUseInput.txt", input);
String oldValue = properties.getProperty("io.sort.mb");
properties.setProperty("io.sort.mb", "1");
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.getPigContext().getProperties().setProperty(MRConfiguration.CHILD_JAVA_OPTS, "-Xmx1024m");
pigServer.registerQuery("a = load 'MultiCombinerUseInput.txt' as (x:int);");
pigServer.registerQuery("b = group a all;");
pigServer.registerQuery("c = foreach b generate COUNT(a), SUM(a.$0), " +
"MIN(a.$0), MAX(a.$0), AVG(a.$0), ((double)SUM(a.$0))/COUNT(a.$0)," +
" COUNT(a.$0) + SUM(a.$0) + MAX(a.$0);");
// make sure there is a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
checkCombinerUsed(pigServer, "c", true);
Iterator<Tuple> it = pigServer.openIterator("c");
Tuple t = it.next();
assertEquals(512000L, t.get(0));
assertEquals(256000L, t.get(1));
assertEquals(0, t.get(2));
assertEquals(1, t.get(3));
assertEquals(0.5, t.get(4));
assertEquals(0.5, t.get(5));
assertEquals(512000L + 256000L + 1, t.get(6));
assertFalse(it.hasNext());
Util.deleteFile(cluster, "MultiCombinerUseInput.txt");
// Reset io.sort.mb to the original value before exit
if (oldValue == null) {
properties.remove("io.sort.mb");
} else {
properties.setProperty("io.sort.mb", oldValue);
}
pigServer.shutdown();
}
@Test
public void testDistinctAggs1() throws Exception {
// test the use of combiner for distinct aggs:
String input[] = {
"pig1\t18\t2.1",
"pig2\t24\t3.3",
"pig5\t45\t2.4",
"pig1\t18\t2.1",
"pig1\t19\t2.1",
"pig2\t24\t4.5",
"pig1\t20\t3.1" };
Util.createInputFile(cluster, "distinctAggs1Input.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'distinctAggs1Input.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a by name;");
pigServer.registerQuery("c = foreach b {" +
" x = distinct a.age;" +
" y = distinct a.gpa;" +
" z = distinct a;" +
" generate group, COUNT(x), SUM(x.age), SUM(y.gpa), SUM(a.age), " +
" SUM(a.gpa), COUNT(z.age), COUNT(z), SUM(z.age);};");
// make sure there is a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
checkCombinerUsed(pigServer, "c", true);
HashMap<String, Object[]> results = new HashMap<String, Object[]>();
results.put("pig1", new Object[] { "pig1", 3L, 57L, 5.2, 75L, 9.4, 3L, 3L, 57L });
results.put("pig2", new Object[] { "pig2", 1L, 24L, 7.8, 48L, 7.8, 2L, 2L, 48L });
results.put("pig5", new Object[] { "pig5", 1L, 45L, 2.4, 45L, 2.4, 1L, 1L, 45L });
Iterator<Tuple> it = pigServer.openIterator("c");
while (it.hasNext()) {
Tuple t = it.next();
List<Object> fields = t.getAll();
Object[] expected = results.get(fields.get(0));
int i = 0;
for (Object field : fields) {
assertEquals(expected[i++], field);
}
}
Util.deleteFile(cluster, "distinctAggs1Input.txt");
pigServer.shutdown();
}
@Test
public void testGroupAndUnion() throws Exception {
// test use of combiner when group elements are accessed in the foreach
String input1[] = {
"ABC\t1\ta\t1",
"ABC\t1\tb\t2",
"ABC\t1\ta\t3",
"ABC\t2\tb\t4",
};
Util.createInputFile(cluster, "testGroupElements1.txt", input1);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.debugOn();
pigServer.registerQuery("a1 = load 'testGroupElements1.txt' " +
"as (str:chararray, num1:int, alph : chararray, num2 : int);");
pigServer.registerQuery("b1 = group a1 by str;");
// check if combiner is present or not for various forms of foreach
pigServer.registerQuery("c1 = foreach b1 generate flatten(group), COUNT(a1.alph), SUM(a1.num2); ");
String input2[] = {
"DEF\t2\ta\t3",
"DEF\t2\td\t5",
};
Util.createInputFile(cluster, "testGroupElements2.txt", input2);
pigServer.registerQuery("a2 = load 'testGroupElements2.txt' " +
"as (str:chararray, num1:int, alph : chararray, num2 : int);");
pigServer.registerQuery("b2 = group a2 by str;");
// check if combiner is present or not for various forms of foreach
pigServer.registerQuery("c2 = foreach b2 generate flatten(group), COUNT(a2.alph), SUM(a2.num2); ");
pigServer.registerQuery("c = union c1, c2;");
List<Tuple> expectedRes =
Util.getTuplesFromConstantTupleStrings(
new String[]{
"('ABC',4L,10L)",
"('DEF',2L,8L)",
});
Iterator<Tuple> it = pigServer.openIterator("c");
Util.checkQueryOutputsAfterSort(it, expectedRes);
Util.deleteFile(cluster, "testGroupElements1.txt");
Util.deleteFile(cluster, "testGroupElements2.txt");
pigServer.shutdown();
}
@Test
public void testGroupElements() throws Exception {
// test use of combiner when group elements are accessed in the foreach
String input[] = {
"ABC\t1\ta\t1",
"ABC\t1\tb\t2",
"ABC\t1\ta\t3",
"ABC\t2\tb\t4",
"DEF\t1\td\t1",
"XYZ\t1\tx\t2"
};
Util.createInputFile(cluster, "testGroupElements.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'testGroupElements.txt' as (str:chararray, num1:int, alph : chararray, num2 : int);");
pigServer.registerQuery("b = group a by (str, num1);");
// check if combiner is present or not for various forms of foreach
pigServer.registerQuery("c = foreach b generate flatten(group), COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", true);
pigServer.registerQuery("c = foreach b generate group, COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", true);
// projecting bag - combiner should not be used
pigServer.registerQuery("c = foreach b generate group, a, COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", false);
// projecting bag - combiner should not be used
pigServer.registerQuery("c = foreach b generate group, a.num2, COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", false);
pigServer.registerQuery("c = foreach b generate group.$0, group.$1, COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", true);
pigServer.registerQuery("c = foreach b generate group.$0, group.$1 + COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", true);
pigServer.registerQuery("c = foreach b generate group.str, group.$1, COUNT(a.alph), SUM(a.num2); ");
checkCombinerUsed(pigServer, "c", true);
pigServer.registerQuery("c = foreach b generate group.str, group.$1, COUNT(a.alph), SUM(a.num2), "
+ " (group.num1 == 1 ? (COUNT(a.num2) + 1) : (SUM(a.num2) + 10)) ; ");
checkCombinerUsed(pigServer, "c", true);
List<Tuple> expectedRes =
Util.getTuplesFromConstantTupleStrings(
new String[] {
"('ABC',1,3L,6L,4L)",
"('ABC',2,1L,4L,14L)",
"('DEF',1,1L,1L,2L)",
"('XYZ',1,1L,2L,2L)",
});
Iterator<Tuple> it = pigServer.openIterator("c");
Util.checkQueryOutputsAfterSort(it, expectedRes);
Util.deleteFile(cluster, "distinctAggs1Input.txt");
pigServer.shutdown();
}
@Test
public void testGroupByLimit() throws Exception {
Assume.assumeFalse("Skip this test for Tez till PIG-5249 is fixed",
Util.isTezExecType(cluster.getExecType()));
// test use of combiner when group elements are accessed in the foreach
String input[] = {
"ABC 1",
"ABC 2",
"DEF 1",
"XYZ 1",
"XYZ 2",
"XYZ 3",
};
Util.createInputFile(cluster, "testGroupLimit.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'testGroupLimit.txt' using PigStorage(' ') " +
"as (str:chararray, num1:int) ;");
pigServer.registerQuery("b = group a by str;");
pigServer.registerQuery("c = foreach b generate group, COUNT(a.num1) ; ");
// check if combiner is present
pigServer.registerQuery("d = limit c 2 ; ");
checkCombinerUsed(pigServer, "d", true);
List<Tuple> expectedRes =
Util.getTuplesFromConstantTupleStrings(
new String[] {
"('ABC',2L)",
"('DEF',1L)",
});
Iterator<Tuple> it = pigServer.openIterator("d");
Util.checkQueryOutputsAfterSort(it, expectedRes);
pigServer.shutdown();
}
private void checkCombinerUsed(PigServer pigServer, String alias, boolean combineExpected)
throws IOException {
// make sure there is a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain(alias, ps);
boolean combinerFound;
if (pigServer.getPigContext().getExecType().name().equalsIgnoreCase("spark")) {
combinerFound = baos.toString().contains("Reduce By");
} else {
combinerFound = baos.toString().matches("(?si).*combine plan.*");
}
System.out.println(baos.toString());
assertEquals("is combiner present as expected", combineExpected, combinerFound);
}
@Test
public void testDistinctNoCombiner() throws Exception {
// test that combiner is NOT invoked when
// one of the elements in the foreach generate
// is a distinct() as the leaf
String input[] = {
"pig1\t18\t2.1",
"pig2\t24\t3.3",
"pig5\t45\t2.4",
"pig1\t18\t2.1",
"pig1\t19\t2.1",
"pig2\t24\t4.5",
"pig1\t20\t3.1" };
Util.createInputFile(cluster, "distinctNoCombinerInput.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'distinctNoCombinerInput.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a by name;");
pigServer.registerQuery("c = foreach b {" +
" z = distinct a;" +
" generate group, z, SUM(a.age), SUM(a.gpa);};");
// make sure there is a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
assertFalse(baos.toString().matches("(?si).*combine plan.*"));
HashMap<String, Object[]> results = new HashMap<String, Object[]>();
results.put("pig1", new Object[] { "pig1", "bag-place-holder", 75L, 9.4 });
results.put("pig2", new Object[] { "pig2", "bag-place-holder", 48L, 7.8 });
results.put("pig5", new Object[] { "pig5", "bag-place-holder", 45L, 2.4 });
Iterator<Tuple> it = pigServer.openIterator("c");
while (it.hasNext()) {
Tuple t = it.next();
List<Object> fields = t.getAll();
Object[] expected = results.get(fields.get(0));
int i = 0;
for (Object field : fields) {
if (i == 1) {
// ignore the second field which is a bag
// for comparison here
continue;
}
assertEquals(expected[i++], field);
}
}
Util.deleteFile(cluster, "distinctNoCombinerInput.txt");
pigServer.shutdown();
}
@Test
public void testForEachNoCombiner() throws Exception {
// test that combiner is NOT invoked when
// one of the elements in the foreach generate
// has a foreach in the plan without a distinct agg
String input[] = {
"pig1\t18\t2.1",
"pig2\t24\t3.3",
"pig5\t45\t2.4",
"pig1\t18\t2.1",
"pig1\t19\t2.1",
"pig2\t24\t4.5",
"pig1\t20\t3.1" };
Util.createInputFile(cluster, "forEachNoCombinerInput.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'forEachNoCombinerInput.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a by name;");
pigServer.registerQuery("c = foreach b {" +
" z = a.age;" +
" generate group, z, SUM(a.age), SUM(a.gpa);};");
// make sure there is a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
assertFalse(baos.toString().matches("(?si).*combine plan.*"));
HashMap<String, Object[]> results = new HashMap<String, Object[]>();
results.put("pig1", new Object[] { "pig1", "bag-place-holder", 75L, 9.4 });
results.put("pig2", new Object[] { "pig2", "bag-place-holder", 48L, 7.8 });
results.put("pig5", new Object[] { "pig5", "bag-place-holder", 45L, 2.4 });
Iterator<Tuple> it = pigServer.openIterator("c");
while (it.hasNext()) {
Tuple t = it.next();
List<Object> fields = t.getAll();
Object[] expected = results.get(fields.get(0));
int i = 0;
for (Object field : fields) {
if (i == 1) {
// ignore the second field which is a bag
// for comparison here
continue;
}
assertEquals(expected[i++], field);
}
}
Util.deleteFile(cluster, "forEachNoCombinerInput.txt");
pigServer.shutdown();
}
@Test
public void testJiraPig746() throws Exception {
// test that combiner is NOT invoked when
// one of the elements in the foreach generate
// has a foreach in the plan without a distinct agg
String input[] = {
"pig1\t18\t2.1",
"pig2\t24\t3.3",
"pig5\t45\t2.4",
"pig1\t18\t2.1",
"pig1\t19\t2.1",
"pig2\t24\t4.5",
"pig1\t20\t3.1" };
String expected[] = {
"(pig1,75,{(pig1,18,2.1),(pig1,18,2.1),(pig1,19,2.1),(pig1,20,3.1)})",
"(pig2,48,{(pig2,24,3.3),(pig2,24,4.5)})",
"(pig5,45,{(pig5,45,2.4)})"
};
try {
Util.createInputFile(cluster, "forEachNoCombinerInput.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'forEachNoCombinerInput.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a by name;");
pigServer.registerQuery("c = foreach b generate group, SUM(a.age), a;");
// make sure there isn't a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
assertFalse(baos.toString().matches("(?si).*combine plan.*"));
Iterator<Tuple> it = pigServer.openIterator("c");
Util.checkQueryOutputsAfterSortRecursive(it, expected,
"group:chararray,age:long,b:{t:(name:chararray,age:int,gpa:double)}");
pigServer.shutdown();
} finally {
Util.deleteFile(cluster, "forEachNoCombinerInput.txt");
}
}
public static class JiraPig1030 extends EvalFunc<DataBag> {
@Override
public DataBag exec(Tuple input) throws IOException {
return new DefaultDataBag();
}
}
@Test
public void testJiraPig1030() throws Exception {
// test that combiner is NOT invoked when
// one of the elements in the foreach generate
// has a non-algebraic UDF that have multiple inputs
// (one of them is distinct).
String input[] = {
"pig1\t18\t2.1",
"pig2\t24\t3.3",
"pig5\t45\t2.4",
"pig1\t18\t2.1",
"pig1\t19\t2.1",
"pig2\t24\t4.5",
"pig1\t20\t3.1" };
try {
Util.createInputFile(cluster, "forEachNoCombinerInput.txt", input);
PigServer pigServer = new PigServer(cluster.getExecType(), properties);
pigServer.registerQuery("a = load 'forEachNoCombinerInput.txt' as (name:chararray, age:int, gpa:double);");
pigServer.registerQuery("b = group a all;");
pigServer.registerQuery("c = foreach b {" +
" d = distinct a.age;" +
" generate group, " + JiraPig1030.class.getName() + "(d, 0);};");
// make sure there isn't a combine plan in the explain output
ByteArrayOutputStream baos = new ByteArrayOutputStream();
PrintStream ps = new PrintStream(baos);
pigServer.explain("c", ps);
assertFalse(baos.toString().matches("(?si).*combine plan.*"));
pigServer.shutdown();
} finally {
Util.deleteFile(cluster, "forEachNoCombinerInput.txt");
}
}
}
| |
/**
* Copyright (C) 2012 KRM Associates, Inc. healtheme@krminc.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package com.krminc.phr.domain.carenotebook;
import com.krminc.phr.web.HealthSummary;
import java.io.Serializable;
import java.math.BigInteger;
import java.util.Date;
import javax.persistence.*;
/**
*
* @author cmccall
*/
@Entity
@Table(name = "carenotebook_pharmacies", catalog = "phr", schema = "")
@NamedQueries({
@NamedQuery(name = "Pharmacy.findAll", query = "SELECT p FROM Pharmacy p"),
@NamedQuery(name = "Pharmacy.findByPharmacyId", query = "SELECT p FROM Pharmacy p WHERE p.pharmacyId = :pharmacyId"),
@NamedQuery(name = "Pharmacy.findByPharmacyName", query = "SELECT p FROM Pharmacy p WHERE p.pharmacyName = :pharmacyName"),
@NamedQuery(name = "Pharmacy.findByPharmacyContact", query = "SELECT p FROM Pharmacy p WHERE p.pharmacyContact = :pharmacyContact"),
@NamedQuery(name = "Pharmacy.findByPhoneNumber", query = "SELECT p FROM Pharmacy p WHERE p.phoneNumber = :phoneNumber"),
@NamedQuery(name = "Pharmacy.findByFaxNumber", query = "SELECT p FROM Pharmacy p WHERE p.faxNumber = :faxNumber"),
@NamedQuery(name = "Pharmacy.findByPharmacyEmail", query = "SELECT p FROM Pharmacy p WHERE p.pharmacyEmail = :pharmacyEmail"),
@NamedQuery(name = "Pharmacy.findByPharmacyMedications", query = "SELECT p FROM Pharmacy p WHERE p.pharmacyMedications = :pharmacyMedications"),
@NamedQuery(name = "Pharmacy.findByHealthRecordId", query = "SELECT p FROM Pharmacy p WHERE p.healthRecordId = :healthRecordId"),
@NamedQuery(name = "Pharmacy.findByDataSourceId", query = "SELECT p FROM Pharmacy p WHERE p.dataSourceId = :dataSourceId"),
@NamedQuery(name = "Pharmacy.findByCareDocumentId", query = "SELECT p FROM Pharmacy p WHERE p.careDocumentId = :careDocumentId"),
@NamedQuery(name = "Pharmacy.findBySourceId", query = "SELECT p FROM Pharmacy p WHERE p.sourceId = :sourceId"),
@NamedQuery(name = "Pharmacy.findByDateAdded", query = "SELECT p FROM Pharmacy p WHERE p.dateAdded = :dateAdded"),
@NamedQuery(name = "Pharmacy.findByComments", query = "SELECT p FROM Pharmacy p WHERE p.comments = :comments"),
@NamedQuery(name = "Pharmacy.findByPrimaryKeyForRecord", query = "SELECT d FROM Pharmacy d WHERE d.pharmacyId = :pharmacyId AND d.healthRecordId = :healthRecordId"),
@NamedQuery(name = "Pharmacy.findByMask", query = "SELECT p FROM Pharmacy p WHERE p.mask = :mask")})
public class Pharmacy extends HealthSummary implements Serializable {
private static final long serialVersionUID = 1L;
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@Basic(optional = false)
@Column(name = "pharmacy_id", nullable = false)
private Long pharmacyId;
@Column(name = "pharmacy_name", length = 50)
private String pharmacyName;
@Column(name = "pharmacy_contact", length = 50)
private String pharmacyContact;
@Column(name = "phone_number", length = 25)
private String phoneNumber;
@Column(name = "fax_number", length = 25)
private String faxNumber;
@Column(name = "pharmacy_email", length = 100)
private String pharmacyEmail;
@Column(name = "pharmacy_medications", length = 512)
private String pharmacyMedications;
@Basic(optional = false)
@Column(name = "rec_id", nullable = false)
private long healthRecordId;
@Basic(optional = false)
@Column(name = "data_source_id", nullable = false)
private long dataSourceId;
@Column(name = "care_document_id")
private BigInteger careDocumentId;
@Column(name = "source_id")
private BigInteger sourceId;
@Basic(optional = false)
@Column(name = "date_added", nullable = false)
@Temporal(TemporalType.TIMESTAMP)
private Date dateAdded;
@Column(name = "comments", length = 512)
private String comments;
@Column(name = "mask", length = 50)
private String mask;
public Pharmacy() {
}
public Pharmacy(Long hrid) {
super(hrid);
this.healthRecordId = hrid;
}
public Long getPharmacyId() {
return pharmacyId;
}
public void setPharmacyId(Long pharmacyId) {
this.pharmacyId = pharmacyId;
}
/** needed to map existing entities by carenotebook form processor **/
public void setPharmacyId(String pharmacyId){
this.pharmacyId = Long.parseLong(pharmacyId);
}
public String getPharmacyName() {
return pharmacyName;
}
public void setPharmacyName(String pharmacyName) {
this.pharmacyName = pharmacyName;
}
public String getPharmacyContact() {
return pharmacyContact;
}
public void setPharmacyContact(String pharmacyContact) {
this.pharmacyContact = pharmacyContact;
}
public String getPhoneNumber() {
return phoneNumber;
}
public void setPhoneNumber(String phoneNumber) {
this.phoneNumber = phoneNumber;
}
public String getFaxNumber() {
return faxNumber;
}
public void setFaxNumber(String faxNumber) {
this.faxNumber = faxNumber;
}
public String getPharmacyEmail() {
return pharmacyEmail;
}
public void setPharmacyEmail(String pharmacyEmail) {
this.pharmacyEmail = pharmacyEmail;
}
public String getPharmacyMedications() {
return pharmacyMedications;
}
public void setPharmacyMedications(String pharmacyMedications) {
this.pharmacyMedications = pharmacyMedications;
}
/**
*
* @return
*/
@Override
public Long getHealthRecordId() {
return healthRecordId;
}
// public void setHealthRecordId(Long healthRecordId) {
// this.healthRecordId = healthRecordId;
// }
public long getDataSourceId() {
return dataSourceId;
}
public void setDataSourceId(long dataSourceId) {
this.dataSourceId = dataSourceId;
}
public BigInteger getCareDocumentId() {
return careDocumentId;
}
public void setCareDocumentId(BigInteger careDocumentId) {
this.careDocumentId = careDocumentId;
}
public BigInteger getSourceId() {
return sourceId;
}
public void setSourceId(BigInteger sourceId) {
this.sourceId = sourceId;
}
public Date getDateAdded() {
return dateAdded;
}
public void setDateAdded(Date dateAdded) {
this.dateAdded = dateAdded;
}
public String getComments() {
return comments;
}
public void setComments(String comments) {
this.comments = comments;
}
public String getMask() {
return mask;
}
public void setMask(String mask) {
this.mask = mask;
}
@Override
public int hashCode() {
int hash = 0;
hash += ( pharmacyId != null ? pharmacyId.hashCode() : 0 );
return hash;
}
@Override
public boolean equals(Object object) {
// TODO: Warning - this method won't work in the case the id fields are not set
if (!( object instanceof Pharmacy )) {
return false;
}
Pharmacy other = (Pharmacy) object;
if (( this.pharmacyId == null && other.pharmacyId != null ) || ( this.pharmacyId != null && !this.pharmacyId.equals(other.pharmacyId) )) {
return false;
}
return true;
}
@Override
public String toString() {
return "com.krminc.phr.domain.carenotebook.Pharmacy[pharmacyId=" + pharmacyId + "]";
}
}
| |
/**
* Copyright 2010-present Facebook.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.widget;
import android.app.Activity;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.ContextWrapper;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.TypedArray;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.content.LocalBroadcastManager;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.View;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import android.widget.TextView;
import com.facebook.android.R;
import com.facebook.internal.*;
/**
* This class provides the UI for displaying the Facebook Like button and its associated components.
*/
public class LikeView extends FrameLayout {
// ***
// Keep all the enum values in sync with attrs.xml
// ***
/**
* Encapsulates the valid values for the facebook:style attribute for a LikeView
*/
public enum Style {
/**
* Setting the attribute to this value will display the button and a sentence near it that describes the
* social sentence for the associated object.
*
* This is the default value
*/
STANDARD("standard", 0),
/**
* Setting the attribute to this value will display the button by itself, with no other components
*/
BUTTON("button", 1),
/**
* Setting the attribute to this value will display the button and a box near it with the number of likes
* for the associated object
*/
BOX_COUNT("box_count", 2);
static Style DEFAULT = STANDARD;
static Style fromInt(int enumValue) {
for (Style style : values()) {
if (style.getValue() == enumValue) {
return style;
}
}
return null;
}
private String stringValue;
private int intValue;
private Style(String stringValue, int value) {
this.stringValue = stringValue;
this.intValue = value;
}
@Override
public String toString() {
return stringValue;
}
private int getValue() {
return intValue;
}
}
/**
* Encapsulates the valid values for the facebook:horizontal_alignment attribute for a LikeView.
*/
public enum HorizontalAlignment {
/**
* Setting the attribute to this value will center the button and auxiliary view in the parent view.
*
* This is the default value
*/
CENTER("center", 0),
/**
* Setting the attribute to this value will left-justify the button and auxiliary view in the parent view.
*/
LEFT("left", 1),
/**
* Setting the attribute to this value will right-justify the button and auxiliary view in the parent view.
* If the facebook:auxiliary_view_position is set to INLINE, then the auxiliary view will be on the
* left of the button
*/
RIGHT("right", 2);
static HorizontalAlignment DEFAULT = CENTER;
static HorizontalAlignment fromInt(int enumValue) {
for (HorizontalAlignment horizontalAlignment : values()) {
if (horizontalAlignment.getValue() == enumValue) {
return horizontalAlignment;
}
}
return null;
}
private String stringValue;
private int intValue;
private HorizontalAlignment(String stringValue, int value) {
this.stringValue = stringValue;
this.intValue = value;
}
@Override
public String toString() {
return stringValue;
}
private int getValue() {
return intValue;
}
}
/**
* Encapsulates the valid values for the facebook:auxiliary_view_position attribute for a LikeView.
*/
public enum AuxiliaryViewPosition {
/**
* Setting the attribute to this value will put the social-sentence or box-count below the like button.
* If the facebook:style is set to BUTTON, then this has no effect.
*
* This is the default value
*/
BOTTOM("bottom", 0),
/**
* Setting the attribute to this value will put the social-sentence or box-count inline with the like button.
* The auxiliary view will be to the left of the button if the facebook:horizontal_alignment is set to RIGHT.
* In all other cases, it will be to the right of the button.
* If the facebook:style is set to BUTTON, then this has no effect.
*/
INLINE("inline", 1),
/**
* Setting the attribute to this value will put the social-sentence or box-count above the like button.
* If the facebook:style is set to BUTTON, then this has no effect.
*/
TOP("top", 2);
static AuxiliaryViewPosition DEFAULT = BOTTOM;
static AuxiliaryViewPosition fromInt(int enumValue) {
for (AuxiliaryViewPosition auxViewPosition : values()) {
if (auxViewPosition.getValue() == enumValue) {
return auxViewPosition;
}
}
return null;
}
private String stringValue;
private int intValue;
private AuxiliaryViewPosition(String stringValue, int value) {
this.stringValue = stringValue;
this.intValue = value;
}
@Override
public String toString() {
return stringValue;
}
private int getValue() {
return intValue;
}
}
private static final int NO_FOREGROUND_COLOR = -1;
private String objectId;
private LinearLayout containerView;
private LikeButton likeButton;
private LikeBoxCountView likeBoxCountView;
private TextView socialSentenceView;
private LikeActionController likeActionController;
private OnErrorListener onErrorListener;
private BroadcastReceiver broadcastReceiver;
private LikeActionControllerCreationCallback creationCallback;
private Style likeViewStyle = Style.DEFAULT;
private HorizontalAlignment horizontalAlignment = HorizontalAlignment.DEFAULT;
private AuxiliaryViewPosition auxiliaryViewPosition = AuxiliaryViewPosition.DEFAULT;
private int foregroundColor = NO_FOREGROUND_COLOR;
private int edgePadding;
private int internalPadding;
private Fragment parentFragment;
/**
* If your app does not use UiLifeCycleHelper, then you must call this method in the calling activity's
* onActivityResult method, to process any pending like actions, where tapping the button had resulted in
* the Like dialog being shown in the Facebook application.
*
* @param context Hosting context
* @param requestCode From the originating call to onActivityResult
* @param resultCode From the originating call to onActivityResult
* @param data From the originating call to onActivityResult
* @return Indication of whether the Intent was handled
*/
public static boolean handleOnActivityResult(Context context,
int requestCode,
int resultCode,
Intent data) {
return LikeActionController.handleOnActivityResult(context, requestCode, resultCode, data);
}
/**
* Constructor
*
* @param context Context for this View
*/
public LikeView(Context context) {
super(context);
initialize(context);
}
/**
* Constructor
*
* @param context Context for this View
* @param attrs AttributeSet for this View.
*/
public LikeView(Context context, AttributeSet attrs) {
super(context, attrs);
parseAttributes(attrs);
initialize(context);
}
/**
* Sets the associated object for this LikeView. Can be changed during runtime.
* @param objectId Object Id
*/
public void setObjectId(String objectId) {
objectId = Utility.coerceValueIfNullOrEmpty(objectId, null);
if (!Utility.areObjectsEqual(objectId, this.objectId)) {
setObjectIdForced(objectId);
updateLikeStateAndLayout();
}
}
/**
* Sets the facebook:style for this LikeView. Can be changed during runtime.
* @param likeViewStyle Should be either LikeView.STANDARD, LikeView.BUTTON or LikeView.BOX_COUNT
*/
public void setLikeViewStyle(Style likeViewStyle) {
likeViewStyle = likeViewStyle != null ? likeViewStyle : Style.DEFAULT;
if (this.likeViewStyle != likeViewStyle) {
this.likeViewStyle = likeViewStyle;
updateLayout();
}
}
/**
* Sets the facebook:auxiliary_view_position for this LikeView. Can be changed during runtime.
* @param auxiliaryViewPosition Should be either LikeView.TOP, LikeView.INLINE or LikeView.BOTTOM
*/
public void setAuxiliaryViewPosition(AuxiliaryViewPosition auxiliaryViewPosition) {
auxiliaryViewPosition = auxiliaryViewPosition != null ? auxiliaryViewPosition : AuxiliaryViewPosition.DEFAULT;
if (this.auxiliaryViewPosition != auxiliaryViewPosition) {
this.auxiliaryViewPosition = auxiliaryViewPosition;
updateLayout();
}
}
/**
* Sets the facebook:horizontal_alignment for this LikeView. Can be changed during runtime.
* @param horizontalAlignment Should be either LikeView.LEFT, LikeView.CENTER or LikeView.RIGHT
*/
public void setHorizontalAlignment(HorizontalAlignment horizontalAlignment) {
horizontalAlignment = horizontalAlignment != null ? horizontalAlignment : HorizontalAlignment.DEFAULT;
if (this.horizontalAlignment != horizontalAlignment) {
this.horizontalAlignment = horizontalAlignment;
updateLayout();
}
}
/**
* Sets the facebook:foreground_color for this LikeView. Can be changed during runtime.
* The color is only used for the social sentence text.
* @param foregroundColor And valid android.graphics.Color value.
*/
public void setForegroundColor(int foregroundColor) {
if (this.foregroundColor != foregroundColor) {
socialSentenceView.setTextColor(foregroundColor);
}
}
/**
* Sets the parent Fragment which is hosting this LikeView. This allows the LikeView to be
* embedded inside a Fragment, and will allow the fragment to receive the
* {@link Fragment#onActivityResult(int, int, android.content.Intent) onActivityResult}
* call rather than the Activity, upon completion of Likes from this view.
*
* @param fragment Fragment that is hosting the LikeView.
*/
public void setFragment(Fragment fragment) {
this.parentFragment = fragment;
}
/**
* Sets an OnErrorListener for this instance of LikeView to call into when
* certain exceptions occur.
*
* @param onErrorListener The listener object to set
*/
public void setOnErrorListener(OnErrorListener onErrorListener) {
this.onErrorListener = onErrorListener;
}
/**
* Returns the current OnErrorListener for this instance of LikeView.
*
* @return The OnErrorListener
*/
public OnErrorListener getOnErrorListener() {
return onErrorListener;
}
@Override
protected void onDetachedFromWindow() {
// Disassociate from the object
setObjectId(null);
super.onDetachedFromWindow();
}
private void parseAttributes(AttributeSet attrs) {
if (attrs == null || getContext() == null) {
return;
}
TypedArray a = getContext().obtainStyledAttributes(attrs, R.styleable.com_facebook_like_view);
if (a == null) {
return;
}
objectId = Utility.coerceValueIfNullOrEmpty(a.getString(R.styleable.com_facebook_like_view_object_id), null);
likeViewStyle = Style.fromInt(
a.getInt(R.styleable.com_facebook_like_view_style,
Style.DEFAULT.getValue()));
if (likeViewStyle == null) {
throw new IllegalArgumentException("Unsupported value for LikeView 'style'");
}
auxiliaryViewPosition = AuxiliaryViewPosition.fromInt(
a.getInt(R.styleable.com_facebook_like_view_auxiliary_view_position,
AuxiliaryViewPosition.DEFAULT.getValue()));
if (auxiliaryViewPosition == null) {
throw new IllegalArgumentException("Unsupported value for LikeView 'auxiliary_view_position'");
}
horizontalAlignment = HorizontalAlignment.fromInt(
a.getInt(R.styleable.com_facebook_like_view_horizontal_alignment,
HorizontalAlignment.DEFAULT.getValue()));
if (horizontalAlignment == null) {
throw new IllegalArgumentException("Unsupported value for LikeView 'horizontal_alignment'");
}
foregroundColor = a.getColor(R.styleable.com_facebook_like_view_foreground_color, NO_FOREGROUND_COLOR);
a.recycle();
}
// If attributes were present, parseAttributes MUST be called before initialize() to ensure proper behavior
private void initialize(Context context) {
edgePadding = getResources().getDimensionPixelSize(R.dimen.com_facebook_likeview_edge_padding);
internalPadding = getResources().getDimensionPixelSize(R.dimen.com_facebook_likeview_internal_padding);
if (foregroundColor == NO_FOREGROUND_COLOR) {
foregroundColor = getResources().getColor(R.color.com_facebook_likeview_text_color);
}
setBackgroundColor(Color.TRANSPARENT);
containerView = new LinearLayout(context);
LayoutParams containerViewLayoutParams = new LayoutParams(
LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
containerView.setLayoutParams(containerViewLayoutParams);
initializeLikeButton(context);
initializeSocialSentenceView(context);
initializeLikeCountView(context);
containerView.addView(likeButton);
containerView.addView(socialSentenceView);
containerView.addView(likeBoxCountView);
addView(containerView);
setObjectIdForced(this.objectId);
updateLikeStateAndLayout();
}
private void initializeLikeButton(Context context) {
likeButton = new LikeButton(
context,
likeActionController != null ? likeActionController.isObjectLiked() : false);
likeButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
toggleLike();
}
});
LinearLayout.LayoutParams buttonLayout = new LinearLayout.LayoutParams(
LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
likeButton.setLayoutParams(buttonLayout);
}
private void initializeSocialSentenceView(Context context) {
socialSentenceView = new TextView(context);
socialSentenceView.setTextSize(
TypedValue.COMPLEX_UNIT_PX,
getResources().getDimension(R.dimen.com_facebook_likeview_text_size));
socialSentenceView.setMaxLines(2);
socialSentenceView.setTextColor(foregroundColor);
socialSentenceView.setGravity(Gravity.CENTER);
LinearLayout.LayoutParams socialSentenceViewLayout = new LinearLayout.LayoutParams(
LayoutParams.WRAP_CONTENT,
LayoutParams.MATCH_PARENT);
socialSentenceView.setLayoutParams(socialSentenceViewLayout);
}
private void initializeLikeCountView(Context context) {
likeBoxCountView = new LikeBoxCountView(context);
LinearLayout.LayoutParams likeCountViewLayout = new LinearLayout.LayoutParams(
LayoutParams.MATCH_PARENT,
LayoutParams.MATCH_PARENT);
likeBoxCountView.setLayoutParams(likeCountViewLayout);
}
private void toggleLike() {
if (likeActionController != null) {
Activity activity = null;
if (parentFragment == null) {
Context context = getContext();
if (context instanceof Activity) {
activity = (Activity) context;
} else if (context instanceof ContextWrapper) {
Context baseContext = ((ContextWrapper) context).getBaseContext();
if (baseContext instanceof Activity) {
activity = (Activity) baseContext;
}
}
} else {
activity = parentFragment.getActivity();
}
likeActionController.toggleLike(
activity,
parentFragment,
getAnalyticsParameters());
}
}
private Bundle getAnalyticsParameters() {
Bundle params = new Bundle();
params.putString(AnalyticsEvents.PARAMETER_LIKE_VIEW_STYLE, likeViewStyle.toString());
params.putString(AnalyticsEvents.PARAMETER_LIKE_VIEW_AUXILIARY_POSITION, auxiliaryViewPosition.toString());
params.putString(AnalyticsEvents.PARAMETER_LIKE_VIEW_HORIZONTAL_ALIGNMENT, horizontalAlignment.toString());
params.putString(AnalyticsEvents.PARAMETER_LIKE_VIEW_OBJECT_ID, Utility.coerceValueIfNullOrEmpty(objectId, ""));
return params;
}
private void setObjectIdForced(String newObjectId) {
tearDownObjectAssociations();
objectId = newObjectId;
if (Utility.isNullOrEmpty(newObjectId)) {
return;
}
creationCallback = new LikeActionControllerCreationCallback();
LikeActionController.getControllerForObjectId(
getContext(),
newObjectId,
creationCallback);
}
private void associateWithLikeActionController(LikeActionController likeActionController) {
this.likeActionController = likeActionController;
this.broadcastReceiver = new LikeControllerBroadcastReceiver();
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(getContext());
// add the broadcast receiver
IntentFilter filter = new IntentFilter();
filter.addAction(LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_UPDATED);
filter.addAction(LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_DID_ERROR);
filter.addAction(LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_DID_RESET);
localBroadcastManager.registerReceiver(broadcastReceiver, filter);
}
private void tearDownObjectAssociations() {
if (broadcastReceiver != null) {
LocalBroadcastManager localBroadcastManager = LocalBroadcastManager.getInstance(getContext());
localBroadcastManager.unregisterReceiver(broadcastReceiver);
broadcastReceiver = null;
}
// If we were already waiting on a controller to be given back, make sure we aren't waiting anymore.
// Otherwise when that controller is given back to the callback, it will go and register a broadcast receiver
// for it.
if (creationCallback != null) {
creationCallback.cancel();
creationCallback = null;
}
likeActionController = null;
}
private void updateLikeStateAndLayout() {
if (likeActionController == null) {
likeButton.setLikeState(false);
socialSentenceView.setText(null);
likeBoxCountView.setText(null);
} else {
likeButton.setLikeState(likeActionController.isObjectLiked());
socialSentenceView.setText(likeActionController.getSocialSentence());
likeBoxCountView.setText(likeActionController.getLikeCountString());
}
updateLayout();
}
private void updateLayout() {
// Make sure the container is horizontally aligned according to specifications.
LayoutParams containerViewLayoutParams = (LayoutParams)containerView.getLayoutParams();
LinearLayout.LayoutParams buttonLayoutParams = (LinearLayout.LayoutParams)likeButton.getLayoutParams();
int viewGravity =
horizontalAlignment == HorizontalAlignment.LEFT ? Gravity.LEFT :
horizontalAlignment == HorizontalAlignment.CENTER ? Gravity.CENTER_HORIZONTAL : Gravity.RIGHT;
containerViewLayoutParams.gravity = viewGravity | Gravity.TOP;
buttonLayoutParams.gravity = viewGravity;
// Choose the right auxiliary view to make visible.
socialSentenceView.setVisibility(GONE);
likeBoxCountView.setVisibility(GONE);
View auxView;
if (likeViewStyle == Style.STANDARD &&
likeActionController != null &&
!Utility.isNullOrEmpty(likeActionController.getSocialSentence())) {
auxView = socialSentenceView;
} else if (likeViewStyle == Style.BOX_COUNT &&
likeActionController != null &&
!Utility.isNullOrEmpty(likeActionController.getLikeCountString())) {
updateBoxCountCaretPosition();
auxView = likeBoxCountView;
} else {
// No more work to be done.
return;
}
auxView.setVisibility(VISIBLE);
// Now position the auxiliary view properly
LinearLayout.LayoutParams auxViewLayoutParams = (LinearLayout.LayoutParams)auxView.getLayoutParams();
auxViewLayoutParams.gravity = viewGravity;
containerView.setOrientation(
auxiliaryViewPosition == AuxiliaryViewPosition.INLINE ?
LinearLayout.HORIZONTAL :
LinearLayout.VERTICAL);
if (auxiliaryViewPosition == AuxiliaryViewPosition.TOP ||
(auxiliaryViewPosition == AuxiliaryViewPosition.INLINE &&
horizontalAlignment == HorizontalAlignment.RIGHT)) {
// Button comes after the auxiliary view. Make sure it is at the end
containerView.removeView(likeButton);
containerView.addView(likeButton);
} else {
// In all other cases, the button comes first
containerView.removeView(auxView);
containerView.addView(auxView);
}
switch (auxiliaryViewPosition) {
case TOP:
auxView.setPadding(edgePadding, edgePadding, edgePadding, internalPadding);
break;
case BOTTOM:
auxView.setPadding(edgePadding, internalPadding, edgePadding, edgePadding);
break;
case INLINE:
if (horizontalAlignment == HorizontalAlignment.RIGHT) {
auxView.setPadding(edgePadding, edgePadding, internalPadding, edgePadding);
} else {
auxView.setPadding(internalPadding, edgePadding, edgePadding, edgePadding);
}
break;
}
}
private void updateBoxCountCaretPosition() {
switch (auxiliaryViewPosition) {
case TOP:
likeBoxCountView.setCaretPosition(LikeBoxCountView.LikeBoxCountViewCaretPosition.BOTTOM);
break;
case BOTTOM:
likeBoxCountView.setCaretPosition(LikeBoxCountView.LikeBoxCountViewCaretPosition.TOP);
break;
case INLINE:
likeBoxCountView.setCaretPosition(
horizontalAlignment == HorizontalAlignment.RIGHT ?
LikeBoxCountView.LikeBoxCountViewCaretPosition.RIGHT :
LikeBoxCountView.LikeBoxCountViewCaretPosition.LEFT);
break;
}
}
/**
* Callback interface that will be called when a network or other error is encountered
* while logging in.
*/
public interface OnErrorListener {
/**
* Called when a network or other error is encountered.
* @param errorBundle a FacebookException representing the error that was encountered.
*/
void onError(Bundle errorBundle);
}
private class LikeControllerBroadcastReceiver extends BroadcastReceiver {
@Override
public void onReceive(Context context, Intent intent) {
String intentAction = intent.getAction();
Bundle extras = intent.getExtras();
boolean shouldRespond = true;
if (extras != null) {
// See if an Id was set in the broadcast Intent. If it was, treat it as a filter.
String broadcastObjectId = extras.getString(LikeActionController.ACTION_OBJECT_ID_KEY);
shouldRespond = Utility.isNullOrEmpty(broadcastObjectId) ||
Utility.areObjectsEqual(objectId, broadcastObjectId);
}
if (!shouldRespond) {
return;
}
if (LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_UPDATED.equals(intentAction)) {
updateLikeStateAndLayout();
} else if (LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_DID_ERROR.equals(intentAction)) {
if (onErrorListener != null) {
onErrorListener.onError(extras);
}
} else if (LikeActionController.ACTION_LIKE_ACTION_CONTROLLER_DID_RESET.equals(intentAction)) {
// This will recreate the controller and associated objects
setObjectIdForced(objectId);
updateLikeStateAndLayout();
}
}
}
private class LikeActionControllerCreationCallback implements LikeActionController.CreationCallback {
private boolean isCancelled;
public void cancel() {
isCancelled = true;
}
@Override
public void onComplete(LikeActionController likeActionController) {
if (isCancelled) {
return;
}
associateWithLikeActionController(likeActionController);
updateLikeStateAndLayout();
LikeView.this.creationCallback = null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.compiler;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.junit.Assert;
import org.junit.Test;
import org.apache.flink.api.common.Plan;
import org.apache.flink.api.java.DataSet;
import org.apache.flink.api.java.ExecutionEnvironment;
import org.apache.flink.api.java.operators.IterativeDataSet;
import org.apache.flink.api.common.functions.RichJoinFunction;
import org.apache.flink.api.java.record.operators.BulkIteration;
import org.apache.flink.api.java.record.operators.CoGroupOperator;
import org.apache.flink.api.java.record.operators.CrossOperator;
import org.apache.flink.api.java.record.operators.DeltaIteration;
import org.apache.flink.api.java.record.operators.FileDataSink;
import org.apache.flink.api.java.record.operators.FileDataSource;
import org.apache.flink.api.java.record.operators.JoinOperator;
import org.apache.flink.api.java.record.operators.MapOperator;
import org.apache.flink.api.java.record.operators.ReduceOperator;
import org.apache.flink.compiler.plan.OptimizedPlan;
import org.apache.flink.compiler.plan.SinkPlanNode;
import org.apache.flink.compiler.plantranslate.NepheleJobGraphGenerator;
import org.apache.flink.compiler.testfunctions.IdentityGroupReducer;
import org.apache.flink.compiler.testfunctions.IdentityKeyExtractor;
import org.apache.flink.compiler.testfunctions.IdentityMapper;
import org.apache.flink.compiler.testfunctions.Top1GroupReducer;
import org.apache.flink.compiler.util.DummyCoGroupStub;
import org.apache.flink.compiler.util.DummyCrossStub;
import org.apache.flink.compiler.util.DummyInputFormat;
import org.apache.flink.compiler.util.DummyMatchStub;
import org.apache.flink.compiler.util.DummyNonPreservingMatchStub;
import org.apache.flink.compiler.util.DummyOutputFormat;
import org.apache.flink.compiler.util.IdentityMap;
import org.apache.flink.compiler.util.IdentityReduce;
import org.apache.flink.types.IntValue;
import org.apache.flink.types.LongValue;
@SuppressWarnings({"serial", "deprecation"})
public class BranchingPlansCompilerTest extends CompilerTestBase {
@Test
public void testCostComputationWithMultipleDataSinks() {
final int SINKS = 5;
try {
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE);
MapOperator mapA = MapOperator.builder(IdentityMap.class).input(sourceA).name("Map A").build();
MapOperator mapC = MapOperator.builder(IdentityMap.class).input(mapA).name("Map C").build();
FileDataSink[] sinkA = new FileDataSink[SINKS];
FileDataSink[] sinkB = new FileDataSink[SINKS];
for (int sink = 0; sink < SINKS; sink++) {
sinkA[sink] = new FileDataSink(DummyOutputFormat.class, out1Path, mapA, "Sink A:" + sink);
sinks.add(sinkA[sink]);
sinkB[sink] = new FileDataSink(DummyOutputFormat.class, out2Path, mapC, "Sink B:" + sink);
sinks.add(sinkB[sink]);
}
// return the PACT plan
Plan plan = new Plan(sinks, "Plans With Multiple Data Sinks");
OptimizedPlan oPlan = compileNoStats(plan);
// ---------- compile plan to nephele job graph to verify that no error is thrown ----------
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
*
* <pre>
* (SRC A)
* |
* (MAP A)
* / \
* (MAP B) (MAP C)
* / / \
* (SINK A) (SINK B) (SINK C)
* </pre>
*/
@Test
public void testBranchingWithMultipleDataSinks2() {
try {
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
final String out3Path = "file:///test/3";
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE);
MapOperator mapA = MapOperator.builder(IdentityMap.class).input(sourceA).name("Map A").build();
MapOperator mapB = MapOperator.builder(IdentityMap.class).input(mapA).name("Map B").build();
MapOperator mapC = MapOperator.builder(IdentityMap.class).input(mapA).name("Map C").build();
FileDataSink sinkA = new FileDataSink(DummyOutputFormat.class, out1Path, mapB, "Sink A");
FileDataSink sinkB = new FileDataSink(DummyOutputFormat.class, out2Path, mapC, "Sink B");
FileDataSink sinkC = new FileDataSink(DummyOutputFormat.class, out3Path, mapC, "Sink C");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sinkA);
sinks.add(sinkB);
sinks.add(sinkC);
// return the PACT plan
Plan plan = new Plan(sinks, "Plans With Multiple Data Sinks");
OptimizedPlan oPlan = compileNoStats(plan);
// ---------- check the optimizer plan ----------
// number of sinks
Assert.assertEquals("Wrong number of data sinks.", 3, oPlan.getDataSinks().size());
// sinks contain all sink paths
Set<String> allSinks = new HashSet<String>();
allSinks.add(out1Path);
allSinks.add(out2Path);
allSinks.add(out3Path);
for (SinkPlanNode n : oPlan.getDataSinks()) {
String path = ((FileDataSink) n.getSinkNode().getPactContract()).getFilePath();
Assert.assertTrue("Invalid data sink.", allSinks.remove(path));
}
// ---------- compile plan to nephele job graph to verify that no error is thrown ----------
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
* <pre>
* SINK
* |
* COGROUP
* +---/ \----+
* / \
* / MATCH10
* / | \
* / | MATCH9
* MATCH5 | | \
* | \ | | MATCH8
* | MATCH4 | | | \
* | | \ | | | MATCH7
* | | MATCH3 | | | | \
* | | | \ | | | | MATCH6
* | | | MATCH2 | | | | | |
* | | | | \ +--+--+--+--+--+
* | | | | MATCH1 MAP
* \ | | | | | /-----------/
* (DATA SOURCE ONE)
* </pre>
*/
@Test
public void testBranchingSourceMultipleTimes() {
try {
// construct the plan
FileDataSource sourceA = new FileDataSource(new DummyInputFormat(), IN_FILE);
JoinOperator mat1 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceA)
.input2(sourceA)
.build();
JoinOperator mat2 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceA)
.input2(mat1)
.build();
JoinOperator mat3 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceA)
.input2(mat2)
.build();
JoinOperator mat4 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceA)
.input2(mat3)
.build();
JoinOperator mat5 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceA)
.input2(mat4)
.build();
MapOperator ma = MapOperator.builder(new IdentityMap()).input(sourceA).build();
JoinOperator mat6 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(ma)
.build();
JoinOperator mat7 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(mat6)
.build();
JoinOperator mat8 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(mat7)
.build();
JoinOperator mat9 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(mat8)
.build();
JoinOperator mat10 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(mat9)
.build();
CoGroupOperator co = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0, 0)
.input1(mat5)
.input2(mat10)
.build();
FileDataSink sink = new FileDataSink(new DummyOutputFormat(), OUT_FILE, co);
// return the PACT plan
Plan plan = new Plan(sink, "Branching Source Multiple Times");
OptimizedPlan oPlan = compileNoStats(plan);
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
//Compile plan to verify that no error is thrown
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
*
* <pre>
* (SINK A)
* | (SINK B) (SINK C)
* CROSS / /
* / \ | +------+
* / \ | /
* REDUCE MATCH2
* | +---/ \
* \ / |
* MAP |
* | |
* COGROUP MATCH1
* / \ / \
* (SRC A) (SRC B) (SRC C)
* </pre>
*/
@Test
public void testBranchingWithMultipleDataSinks() {
try {
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
final String out3Path = "file:///test/3";
FileDataSource sourceA = new FileDataSource(new DummyInputFormat(), IN_FILE);
FileDataSource sourceB = new FileDataSource(new DummyInputFormat(), IN_FILE);
FileDataSource sourceC = new FileDataSource(new DummyInputFormat(), IN_FILE);
CoGroupOperator co = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(sourceA)
.input2(sourceB)
.build();
MapOperator ma = MapOperator.builder(new IdentityMap()).input(co).build();
JoinOperator mat1 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceB)
.input2(sourceC)
.build();
JoinOperator mat2 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(ma)
.input2(mat1)
.build();
ReduceOperator r = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
.input(ma)
.build();
CrossOperator c = CrossOperator.builder(new DummyCrossStub())
.input1(r)
.input2(mat2)
.build();
FileDataSink sinkA = new FileDataSink(new DummyOutputFormat(), out1Path, c);
FileDataSink sinkB = new FileDataSink(new DummyOutputFormat(), out2Path, mat2);
FileDataSink sinkC = new FileDataSink(new DummyOutputFormat(), out3Path, mat2);
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sinkA);
sinks.add(sinkB);
sinks.add(sinkC);
// return the PACT plan
Plan plan = new Plan(sinks, "Branching Plans With Multiple Data Sinks");
OptimizedPlan oPlan = compileNoStats(plan);
// ---------- check the optimizer plan ----------
// number of sinks
Assert.assertEquals("Wrong number of data sinks.", 3, oPlan.getDataSinks().size());
// sinks contain all sink paths
Set<String> allSinks = new HashSet<String>();
allSinks.add(out1Path);
allSinks.add(out2Path);
allSinks.add(out3Path);
for (SinkPlanNode n : oPlan.getDataSinks()) {
String path = ((FileDataSink) n.getSinkNode().getPactContract()).getFilePath();
Assert.assertTrue("Invalid data sink.", allSinks.remove(path));
}
// ---------- compile plan to nephele job graph to verify that no error is thrown ----------
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@SuppressWarnings("unchecked")
@Test
public void testBranchEachContractType() {
try {
// construct the plan
FileDataSource sourceA = new FileDataSource(new DummyInputFormat(), "file:///test/file1", "Source A");
FileDataSource sourceB = new FileDataSource(new DummyInputFormat(), "file:///test/file2", "Source B");
FileDataSource sourceC = new FileDataSource(new DummyInputFormat(), "file:///test/file3", "Source C");
MapOperator map1 = MapOperator.builder(new IdentityMap()).input(sourceA).name("Map 1").build();
ReduceOperator reduce1 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
.input(map1)
.name("Reduce 1")
.build();
JoinOperator match1 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(sourceB, sourceB, sourceC)
.input2(sourceC)
.name("Match 1")
.build();
;
CoGroupOperator cogroup1 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(sourceA)
.input2(sourceB)
.name("CoGroup 1")
.build();
CrossOperator cross1 = CrossOperator.builder(new DummyCrossStub())
.input1(reduce1)
.input2(cogroup1)
.name("Cross 1")
.build();
CoGroupOperator cogroup2 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(cross1)
.input2(cross1)
.name("CoGroup 2")
.build();
CoGroupOperator cogroup3 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(map1)
.input2(match1)
.name("CoGroup 3")
.build();
MapOperator map2 = MapOperator.builder(new IdentityMap()).input(cogroup3).name("Map 2").build();
CoGroupOperator cogroup4 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(map2)
.input2(match1)
.name("CoGroup 4")
.build();
CoGroupOperator cogroup5 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(cogroup2)
.input2(cogroup1)
.name("CoGroup 5")
.build();
CoGroupOperator cogroup6 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(reduce1)
.input2(cogroup4)
.name("CoGroup 6")
.build();
CoGroupOperator cogroup7 = CoGroupOperator.builder(new DummyCoGroupStub(), IntValue.class, 0,0)
.input1(cogroup5)
.input2(cogroup6)
.name("CoGroup 7")
.build();
FileDataSink sink = new FileDataSink(new DummyOutputFormat(), OUT_FILE, cogroup7);
sink.addInput(sourceA);
sink.addInput(cogroup3);
sink.addInput(cogroup4);
sink.addInput(cogroup1);
// return the PACT plan
Plan plan = new Plan(sink, "Branching of each contract type");
OptimizedPlan oPlan = compileNoStats(plan);
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
//Compile plan to verify that no error is thrown
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testBranchingUnion() {
try {
// construct the plan
FileDataSource source1 = new FileDataSource(new DummyInputFormat(), IN_FILE);
FileDataSource source2 = new FileDataSource(new DummyInputFormat(), IN_FILE);
JoinOperator mat1 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(source1)
.input2(source2)
.name("Match 1")
.build();
MapOperator ma1 = MapOperator.builder(new IdentityMap()).input(mat1).name("Map1").build();
ReduceOperator r1 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
.input(ma1)
.name("Reduce 1")
.build();
ReduceOperator r2 = ReduceOperator.builder(new IdentityReduce(), IntValue.class, 0)
.input(mat1)
.name("Reduce 2")
.build();
MapOperator ma2 = MapOperator.builder(new IdentityMap()).input(mat1).name("Map 2").build();
MapOperator ma3 = MapOperator.builder(new IdentityMap()).input(ma2).name("Map 3").build();
@SuppressWarnings("unchecked")
JoinOperator mat2 = JoinOperator.builder(new DummyMatchStub(), IntValue.class, 0, 0)
.input1(r1, r2, ma2, ma3)
.input2(ma2)
.name("Match 2")
.build();
mat2.setParameter(PactCompiler.HINT_LOCAL_STRATEGY, PactCompiler.HINT_LOCAL_STRATEGY_MERGE);
FileDataSink sink = new FileDataSink(new DummyOutputFormat(), OUT_FILE, mat2);
// return the PACT plan
Plan plan = new Plan(sink, "Branching Union");
OptimizedPlan oPlan = compileNoStats(plan);
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
//Compile plan to verify that no error is thrown
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
*
* <pre>
* (SRC A)
* / \
* (SINK A) (SINK B)
* </pre>
*/
@Test
public void testBranchingWithMultipleDataSinksSmall() {
try {
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE);
FileDataSink sinkA = new FileDataSink(DummyOutputFormat.class, out1Path, sourceA);
FileDataSink sinkB = new FileDataSink(DummyOutputFormat.class, out2Path, sourceA);
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sinkA);
sinks.add(sinkB);
// return the PACT plan
Plan plan = new Plan(sinks, "Plans With Multiple Data Sinks");
OptimizedPlan oPlan = compileNoStats(plan);
// ---------- check the optimizer plan ----------
// number of sinks
Assert.assertEquals("Wrong number of data sinks.", 2, oPlan.getDataSinks().size());
// sinks contain all sink paths
Set<String> allSinks = new HashSet<String>();
allSinks.add(out1Path);
allSinks.add(out2Path);
for (SinkPlanNode n : oPlan.getDataSinks()) {
String path = ((FileDataSink) n.getSinkNode().getPactContract()).getFilePath();
Assert.assertTrue("Invalid data sink.", allSinks.remove(path));
}
// ---------- compile plan to nephele job graph to verify that no error is thrown ----------
NepheleJobGraphGenerator jobGen = new NepheleJobGraphGenerator();
jobGen.compileJobGraph(oPlan);
} catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
*
* <pre>
* (SINK A) (SINK B)
* / /
* (SRC A) (SRC B)
* </pre>
*/
@Test
public void testSimpleDisjointPlan() {
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE);
FileDataSource sourceB = new FileDataSource(DummyInputFormat.class, IN_FILE);
FileDataSink sinkA = new FileDataSink(DummyOutputFormat.class, out1Path, sourceA);
FileDataSink sinkB = new FileDataSink(DummyOutputFormat.class, out2Path, sourceB);
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sinkA);
sinks.add(sinkB);
// return the PACT plan
Plan plan = new Plan(sinks, "Disjoint plan with multiple data sinks");
try {
compileNoStats(plan);
Assert.fail("Plan must not be compilable, it contains disjoint sub-plans.");
}
catch (Exception ex) {
// as expected
}
}
/**
*
* <pre>
* (SINK 3) (SINK 1) (SINK 2) (SINK 4)
* \ / \ /
* (SRC A) (SRC B)
* </pre>
*
* NOTE: this case is currently not caught by the compiler. we should enable the test once it is caught.
*/
// @Test (Deactivated for now because of unsupported feature)
public void testBranchingDisjointPlan() {
// construct the plan
final String out1Path = "file:///test/1";
final String out2Path = "file:///test/2";
final String out3Path = "file:///test/3";
final String out4Path = "file:///test/4";
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE);
FileDataSource sourceB = new FileDataSource(DummyInputFormat.class, IN_FILE);
FileDataSink sink1 = new FileDataSink(DummyOutputFormat.class, out1Path, sourceA, "1");
FileDataSink sink2 = new FileDataSink(DummyOutputFormat.class, out2Path, sourceB, "2");
FileDataSink sink3 = new FileDataSink(DummyOutputFormat.class, out3Path, sourceA, "3");
FileDataSink sink4 = new FileDataSink(DummyOutputFormat.class, out4Path, sourceB, "4");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink1);
sinks.add(sink2);
sinks.add(sink3);
sinks.add(sink4);
// return the PACT plan
Plan plan = new Plan(sinks, "Disjoint plan with multiple data sinks and branches");
try {
compileNoStats(plan);
Assert.fail("Plan must not be compilable, it contains disjoint sub-plans.");
}
catch (Exception ex) {
// as expected
}
}
@Test
public void testBranchAfterIteration() {
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 2");
BulkIteration iteration = new BulkIteration("Loop");
iteration.setInput(sourceA);
iteration.setMaximumNumberOfIterations(10);
MapOperator mapper = MapOperator.builder(IdentityMap.class).name("Mapper").input(iteration.getPartialSolution()).build();
iteration.setNextPartialSolution(mapper);
FileDataSink sink1 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Sink 1");
MapOperator postMap = MapOperator.builder(IdentityMap.class).name("Post Iteration Mapper")
.input(iteration).build();
FileDataSink sink2 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, postMap, "Sink 2");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink1);
sinks.add(sink2);
Plan plan = new Plan(sinks);
try {
compileNoStats(plan);
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testBranchBeforeIteration() {
FileDataSource source1 = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 1");
FileDataSource source2 = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 2");
BulkIteration iteration = new BulkIteration("Loop");
iteration.setInput(source2);
iteration.setMaximumNumberOfIterations(10);
MapOperator inMap = MapOperator.builder(new IdentityMap())
.input(source1)
.name("In Iteration Map")
.setBroadcastVariable("BC", iteration.getPartialSolution())
.build();
iteration.setNextPartialSolution(inMap);
MapOperator postMap = MapOperator.builder(new IdentityMap())
.input(source1)
.name("Post Iteration Map")
.setBroadcastVariable("BC", iteration)
.build();
FileDataSink sink = new FileDataSink(DummyOutputFormat.class, OUT_FILE, postMap, "Sink");
Plan plan = new Plan(sink);
try {
compileNoStats(plan);
}
catch (Exception e) {
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
* Test to ensure that sourceA is inside as well as outside of the iteration the same
* node.
*
* <pre>
* (SRC A) (SRC B)
* / \ / \
* (SINK 1) (ITERATION) | (SINK 2)
* / \ /
* (SINK 3) (CROSS => NEXT PARTIAL SOLUTION)
* </pre>
*/
@Test
public void testClosure() {
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 1");
FileDataSource sourceB = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 2");
FileDataSink sink1 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceA, "Sink 1");
FileDataSink sink2 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceB, "Sink 2");
BulkIteration iteration = new BulkIteration("Loop");
iteration.setInput(sourceA);
iteration.setMaximumNumberOfIterations(10);
CrossOperator stepFunction = CrossOperator.builder(DummyCrossStub.class).name("StepFunction").
input1(iteration.getPartialSolution()).
input2(sourceB).
build();
iteration.setNextPartialSolution(stepFunction);
FileDataSink sink3 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Sink 3");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink1);
sinks.add(sink2);
sinks.add(sink3);
Plan plan = new Plan(sinks);
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
* <pre>
* (SRC A) (SRC B) (SRC C)
* / \ / / \
* (SINK 1) (DELTA ITERATION) | (SINK 2)
* / | \ /
* (SINK 3) | (CROSS => NEXT WORKSET)
* | |
* (JOIN => SOLUTION SET DELTA)
* </pre>
*/
@Test
public void testClosureDeltaIteration() {
FileDataSource sourceA = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 1");
FileDataSource sourceB = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 2");
FileDataSource sourceC = new FileDataSource(DummyInputFormat.class, IN_FILE, "Source 3");
FileDataSink sink1 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceA, "Sink 1");
FileDataSink sink2 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, sourceC, "Sink 2");
DeltaIteration iteration = new DeltaIteration(0, "Loop");
iteration.setInitialSolutionSet(sourceA);
iteration.setInitialWorkset(sourceB);
iteration.setMaximumNumberOfIterations(10);
CrossOperator nextWorkset = CrossOperator.builder(DummyCrossStub.class).name("Next workset").
input1(iteration.getWorkset()).
input2(sourceC).
build();
JoinOperator solutionSetDelta = JoinOperator.builder(DummyMatchStub.class, LongValue.class,0,0).
name("Next solution set.").
input1(nextWorkset).
input2(iteration.getSolutionSet()).
build();
iteration.setNextWorkset(nextWorkset);
iteration.setSolutionSetDelta(solutionSetDelta);
FileDataSink sink3 = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Sink 3");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink1);
sinks.add(sink2);
sinks.add(sink3);
Plan plan = new Plan(sinks);
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
* <prev>
* +----Iteration-------+
* | |
* /---------< >---------join-----< >---sink
* / (Solution)| / |
* / | / |
* /--map-------< >----\ / /--|
* / (Workset)| \ / / |
* src-map | join------/ |
* \ | / |
* \ +-----/--------------+
* \ /
* \--reduce-------/
* <p/>
* </prev>
*/
@Test
public void testDeltaIterationWithStaticInput() {
FileDataSource source = new FileDataSource(DummyInputFormat.class, IN_FILE, "source");
MapOperator mappedSource = MapOperator.builder(IdentityMap.class).
input(source).
name("Identity mapped source").
build();
ReduceOperator reducedSource = ReduceOperator.builder(IdentityReduce.class).
input(source).
name("Identity reduce source").
build();
DeltaIteration iteration = new DeltaIteration(0,"Loop");
iteration.setMaximumNumberOfIterations(10);
iteration.setInitialSolutionSet(source);
iteration.setInitialWorkset(mappedSource);
JoinOperator nextWorkset = JoinOperator.builder(DummyNonPreservingMatchStub.class, IntValue.class, 0,0).
input1(iteration.getWorkset()).
input2(reducedSource).
name("Next work set").
build();
JoinOperator solutionSetDelta = JoinOperator.builder(DummyNonPreservingMatchStub.class, IntValue.class, 0,
0).
input1(iteration.getSolutionSet()).
input2(nextWorkset).
name("Solution set delta").
build();
iteration.setNextWorkset(nextWorkset);
iteration.setSolutionSetDelta(solutionSetDelta);
FileDataSink sink = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Iteration sink");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink);
Plan plan = new Plan(sinks);
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
/**
* <prev>
* +---------Iteration-------+
* | |
* /--map--< >----\ |
* / | \ /-------< >---sink
* src-map | join------/ |
* \ | / |
* \ +-----/-------------------+
* \ /
* \--reduce--/
* <p/>
* </prev>
*/
@Test
public void testIterationWithStaticInput() {
FileDataSource source = new FileDataSource(DummyInputFormat.class, IN_FILE, "source");
MapOperator mappedSource = MapOperator.builder(IdentityMap.class).
input(source).
name("Identity mapped source").
build();
ReduceOperator reducedSource = ReduceOperator.builder(IdentityReduce.class).
input(source).
name("Identity reduce source").
build();
BulkIteration iteration = new BulkIteration("Loop");
iteration.setInput(mappedSource);
iteration.setMaximumNumberOfIterations(10);
JoinOperator nextPartialSolution = JoinOperator.builder(DummyMatchStub.class, IntValue.class, 0,0).
input1(iteration.getPartialSolution()).
input2(reducedSource).
name("Next partial solution").
build();
iteration.setNextPartialSolution(nextPartialSolution);
FileDataSink sink = new FileDataSink(DummyOutputFormat.class, OUT_FILE, iteration, "Iteration sink");
List<FileDataSink> sinks = new ArrayList<FileDataSink>();
sinks.add(sink);
Plan plan = new Plan(sinks);
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testBranchingBroadcastVariable() {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> input1 = env.readTextFile(IN_FILE).name("source1");
DataSet<String> input2 = env.readTextFile(IN_FILE).name("source2");
DataSet<String> input3 = env.readTextFile(IN_FILE).name("source3");
DataSet<String> result1 = input1
.map(new IdentityMapper<String>())
.reduceGroup(new Top1GroupReducer<String>())
.withBroadcastSet(input3, "bc");
DataSet<String> result2 = input2
.map(new IdentityMapper<String>())
.reduceGroup(new Top1GroupReducer<String>())
.withBroadcastSet(input3, "bc");
result1.join(result2)
.where(new IdentityKeyExtractor<String>())
.equalTo(new IdentityKeyExtractor<String>())
.with(new RichJoinFunction<String, String, String>() {
@Override
public String join(String first, String second) {
return null;
}
})
.withBroadcastSet(input3, "bc1")
.withBroadcastSet(input1, "bc2")
.withBroadcastSet(result1, "bc3")
.print();
Plan plan = env.createProgramPlan();
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testBCVariableClosure() {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> input = env.readTextFile(IN_FILE).name("source1");
DataSet<String> reduced = input
.map(new IdentityMapper<String>())
.reduceGroup(new Top1GroupReducer<String>());
DataSet<String> initialSolution = input.map(new IdentityMapper<String>()).withBroadcastSet(reduced, "bc");
IterativeDataSet<String> iteration = initialSolution.iterate(100);
iteration.closeWith(iteration.map(new IdentityMapper<String>()).withBroadcastSet(reduced, "red"))
.print();
Plan plan = env.createProgramPlan();
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testMultipleIterations() {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> input = env.readTextFile(IN_FILE).name("source1");
DataSet<String> reduced = input
.map(new IdentityMapper<String>())
.reduceGroup(new Top1GroupReducer<String>());
IterativeDataSet<String> iteration1 = input.iterate(100);
IterativeDataSet<String> iteration2 = input.iterate(20);
IterativeDataSet<String> iteration3 = input.iterate(17);
iteration1.closeWith(iteration1.map(new IdentityMapper<String>()).withBroadcastSet(reduced, "bc1")).print();
iteration2.closeWith(iteration2.reduceGroup(new Top1GroupReducer<String>()).withBroadcastSet(reduced, "bc2")).print();
iteration3.closeWith(iteration3.reduceGroup(new IdentityGroupReducer<String>()).withBroadcastSet(reduced, "bc3")).print();
Plan plan = env.createProgramPlan();
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
@Test
public void testMultipleIterationsWithClosueBCVars() {
ExecutionEnvironment env = ExecutionEnvironment.getExecutionEnvironment();
DataSet<String> input = env.readTextFile(IN_FILE).name("source1");
IterativeDataSet<String> iteration1 = input.iterate(100);
IterativeDataSet<String> iteration2 = input.iterate(20);
IterativeDataSet<String> iteration3 = input.iterate(17);
iteration1.closeWith(iteration1.map(new IdentityMapper<String>())).print();
iteration2.closeWith(iteration2.reduceGroup(new Top1GroupReducer<String>())).print();
iteration3.closeWith(iteration3.reduceGroup(new IdentityGroupReducer<String>())).print();
Plan plan = env.createProgramPlan();
try{
compileNoStats(plan);
}catch(Exception e){
e.printStackTrace();
Assert.fail(e.getMessage());
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.gobblin.runtime.spec_catalog;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.net.URI;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import org.apache.commons.lang3.reflect.ConstructorUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.common.base.Optional;
import com.google.common.base.Preconditions;
import com.google.common.util.concurrent.AbstractIdleService;
import com.typesafe.config.Config;
import com.typesafe.config.ConfigException;
import javax.annotation.Nonnull;
import javax.inject.Inject;
import javax.inject.Singleton;
import lombok.Getter;
import org.apache.gobblin.instrumented.Instrumented;
import org.apache.gobblin.metrics.MetricContext;
import org.apache.gobblin.metrics.Tag;
import org.apache.gobblin.runtime.api.FlowSpec;
import org.apache.gobblin.runtime.api.GobblinInstanceEnvironment;
import org.apache.gobblin.runtime.api.MutableSpecCatalog;
import org.apache.gobblin.runtime.api.Spec;
import org.apache.gobblin.runtime.api.SpecCatalog;
import org.apache.gobblin.runtime.api.SpecCatalogListener;
import org.apache.gobblin.runtime.api.SpecNotFoundException;
import org.apache.gobblin.runtime.api.SpecSearchObject;
import org.apache.gobblin.runtime.api.SpecSerDe;
import org.apache.gobblin.runtime.api.SpecStore;
import org.apache.gobblin.runtime.spec_serde.JavaSpecSerDe;
import org.apache.gobblin.runtime.spec_store.FSSpecStore;
import org.apache.gobblin.service.ServiceConfigKeys;
import org.apache.gobblin.util.ClassAliasResolver;
import org.apache.gobblin.util.ConfigUtils;
import org.apache.gobblin.util.callbacks.CallbackResult;
import org.apache.gobblin.util.callbacks.CallbacksDispatcher;
/**
* A service that interact with FlowSpec storage.
* The FlowSpec storage, a.k.a. {@link SpecStore} should be plugable with different implementation.
*/
@Singleton
public class FlowCatalog extends AbstractIdleService implements SpecCatalog, MutableSpecCatalog {
/***
* Configuration properties related to FlowSpec Store
*/
public static final String FLOWSPEC_STORE_CLASS_KEY = "flowSpec.store.class";
public static final String FLOWSPEC_STORE_DIR_KEY = "flowSpec.store.dir";
public static final String DEFAULT_FLOWSPEC_STORE_CLASS = FSSpecStore.class.getCanonicalName();
public static final String FLOWSPEC_SERDE_CLASS_KEY = "flowSpec.serde.class";
public static final String DEFAULT_FLOWSPEC_SERDE_CLASS = JavaSpecSerDe.class.getCanonicalName();
protected final SpecCatalogListenersList listeners;
protected final Logger log;
protected final MetricContext metricContext;
protected final MutableStandardMetrics metrics;
@Getter
protected final SpecStore specStore;
// a map which keeps a handle of condition variables for each spec being added to the flow catalog
// to provide synchronization needed for flow specs
private final Map<String, Object> specSyncObjects = new HashMap<>();
private final ClassAliasResolver<SpecStore> aliasResolver;
public FlowCatalog(Config config) {
this(config, Optional.<Logger>absent());
}
public FlowCatalog(Config config, Optional<Logger> log) {
this(config, log, Optional.<MetricContext>absent(), true);
}
@Inject
public FlowCatalog(Config config, GobblinInstanceEnvironment env) {
this(config, Optional.of(env.getLog()), Optional.of(env.getMetricContext()),
env.isInstrumentationEnabled());
}
public FlowCatalog(Config config, Optional<Logger> log, Optional<MetricContext> parentMetricContext,
boolean instrumentationEnabled) {
this.log = log.isPresent() ? log.get() : LoggerFactory.getLogger(getClass());
this.listeners = new SpecCatalogListenersList(log);
if (instrumentationEnabled) {
MetricContext realParentCtx =
parentMetricContext.or(Instrumented.getMetricContext(new org.apache.gobblin.configuration.State(), getClass()));
this.metricContext = realParentCtx.childBuilder(FlowCatalog.class.getSimpleName()).build();
this.metrics = new MutableStandardMetrics(this, Optional.of(config));
this.addListener(this.metrics);
} else {
this.metricContext = null;
this.metrics = null;
}
this.aliasResolver = new ClassAliasResolver<>(SpecStore.class);
try {
Config newConfig = config;
if (config.hasPath(FLOWSPEC_STORE_DIR_KEY)) {
newConfig = config.withValue(FSSpecStore.SPECSTORE_FS_DIR_KEY,
config.getValue(FLOWSPEC_STORE_DIR_KEY));
}
String specStoreClassName = ConfigUtils.getString(config, FLOWSPEC_STORE_CLASS_KEY, DEFAULT_FLOWSPEC_STORE_CLASS);
this.log.info(String.format("Using class name/alias [%s] for specstore", specStoreClassName));
String specSerDeClassName = ConfigUtils.getString(config, FLOWSPEC_SERDE_CLASS_KEY, DEFAULT_FLOWSPEC_SERDE_CLASS);
this.log.info(String.format("Using class name/alias [%s] for spec serde", specSerDeClassName));
SpecSerDe specSerDe = (SpecSerDe) ConstructorUtils.invokeConstructor(Class.forName(
new ClassAliasResolver<>(SpecSerDe.class).resolve(specSerDeClassName)));
this.specStore = (SpecStore) ConstructorUtils.invokeConstructor(Class.forName(this.aliasResolver.resolve(
specStoreClassName)), newConfig, specSerDe);
} catch (NoSuchMethodException | IllegalAccessException | InvocationTargetException | InstantiationException
| ClassNotFoundException e) {
throw new RuntimeException(e);
}
}
/***************************************************
/* Catalog init and shutdown handlers *
/**************************************************/
@Override
protected void startUp() throws Exception {
//Do nothing
}
@Override
protected void shutDown() throws Exception {
this.listeners.close();
}
/***************************************************
/* Catalog listeners *
/**************************************************/
protected void notifyAllListeners() {
try {
Iterator<URI> uriIterator = getSpecURIs();
while (uriIterator.hasNext()) {
this.listeners.onAddSpec(getSpecWrapper(uriIterator.next()));
}
} catch (IOException e) {
log.error("Cannot retrieve specs from catalog:", e);
}
}
@Override
public void addListener(SpecCatalogListener specListener) {
Preconditions.checkNotNull(specListener);
this.listeners.addListener(specListener);
if (state() == State.RUNNING) {
try {
Iterator<URI> uriIterator = getSpecURIs();
while (uriIterator.hasNext()) {
SpecCatalogListener.AddSpecCallback addJobCallback =
new SpecCatalogListener.AddSpecCallback(getSpecWrapper(uriIterator.next()));
this.listeners.callbackOneListener(addJobCallback, specListener);
}
} catch (IOException e) {
log.error("Cannot retrieve specs from catalog:", e);
}
}
}
@Override
public void removeListener(SpecCatalogListener specCatalogListener) {
this.listeners.removeListener(specCatalogListener);
}
@Override
public void registerWeakSpecCatalogListener(SpecCatalogListener specCatalogListener) {
this.listeners.registerWeakSpecCatalogListener(specCatalogListener);
}
/***************************************************
/* Catalog metrics *
/**************************************************/
@Nonnull
@Override
public MetricContext getMetricContext() {
return this.metricContext;
}
@Override
public boolean isInstrumentationEnabled() {
return null != this.metricContext;
}
@Override
public List<Tag<?>> generateTags(org.apache.gobblin.configuration.State state) {
return Collections.emptyList();
}
@Override
public void switchMetricContext(List<Tag<?>> tags) {
throw new UnsupportedOperationException();
}
@Override
public void switchMetricContext(MetricContext context) {
throw new UnsupportedOperationException();
}
@Override
public SpecCatalog.StandardMetrics getMetrics() {
return this.metrics;
}
/**************************************************
/* Catalog core functionality *
/**************************************************/
public Iterator<URI> getSpecURIs() throws IOException {
return specStore.getSpecURIs();
}
public Iterator<URI> getSpecURISWithTag(String tag) throws IOException {
return specStore.getSpecURIsWithTag(tag);
}
/**
* Get all specs from {@link SpecStore}
* Not suggested for {@link FlowCatalog} where the total amount of space that all {@link FlowSpec}s occupied
* would be large and loading process is slow.
*/
@Deprecated
@Override
public Collection<Spec> getSpecs() {
try {
return specStore.getSpecs();
// TODO: Have kind of metrics keeping track of specs that failed to be deserialized.
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Specs from Spec store", e);
}
}
/**
* Get number of specs from {@link SpecStore}
*/
@Override
public int getSize() {
try {
return specStore.getSize();
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve number of specs from Spec store", e);
}
}
public boolean exists(URI uri) {
try {
return specStore.exists(uri);
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Spec from Spec store for URI: " + uri, e);
}
}
@Override
public Spec getSpecs(URI uri) throws SpecNotFoundException {
try {
return specStore.getSpec(uri);
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Spec from Spec store for URI: " + uri, e);
}
}
@Override
public Collection<Spec> getSpecs(SpecSearchObject specSearchObject) {
try {
return specStore.getSpecs(specSearchObject);
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve Spec from Spec store for URI: " + specSearchObject, e);
}
}
public Collection<Spec> getAllSpecs() {
try {
return specStore.getSpecs();
} catch (IOException e) {
throw new RuntimeException("Cannot retrieve all specs from Spec stores", e);
}
}
/**
* A wrapper of getSpecs that handles {@link SpecNotFoundException} properly.
* This is the most common way to fetch {@link Spec}. For customized way to deal with exception, one will
* need to implement specific catch-block logic.
*/
public Spec getSpecWrapper(URI uri) {
Spec spec = null;
try {
spec = getSpecs(uri);
} catch (SpecNotFoundException snfe) {
log.error(String.format("The URI %s discovered in SpecStore is missing in FlowCatalog"
+ ", suspecting current modification on SpecStore", uri), snfe);
}
return spec;
}
/**
* Persist {@link Spec} into {@link SpecStore} and notify {@link SpecCatalogListener} if triggerListener
* is set to true.
* If the {@link Spec} is a {@link FlowSpec} it is persisted if it can be compiled at the time this method received
* the spec. `explain` specs are not persisted. The logic of this method is tightly coupled with the logic of
* {@link GobblinServiceJobScheduler#onAddSpec()}, which is one of the listener of {@link FlowCatalog}.
* We use condition variables {@link #specSyncObjects} to achieve synchronization between
* {@link GobblinServiceJobScheduler#NonScheduledJobRunner} thread and this thread to ensure deletion of
* {@link FlowSpec} happens after the corresponding run once flow is submitted to the orchestrator.
*
* @param spec The Spec to be added
* @param triggerListener True if listeners should be notified.
* @return a map of listeners and their {@link AddSpecResponse}s
*/
public Map<String, AddSpecResponse> put(Spec spec, boolean triggerListener) {
Map<String, AddSpecResponse> responseMap = new HashMap<>();
FlowSpec flowSpec = (FlowSpec) spec;
Preconditions.checkState(state() == State.RUNNING, String.format("%s is not running.", this.getClass().getName()));
Preconditions.checkNotNull(flowSpec);
log.info(String.format("Adding FlowSpec with URI: %s and Config: %s", flowSpec.getUri(), flowSpec.getConfigAsProperties()));
Object syncObject = new Object();
specSyncObjects.put(flowSpec.getUri().toString(), syncObject);
if (triggerListener) {
AddSpecResponse<CallbacksDispatcher.CallbackResults<SpecCatalogListener, AddSpecResponse>> response = this.listeners.onAddSpec(flowSpec);
// If flow fails compilation, the result will have a non-empty string with the error
for (Map.Entry<SpecCatalogListener, CallbackResult<AddSpecResponse>> entry : response.getValue().getSuccesses().entrySet()) {
responseMap.put(entry.getKey().getName(), entry.getValue().getResult());
}
}
if (isCompileSuccessful(responseMap)) {
synchronized (syncObject) {
try {
if (!flowSpec.isExplain()) {
long startTime = System.currentTimeMillis();
specStore.addSpec(spec);
metrics.updatePutSpecTime(startTime);
}
responseMap.put(ServiceConfigKeys.COMPILATION_SUCCESSFUL, new AddSpecResponse<>("true"));
} catch (IOException e) {
throw new RuntimeException("Cannot add Spec to Spec store: " + flowSpec, e);
} finally {
syncObject.notifyAll();
this.specSyncObjects.remove(flowSpec.getUri().toString());
}
}
} else {
responseMap.put(ServiceConfigKeys.COMPILATION_SUCCESSFUL, new AddSpecResponse<>("false"));
}
return responseMap;
}
public static boolean isCompileSuccessful(Map<String, AddSpecResponse> responseMap) {
// If we cannot get the response from the scheduler, assume that the flow failed compilation
AddSpecResponse<String> addSpecResponse = responseMap.getOrDefault(
ServiceConfigKeys.GOBBLIN_SERVICE_JOB_SCHEDULER_LISTENER_CLASS, new AddSpecResponse<>(null));
return isCompileSuccessful(addSpecResponse.getValue());
}
public static boolean isCompileSuccessful(String dag) {
return dag != null && !dag.contains(ConfigException.class.getSimpleName());
}
@Override
public Map<String, AddSpecResponse> put(Spec spec) {
return put(spec, true);
}
public void remove(URI uri) {
remove(uri, new Properties());
}
@Override
public void remove(URI uri, Properties headers) {
this.remove(uri, headers, true);
}
public void remove(URI uri, Properties headers, boolean triggerListener) {
try {
Preconditions.checkState(state() == State.RUNNING, String.format("%s is not running.", this.getClass().getName()));
Preconditions.checkNotNull(uri);
long startTime = System.currentTimeMillis();
log.info(String.format("Removing FlowSpec with URI: %s", uri));
specStore.deleteSpec(uri);
this.metrics.updateRemoveSpecTime(startTime);
if (triggerListener) {
this.listeners.onDeleteSpec(uri, FlowSpec.Builder.DEFAULT_VERSION, headers);
}
} catch (IOException e) {
throw new RuntimeException("Cannot delete Spec from Spec store for URI: " + uri, e);
}
}
public Object getSyncObject(String specUri) {
return this.specSyncObjects.getOrDefault(specUri, null);
}
}
| |
package com.anthropicandroid.gzt.activity;
/*
* Created by Andrew Brin on 5/10/2016.
*/
import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.AnimatorSet;
import android.animation.ObjectAnimator;
import android.graphics.Point;
import android.graphics.Rect;
import android.util.Log;
import android.util.SparseArray;
import android.view.View;
import android.view.ViewGroup;
import android.view.animation.AccelerateDecelerateInterpolator;
import android.view.animation.DecelerateInterpolator;
import androidx.annotation.NonNull;
import java.util.Stack;
public class ZoomAnimator {
public static final String TAG = ZoomAnimator.class.getSimpleName();
public static final int UNZOOM_SPEED = 500;
public static final int ZOOM_SPEED = 700;
private AnimatorSet currentAnimatorSet;
private Stack<View> reversableViews = new Stack<>();
private final SparseArray<AnimationPrecursor> animationPrecursors = new SparseArray<>();
public ZoomAnimator() {
}
public void addViewAndPrepareToZoom(View targetView, View beginningView, View viewToMatch) {
// get stand and end bounds as well as global offset; build an animation precursor with
// those params and add to map
Log.d(TAG, "initializing animation set");
View rootView = beginningView.getRootView();
// index by id of targetView
animationPrecursors.put(targetView.getId(), new AnimationPrecursor(
targetView,
beginningView,
viewToMatch));
// disappear old view and add new view
beginningView.setAlpha(0f);
((ViewGroup) rootView).addView(targetView); // this view starts the animation
targetView.bringToFront(); // bring to front for systems without elevation
}
public void zoomToView(final View viewToZoomTo) {
if (currentAnimatorSet != null) currentAnimatorSet.cancel();
AnimationPrecursor precursor = animationPrecursors.get(viewToZoomTo.getId());
if (precursor == null)
Log.e(TAG, "referenced null animator precursor while zooming ");
else {
viewToZoomTo.setPivotX(0f);
viewToZoomTo.setPivotY(0f);
AnimatorSet animatorSet = getZoomAnimatorSet(precursor);
animatorSet.start();
currentAnimatorSet = animatorSet;
reversableViews.add(viewToZoomTo);
}
}
public boolean undoLastAnimation() {
return !reversableViews.isEmpty() && undoAnimation(reversableViews.pop());
}
private boolean undoAnimation(View view) { // should be general unanimate
int id = view.getId();
AnimationPrecursor animationPrecursor = animationPrecursors.get(id);
animationPrecursors.delete(id);
// when other animations are added, add a switch for a animation-type enum in precursor
return unZoomPrecursor(animationPrecursor);
}
private boolean unZoomPrecursor(final AnimationPrecursor precursor) {
if (precursor == null) {
Log.e(TAG, "precursor null when unZooming ");
return false;
} else {
if (currentAnimatorSet != null) currentAnimatorSet.cancel();
AnimatorSet animatorSet = getUnZoomAnimatorSet(precursor);
animatorSet.start();
currentAnimatorSet = animatorSet;
return true;
}
}
@NonNull
private AnimatorSet getZoomAnimatorSet(final AnimationPrecursor precursor) {
AnimatorSet animatorSet = new AnimatorSet();
animatorSet // assign location and scaling values with precursor as beginning
.play(ObjectAnimator.ofFloat(
precursor.targetView,
View.Y,
precursor.startBounds.top,
precursor.finalBounds.top))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.X,
precursor.startBounds.left,
precursor.finalBounds.left))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.SCALE_X,
precursor.startScale,
1f))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.SCALE_Y,
precursor.startScale,
1f));
animatorSet.setDuration(ZOOM_SPEED);
animatorSet.setInterpolator(new AccelerateDecelerateInterpolator());
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationCancel(Animator animation) {
super.onAnimationCancel(animation);
currentAnimatorSet = null;
}
@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
currentAnimatorSet = null;
}
});
return animatorSet;
}
@NonNull
private AnimatorSet getUnZoomAnimatorSet(final AnimationPrecursor precursor) {
AnimatorSet animatorSet = new AnimatorSet();
animatorSet // assign location and scaling values with precursor as target
.play(ObjectAnimator.ofFloat(
precursor.targetView,
View.X,
precursor.startBounds.left))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.Y,
precursor.startBounds.top))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.SCALE_X,
precursor.startScale))
.with(ObjectAnimator.ofFloat(
precursor.targetView,
View.SCALE_Y,
precursor.startScale));
animatorSet
.setDuration(UNZOOM_SPEED)
.setInterpolator(new DecelerateInterpolator());
animatorSet.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationCancel(Animator animation) {
((ViewGroup) precursor.rootView).removeView(precursor.targetView); // remove
// zooming view from layout
precursor.beginningView.setAlpha(1f); // restore old view's opacity
currentAnimatorSet = null; // remove animation
}
@Override
public void onAnimationEnd(Animator animation) {
((ViewGroup) precursor.rootView).removeView(precursor.targetView); // remove
// zooming view from layout
precursor.beginningView.setAlpha(1f); // restore old view's opacity
currentAnimatorSet = null; // remove animation
}
});
return animatorSet;
}
private static class AnimationPrecursor {
private View targetView;
private View beginningView;
private View rootView;
final Rect startBounds = new Rect();
final Rect finalBounds = new Rect();
final Point globalOffset = new Point();
final private float startScale;
public AnimationPrecursor(View targetView, View beginningView, View viewToMatch) {
this.targetView = targetView;
this.beginningView = beginningView;
this.rootView = beginningView.getRootView();
// calculate starting and ending bounds for the zoomed-in view
beginningView.getGlobalVisibleRect(startBounds);
viewToMatch.getGlobalVisibleRect(finalBounds, globalOffset);
// startBounds.offset(-globalOffset.x, -globalOffset.y);
finalBounds.offset(-globalOffset.x, -globalOffset.y);
// set starting bounds to same aspect ratio as final bounds
if ((float) finalBounds.width() / finalBounds.height() > (float) startBounds.width()
/ startBounds.height()) {
// extend start bounds horizontally
startScale = (float) startBounds.height() / finalBounds.height();
float startWidth = startScale * finalBounds.width();
float deltaWidth = (startWidth - startBounds.width()) / 2;
startBounds.left -= deltaWidth;
startBounds.right += deltaWidth;
} else {
// extend start bounds vertically
Log.d(TAG, "extending vertically");
startScale = (float) startBounds.width() / finalBounds.width();
float startHeight = startScale * finalBounds.height();
float deltaHeight = (startHeight - startBounds.height()) / 2;
startBounds.top -= deltaHeight;
startBounds.bottom += deltaHeight;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.io.sstable;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import java.util.regex.Pattern;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.CharMatcher;
import com.google.common.base.Objects;
import com.google.common.base.Splitter;
import org.apache.cassandra.db.Directories;
import org.apache.cassandra.io.sstable.format.SSTableFormat;
import org.apache.cassandra.io.sstable.format.Version;
import org.apache.cassandra.io.sstable.metadata.IMetadataSerializer;
import org.apache.cassandra.io.sstable.metadata.MetadataSerializer;
import org.apache.cassandra.utils.Pair;
import static org.apache.cassandra.io.sstable.Component.separator;
/**
* A SSTable is described by the keyspace and column family it contains data
* for, a generation (where higher generations contain more recent data) and
* an alphabetic version string.
*
* A descriptor can be marked as temporary, which influences generated filenames.
*/
public class Descriptor
{
private final static String LEGACY_TMP_REGEX_STR = "^((.*)\\-(.*)\\-)?tmp(link)?\\-((?:l|k).)\\-(\\d)*\\-(.*)$";
private final static Pattern LEGACY_TMP_REGEX = Pattern.compile(LEGACY_TMP_REGEX_STR);
public static String TMP_EXT = ".tmp";
private static final Splitter filenameSplitter = Splitter.on('-');
/** canonicalized path to the directory where SSTable resides */
public final File directory;
/** version has the following format: <code>[a-z]+</code> */
public final Version version;
public final String ksname;
public final String cfname;
public final int generation;
public final SSTableFormat.Type formatType;
private final int hashCode;
/**
* A descriptor that assumes CURRENT_VERSION.
*/
@VisibleForTesting
public Descriptor(File directory, String ksname, String cfname, int generation)
{
this(SSTableFormat.Type.current().info.getLatestVersion(), directory, ksname, cfname, generation, SSTableFormat.Type.current());
}
/**
* Constructor for sstable writers only.
*/
public Descriptor(File directory, String ksname, String cfname, int generation, SSTableFormat.Type formatType)
{
this(formatType.info.getLatestVersion(), directory, ksname, cfname, generation, formatType);
}
public Descriptor(Version version, File directory, String ksname, String cfname, int generation, SSTableFormat.Type formatType)
{
assert version != null && directory != null && ksname != null && cfname != null && formatType.info.getLatestVersion().getClass().equals(version.getClass());
this.version = version;
try
{
this.directory = directory.getCanonicalFile();
}
catch (IOException e)
{
throw new IOError(e);
}
this.ksname = ksname;
this.cfname = cfname;
this.generation = generation;
this.formatType = formatType;
hashCode = Objects.hashCode(version, this.directory, generation, ksname, cfname, formatType);
}
public Descriptor withGeneration(int newGeneration)
{
return new Descriptor(version, directory, ksname, cfname, newGeneration, formatType);
}
public Descriptor withFormatType(SSTableFormat.Type newType)
{
return new Descriptor(newType.info.getLatestVersion(), directory, ksname, cfname, generation, newType);
}
public String tmpFilenameFor(Component component)
{
return filenameFor(component) + TMP_EXT;
}
public String filenameFor(Component component)
{
return baseFilename() + separator + component.name();
}
public String baseFilename()
{
StringBuilder buff = new StringBuilder();
buff.append(directory).append(File.separatorChar);
appendFileName(buff);
return buff.toString();
}
private void appendFileName(StringBuilder buff)
{
buff.append(version).append(separator);
buff.append(generation);
buff.append(separator).append(formatType.name);
}
public String relativeFilenameFor(Component component)
{
final StringBuilder buff = new StringBuilder();
appendFileName(buff);
buff.append(separator).append(component.name());
return buff.toString();
}
public SSTableFormat getFormat()
{
return formatType.info;
}
/** Return any temporary files found in the directory */
public List<File> getTemporaryFiles()
{
List<File> ret = new ArrayList<>();
File[] tmpFiles = directory.listFiles((dir, name) ->
name.endsWith(Descriptor.TMP_EXT));
for (File tmpFile : tmpFiles)
ret.add(tmpFile);
return ret;
}
public static boolean isValidFile(File file)
{
String filename = file.getName();
return filename.endsWith(".db") && !LEGACY_TMP_REGEX.matcher(filename).matches();
}
/**
* Parse a sstable filename into a Descriptor.
* <p>
* This is a shortcut for {@code fromFilename(new File(filename))}.
*
* @param filename the filename to a sstable component.
* @return the descriptor for the parsed file.
*
* @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could
* mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported
* versions.
*/
public static Descriptor fromFilename(String filename)
{
return fromFilename(new File(filename));
}
/**
* Parse a sstable filename into a Descriptor.
* <p>
* SSTables files are all located within subdirectories of the form {@code <keyspace>/<table>/}. Normal sstables are
* are directly within that subdirectory structure while 2ndary index, backups and snapshot are each inside an
* additional subdirectory. The file themselves have the form:
* {@code <version>-<gen>-<format>-<component>}.
* <p>
* Note that this method will only sucessfully parse sstable files of supported versions.
*
* @param file the {@code File} object for the filename to parse.
* @return the descriptor for the parsed file.
*
* @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could
* mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported
* versions.
*/
public static Descriptor fromFilename(File file)
{
return fromFilenameWithComponent(file).left;
}
/**
* Parse a sstable filename, extracting both the {@code Descriptor} and {@code Component} part.
*
* @param file the {@code File} object for the filename to parse.
* @return a pair of the descriptor and component corresponding to the provided {@code file}.
*
* @throws IllegalArgumentException if the provided {@code file} does point to a valid sstable filename. This could
* mean either that the filename doesn't look like a sstable file, or that it is for an old and unsupported
* versions.
*/
public static Pair<Descriptor, Component> fromFilenameWithComponent(File file)
{
// We need to extract the keyspace and table names from the parent directories, so make sure we deal with the
// absolute path.
if (!file.isAbsolute())
file = file.getAbsoluteFile();
String name = file.getName();
List<String> tokens = filenameSplitter.splitToList(name);
int size = tokens.size();
if (size != 4)
{
// This is an invalid sstable file for this version. But to provide a more helpful error message, we detect
// old format sstable, which had the format:
// <keyspace>-<table>-(tmp-)?<version>-<gen>-<component>
// Note that we assume it's an old format sstable if it has the right number of tokens: this is not perfect
// but we're just trying to be helpful, not perfect.
if (size == 5 || size == 6)
throw new IllegalArgumentException(String.format("%s is of version %s which is now unsupported and cannot be read.",
name,
tokens.get(size - 3)));
throw new IllegalArgumentException(String.format("Invalid sstable file %s: the name doesn't look like a supported sstable file name", name));
}
String versionString = tokens.get(0);
if (!Version.validate(versionString))
throw invalidSSTable(name, "invalid version %s", versionString);
int generation;
try
{
generation = Integer.parseInt(tokens.get(1));
}
catch (NumberFormatException e)
{
throw invalidSSTable(name, "the 'generation' part of the name doesn't parse as a number");
}
String formatString = tokens.get(2);
SSTableFormat.Type format;
try
{
format = SSTableFormat.Type.validate(formatString);
}
catch (IllegalArgumentException e)
{
throw invalidSSTable(name, "unknown 'format' part (%s)", formatString);
}
Component component = Component.parse(tokens.get(3));
Version version = format.info.getVersion(versionString);
if (!version.isCompatible())
throw invalidSSTable(name, "incompatible sstable version (%s); you should have run upgradesstables before upgrading", versionString);
File directory = parentOf(name, file);
File tableDir = directory;
// Check if it's a 2ndary index directory (not that it doesn't exclude it to be also a backup or snapshot)
String indexName = "";
if (tableDir.getName().startsWith(Directories.SECONDARY_INDEX_NAME_SEPARATOR))
{
indexName = tableDir.getName();
tableDir = parentOf(name, tableDir);
}
// Then it can be a backup or a snapshot
if (tableDir.getName().equals(Directories.BACKUPS_SUBDIR))
tableDir = tableDir.getParentFile();
else if (parentOf(name, tableDir).getName().equals(Directories.SNAPSHOT_SUBDIR))
tableDir = parentOf(name, parentOf(name, tableDir));
String table = tableDir.getName().split("-")[0] + indexName;
String keyspace = parentOf(name, tableDir).getName();
return Pair.create(new Descriptor(version, directory, keyspace, table, generation, format), component);
}
private static File parentOf(String name, File file)
{
File parent = file.getParentFile();
if (parent == null)
throw invalidSSTable(name, "cannot extract keyspace and table name; make sure the sstable is in the proper sub-directories");
return parent;
}
private static IllegalArgumentException invalidSSTable(String name, String msgFormat, Object... parameters)
{
throw new IllegalArgumentException(String.format("Invalid sstable file " + name + ": " + msgFormat, parameters));
}
public IMetadataSerializer getMetadataSerializer()
{
return new MetadataSerializer();
}
/**
* @return true if the current Cassandra version can read the given sstable version
*/
public boolean isCompatible()
{
return version.isCompatible();
}
@Override
public String toString()
{
return baseFilename();
}
@Override
public boolean equals(Object o)
{
if (o == this)
return true;
if (!(o instanceof Descriptor))
return false;
Descriptor that = (Descriptor)o;
return that.directory.equals(this.directory)
&& that.generation == this.generation
&& that.ksname.equals(this.ksname)
&& that.cfname.equals(this.cfname)
&& that.formatType == this.formatType;
}
@Override
public int hashCode()
{
return hashCode;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.codebuild.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/codebuild-2016-10-06/ListReportsForReportGroup"
* target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListReportsForReportGroupResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i> is
* returned. To get the next batch of items in the list, call this operation again, adding the next token to the
* call. To get all of the items in the list, keep calling this operation with each subsequent next token that is
* returned, until no more next tokens are returned.
* </p>
*/
private String nextToken;
/**
* <p>
* The list of report ARNs.
* </p>
*/
private java.util.List<String> reports;
/**
* <p>
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i> is
* returned. To get the next batch of items in the list, call this operation again, adding the next token to the
* call. To get all of the items in the list, keep calling this operation with each subsequent next token that is
* returned, until no more next tokens are returned.
* </p>
*
* @param nextToken
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i>
* is returned. To get the next batch of items in the list, call this operation again, adding the next token
* to the call. To get all of the items in the list, keep calling this operation with each subsequent next
* token that is returned, until no more next tokens are returned.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i> is
* returned. To get the next batch of items in the list, call this operation again, adding the next token to the
* call. To get all of the items in the list, keep calling this operation with each subsequent next token that is
* returned, until no more next tokens are returned.
* </p>
*
* @return During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i>
* is returned. To get the next batch of items in the list, call this operation again, adding the next token
* to the call. To get all of the items in the list, keep calling this operation with each subsequent next
* token that is returned, until no more next tokens are returned.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i> is
* returned. To get the next batch of items in the list, call this operation again, adding the next token to the
* call. To get all of the items in the list, keep calling this operation with each subsequent next token that is
* returned, until no more next tokens are returned.
* </p>
*
* @param nextToken
* During a previous call, the maximum number of items that can be returned is the value specified in
* <code>maxResults</code>. If there more items in the list, then a unique string called a <i>nextToken</i>
* is returned. To get the next batch of items in the list, call this operation again, adding the next token
* to the call. To get all of the items in the list, keep calling this operation with each subsequent next
* token that is returned, until no more next tokens are returned.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReportsForReportGroupResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* <p>
* The list of report ARNs.
* </p>
*
* @return The list of report ARNs.
*/
public java.util.List<String> getReports() {
return reports;
}
/**
* <p>
* The list of report ARNs.
* </p>
*
* @param reports
* The list of report ARNs.
*/
public void setReports(java.util.Collection<String> reports) {
if (reports == null) {
this.reports = null;
return;
}
this.reports = new java.util.ArrayList<String>(reports);
}
/**
* <p>
* The list of report ARNs.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setReports(java.util.Collection)} or {@link #withReports(java.util.Collection)} if you want to override
* the existing values.
* </p>
*
* @param reports
* The list of report ARNs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReportsForReportGroupResult withReports(String... reports) {
if (this.reports == null) {
setReports(new java.util.ArrayList<String>(reports.length));
}
for (String ele : reports) {
this.reports.add(ele);
}
return this;
}
/**
* <p>
* The list of report ARNs.
* </p>
*
* @param reports
* The list of report ARNs.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListReportsForReportGroupResult withReports(java.util.Collection<String> reports) {
setReports(reports);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken()).append(",");
if (getReports() != null)
sb.append("Reports: ").append(getReports());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListReportsForReportGroupResult == false)
return false;
ListReportsForReportGroupResult other = (ListReportsForReportGroupResult) obj;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
if (other.getReports() == null ^ this.getReports() == null)
return false;
if (other.getReports() != null && other.getReports().equals(this.getReports()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
hashCode = prime * hashCode + ((getReports() == null) ? 0 : getReports().hashCode());
return hashCode;
}
@Override
public ListReportsForReportGroupResult clone() {
try {
return (ListReportsForReportGroupResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
package com.robrua.orianna.type.dto.staticdata;
import java.util.List;
import java.util.Map;
import javax.persistence.CascadeType;
import javax.persistence.CollectionTable;
import javax.persistence.Column;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.Lob;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import com.robrua.orianna.type.dto.OriannaDto;
@Entity
@Table(name = "item")
public class Item extends OriannaDto {
private static final long serialVersionUID = -1354683419193009743L;
private String colloq, name, requiredChampion;
private Boolean consumeOnFull, consumed, hideFromAll, inStore;
private Integer depth, specialRecipe, stacks;
@Lob
private String description, plaintext, sanitizedDescription;
@ElementCollection
@CollectionTable(name = "item_effect", joinColumns = @JoinColumn(name = "item_id"))
private Map<String, String> effect;
@ElementCollection
@CollectionTable(name = "item_from", joinColumns = @JoinColumn(name = "item_id"))
@Column(name = "frm")
private List<String> from;
@OneToOne(cascade = CascadeType.ALL)
private Gold gold;
@Column(name = "grp")
private String group;
@Id
private Integer id;
@OneToOne(cascade = CascadeType.ALL)
private Image image;
@ElementCollection
@CollectionTable(name = "item_into", joinColumns = @JoinColumn(name = "item_id"))
@Column(name = "nto")
private List<String> into;
@ElementCollection
@CollectionTable(name = "item_map", joinColumns = @JoinColumn(name = "item_id"))
private Map<String, Boolean> maps;
@OneToOne(cascade = CascadeType.ALL)
private MetaData rune;
@OneToOne(cascade = CascadeType.ALL)
private BasicDataStats stats;
@ElementCollection
@CollectionTable(name = "item_tag", joinColumns = @JoinColumn(name = "item_id"))
private List<String> tags;
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if(this == obj) {
return true;
}
if(obj == null) {
return false;
}
if(!(obj instanceof Item)) {
return false;
}
final Item other = (Item)obj;
if(id == null) {
if(other.id != null) {
return false;
}
}
else if(!id.equals(other.id)) {
return false;
}
return true;
}
/**
* @return the colloq
*/
public String getColloq() {
return colloq;
}
/**
* @return the consumed
*/
public Boolean getConsumed() {
return consumed;
}
/**
* @return the consumeOnFull
*/
public Boolean getConsumeOnFull() {
return consumeOnFull;
}
@Override
public String getDataStoreIndexField(final Class<?> keyType) {
if(keyType.equals(Integer.class)) {
return "id";
}
if(keyType.equals(String.class)) {
return "name";
}
return null;
}
/**
* @return the depth
*/
public Integer getDepth() {
return depth;
}
/**
* @return the description
*/
public String getDescription() {
return description;
}
/**
* @return the effect
*/
public Map<String, String> getEffect() {
return effect;
}
/**
* @return the from
*/
public List<String> getFrom() {
return from;
}
/**
* @return the gold
*/
public Gold getGold() {
return gold;
}
/**
* @return the group
*/
public String getGroup() {
return group;
}
/**
* @return the hideFromAll
*/
public Boolean getHideFromAll() {
return hideFromAll;
}
/**
* @return the id
*/
public Integer getId() {
return id;
}
/**
* @return the image
*/
public Image getImage() {
return image;
}
/**
* @return the inStore
*/
public Boolean getInStore() {
return inStore;
}
/**
* @return the into
*/
public List<String> getInto() {
return into;
}
/**
* @return the maps
*/
public Map<String, Boolean> getMaps() {
return maps;
}
/**
* @return the name
*/
public String getName() {
return name;
}
/**
* @return the plaintext
*/
public String getPlaintext() {
return plaintext;
}
/**
* @return the requiredChampion
*/
public String getRequiredChampion() {
return requiredChampion;
}
/**
* @return the rune
*/
public MetaData getRune() {
return rune;
}
/**
* @return the sanitizedDescription
*/
public String getSanitizedDescription() {
return sanitizedDescription;
}
/**
* @return the specialRecipe
*/
public Integer getSpecialRecipe() {
return specialRecipe;
}
/**
* @return the stacks
*/
public Integer getStacks() {
return stacks;
}
/**
* @return the stats
*/
public BasicDataStats getStats() {
return stats;
}
/**
* @return the tags
*/
public List<String> getTags() {
return tags;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (id == null ? 0 : id.hashCode());
return result;
}
/**
* @param colloq
* the colloq to set
*/
public void setColloq(final String colloq) {
this.colloq = colloq;
}
/**
* @param consumed
* the consumed to set
*/
public void setConsumed(final Boolean consumed) {
this.consumed = consumed;
}
/**
* @param consumeOnFull
* the consumeOnFull to set
*/
public void setConsumeOnFull(final Boolean consumeOnFull) {
this.consumeOnFull = consumeOnFull;
}
/**
* @param depth
* the depth to set
*/
public void setDepth(final Integer depth) {
this.depth = depth;
}
/**
* @param description
* the description to set
*/
public void setDescription(final String description) {
this.description = description;
}
/**
* @param effect
* the effect to set
*/
public void setEffect(final Map<String, String> effect) {
this.effect = effect;
}
/**
* @param from
* the from to set
*/
public void setFrom(final List<String> from) {
this.from = from;
}
/**
* @param gold
* the gold to set
*/
public void setGold(final Gold gold) {
this.gold = gold;
}
/**
* @param group
* the group to set
*/
public void setGroup(final String group) {
this.group = group;
}
/**
* @param hideFromAll
* the hideFromAll to set
*/
public void setHideFromAll(final Boolean hideFromAll) {
this.hideFromAll = hideFromAll;
}
/**
* @param id
* the id to set
*/
public void setId(final Integer id) {
this.id = id;
}
/**
* @param image
* the image to set
*/
public void setImage(final Image image) {
this.image = image;
}
/**
* @param inStore
* the inStore to set
*/
public void setInStore(final Boolean inStore) {
this.inStore = inStore;
}
/**
* @param into
* the into to set
*/
public void setInto(final List<String> into) {
this.into = into;
}
/**
* @param maps
* the maps to set
*/
public void setMaps(final Map<String, Boolean> maps) {
this.maps = maps;
}
/**
* @param name
* the name to set
*/
public void setName(final String name) {
this.name = name;
}
/**
* @param plaintext
* the plaintext to set
*/
public void setPlaintext(final String plaintext) {
this.plaintext = plaintext;
}
/**
* @param requiredChampion
* the requiredChampion to set
*/
public void setRequiredChampion(final String requiredChampion) {
this.requiredChampion = requiredChampion;
}
/**
* @param rune
* the rune to set
*/
public void setRune(final MetaData rune) {
this.rune = rune;
}
/**
* @param sanitizedDescription
* the sanitizedDescription to set
*/
public void setSanitizedDescription(final String sanitizedDescription) {
this.sanitizedDescription = sanitizedDescription;
}
/**
* @param specialRecipe
* the specialRecipe to set
*/
public void setSpecialRecipe(final Integer specialRecipe) {
this.specialRecipe = specialRecipe;
}
/**
* @param stacks
* the stacks to set
*/
public void setStacks(final Integer stacks) {
this.stacks = stacks;
}
/**
* @param stats
* the stats to set
*/
public void setStats(final BasicDataStats stats) {
this.stats = stats;
}
/**
* @param tags
* the tags to set
*/
public void setTags(final List<String> tags) {
this.tags = tags;
}
/*
* (non-Javadoc)
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
return "Item [colloq=" + colloq + ", description=" + description + ", group=" + group + ", name=" + name + ", plaintext=" + plaintext
+ ", requiredChampion=" + requiredChampion + ", sanitizedDescription=" + sanitizedDescription + ", consumeOnFull=" + consumeOnFull
+ ", consumed=" + consumed + ", hideFromAll=" + hideFromAll + ", inStore=" + inStore + ", depth=" + depth + ", id=" + id + ", specialRecipe="
+ specialRecipe + ", stacks=" + stacks + ", effect=" + effect + ", from=" + from + ", into=" + into + ", tags=" + tags + ", gold=" + gold
+ ", image=" + image + ", maps=" + maps + ", rune=" + rune + ", stats=" + stats + "]";
}
}
| |
/**
* Copyright 2011 multibit.org
*
* Licensed under the MIT license (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.multibit.viewsystem.swing.view.panels;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Component;
import java.awt.ComponentOrientation;
import java.awt.Container;
import java.awt.Cursor;
import java.awt.Dimension;
import java.awt.FlowLayout;
import java.awt.Font;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.nio.CharBuffer;
import java.text.DateFormat;
import java.util.Collection;
import java.util.Date;
import java.util.Locale;
import javax.swing.Action;
import javax.swing.BorderFactory;
import javax.swing.Icon;
import javax.swing.JFileChooser;
import javax.swing.JLabel;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import javax.swing.JScrollPane;
import javax.swing.SwingConstants;
import javax.swing.filechooser.FileFilter;
import org.multibit.controller.Controller;
import org.multibit.controller.bitcoin.BitcoinController;
import org.multibit.crypto.KeyCrypterOpenSSL;
import org.multibit.file.PrivateKeyAndDate;
import org.multibit.file.PrivateKeysHandler;
import org.multibit.file.PrivateKeysHandlerException;
import org.multibit.model.bitcoin.BitcoinModel;
import org.multibit.model.bitcoin.WalletBusyListener;
import org.multibit.model.core.CoreModel;
import org.multibit.utils.ImageLoader;
import org.multibit.viewsystem.DisplayHint;
import org.multibit.viewsystem.View;
import org.multibit.viewsystem.Viewable;
import org.multibit.viewsystem.swing.ColorAndFontConstants;
import org.multibit.viewsystem.swing.MultiBitFrame;
import org.multibit.viewsystem.swing.action.HelpContextAction;
import org.multibit.viewsystem.swing.action.ImportPrivateKeysSubmitAction;
import org.multibit.viewsystem.swing.view.PrivateKeyFileFilter;
import org.multibit.viewsystem.swing.view.components.FontSizer;
import org.multibit.viewsystem.swing.view.components.HelpButton;
import org.multibit.viewsystem.swing.view.components.MultiBitButton;
import org.multibit.viewsystem.swing.view.components.MultiBitLabel;
import org.multibit.viewsystem.swing.view.components.MultiBitTitledPanel;
import com.google.leafcoin.crypto.KeyCrypterException;
import org.bitcoinj.wallet.Protos.Wallet.EncryptionType;
import com.piuk.blockchain.MyWallet;
import com.piuk.blockchain.MyWalletEncryptedKeyFileFilter;
import com.piuk.blockchain.MyWalletPlainKeyFileFilter;
/**
* The import private keys view.
*/
public class ImportPrivateKeysPanel extends JPanel implements Viewable, WalletBusyListener {
private static final long serialVersionUID = 444992294329957705L;
private final Controller controller;
private final BitcoinController bitcoinController;
private MultiBitFrame mainFrame;
private MultiBitLabel walletFilenameLabel;
private MultiBitLabel walletDescriptionLabel;
private MultiBitButton chooseFilenameButton;
private String chooseFilenameButtonText;
private JFileChooser fileChooser;
private MultiBitLabel outputFilenameLabel;
private MultiBitLabel messageLabel1;
private MultiBitLabel messageLabel2;
private String outputFilename;
private MultiBitLabel passwordInfoLabel;
private JPasswordField passwordField1;
private MultiBitLabel passwordPromptLabel1;
private MultiBitButton unlockButton;
private JPasswordField passwordField2;
private MultiBitLabel passwordPromptLabel2;
private JPasswordField walletPasswordField;
private MultiBitLabel walletPasswordPromptLabel;
private JLabel numberOfKeysLabel;
private JLabel replayDateLabel;
private ImportPrivateKeysSubmitAction importPrivateKeysSubmitAction;
private KeyCrypterOpenSSL encrypterDecrypter;
public FileFilter multiBitFileChooser;
public FileFilter myWalletPlainFileChooser;
public FileFilter myWalletEncryptedFileChooser;
private Font adjustedFont;
/**
* Creates a new {@link ImportPrivateKeysPanel}.
*/
public ImportPrivateKeysPanel(BitcoinController bitcoinController, MultiBitFrame mainFrame) {
this.bitcoinController = bitcoinController;
this.controller = this.bitcoinController;
this.mainFrame = mainFrame;
setBackground(ColorAndFontConstants.VERY_LIGHT_BACKGROUND_COLOR);
applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
outputFilename = "";
initUI();
walletBusyChange(this.bitcoinController.getModel().getActivePerWalletModelData().isBusy());
this.bitcoinController.registerWalletBusyListener(this);
enableImportFilePasswordPanel(false);
passwordField1.setText("");
passwordField2.setText("");
boolean walletPasswordRequired = false;
if (this.bitcoinController.getModel().getActiveWallet() != null && this.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) {
walletPasswordRequired = true;
}
enableWalletPassword(walletPasswordRequired);
encrypterDecrypter = new KeyCrypterOpenSSL();
multiBitFileChooser = new PrivateKeyFileFilter(controller);
myWalletPlainFileChooser = new MyWalletPlainKeyFileFilter();
myWalletEncryptedFileChooser = new MyWalletEncryptedKeyFileFilter();
}
private void initUI() {
setLayout(new BorderLayout());
JPanel mainPanel = new JPanel();
mainPanel.setMinimumSize(new Dimension(550, 160));
mainPanel.setLayout(new GridBagLayout());
mainPanel.setOpaque(false);
mainPanel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
String[] keys = new String[] { "resetTransactionsPanel.walletDescriptionLabel",
"resetTransactionsPanel.walletFilenameLabel", "showExportPrivateKeysPanel.passwordPrompt",
"showExportPrivateKeysPanel.repeatPasswordPrompt", "showImportPrivateKeysPanel.numberOfKeys.text",
"showImportPrivateKeysPanel.replayDate.text" };
int stentWidth = MultiBitTitledPanel.calculateStentWidthForKeys(controller.getLocaliser(), keys, this)
+ ExportPrivateKeysPanel.STENT_DELTA;
GridBagConstraints constraints = new GridBagConstraints();
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 0;
constraints.gridwidth = 2;
constraints.weightx = 1;
constraints.weighty = 1;
constraints.anchor = GridBagConstraints.LINE_START;
JPanel walletPanel = createWalletPanel(stentWidth);
mainPanel.add(walletPanel, constraints);
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 1;
constraints.gridwidth = 2;
constraints.weightx = 1;
constraints.weighty = 1;
constraints.anchor = GridBagConstraints.LINE_START;
JPanel filenamePanel = createFilenamePanel(stentWidth);
mainPanel.add(filenamePanel, constraints);
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 2;
constraints.gridwidth = 2;
constraints.weightx = 1;
constraints.weighty = 0.2;
constraints.anchor = GridBagConstraints.LINE_START;
JPanel passwordPanel = createPasswordPanel(stentWidth);
mainPanel.add(passwordPanel, constraints);
JLabel filler1 = new JLabel();
filler1.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = 3;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.weightx = 1;
constraints.weighty = 0.1;
constraints.anchor = GridBagConstraints.CENTER;
mainPanel.add(filler1, constraints);
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 4;
constraints.gridwidth = 1;
constraints.weightx = 0.4;
constraints.weighty = 0.06;
constraints.anchor = GridBagConstraints.LINE_START;
JPanel buttonPanel = createButtonPanel();
mainPanel.add(buttonPanel, constraints);
messageLabel1 = new MultiBitLabel("");
messageLabel1.setOpaque(false);
messageLabel1.setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 0));
messageLabel1.setHorizontalAlignment(JLabel.LEADING);
messageLabel1.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 5;
constraints.gridwidth = 3;
constraints.weightx = 1;
constraints.weighty = 0.06;
constraints.anchor = GridBagConstraints.LINE_START;
mainPanel.add(messageLabel1, constraints);
messageLabel2 = new MultiBitLabel("");
messageLabel2.setOpaque(false);
messageLabel2.setBorder(BorderFactory.createEmptyBorder(0, 30, 0, 0));
messageLabel2.setHorizontalAlignment(JLabel.LEADING);
messageLabel2.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 0;
constraints.gridy = 6;
constraints.gridwidth = 3;
constraints.weightx = 1;
constraints.weighty = 0.06;
constraints.anchor = GridBagConstraints.LINE_START;
mainPanel.add(messageLabel2, constraints);
Action helpAction;
if (ComponentOrientation.LEFT_TO_RIGHT == ComponentOrientation.getOrientation(controller.getLocaliser().getLocale())) {
helpAction = new HelpContextAction(controller, ImageLoader.HELP_CONTENTS_BIG_ICON_FILE,
"multiBitFrame.helpMenuText", "multiBitFrame.helpMenuTooltip", "multiBitFrame.helpMenuText",
HelpContentsPanel.HELP_IMPORTING_PRIVATE_KEYS_URL);
} else {
helpAction = new HelpContextAction(controller, ImageLoader.HELP_CONTENTS_BIG_RTL_ICON_FILE,
"multiBitFrame.helpMenuText", "multiBitFrame.helpMenuTooltip", "multiBitFrame.helpMenuText",
HelpContentsPanel.HELP_IMPORTING_PRIVATE_KEYS_URL);
}
HelpButton helpButton = new HelpButton(helpAction, controller);
helpButton.setText("");
String tooltipText = HelpContentsPanel.createMultilineTooltipText(new String[] { controller.getLocaliser().getString(
"multiBitFrame.helpMenuTooltip") });
helpButton.setToolTipText(tooltipText);
helpButton.setHorizontalAlignment(SwingConstants.LEADING);
helpButton.setBorder(BorderFactory.createEmptyBorder(0, AbstractTradePanel.HELP_BUTTON_INDENT,
AbstractTradePanel.HELP_BUTTON_INDENT, AbstractTradePanel.HELP_BUTTON_INDENT));
helpButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 0;
constraints.gridy = 7;
constraints.weightx = 1;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.anchor = GridBagConstraints.BASELINE_LEADING;
mainPanel.add(helpButton, constraints);
JLabel filler2 = new JLabel();
filler2.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = 8;
constraints.gridwidth = 1;
constraints.weightx = 1;
constraints.weighty = 100;
constraints.anchor = GridBagConstraints.CENTER;
mainPanel.add(filler2, constraints);
JScrollPane mainScrollPane = new JScrollPane(mainPanel, JScrollPane.VERTICAL_SCROLLBAR_AS_NEEDED,
JScrollPane.HORIZONTAL_SCROLLBAR_AS_NEEDED);
mainScrollPane.setBorder(BorderFactory.createEmptyBorder());
mainScrollPane.getViewport().setBackground(ColorAndFontConstants.VERY_LIGHT_BACKGROUND_COLOR);
mainScrollPane.getViewport().setOpaque(true);
mainScrollPane.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
mainScrollPane.getHorizontalScrollBar().setUnitIncrement(CoreModel.SCROLL_INCREMENT);
mainScrollPane.getVerticalScrollBar().setUnitIncrement(CoreModel.SCROLL_INCREMENT);
add(mainScrollPane, BorderLayout.CENTER);
}
private JPanel createWalletPanel(int stentWidth) {
MultiBitTitledPanel inputWalletPanel = new MultiBitTitledPanel(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.wallet.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
GridBagConstraints constraints = new GridBagConstraints();
MultiBitTitledPanel.addLeftJustifiedTextAtIndent(
controller.getLocaliser().getString("showImportPrivateKeysPanel.wallet.text"), 3, inputWalletPanel);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 4;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(MultiBitTitledPanel.createStent(stentWidth, ExportPrivateKeysPanel.STENT_HEIGHT), constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 5;
constraints.weightx = 0.05;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.CENTER;
inputWalletPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS), constraints);
MultiBitLabel walletDescriptionLabelLabel = new MultiBitLabel(controller.getLocaliser().getString(
"resetTransactionsPanel.walletDescriptionLabel"));
walletDescriptionLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 5;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
inputWalletPanel.add(walletDescriptionLabelLabel, constraints);
walletDescriptionLabel = new MultiBitLabel(this.bitcoinController.getModel().getActivePerWalletModelData().getWalletDescription());
walletDescriptionLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 5;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(walletDescriptionLabel, constraints);
MultiBitLabel walletFilenameLabelLabel = new MultiBitLabel(controller.getLocaliser().getString(
"resetTransactionsPanel.walletFilenameLabel"));
walletFilenameLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 6;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
inputWalletPanel.add(walletFilenameLabelLabel, constraints);
walletFilenameLabel = new MultiBitLabel(this.bitcoinController.getModel().getActiveWalletFilename());
walletFilenameLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 6;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(walletFilenameLabel, constraints);
JPanel filler3 = new JPanel();
filler3.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 7;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(filler3, constraints);
walletPasswordPromptLabel = new MultiBitLabel(controller.getLocaliser().getString("showExportPrivateKeysPanel.walletPasswordPrompt"));
walletPasswordPromptLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 8;
constraints.weightx = 0.3;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
inputWalletPanel.add(walletPasswordPromptLabel, constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 8;
constraints.weightx = 0.05;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.CENTER;
inputWalletPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS),
constraints);
walletPasswordField = new JPasswordField(24);
walletPasswordField.setMinimumSize(new Dimension(200, 20));
walletPasswordField.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 8;
constraints.weightx = 0.3;
constraints.weighty = 0.6;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(walletPasswordField, constraints);
JPanel filler4 = new JPanel();
filler4.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 9;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
inputWalletPanel.add(filler4, constraints);
return inputWalletPanel;
}
private JPanel createFilenamePanel(int stentWidth) {
MultiBitTitledPanel outputFilenamePanel = new MultiBitTitledPanel(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.filename.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
MultiBitTitledPanel.addLeftJustifiedTextAtIndent(" ", 1, outputFilenamePanel);
GridBagConstraints constraints = new GridBagConstraints();
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 0;
constraints.gridy = 2;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(MultiBitTitledPanel.getIndentPanel(1), constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 3;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(MultiBitTitledPanel.createStent(stentWidth, ExportPrivateKeysPanel.STENT_HEIGHT), constraints);
chooseFilenameButtonText = "";
String chooseFilenameButtonText1 = controller.getLocaliser().getString("showImportPrivateKeysPanel.filename.text");
String chooseFilenameButtonText2 = controller.getLocaliser().getString("showImportPrivateKeysPanel.filename.text.2");
// If the second term is localised, use that, otherwise the first.
if (controller.getLocaliser().getLocale().equals(Locale.ENGLISH)) {
chooseFilenameButtonText = chooseFilenameButtonText2;
} else {
if (!"Import from ...".equals(chooseFilenameButtonText2)) {
chooseFilenameButtonText = chooseFilenameButtonText2;
} else {
chooseFilenameButtonText = chooseFilenameButtonText1;
}
}
chooseFilenameButton = new MultiBitButton(chooseFilenameButtonText);
chooseFilenameButton.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("showImportPrivateKeysPanel.filename.tooltip")));
chooseFilenameButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
final MultiBitButton finalChooseFilenameButton = chooseFilenameButton;
chooseFilenameButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
chooseFile(finalChooseFilenameButton);
}
});
MultiBitLabel walletFilenameLabelLabel = new MultiBitLabel(controller.getLocaliser().getString(
"resetTransactionsPanel.walletFilenameLabel"));
walletFilenameLabelLabel.setHorizontalAlignment(JLabel.TRAILING);
walletFilenameLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 1;
constraints.gridy = 4;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
outputFilenamePanel.add(walletFilenameLabelLabel, constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 5;
constraints.weightx = 0.05;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.CENTER;
outputFilenamePanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS),
constraints);
outputFilenameLabel = new MultiBitLabel(outputFilename);
outputFilenameLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 3;
constraints.gridy = 4;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 2;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(outputFilenameLabel, constraints);
JPanel filler0 = new JPanel();
filler0.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 5;
constraints.gridy = 4;
constraints.weightx = 100;
constraints.weighty = 1;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
outputFilenamePanel.add(filler0, constraints);
JPanel filler2 = new JPanel();
filler2.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 5;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(filler2, constraints);
MultiBitLabel numberOfKeysLabelLabel = new MultiBitLabel(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.numberOfKeys.text"));
numberOfKeysLabelLabel.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser()
.getString("showImportPrivateKeysPanel.numberOfKeys.tooltip")));
numberOfKeysLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 6;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
outputFilenamePanel.add(numberOfKeysLabelLabel, constraints);
JPanel filler3 = new JPanel();
filler3.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 6;
constraints.weightx = 0.1;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(filler3, constraints);
numberOfKeysLabel = new MultiBitLabel(" ");
numberOfKeysLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 3;
constraints.gridy = 6;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(numberOfKeysLabel, constraints);
MultiBitLabel replayDateLabelLabel = new MultiBitLabel(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.replayDate.text"));
replayDateLabelLabel.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("showImportPrivateKeysPanel.replayDate.tooltip")));
replayDateLabelLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 7;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
outputFilenamePanel.add(replayDateLabelLabel, constraints);
replayDateLabel = new MultiBitLabel(" ");
replayDateLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.HORIZONTAL;
constraints.gridx = 3;
constraints.gridy = 7;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(replayDateLabel, constraints);
JPanel filler4 = new JPanel();
filler4.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 8;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(filler4, constraints);
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 9;
constraints.weightx = 0.5;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(chooseFilenameButton, constraints);
JPanel filler5 = new JPanel();
filler5.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 10;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
outputFilenamePanel.add(filler5, constraints);
return outputFilenamePanel;
}
private JPanel createPasswordPanel(int stentWidth) {
// Do/do not password protect radios.
MultiBitTitledPanel passwordProtectPanel = new MultiBitTitledPanel(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.password.title"), ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
GridBagConstraints constraints = new GridBagConstraints();
passwordInfoLabel = MultiBitTitledPanel.addLeftJustifiedTextAtIndent(
controller.getLocaliser().getString("showImportPrivateKeysPanel.enterPassword"), 3, passwordProtectPanel);
passwordInfoLabel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 4;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
passwordProtectPanel.add(MultiBitTitledPanel.createStent(stentWidth, ExportPrivateKeysPanel.STENT_HEIGHT), constraints);
passwordPromptLabel1 = new MultiBitLabel(controller.getLocaliser().getString("showExportPrivateKeysPanel.passwordPrompt"));
passwordPromptLabel1.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 5;
constraints.weightx = 0.3;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
passwordProtectPanel.add(passwordPromptLabel1, constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 5;
constraints.weightx = 0.05;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.CENTER;
passwordProtectPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS),
constraints);
passwordField1 = new JPasswordField(24);
passwordField1.setMinimumSize(new Dimension(200, 20));
passwordField1.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 5;
constraints.weightx = 0.3;
constraints.weighty = 0.6;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
passwordProtectPanel.add(passwordField1, constraints);
passwordPromptLabel2 = new MultiBitLabel(controller.getLocaliser().getString("showImportPrivateKeysPanel.secondPassword"));
passwordPromptLabel2.setVisible(false);
passwordPromptLabel2.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 6;
constraints.weightx = 0.3;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_END;
passwordProtectPanel.add(passwordPromptLabel2, constraints);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 2;
constraints.gridy = 6;
constraints.weightx = 0.05;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.CENTER;
passwordProtectPanel.add(MultiBitTitledPanel.createStent(MultiBitTitledPanel.SEPARATION_BETWEEN_NAME_VALUE_PAIRS),
constraints);
passwordField2 = new JPasswordField(24);
passwordField2.setMinimumSize(new Dimension(200, 20));
passwordField2.setVisible(false);
passwordField2.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 6;
constraints.weightx = 0.3;
constraints.weighty = 0.6;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
passwordProtectPanel.add(passwordField2, constraints);
JLabel filler3 = new JLabel();
filler3.setMinimumSize(new Dimension(3, 3));
filler3.setMaximumSize(new Dimension(3, 3));
filler3.setPreferredSize(new Dimension(3, 3));
filler3.setOpaque(false);
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 1;
constraints.gridy = 7;
constraints.weightx = 0.1;
constraints.weighty = 0.1;
constraints.gridwidth = 1;
constraints.gridheight = 1;
constraints.anchor = GridBagConstraints.CENTER;
passwordProtectPanel.add(filler3, constraints);
unlockButton = new MultiBitButton(controller.getLocaliser().getString("showImportPrivateKeysPanel.unlock.text"));
unlockButton.addActionListener(new ActionListener() {
@Override
public void actionPerformed(ActionEvent arg0) {
setMessageText1(" ");
try {
readInImportFileAndUpdateDetails();
} catch (KeyCrypterException ede) {
setMessageText1(controller.getLocaliser().getString("importPrivateKeysSubmitAction.privateKeysUnlockFailure",
new Object[] { ede.getMessage() }));
}
}
});
unlockButton.setToolTipText(HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("showImportPrivateKeysPanel.unlock.tooltip")));
unlockButton.setEnabled(false);
unlockButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
constraints.fill = GridBagConstraints.NONE;
constraints.gridx = 3;
constraints.gridy = 8;
constraints.weightx = 0.3;
constraints.weighty = 0.6;
constraints.gridwidth = 3;
constraints.anchor = GridBagConstraints.LINE_START;
passwordProtectPanel.add(unlockButton, constraints);
JPanel filler5 = new JPanel();
filler5.setOpaque(false);
constraints.fill = GridBagConstraints.BOTH;
constraints.gridx = 1;
constraints.gridy = 9;
constraints.weightx = 0.3;
constraints.weighty = 0.3;
constraints.gridwidth = 1;
constraints.anchor = GridBagConstraints.LINE_START;
passwordProtectPanel.add(filler3, constraints);
return passwordProtectPanel;
}
private JPanel createButtonPanel() {
JPanel buttonPanel = new JPanel();
buttonPanel.setOpaque(false);
FlowLayout flowLayout = new FlowLayout();
flowLayout.setAlignment(FlowLayout.LEADING);
buttonPanel.setLayout(flowLayout);
buttonPanel.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
importPrivateKeysSubmitAction = new ImportPrivateKeysSubmitAction(this.bitcoinController, mainFrame, this,
ImageLoader.createImageIcon(ImageLoader.IMPORT_PRIVATE_KEYS_ICON_FILE), walletPasswordField, passwordField1, passwordField2);
MultiBitButton submitButton = new MultiBitButton(importPrivateKeysSubmitAction, controller);
submitButton.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
buttonPanel.add(submitButton);
return buttonPanel;
}
@Override
public void displayView(DisplayHint displayHint) {
// If it is a wallet transaction change no need to update.
if (DisplayHint.WALLET_TRANSACTIONS_HAVE_CHANGED == displayHint) {
return;
}
walletFilenameLabel.setText(this.bitcoinController.getModel().getActiveWalletFilename());
walletDescriptionLabel.setText(this.bitcoinController.getModel().getActivePerWalletModelData().getWalletDescription());
boolean walletPasswordRequired = false;
if (this.bitcoinController.getModel().getActiveWallet() != null && this.bitcoinController.getModel().getActiveWallet().getEncryptionType() == EncryptionType.ENCRYPTED_SCRYPT_AES) {
walletPasswordRequired = true;
}
enableWalletPassword(walletPasswordRequired);
walletBusyChange(this.bitcoinController.getModel().getActivePerWalletModelData().isBusy());
if (outputFilename == null || "".equals(outputFilename)) {
outputFilenameLabel.setText(controller.getLocaliser().getString("showImportPrivateKeysPanel.noFileSelected"));
}
messageLabel1.setText(" ");
messageLabel2.setText(" ");
}
@Override
public void navigateAwayFromView() {
}
private void chooseFile(MultiBitButton callingButton) {
JFileChooser.setDefaultLocale(controller.getLocaliser().getLocale());
fileChooser = new JFileChooser();
fileChooser.setLocale(controller.getLocaliser().getLocale());
fileChooser.setDialogTitle(chooseFilenameButtonText);
adjustedFont = FontSizer.INSTANCE.getAdjustedDefaultFont();
if (adjustedFont != null) {
setFileChooserFont(new Container[] {fileChooser});
}
fileChooser.applyComponentOrientation(ComponentOrientation.getOrientation(controller.getLocaliser().getLocale()));
fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
fileChooser.addChoosableFileFilter(multiBitFileChooser);
// fileChooser.addChoosableFileFilter(myWalletPlainFileChooser);
// fileChooser.addChoosableFileFilter(myWalletEncryptedFileChooser);
fileChooser.setAcceptAllFileFilterUsed(false);
fileChooser.setFileFilter(multiBitFileChooser);
if (outputFilename != null && !"".equals(outputFilename)) {
fileChooser.setCurrentDirectory(new File(outputFilename));
fileChooser.setSelectedFile(new File(outputFilename));
} else {
if (this.bitcoinController.getModel().getActiveWalletFilename() != null) {
fileChooser.setCurrentDirectory(new File(this.bitcoinController.getModel().getActiveWalletFilename()));
}
String defaultFileName = fileChooser.getCurrentDirectory().getAbsoluteFile() + File.separator
+ controller.getLocaliser().getString("saveWalletAsView.untitled") + "."
+ BitcoinModel.PRIVATE_KEY_FILE_EXTENSION;
fileChooser.setSelectedFile(new File(defaultFileName));
}
try {
callingButton.setEnabled(false);
mainFrame.setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
fileChooser.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
int returnVal = fileChooser.showOpenDialog(mainFrame);
if (returnVal == JFileChooser.APPROVE_OPTION) {
numberOfKeysLabel.setText(" ");
replayDateLabel.setText(" ");
passwordField1.setText("");
File file = fileChooser.getSelectedFile();
if (file != null) {
outputFilename = file.getAbsolutePath();
outputFilenameLabel.setText(outputFilename);
if (multiBitFileChooser.accept(file)) {
try {
String firstLine = readFirstLineInFile(file);
if (firstLine != null && firstLine.startsWith(encrypterDecrypter.getOpenSSLMagicText())) {
// File is encrypted.
enableImportFilePasswordPanel(true);
passwordField1.requestFocusInWindow();
} else {
// File is not encrypted.
enableImportFilePasswordPanel(false);
readInImportFileAndUpdateDetails();
}
} catch (IOException e) {
setMessageText1(controller.getLocaliser().getString(
"importPrivateKeysSubmitAction.privateKeysImportFailure",
new Object[] { e.getClass().getName() + " " + e.getMessage() }));
} catch (KeyCrypterException e) {
// TODO User may not have entered a password yet so
// password incorrect is ok at this stage.
// Other errors indicate a more general problem with
// the
// import.
setMessageText1(controller.getLocaliser().getString(
"importPrivateKeysSubmitAction.privateKeysImportFailure",
new Object[] { e.getClass().getName() + " " + e.getMessage() }));
}
} else if (myWalletEncryptedFileChooser.accept(file)) {
enableImportFilePasswordPanel(true);
passwordField1.requestFocusInWindow();
} else if (myWalletPlainFileChooser.accept(file)) {
// File is not encrypted.
enableImportFilePasswordPanel(false);
try {
readInImportFileAndUpdateDetails();
} catch (KeyCrypterException e) {
setMessageText1(controller.getLocaliser().getString(
"importPrivateKeysSubmitAction.privateKeysImportFailure",
new Object[] { e.getClass().getName() + " " + e.getMessage() }));
}
}
}
}
} finally {
mainFrame.setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
callingButton.setEnabled(true);
}
}
private void enableSecondPasswordPanel(boolean enablePanel) {
passwordField2.setEnabled(enablePanel);
passwordPromptLabel2.setEnabled(enablePanel);
passwordField2.setVisible(enablePanel);
passwordPromptLabel2.setVisible(enablePanel);
}
private void enableImportFilePasswordPanel(boolean enableImportFilePanel) {
if (enableImportFilePanel) {
// Enable the import file password panel.
passwordPromptLabel1.setEnabled(true);
passwordField1.setEnabled(true);
unlockButton.setEnabled(true);
passwordInfoLabel.setForeground(Color.BLACK);
} else {
// Disable the import file password panel.
passwordPromptLabel1.setEnabled(false);
passwordField1.setEnabled(false);
unlockButton.setEnabled(false);
passwordInfoLabel.setForeground(Color.GRAY);
}
}
private void enableWalletPassword(boolean enableWalletPassword) {
if (enableWalletPassword) {
// Enable the wallet password.
walletPasswordField.setEnabled(true);
walletPasswordPromptLabel.setEnabled(true);
} else {
// Disable the wallet password.
walletPasswordField.setEnabled(false);
walletPasswordPromptLabel.setEnabled(false);
}
}
/**
* Read in the import file and show the file details.
* @throws EncrypterDecrypterException
* @throws PrivateKeysHandlerException
*/
private void readInImportFileAndUpdateDetails() throws PrivateKeysHandlerException, KeyCrypterException {
// Update number of keys and earliest date.
try {
File file = new File(outputFilename);
if (multiBitFileChooser.accept(file)) {
// Read in contents of file.
setCursor(Cursor.getPredefinedCursor(Cursor.WAIT_CURSOR));
PrivateKeysHandler privateKeysHandler = new PrivateKeysHandler(this.bitcoinController.getModel().getNetworkParameters());
Collection<PrivateKeyAndDate> privateKeyAndDates = privateKeysHandler.readInPrivateKeys(new File(outputFilename),
CharBuffer.wrap(passwordField1.getPassword()));
numberOfKeysLabel.setText("" + privateKeyAndDates.size());
Date replayDate = privateKeysHandler.calculateReplayDate(privateKeyAndDates, this.bitcoinController.getModel()
.getActiveWallet());
if (replayDate == null) {
replayDateLabel.setText(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.thereWereMissingKeyDates"));
} else {
replayDateLabel.setText(DateFormat.getDateInstance(DateFormat.MEDIUM, controller.getLocaliser().getLocale())
.format(replayDate));
}
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
} else if (myWalletEncryptedFileChooser.accept(file)) {
try {
String importFileContents = PrivateKeysHandler.readFile(file);
String mainPassword = new String(passwordField1.getPassword());
String secondPassword = new String(passwordField2.getPassword());
MyWallet wallet = new MyWallet(importFileContents, mainPassword);
boolean needSecondPassword = false;
if (wallet.isDoubleEncrypted()) {
if ("".equals(secondPassword)) {
needSecondPassword = true;
requestSecondPassword();
}
}
if (!needSecondPassword) {
wallet.setTemporySecondPassword(secondPassword);
int numberOfKeys = 0;
if (wallet.getBitcoinJWallet() != null && wallet.getBitcoinJWallet().getKeychain() != null) {
numberOfKeys = wallet.getBitcoinJWallet().getKeychainSize();
}
numberOfKeysLabel.setText("" + numberOfKeys);
replayDateLabel.setText(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.thereWereMissingKeyDates"));
}
} catch (Exception e) {
throw new KeyCrypterException("Error Decrypting Wallet");
}
} else if (myWalletPlainFileChooser.accept(file)) {
try {
String importFileContents = PrivateKeysHandler.readFile(file);
MyWallet wallet = new MyWallet(importFileContents);
int numberOfKeys = 0;
if (wallet.getBitcoinJWallet() != null && wallet.getBitcoinJWallet().getKeychain() != null) {
numberOfKeys = wallet.getBitcoinJWallet().getKeychainSize();
}
numberOfKeysLabel.setText("" + numberOfKeys);
replayDateLabel.setText(controller.getLocaliser().getString(
"showImportPrivateKeysPanel.thereWereMissingKeyDates"));
} catch (Exception e) {
throw new KeyCrypterException("Error Opening Wallet");
}
}
} finally {
setCursor(Cursor.getPredefinedCursor(Cursor.DEFAULT_CURSOR));
}
}
public void requestSecondPassword() {
enableSecondPasswordPanel(true);
setMessageText1(controller.getLocaliser().getString("importPrivateKeysSubmitAction.enterTheSecondPassword"));
}
public String getOutputFilename() {
return outputFilename;
}
public void clearPasswords() {
walletPasswordField.setText("");
passwordField1.setText("");
if (passwordField2 != null) {
passwordField2.setText("");
}
}
public void setMessageText1(String message1) {
if (messageLabel1 != null) {
messageLabel1.setText(message1);
}
}
public String getMessageText1() {
if (messageLabel1 != null) {
return messageLabel1.getText();
} else {
return "";
}
}
public void setMessageText2(String message2) {
if (messageLabel2 != null) {
messageLabel2.setText(message2);
}
}
public String getMessageText2() {
if (messageLabel2 != null) {
return messageLabel2.getText();
} else {
return "";
}
}
private String readFirstLineInFile(File file) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(file));
return reader.readLine();
}
@Override
public Icon getViewIcon() {
return ImageLoader.createImageIcon(ImageLoader.IMPORT_PRIVATE_KEYS_ICON_FILE);
}
@Override
public String getViewTitle() {
return controller.getLocaliser().getString("showImportPrivateKeysAction.text");
}
@Override
public String getViewTooltip() {
return controller.getLocaliser().getString("showImportPrivateKeysAction.tooltip");
}
@Override
public View getViewId() {
return View.SHOW_IMPORT_PRIVATE_KEYS_VIEW;
}
// Used in testing.
public MultiBitButton getUnlockButton() {
return unlockButton;
}
public ImportPrivateKeysSubmitAction getImportPrivateKeysSubmitAction() {
return importPrivateKeysSubmitAction;
}
public void setOutputFilename(String outputFilename) {
this.outputFilename = outputFilename;
}
public void setImportFilePassword(CharSequence password) {
passwordField1.setText(password.toString());
}
public void setWalletPassword(CharSequence password) {
walletPasswordField.setText(password.toString());
}
public boolean isWalletPasswordFieldEnabled() {
return walletPasswordField.isEnabled();
}
@Override
public void walletBusyChange(boolean newWalletIsBusy) {
// Update the enable status of the action to match the wallet busy status.
if (this.bitcoinController.getModel().getActivePerWalletModelData().isBusy()) {
// Wallet is busy with another operation that may change the private keys - Action is disabled.
importPrivateKeysSubmitAction.putValue(Action.SHORT_DESCRIPTION, HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("multiBitSubmitAction.walletIsBusy",
new Object[]{controller.getLocaliser().getString(this.bitcoinController.getModel().getActivePerWalletModelData().getBusyTaskKey())})));
importPrivateKeysSubmitAction.setEnabled(false);
} else {
// Enable unless wallet has been modified by another process.
if (!this.bitcoinController.getModel().getActivePerWalletModelData().isFilesHaveBeenChangedByAnotherProcess()) {
importPrivateKeysSubmitAction.putValue(Action.SHORT_DESCRIPTION, HelpContentsPanel.createTooltipText(controller.getLocaliser().getString("importPrivateKeysSubmitAction.tooltip")));
importPrivateKeysSubmitAction.setEnabled(true);
}
}
}
private void setFileChooserFont(Component[] comp) {
for (int x = 0; x < comp.length; x++) {
if (comp[x] instanceof Container)
setFileChooserFont(((Container) comp[x]).getComponents());
try {
comp[x].setFont(adjustedFont);
} catch (Exception e) {
}// do nothing
}
}
}
| |
package com.sap.cloud.lm.sl.cf.core.dao;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.text.MessageFormat;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import javax.persistence.EntityManagerFactory;
import javax.persistence.Persistence;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.runners.Enclosed;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
import com.fasterxml.jackson.core.type.TypeReference;
import com.sap.cloud.lm.sl.cf.core.message.Messages;
import com.sap.cloud.lm.sl.cf.core.model.CloudTarget;
import com.sap.cloud.lm.sl.cf.core.model.ConfigurationEntry;
import com.sap.cloud.lm.sl.common.NotFoundException;
import com.sap.cloud.lm.sl.common.SLException;
import com.sap.cloud.lm.sl.common.util.JsonUtil;
import com.sap.cloud.lm.sl.common.util.MapUtil;
import com.sap.cloud.lm.sl.common.util.TestCase;
import com.sap.cloud.lm.sl.common.util.TestInput;
import com.sap.cloud.lm.sl.common.util.TestUtil;
import com.sap.cloud.lm.sl.common.util.Tester;
import com.sap.cloud.lm.sl.common.util.Tester.Expectation;
import com.sap.cloud.lm.sl.mta.model.Version;
@RunWith(Enclosed.class)
public class ConfigurationEntryDaoTest {
private static final EntityManagerFactory EMF = Persistence.createEntityManagerFactory("TestDefault");
private static final Tester TESTER = Tester.forClass(ConfigurationEntryDaoTest.class);
@RunWith(Parameterized.class)
public static class ConfigurationEntryDaoParameterizedTest {
private static final String DATABASE_CONTENT_LOCATION = "configuration-registry-content.json";
private ConfigurationEntryDao dao = createDao();
private TestCase<TestInput> test;
public ConfigurationEntryDaoParameterizedTest(TestCase<TestInput> test) {
this.test = test;
}
@Parameters
public static List<Object[]> getParameters() throws Exception {
return Arrays.asList(new Object[][] {
// @formatter:off
// (0)
{
new AddTest(new AddTestInput("configuration-entry-07.json"),
new Expectation(Expectation.Type.JSON, "configuration-entry-07.json")),
},
// (1)
{
new AddTest(new AddTestInput("configuration-entry-01.json"),
new Expectation(Expectation.Type.EXCEPTION, "Configuration entry with namespace ID \"n-1\", ID \"id-1\", version \"1.0.0\", target org \"org-1\" and target space \"space-1\", already exists")),
},
// (2)
{
new FindTest(new FindTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-01.json")),
},
// (3)
{
new RemoveTest(new RemoveTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), null, null, ""),
new Expectation(null)),
},
// (4)
{
new FindAllTest(new FindTestInput(null, "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-04.json")),
},
// (5)
{
new FindAllTest(new FindTestInput("n-3", "id-3", "3.0.0", new CloudTarget("org-3", "space-3"), null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-05.json")),
},
// (6)
{
new FindAllTest(new FindTestInput("n-1", null, null, new CloudTarget("org-1", "space-1"), null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-06.json")),
},
// (7)
{
new AddTest(new AddTestInput("configuration-entry-08.json"),
new Expectation(Expectation.Type.JSON, "configuration-entry-08.json")),
},
// (8)
{
new FindAllTest(new FindTestInput(null, null, null, null, null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-registry-content.json")),
},
// (9)
{
new FindAllTest(new FindTestInput(null, null, "> 1.0.0", null, null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-09.json")),
},
// (10)
{
new FindAllTest(new FindTestInput(null, null, "= 1.0.0", null, null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-10.json")),
},
// (11)
{
new FindAllTest(new FindTestInput(null, null, ">=1.0.0", null, null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-11.json")),
},
// (12)
{
new FindAllTest(new FindTestInput(null, null, ">=1.0.0", null, MapUtil.asMap("type", "test"), null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-12.json")),
},
// (13)
{
new RemoveTest(new RemoveTestInput(null, "id-2", null, new CloudTarget("org-2", "space-2"), null, null, ""),
new Expectation(null)),
},
// (14)
{
new FindTest(new FindTestInput(null, "id-6", null, new CloudTarget("org-6", "space-6"), null, null, null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-06.json")),
},
// (15)
{
new UpdateTest(new UpdateTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), "configuration-entry-dao-test-input-18.json", null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-15.json")),
},
// (16)
{
new UpdateTest(new UpdateTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), "configuration-entry-dao-test-input-19.json", null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-16.json")),
},
// (17)
{
new UpdateTest(new UpdateTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), "configuration-entry-dao-test-input-20.json", null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-17.json")),
},
// (18)
{
new UpdateTest(new UpdateTestInput("n-1", "id-1", "1.0.0", new CloudTarget("org-1", "space-1"), "configuration-entry-dao-test-input-21.json", null, ""),
new Expectation(Expectation.Type.EXCEPTION, "Configuration entry with namespace ID \"n-2\", ID \"id-2\", version \"2.0.0\", target org \"org-2\" and target space \"space-2\", already exists")),
},
// (19)
{
new FindAllTest(new FindTestInput(null, null, null, null, null, "com.sap.example.mta", null, ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-19.json")),
},
// (20)
{
new FindAllTest(new FindTestInput(null, "id-2", null, null, null, null, Arrays.asList(new CloudTarget("org-2", "space-2")), ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-20.json")),
},
// (21)
{
new FindAllTest(new FindTestInput(null, null, "0.1.0", null, null, null, Arrays.asList(new CloudTarget("myorg", "*")), ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-21.json")),
},
// (22)
{
new FindAllTest(new FindTestInput(null, null, null, null, MapUtil.asMap("type", "test"), null, Arrays.asList(new CloudTarget("org-3", "space-3")), ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-22.json")),
},
// (23)
{
new FindAllTest(new FindTestInput("n-2", null, null, null, null, null, Arrays.asList(new CloudTarget("*", "")), ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-23.json")),
},
// (24)
{
new FindAllTest(new FindTestInput(null, null, null, null, MapUtil.asMap("type", "test"), null, Arrays.asList(new CloudTarget("org-3", "space-3"), new CloudTarget("org-4", "space-4")), ""),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-24.json")),
},
// (25)
{
new FindAllTest(new FindTestInput(null, "s-2", null, null, null, null, Arrays.asList(new CloudTarget("myorg1", "myspace1")), ""),
new Expectation("[]")),
},
// (26)
{
new FindAllGuidTest(new FindTestInput(null, null, null, null, null, null, null, "fbd3dc79-1a54-4a70-8022-ab716643809b"),
new Expectation(Expectation.Type.JSON, "configuration-entry-dao-test-output-26.json")),
}
// @formatter:on
});
}
@Before
public void prepare() throws Exception {
List<ConfigurationEntry> entries = JsonUtil.convertJsonToList(
TestUtil.getResourceAsString(DATABASE_CONTENT_LOCATION, getClass()), new TypeReference<List<ConfigurationEntry>>() {
});
for (ConfigurationEntry entry : entries) {
dao.add(entry);
}
}
@After
public void clearDatabase() throws Exception {
for (ConfigurationEntry entry : dao.findAll()) {
dao.remove(entry.getId());
}
}
@Test
public void test() throws Exception {
test.run();
}
private static class RemoveTestInput extends FindTestInput {
public RemoveTestInput(String nid, String id, String version, CloudTarget target, Map<String, Object> requiredProperties,
List<CloudTarget> cloudTargets, String spaceId) {
super(nid, id, version, target, requiredProperties, null, cloudTargets, spaceId);
}
}
private static class UpdateTestInput extends FindTestInput {
public ConfigurationEntry configurationEntry;
public UpdateTestInput(String nid, String id, String version, CloudTarget target, String configurationEntryLocation,
List<CloudTarget> cloudTarget, String spaceId) throws Exception {
super(nid, id, version, target, Collections.emptyMap(), null, cloudTarget, spaceId);
configurationEntry = TestInput.loadJsonInput(configurationEntryLocation, ConfigurationEntry.class, getClass());
}
}
private static class FindTestInput extends TestInput {
public String nid, id, version, mtaId, spaceId;
public CloudTarget target;
public Map<String, Object> requiredProperties;
public List<CloudTarget> cloudTargets;
public FindTestInput(String nid, String id, String version, CloudTarget target, Map<String, Object> requiredProperties,
String mtaId, List<CloudTarget> cloudTargets, String spaceId) {
this.version = version;
this.nid = nid;
this.target = target;
this.id = id;
this.requiredProperties = requiredProperties;
this.mtaId = mtaId;
this.cloudTargets = cloudTargets;
this.spaceId = spaceId;
}
}
private static class AddTestInput extends TestInput {
public ConfigurationEntry configurationEntry;
public AddTestInput(String configurationEntryLocation) throws Exception {
configurationEntry = TestInput.loadJsonInput(configurationEntryLocation, ConfigurationEntry.class, getClass());
}
}
private static class RemoveTest extends TestCase<RemoveTestInput> {
ConfigurationEntryDao dao = createDao();
public RemoveTest(RemoveTestInput input, Expectation expectation) {
super(input, expectation);
}
@Override
public void test() {
TESTER.test(() -> {
ConfigurationEntry entry = findConfigurationEntries(input, dao).get(0);
dao.remove(entry.getId());
assertTrue(!dao.exists(entry.getId()));
}, expectation);
}
}
private static class UpdateTest extends TestCase<UpdateTestInput> {
ConfigurationEntryDao dao = createDao();
public UpdateTest(UpdateTestInput input, Expectation expectation) {
super(input, expectation);
}
@Override
protected void test() throws Exception {
TESTER.test(() -> {
return removeId(dao.update(findConfigurationEntries(input, dao).get(0)
.getId(), input.configurationEntry));
}, expectation);
}
}
private static class AddTest extends TestCase<AddTestInput> {
public AddTest(AddTestInput input, Expectation expectation) {
super(input, expectation);
}
ConfigurationEntryDao dao = createDao();
@Override
public void test() {
TESTER.test(() -> {
return removeId(dao.add(input.configurationEntry));
}, expectation);
}
}
private static class FindTest extends TestCase<FindTestInput> {
public FindTest(FindTestInput input, Expectation expectation) {
super(input, expectation);
}
ConfigurationEntryDao dao = createDao();
@Override
public void test() {
TESTER.test(() -> {
return removeId(dao.find(findConfigurationEntries(input, dao).get(0)
.getId()));
}, expectation);
}
}
private static class FindAllTest extends TestCase<FindTestInput> {
public FindAllTest(FindTestInput input, Expectation expectation) {
super(input, expectation);
}
@Override
public void test() {
TESTER.test(() -> {
return removeIds(findConfigurationEntries(input, createDao()));
}, expectation);
}
}
private static class FindAllGuidTest extends TestCase<FindTestInput> {
public FindAllGuidTest(FindTestInput input, Expectation expectation) {
super(input, expectation);
}
@Override
public void test() {
TESTER.test(() -> {
return removeIds(findConfigurationEntriesGuid(input, createDao()));
}, expectation);
}
}
private static List<ConfigurationEntry> findConfigurationEntries(FindTestInput input, ConfigurationEntryDao dao) {
return dao.find(input.nid, input.id, input.version, input.target, input.requiredProperties, input.mtaId, input.cloudTargets);
}
private static List<ConfigurationEntry> findConfigurationEntriesGuid(FindTestInput input, ConfigurationEntryDao dao) {
return dao.find(input.spaceId);
}
}
public static class ConfigurationEntryDaoStandardTest {
private ConfigurationEntryDao dao = createDao();
@Test
public void testFind() {
long id = getUnusedId(dao);
try {
dao.find(id);
fail();
} catch (NotFoundException e) {
assertEquals(MessageFormat.format(Messages.CONFIGURATION_ENTRY_NOT_FOUND, id), e.getMessage());
}
}
@Test
public void testUpdate() {
long id = getUnusedId(dao);
try {
dao.update(id, new ConfigurationEntry("", "", Version.parseVersion("1.0.0"), new CloudTarget("", ""), "", null, ""));
fail();
} catch (SLException e) {
assertEquals(MessageFormat.format(Messages.CONFIGURATION_ENTRY_NOT_FOUND, id), e.getMessage());
}
}
@Test
public void testRemove() {
long id = getUnusedId(dao);
try {
dao.remove(id);
fail();
} catch (NotFoundException e) {
assertEquals(MessageFormat.format(Messages.CONFIGURATION_ENTRY_NOT_FOUND, id), e.getMessage());
}
}
private long getUnusedId(ConfigurationEntryDao dao) {
for (long id = 0; id <= Long.MAX_VALUE; id++) {
boolean isUsed = dao.exists(id);
if (!isUsed) {
return id;
}
}
throw new UnsupportedOperationException();
}
}
private static List<ConfigurationEntry> removeIds(List<ConfigurationEntry> entries) {
return entries.stream()
.map(ConfigurationEntryDaoTest::removeId)
.collect(Collectors.toList());
}
private static ConfigurationEntry removeId(ConfigurationEntry entry) {
entry.setId(0);
return entry;
}
private static ConfigurationEntryDao createDao() {
ConfigurationEntryDtoDao dtoDao = new ConfigurationEntryDtoDao();
dtoDao.entityManagerFactory = EMF;
ConfigurationEntryDao dao = new ConfigurationEntryDao();
dao.dao = dtoDao;
return dao;
}
}
| |
/*
* Copyright 2012 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.i18ndirectives;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.template.soy.base.SoySyntaxException;
import com.google.template.soy.data.SoyValue;
import com.google.template.soy.data.restricted.NumberData;
import com.google.template.soy.data.restricted.StringData;
import com.google.template.soy.internal.targetexpr.TargetExpr;
import com.google.template.soy.jssrc.restricted.JsExpr;
import com.google.template.soy.jssrc.restricted.SoyLibraryAssistedJsSrcPrintDirective;
import com.google.template.soy.phpsrc.restricted.PhpExpr;
import com.google.template.soy.phpsrc.restricted.PhpExprUtils;
import com.google.template.soy.phpsrc.restricted.PhpFunctionExprBuilder;
import com.google.template.soy.phpsrc.restricted.SoyPhpSrcPrintDirective;
import com.google.template.soy.pysrc.restricted.PyExpr;
import com.google.template.soy.pysrc.restricted.PyExprUtils;
import com.google.template.soy.pysrc.restricted.PyFunctionExprBuilder;
import com.google.template.soy.pysrc.restricted.SoyPySrcPrintDirective;
import com.google.template.soy.shared.restricted.ApiCallScopeBindingAnnotations.LocaleString;
import com.google.template.soy.shared.restricted.SoyJavaPrintDirective;
import com.ibm.icu.text.CompactDecimalFormat;
import com.ibm.icu.text.CompactDecimalFormat.CompactStyle;
import com.ibm.icu.text.NumberFormat;
import com.ibm.icu.util.ULocale;
import java.util.List;
import java.util.Set;
import javax.inject.Inject;
import javax.inject.Provider;
/**
* A directive that formats an input number based on Locale of the current SoyMsgBundle.
* It may take two optional arguments. The first is a lower-case string describing the type of
* format to apply, which can be one of 'decimal', 'currency', 'percent', 'scientific',
* 'compact_short', or 'compact_long'. If this argument is not provided, the default 'decimal'
* will be used. The second argument is the "numbers" keyword passed to the ICU4J's locale. For
* instance, it can be "native" so that we show native characters in languages like arabic (this
* argument is ignored for templates running in JavaScript).
*
* <p>Usage examples:
* {@code
{$value|formatNum}
{$value|formatNum:'decimal'}
{$value|formatNum:'decimal','native'}
}
*
*/
class FormatNumDirective implements SoyJavaPrintDirective, SoyLibraryAssistedJsSrcPrintDirective,
SoyPySrcPrintDirective, SoyPhpSrcPrintDirective {
// Map of format arguments to the Closure Format enum.
private static final ImmutableMap<String, String> JS_ARGS_TO_ENUM =
ImmutableMap.<String, String>builder()
.put("'decimal'", "goog.i18n.NumberFormat.Format.DECIMAL")
.put("'currency'", "goog.i18n.NumberFormat.Format.CURRENCY")
.put("'percent'", "goog.i18n.NumberFormat.Format.PERCENT")
.put("'scientific'", "goog.i18n.NumberFormat.Format.SCIENTIFIC")
.put("'compact_short'", "goog.i18n.NumberFormat.Format.COMPACT_SHORT")
.put("'compact_long'", "goog.i18n.NumberFormat.Format.COMPACT_LONG")
.build();
// This directive can be called with no arguments, with one argument setting the format type,
// or with two arguments setting the format type and the 'numbers' keyword for the ICU4J
// formatter.
private static final ImmutableSet<Integer> VALID_ARGS_SIZES = ImmutableSet.of(0, 1, 2);
private static final ImmutableSet<String> REQUIRED_JS_LIBS =
ImmutableSet.of("goog.i18n.NumberFormat");
private static final String DEFAULT_FORMAT = "decimal";
/**
* Provide the current Locale string.
*
* <p>Note that this Locale value is only used in the Java environment. Closure does not provide a
* clear mechanism to override the NumberFormat defined when the NumberFormat module loads. This
* is probably not a significant loss of functionality, since the primary reason to inject the
* LocaleString is because the Java VM's default Locale may not be the same as the desired Locale
* for the page, while in the JavaScript environment, the value of goog.LOCALE should reliably
* indicate which Locale Soy should use. Similarly, the Python backend relies on implementation
* specific runtime locale support.
*/
private final Provider<String> localeStringProvider;
@Inject
FormatNumDirective(@LocaleString Provider<String> localeStringProvider) {
this.localeStringProvider = localeStringProvider;
}
@Override public String getName() {
return "|formatNum";
}
@Override public Set<Integer> getValidArgsSizes() {
return VALID_ARGS_SIZES;
}
@Override public boolean shouldCancelAutoescape() {
return false;
}
@Override public SoyValue applyForJava(SoyValue value, List<SoyValue> args) {
ULocale uLocale = I18nUtils.parseULocale(localeStringProvider.get())
.setKeywordValue("numbers", "local");
if (args.size() > 1) {
// A keyword for ULocale was passed (like 'native', for instance, to use native characters).
uLocale = uLocale.setKeywordValue("numbers", args.get(1).stringValue());
}
NumberFormat numberFormat;
String formatType = args.isEmpty() ? DEFAULT_FORMAT : args.get(0).stringValue();
if ("decimal".equals(formatType)) {
numberFormat = NumberFormat.getInstance(uLocale);
} else if ("percent".equals(formatType)) {
numberFormat = NumberFormat.getPercentInstance(uLocale);
} else if ("currency".equals(formatType)) {
numberFormat = NumberFormat.getCurrencyInstance(uLocale);
} else if ("scientific".equals(formatType)) {
numberFormat = NumberFormat.getScientificInstance(uLocale);
} else if ("compact_short".equals(formatType)) {
CompactDecimalFormat compactNumberFormat =
CompactDecimalFormat.getInstance(uLocale, CompactStyle.SHORT);
compactNumberFormat.setMaximumSignificantDigits(3);
numberFormat = compactNumberFormat;
} else if ("compact_long".equals(formatType)) {
CompactDecimalFormat compactNumberFormat =
CompactDecimalFormat.getInstance(uLocale, CompactStyle.LONG);
compactNumberFormat.setMaximumSignificantDigits(3);
numberFormat = compactNumberFormat;
} else {
throw SoySyntaxException.createWithoutMetaInfo("First argument to formatNum must be "
+ "constant, and one of: 'decimal', 'currency', 'percent', 'scientific', "
+ "'compact_short', or 'compact_long'.");
}
return StringData.forValue(numberFormat.format(((NumberData) value).toFloat()));
}
@Override public JsExpr applyForJsSrc(JsExpr value, List<JsExpr> args) {
String numberFormatType = parseFormat(args);
StringBuilder expr = new StringBuilder();
expr.append("(new goog.i18n.NumberFormat(" + JS_ARGS_TO_ENUM.get(numberFormatType) + "))");
if ("'compact_short'".equals(numberFormatType) || "'compact_long'".equals(numberFormatType)) {
expr.append(".setSignificantDigits(3)");
}
expr.append(".format(" + value.getText() + ")");
return new JsExpr(expr.toString(), Integer.MAX_VALUE);
}
@Override public PyExpr applyForPySrc(PyExpr value, List<PyExpr> args) {
String numberFormatType = parseFormat(args);
PyFunctionExprBuilder builder =
new PyFunctionExprBuilder(PyExprUtils.TRANSLATOR_NAME + ".format_num")
.addArg(value)
.addArg(new PyExpr(numberFormatType, Integer.MAX_VALUE));
return builder.asPyStringExpr();
}
@Override public PhpExpr applyForPhpSrc(PhpExpr value, List<PhpExpr> args) {
String numberFormatType = parseFormat(args);
PhpFunctionExprBuilder builder =
new PhpFunctionExprBuilder(PhpExprUtils.TRANSLATOR_NAME + "::formatNum")
.addArg(value)
.addArg(new PhpExpr(numberFormatType, Integer.MAX_VALUE));
return builder.asPhpStringExpr();
}
@Override public ImmutableSet<String> getRequiredJsLibNames() {
return REQUIRED_JS_LIBS;
}
/**
* Validates that the provided format matches a supported format, and returns the value, if not,
* this throws an exception.
* @param args The list of provided arguments.
* @return String The number format type.
*/
private static String parseFormat(List<? extends TargetExpr> args) {
String numberFormatType = !args.isEmpty() ? args.get(0).getText() : "'" + DEFAULT_FORMAT + "'";
if (!JS_ARGS_TO_ENUM.containsKey(numberFormatType)) {
String validKeys = Joiner.on("', '").join(JS_ARGS_TO_ENUM.keySet());
throw SoySyntaxException.createWithoutMetaInfo("First argument to formatNum must be "
+ "constant, and one of: '" + validKeys + "'.");
}
return numberFormatType;
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package oit.gl3.dp;
import static com.jogamp.opengl.GL2GL3.*;
import com.jogamp.opengl.GL3;
import com.jogamp.opengl.GLContext;
import com.jogamp.opengl.util.GLBuffers;
import com.jogamp.opengl.util.glsl.ShaderCode;
import com.jogamp.opengl.util.glsl.ShaderProgram;
import java.nio.IntBuffer;
import oit.framework.Resources;
import oit.gl3.OIT;
import oit.framework.Scene;
import oit.gl3.Viewer;
import oit.gl3.Semantic;
/**
*
* @author gbarbieri
*/
public class DepthPeeling extends OIT {
private static final String SHADERS_ROOT = "/oit/gl3/dp/shaders/";
private static final String[] SHADERS_SRC = new String[]{"init", "peel", "blend", "final"};
private class Program {
public final static int INIT = 0;
public final static int PEEL = 1;
public final static int BLEND = 2;
public final static int FINAL = 3;
public final static int MAX = 4;
}
private class Texture {
public final static int DEPTH0 = 0;
public final static int DEPTH1 = 1;
public final static int COLOR0 = 2;
public final static int COLOR1 = 3;
public final static int COLOR_BLENDER = 4;
public final static int MAX = 5;
}
private class Framebuffer {
public final static int _0 = 0;
public final static int _1 = 1;
public final static int COLOR_BLENDER = 2;
public final static int MAX = 3;
}
private int[] programName = new int[Program.MAX];
private IntBuffer textureName = GLBuffers.newDirectIntBuffer(Texture.MAX),
framebufferName = GLBuffers.newDirectIntBuffer(Framebuffer.MAX);
@Override
public void init(GL3 gl3) {
initPrograms(gl3);
initTargets(gl3);
}
private void initPrograms(GL3 gl3) {
// init & peel
for (int program = Program.INIT; program <= Program.PEEL; program++) {
ShaderCode vertShader = ShaderCode.create(gl3, GL_VERTEX_SHADER, 2, this.getClass(), SHADERS_ROOT,
new String[]{SHADERS_SRC[program], "shade"}, "vert", null, null, null, true);
ShaderCode fragShader = ShaderCode.create(gl3, GL_FRAGMENT_SHADER, 2, this.getClass(), SHADERS_ROOT,
new String[]{SHADERS_SRC[program], "shade"}, "frag", null, null, null, true);
ShaderProgram shaderProgram = new ShaderProgram();
shaderProgram.add(vertShader);
shaderProgram.add(fragShader);
shaderProgram.link(gl3, System.out);
programName[program] = shaderProgram.program();
gl3.glUniformBlockBinding(
programName[program],
gl3.glGetUniformBlockIndex(programName[program], "Transform0"),
Semantic.Uniform.TRANSFORM0);
gl3.glUniformBlockBinding(
programName[program],
gl3.glGetUniformBlockIndex(programName[program], "Transform1"),
Semantic.Uniform.TRANSFORM1);
gl3.glUniformBlockBinding(
programName[program],
gl3.glGetUniformBlockIndex(programName[program], "Parameters"),
Semantic.Uniform.PARAMETERS);
gl3.glUseProgram(programName[program]);
gl3.glUniform1i(
gl3.glGetUniformLocation(programName[program], "opaqueDepthTex"),
Semantic.Sampler.OPAQUE_DEPTH);
}
gl3.glUseProgram(programName[Program.PEEL]);
gl3.glUniform1i(
gl3.glGetUniformLocation(programName[Program.PEEL], "depthTex"),
Semantic.Sampler.DEPTH);
// blend & final
for (int program = Program.BLEND; program <= Program.FINAL; program++) {
ShaderCode vertShader = ShaderCode.create(gl3, GL_VERTEX_SHADER, this.getClass(), SHADERS_ROOT, null,
SHADERS_SRC[program], "vert", null, true);
ShaderCode fragShader = ShaderCode.create(gl3, GL_FRAGMENT_SHADER, this.getClass(), SHADERS_ROOT, null,
SHADERS_SRC[program], "frag", null, true);
ShaderProgram shaderProgram = new ShaderProgram();
shaderProgram.add(vertShader);
shaderProgram.add(fragShader);
shaderProgram.link(gl3, System.out);
programName[program] = shaderProgram.program();
gl3.glUniformBlockBinding(
programName[program],
gl3.glGetUniformBlockIndex(programName[program], "Transform2"),
Semantic.Uniform.TRANSFORM2);
}
gl3.glUseProgram(programName[Program.BLEND]);
gl3.glUniform1i(
gl3.glGetUniformLocation(programName[Program.BLEND], "tempTex"),
Semantic.Sampler.TEMP);
gl3.glUseProgram(programName[Program.FINAL]);
gl3.glUniform1i(
gl3.glGetUniformLocation(programName[Program.FINAL], "colorTex"),
Semantic.Sampler.COLOR);
gl3.glUniform1i(
gl3.glGetUniformLocation(programName[Program.FINAL], "opaqueColorTex"),
Semantic.Sampler.OPAQUE_COLOR);
}
@Override
public void render(GL3 gl3, Scene scene) {
/**
* (1) Initialize Min Depth Buffer.
*/
gl3.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName.get(Framebuffer.COLOR_BLENDER));
gl3.glDrawBuffer(drawBuffers.get(0));
gl3.glClearBufferfv(GL_COLOR, 0, Resources.clearColor.put(0, 0).put(1, 0).put(2, 0).put(3, 1));
gl3.glClearBufferfv(GL_DEPTH, 0, Resources.clearDepth.put(0, 1));
gl3.glEnable(GL_DEPTH_TEST);
if (Resources.useOQ) {
gl3.glBeginQuery(GL_SAMPLES_PASSED, queryName.get(0));
}
gl3.glUseProgram(programName[Program.INIT]);
bindRectTex(gl3, Viewer.textureName.get(Viewer.Texture.DEPTH), Semantic.Sampler.OPAQUE_DEPTH);
scene.renderTransparent(gl3);
boolean occluded = false;
if (Resources.useOQ) {
gl3.glEndQuery(GL_SAMPLES_PASSED);
gl3.glGetQueryObjectuiv(queryName.get(0), GL_QUERY_RESULT, samplesCount);
occluded = samplesCount.get(0) == 0;
}
/**
* (2) Depth Peeling + Blending
*
* numLayers is useful if occlusion queries are disabled; in this case,
* increasing/decreasing numPasses lets you see the intermediate results
* and compare the intermediate results of front-to-back peeling vs dual
* depth peeling for a given budget of geometry passes (numPasses);
*
* careful, && means you wont go deeper of the numLayers, you might be
* done earlier but for sure not further than that
*
* || means you will peel until you render something or you didnt reach
* yet the max numLayers.
*/
if (!occluded) {
for (int layer = 1; Resources.useOQ || layer < Resources.numLayers; layer++) {
int currId = layer % 2;
int prevId = 1 - currId;
gl3.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName.get(Framebuffer._0 + currId));
gl3.glDrawBuffer(drawBuffers.get(0));
gl3.glClearBufferfv(GL_COLOR, 0, Resources.clearColor.put(3, 0));
gl3.glClearBufferfv(GL_DEPTH, 0, Resources.clearDepth);
gl3.glDisable(GL_BLEND);
gl3.glEnable(GL_DEPTH_TEST);
if (Resources.useOQ) {
gl3.glBeginQuery(GL_SAMPLES_PASSED, queryName.get(0));
}
gl3.glUseProgram(programName[Program.PEEL]);
bindRectTex(gl3, textureName.get(Texture.DEPTH0 + prevId), Semantic.Sampler.DEPTH);
bindRectTex(gl3, Viewer.textureName.get(Viewer.Texture.DEPTH), Semantic.Sampler.OPAQUE_DEPTH);
scene.renderTransparent(gl3);
if (Resources.useOQ) {
gl3.glEndQuery(GL_SAMPLES_PASSED);
}
gl3.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName.get(Framebuffer.COLOR_BLENDER));
gl3.glDrawBuffer(drawBuffers.get(0));
gl3.glDisable(GL_DEPTH_TEST);
gl3.glEnable(GL_BLEND);
gl3.glBlendEquation(GL_FUNC_ADD);
gl3.glBlendFuncSeparate(GL_DST_ALPHA, GL_ONE, GL_ZERO, GL_ONE_MINUS_SRC_ALPHA);
gl3.glUseProgram(programName[Program.BLEND]);
bindRectTex(gl3, textureName.get(Texture.COLOR0 + currId), Semantic.Sampler.TEMP);
Viewer.fullscreenQuad.render(gl3);
gl3.glDisable(GL_BLEND);
if (Resources.useOQ) {
gl3.glGetQueryObjectuiv(queryName.get(0), GL_QUERY_RESULT, samplesCount);
if (samplesCount.get(0) == 0) {
break;
}
}
}
}
/**
* (3) Final Pass.
*/
gl3.glBindFramebuffer(GL_FRAMEBUFFER, 0);
gl3.glDrawBuffer(GL_BACK);
gl3.glDisable(GL_DEPTH_TEST);
gl3.glUseProgram(programName[Program.FINAL]);
bindRectTex(gl3, textureName.get(Texture.COLOR_BLENDER), Semantic.Sampler.COLOR);
bindRectTex(gl3, Viewer.textureName.get(Viewer.Texture.COLOR), Semantic.Sampler.OPAQUE_COLOR);
Viewer.fullscreenQuad.render(gl3);
}
@Override
public void reshape(GL3 gl3) {
deleteTargets(gl3);
initTargets(gl3);
}
@Override
public void dispose(GL3 gl3) {
deleteTargets(gl3);
}
private void initTargets(GL3 gl3) {
gl3.glGenTextures(Texture.MAX, textureName);
gl3.glGenFramebuffers(Framebuffer.MAX, framebufferName);
for (int i = 0; i < 2; i++) {
gl3.glBindTexture(GL_TEXTURE_RECTANGLE, textureName.get(Texture.DEPTH0 + i));
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_BASE_LEVEL, 0);
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAX_LEVEL, 0);
gl3.glTexImage2D(GL_TEXTURE_RECTANGLE, 0, GL_DEPTH_COMPONENT32F, Resources.imageSize.x, Resources.imageSize.y,
0, GL_DEPTH_COMPONENT, GL_FLOAT, null);
gl3.glBindTexture(GL_TEXTURE_RECTANGLE, textureName.get(Texture.COLOR0 + i));
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_BASE_LEVEL, 0);
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAX_LEVEL, 0);
gl3.glTexImage2D(GL_TEXTURE_RECTANGLE, 0, GL_RGBA8, Resources.imageSize.x, Resources.imageSize.y, 0, GL_RGBA,
GL_FLOAT, null);
gl3.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName.get(Framebuffer._0 + i));
gl3.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_RECTANGLE,
textureName.get(Texture.DEPTH0 + i), 0);
gl3.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_RECTANGLE,
textureName.get(Texture.COLOR0 + i), 0);
}
gl3.glBindTexture(GL_TEXTURE_RECTANGLE, textureName.get(Texture.COLOR_BLENDER));
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_BASE_LEVEL, 0);
gl3.glTexParameteri(GL_TEXTURE_RECTANGLE, GL_TEXTURE_MAX_LEVEL, 0);
gl3.glTexImage2D(GL_TEXTURE_RECTANGLE, 0, GL_RGBA8, Resources.imageSize.x, Resources.imageSize.y, 0, GL_RGBA,
GL_FLOAT, null);
gl3.glBindFramebuffer(GL_FRAMEBUFFER, framebufferName.get(Framebuffer.COLOR_BLENDER));
gl3.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_DEPTH_ATTACHMENT, GL_TEXTURE_RECTANGLE,
textureName.get(Texture.DEPTH0), 0);
gl3.glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_RECTANGLE,
textureName.get(Texture.COLOR_BLENDER), 0);
}
private void deleteTargets(GL3 gl3) {
gl3.glDeleteFramebuffers(Framebuffer.MAX, framebufferName);
gl3.glDeleteTextures(Texture.MAX, textureName);
}
}
| |
/*
* Copyright 2014 Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.net.flow;
import org.onosproject.core.ApplicationId;
import org.onosproject.core.DefaultGroupId;
import org.onosproject.core.GroupId;
import org.onosproject.net.DeviceId;
import java.util.Objects;
import static com.google.common.base.MoreObjects.toStringHelper;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
public class DefaultFlowRule implements FlowRule {
private final DeviceId deviceId;
private final int priority;
private final TrafficSelector selector;
private final TrafficTreatment treatment;
private final long created;
private final FlowId id;
private final Short appId;
private final int timeout;
private final boolean permanent;
private final GroupId groupId;
private final Integer tableId;
private final FlowRuleExtPayLoad payLoad;
public DefaultFlowRule(FlowRule rule) {
this.deviceId = rule.deviceId();
this.priority = rule.priority();
this.selector = rule.selector();
this.treatment = rule.treatment();
this.appId = rule.appId();
this.groupId = rule.groupId();
this.id = rule.id();
this.timeout = rule.timeout();
this.permanent = rule.isPermanent();
this.created = System.currentTimeMillis();
this.tableId = rule.tableId();
this.payLoad = rule.payLoad();
}
private DefaultFlowRule(DeviceId deviceId, TrafficSelector selector,
TrafficTreatment treatment, Integer priority,
FlowId flowId, Boolean permanent, Integer timeout,
Integer tableId) {
this.deviceId = deviceId;
this.selector = selector;
this.treatment = treatment;
this.priority = priority;
this.appId = (short) (flowId.value() >>> 48);
this.id = flowId;
this.permanent = permanent;
this.timeout = timeout;
this.tableId = tableId;
this.created = System.currentTimeMillis();
//FIXME: fields below will be removed.
this.groupId = new DefaultGroupId(0);
this.payLoad = null;
}
/**
* Support for the third party flow rule. Creates a flow rule of flow table.
*
* @param deviceId the identity of the device where this rule applies
* @param selector the traffic selector that identifies what traffic this
* rule
* @param treatment the traffic treatment that applies to selected traffic
* @param priority the flow rule priority given in natural order
* @param appId the application id of this flow
* @param timeout the timeout for this flow requested by an application
* @param permanent whether the flow is permanent i.e. does not time out
* @param payLoad 3rd-party origin private flow
*/
public DefaultFlowRule(DeviceId deviceId, TrafficSelector selector,
TrafficTreatment treatment, int priority,
ApplicationId appId, int timeout, boolean permanent,
FlowRuleExtPayLoad payLoad) {
if (priority < FlowRule.MIN_PRIORITY) {
throw new IllegalArgumentException("Priority cannot be less than "
+ MIN_PRIORITY);
}
this.deviceId = deviceId;
this.priority = priority;
this.selector = selector;
this.treatment = treatment;
this.appId = appId.id();
this.groupId = new DefaultGroupId(0);
this.timeout = timeout;
this.permanent = permanent;
this.tableId = 0;
this.created = System.currentTimeMillis();
this.payLoad = payLoad;
/*
* id consists of the following. | appId (16 bits) | groupId (16 bits) |
* flowId (32 bits) |
*/
this.id = FlowId.valueOf((((long) this.appId) << 48)
| (((long) this.groupId.id()) << 32)
| (this.hash() & 0xffffffffL));
}
/**
* Support for the third party flow rule. Creates a flow rule of group
* table.
*
* @param deviceId the identity of the device where this rule applies
* @param selector the traffic selector that identifies what traffic this
* rule
* @param treatment the traffic treatment that applies to selected traffic
* @param priority the flow rule priority given in natural order
* @param appId the application id of this flow
* @param groupId the group id of this flow
* @param timeout the timeout for this flow requested by an application
* @param permanent whether the flow is permanent i.e. does not time out
* @param payLoad 3rd-party origin private flow
*
*/
public DefaultFlowRule(DeviceId deviceId, TrafficSelector selector,
TrafficTreatment treatment, int priority,
ApplicationId appId, GroupId groupId, int timeout,
boolean permanent, FlowRuleExtPayLoad payLoad) {
if (priority < FlowRule.MIN_PRIORITY) {
throw new IllegalArgumentException("Priority cannot be less than "
+ MIN_PRIORITY);
}
this.deviceId = deviceId;
this.priority = priority;
this.selector = selector;
this.treatment = treatment;
this.appId = appId.id();
this.groupId = groupId;
this.timeout = timeout;
this.permanent = permanent;
this.created = System.currentTimeMillis();
this.tableId = 0;
this.payLoad = payLoad;
/*
* id consists of the following. | appId (16 bits) | groupId (16 bits) |
* flowId (32 bits) |
*/
this.id = FlowId.valueOf((((long) this.appId) << 48)
| (((long) this.groupId.id()) << 32)
| (this.hash() & 0xffffffffL));
}
@Override
public FlowId id() {
return id;
}
@Override
public short appId() {
return appId;
}
@Override
public GroupId groupId() {
return groupId;
}
@Override
public int priority() {
return priority;
}
@Override
public DeviceId deviceId() {
return deviceId;
}
@Override
public TrafficSelector selector() {
return selector;
}
@Override
public TrafficTreatment treatment() {
return treatment;
}
@Override
/*
* The priority and statistics can change on a given treatment and selector
*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public int hashCode() {
return Objects.hash(deviceId, selector, tableId, payLoad);
}
//FIXME do we need this method in addition to hashCode()?
private int hash() {
return Objects.hash(deviceId, selector, tableId, payLoad);
}
@Override
/*
* The priority and statistics can change on a given treatment and selector
*
* (non-Javadoc)
*
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (obj instanceof DefaultFlowRule) {
DefaultFlowRule that = (DefaultFlowRule) obj;
return Objects.equals(deviceId, that.deviceId) &&
Objects.equals(priority, that.priority) &&
Objects.equals(selector, that.selector) &&
Objects.equals(tableId, that.tableId)
&& Objects.equals(payLoad, that.payLoad);
}
return false;
}
@Override
public String toString() {
return toStringHelper(this)
.add("id", Long.toHexString(id.value()))
.add("deviceId", deviceId)
.add("priority", priority)
.add("selector", selector.criteria())
.add("treatment", treatment == null ? "N/A" : treatment.allInstructions())
.add("tableId", tableId)
.add("created", created)
.add("payLoad", payLoad)
.toString();
}
@Override
public int timeout() {
return timeout;
}
@Override
public boolean isPermanent() {
return permanent;
}
@Override
public int tableId() {
return tableId;
}
public static Builder builder() {
return new Builder();
}
public static final class Builder implements FlowRule.Builder {
private FlowId flowId;
private Integer priority;
private DeviceId deviceId;
private Integer tableId = 0;
private TrafficSelector selector;
private TrafficTreatment treatment;
private Integer timeout;
private Boolean permanent;
@Override
public FlowRule.Builder withCookie(long cookie) {
this.flowId = FlowId.valueOf(cookie);
return this;
}
@Override
public FlowRule.Builder fromApp(ApplicationId appId) {
this.flowId = computeFlowId(appId);
return this;
}
@Override
public FlowRule.Builder withPriority(int priority) {
this.priority = priority;
return this;
}
@Override
public FlowRule.Builder forDevice(DeviceId deviceId) {
this.deviceId = deviceId;
return this;
}
@Override
public FlowRule.Builder forTable(int tableId) {
this.tableId = tableId;
return this;
}
@Override
public FlowRule.Builder withSelector(TrafficSelector selector) {
this.selector = selector;
return this;
}
@Override
public FlowRule.Builder withTreatment(TrafficTreatment treatment) {
this.treatment = treatment;
return this;
}
@Override
public FlowRule.Builder makePermanent() {
this.timeout = 0;
this.permanent = true;
return this;
}
@Override
public FlowRule.Builder makeTemporary(int timeout) {
this.permanent = false;
this.timeout = timeout;
return this;
}
@Override
public FlowRule build() {
checkNotNull(flowId != null, "Either an application" +
" id or a cookie must be supplied");
checkNotNull(selector != null, "Traffic selector cannot be null");
checkNotNull(timeout != null || permanent != null, "Must either have " +
"a timeout or be permanent");
checkNotNull(deviceId != null, "Must refer to a device");
checkNotNull(priority != null, "Priority cannot be null");
checkArgument(priority >= MIN_PRIORITY, "Priority cannot be less than " +
MIN_PRIORITY);
return new DefaultFlowRule(deviceId, selector, treatment, priority,
flowId, permanent, timeout, tableId);
}
private FlowId computeFlowId(ApplicationId appId) {
return FlowId.valueOf((((long) appId.id()) << 48)
| (hash() & 0xffffffffL));
}
private int hash() {
return Objects.hash(deviceId, selector, treatment, tableId);
}
}
@Override
public FlowRuleExtPayLoad payLoad() {
return payLoad;
}
}
| |
package com.lcsc.cs.lurkserver.game;
import com.lcsc.cs.lurkserver.Protocol.CommandType;
import org.eclipse.jetty.util.ajax.JSON;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* Created by Jake on 4/19/2015.
* This holds a logged in player's data.
*/
public class Player implements Being{
private static final Logger _logger = LoggerFactory.getLogger(Player.class);
private final int MAX_STAT_POINTS = 100;
private final int MAX_HEALTH = 100;
private final File _playerFile;
//This is the player's client!
private Client _client;
public String name;
private String _description;
private int _gold;
private int _attack;
private int _defense;
private int _regen;
private BeingStatus _status;
private String _location;
private int _health;
private boolean _started;
private List<String> _keys;
/**
* The constructor for the player. This will automatically load the player's data from its existing data file
* or will load in default data and save the file.
* @param playerName A unique player name.
* @param playerDataDir The absolute path for the players' data files directory.
* @param startingRoom This is the room that the player starts out in.
*/
public Player(String playerName, String playerDataDir, String startingRoom) {
name = playerName;
_playerFile = new File(playerDataDir, playerName+".pldat");
if (_playerFile.exists()) {
loadDataFromFile(startingRoom);
}
else {
loadDefaultData(startingRoom);
}
}
/**
* This will regenerate the player's health.
* @param secondsPassed This is how much time has passed.
*/
public synchronized void regenHealth(int secondsPassed) {
int regeneratedHealth = ((Double)((secondsPassed/10.)*(double)_regen)).intValue();
if (regeneratedHealth + _health > MAX_HEALTH)
_health = MAX_HEALTH;
else
_health += regeneratedHealth;
}
/**
* This tells the pool if the player exists or not.
* @return A boolean specifying if the player's data file exists already.
*/
public static boolean playerExists(String playerName, String playerDataDir) {
return new File(playerDataDir, playerName+".pldat").exists();
}
public synchronized boolean isDead() {
return _status == BeingStatus.DEAD;
}
/**
* This gets the attack of the player so damage can be done to an enemy.
* @return The value of the player's attack or 0 if the player is dead.
*/
@Override
public synchronized int getAttack() {
return isDead() ? 0 : _attack;
}
/**
* This is called when an enemy is attacking the player.
* @param damage This is the attack of the enemy in addition to the d20 roll that was obtained.
* If a 1 was rolled, this method will not be called and damage will be done to the user
* instead.
* @return The amount of gold dropped is returned. Gold is only dropped if the player has died.
*/
@Override
public synchronized int doDamage(int damage) {
if (damage > _defense)
_health -= damage-_defense;
int gold = 0;
if (_health <= 0) {
_status = BeingStatus.DEAD;
gold = _gold;
_gold = 0;
}
return gold;
}
/**
* This is called each time this Being does damage to another Being.
* @param gold This is the gold that is picked up after damage is done to another Being. If the other
* Being isn't dead, then zero gold will be passed to this method.
*/
@Override
public synchronized void pickedUpGold(int gold) {
_gold += gold;
_client.sendStatus(_health, gold);
}
/**
* Checks to see if the player has a key that belongs to some door.
* @param keyName The name of the key.
* @return true if the player has the key, false otherwise.
*/
public synchronized boolean hasKey(String keyName) {
return _keys.contains(keyName);
}
/**
* This is called when the PCKUP extension is used.
* @param keyName The name of the key that is being picked up.
* @return true if the player doesn't have the key already, false otherwise.
*/
public synchronized boolean pickUpKey(String keyName) {
boolean success = false;
if (!_keys.contains(keyName)) {
_keys.add(keyName);
success = true;
}
return success;
}
/**
* This sets up a way for us to contact the client using the Player's object.
* @param client The client that can communicate with the user.
*/
public void setClient(Client client) {
_client = client;
}
/**
* This will send the current room info to the client, which will send it to the user.
* @param infoList A list of the current things in the room.
*/
public synchronized void sendRoomInfo(List<String> infoList) {
_client.sendRoomInfo(infoList);
}
/**
* This just sets some default data for the new player.
* @param startingRoom This is the room that the player starts out in.
*/
private void loadDefaultData(String startingRoom) {
_description = null;
_gold = 0;
_attack = 0;
_defense = 0;
_regen = 0;
_status = BeingStatus.ALIVE;
_location = startingRoom;
_health = MAX_HEALTH;
_started = false;
_keys = new ArrayList<String>();
}
/**
* This will load the player's data from a file.
* @param startingRoom This is the room that the player starts out in.
*/
private void loadDataFromFile(String startingRoom) {
FileReader reader = null;
try {
reader = new FileReader(_playerFile);
Map<String, Object> data = (Map<String, Object>)JSON.parse(reader);
_description = (String)data.get("description");
_gold = ((Long)data.get("gold")).intValue();
_attack = ((Long)data.get("attack")).intValue();
_defense = ((Long)data.get("defense")).intValue();
_regen = ((Long)data.get("regen")).intValue();
_status = BeingStatus.fromString((String) data.get("status"));
_location = startingRoom;
_health = ((Long)data.get("health")).intValue();
_started = ((Boolean)data.get("started")).booleanValue();
_keys = new ArrayList<String>();
} catch (FileNotFoundException e) {
_logger.error("Problem loading the player data file", e);
} catch (IOException e) {
_logger.error("Problem loading the player data file", e);
} finally {
try {
reader.close();
} catch (IOException e) {}
}
}
/**
* This is used to save the player's current data to a file so it can be loaded next time that player joins
* the game.
*/
public synchronized void saveData() {
if (_started) {
Map<String, Object> data = new HashMap<String, Object>();
data.put("description", _description);
data.put("gold", _gold);
data.put("attack", _attack);
data.put("defense", _defense);
data.put("regen", _regen);
data.put("status", _status.getStatus());
data.put("health", _health);
data.put("started", _started);
String jsonData = JSON.toString(data);
FileOutputStream out = null;
try {
out = new FileOutputStream(_playerFile);
out.write(jsonData.getBytes());
} catch (FileNotFoundException e) {
_logger.error("Player data file failed to save", e);
} catch (IOException e) {
_logger.error("Player data file failed to save", e);
} finally {
try {
out.close();
} catch (IOException e) {
}
}
}
}
/**
* This will respond to the Start command essentially.
* @return A boolean saying whether this player is ready to be started.
*/
public boolean start() {
boolean ready = true;
if (_description == null ||
(_attack == 0 && _defense == 0 && _regen == 0)) {
ready = false;
}
else
_started = true;
return ready;
}
/**
* This returns the location of the player.
* @return A string name of the player's current room.
*/
public synchronized String currentRoom() {
return _location;
}
/**
* Changes the location of the player.
* @param newRoom This is the new location of the player.
*/
public synchronized void changeRoom(String newRoom) {
_location = newRoom;
}
/**
* This is for setting the stats of the player before the player has restarted yet.
* @param commandType This specifies the stat that is being changed.
* @param stat This is the actual stat that some stat is being changed to.
* @return A response that will be sent to the client.
*/
public ResponseMessage setStat(CommandType commandType, String stat) {
ResponseMessage response = ResponseMessage.FINE;
if (commandType == CommandType.SET_PLAYER_DESC) {
_description = stat;
}
else if (commandType == CommandType.SET_ATTACK_STAT) {
try {
int remainingStatPoints = MAX_STAT_POINTS - _defense - _regen;
int atkStat = Integer.parseInt(stat);
if (atkStat >= 0 && atkStat <= remainingStatPoints)
_attack = atkStat;
else
response = ResponseMessage.STATS_TOO_HIGH;
} catch(Exception e) {
response = ResponseMessage.INCORRECT_STATE;
}
}
else if (commandType == CommandType.SET_DEFENSE_STAT) {
try {
int remainingStatPoints = MAX_STAT_POINTS - _attack - _regen;
int defStat = Integer.parseInt(stat);
if (defStat >= 0 && defStat <= remainingStatPoints)
_defense = defStat;
else
response = ResponseMessage.STATS_TOO_HIGH;
} catch(Exception e) {
response = ResponseMessage.INCORRECT_STATE;
}
}
else if (commandType == CommandType.SET_REGEN_STAT) {
try {
int remainingStatPoints = MAX_STAT_POINTS - _attack - _defense;
int regStat = Integer.parseInt(stat);
if (regStat >= 0 && regStat <= remainingStatPoints)
_regen = regStat;
else
response = ResponseMessage.STATS_TOO_HIGH;
} catch(Exception e) {
response = ResponseMessage.INCORRECT_STATE;
}
}
return response;
}
public synchronized String getInfo() {
/*
Name: Trudy
Description: Black Hat Security Expert
Gold: 10
Attack: 60
Defense: 30
Regen: 10
Status: ALIVE
Location: Broom Closet
Health: 100
Started: YES
*/
return String.format("Name: %s\n",name)+
String.format("Description: %s\n", _description)+
String.format("Gold: %d\n", _gold)+
String.format("Attack: %d\n", _attack)+
String.format("Defense: %d\n", _defense)+
String.format("Regen: %d\n", _regen)+
String.format("Status: %s\n", _status.getStatus())+
String.format("Location: %s\n", _location)+
String.format("Health: %d\n", _health)+
String.format("Started: %s", _started ? "YES" : "NO");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.client;
import org.elasticsearch.client.http.Header;
import org.elasticsearch.client.http.HttpHost;
import org.elasticsearch.client.http.client.config.RequestConfig;
import org.elasticsearch.client.http.impl.client.CloseableHttpClient;
import org.elasticsearch.client.http.impl.client.HttpClientBuilder;
import org.elasticsearch.client.http.impl.nio.client.CloseableHttpAsyncClient;
import org.elasticsearch.client.http.impl.nio.client.HttpAsyncClientBuilder;
import org.elasticsearch.client.http.nio.conn.SchemeIOSessionStrategy;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.Objects;
/**
* Helps creating a new {@link RestClient}. Allows to set the most common http client configuration options when internally
* creating the underlying {@link org.elasticsearch.client.http.nio.client.HttpAsyncClient}. Also allows to provide an externally created
* {@link org.elasticsearch.client.http.nio.client.HttpAsyncClient} in case additional customization is needed.
*/
public final class RestClientBuilder {
public static final int DEFAULT_CONNECT_TIMEOUT_MILLIS = 1000;
public static final int DEFAULT_SOCKET_TIMEOUT_MILLIS = 30000;
public static final int DEFAULT_MAX_RETRY_TIMEOUT_MILLIS = DEFAULT_SOCKET_TIMEOUT_MILLIS;
public static final int DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS = 500;
public static final int DEFAULT_MAX_CONN_PER_ROUTE = 10;
public static final int DEFAULT_MAX_CONN_TOTAL = 30;
private static final Header[] EMPTY_HEADERS = new Header[0];
private final HttpHost[] hosts;
private int maxRetryTimeout = DEFAULT_MAX_RETRY_TIMEOUT_MILLIS;
private Header[] defaultHeaders = EMPTY_HEADERS;
private RestClient.FailureListener failureListener;
private HttpClientConfigCallback httpClientConfigCallback;
private RequestConfigCallback requestConfigCallback;
private String pathPrefix;
/**
* Creates a new builder instance and sets the hosts that the client will send requests to.
*
* @throws NullPointerException if {@code hosts} or any host is {@code null}.
* @throws IllegalArgumentException if {@code hosts} is empty.
*/
RestClientBuilder(HttpHost... hosts) {
Objects.requireNonNull(hosts, "hosts must not be null");
if (hosts.length == 0) {
throw new IllegalArgumentException("no hosts provided");
}
for (HttpHost host : hosts) {
Objects.requireNonNull(host, "host cannot be null");
}
this.hosts = hosts;
}
/**
* Sets the default request headers, which will be sent along with each request.
* <p>
* Request-time headers will always overwrite any default headers.
*
* @throws NullPointerException if {@code defaultHeaders} or any header is {@code null}.
*/
public RestClientBuilder setDefaultHeaders(Header[] defaultHeaders) {
Objects.requireNonNull(defaultHeaders, "defaultHeaders must not be null");
for (Header defaultHeader : defaultHeaders) {
Objects.requireNonNull(defaultHeader, "default header must not be null");
}
this.defaultHeaders = defaultHeaders;
return this;
}
/**
* Sets the {@link RestClient.FailureListener} to be notified for each request failure
*
* @throws NullPointerException if {@code failureListener} is {@code null}.
*/
public RestClientBuilder setFailureListener(RestClient.FailureListener failureListener) {
Objects.requireNonNull(failureListener, "failureListener must not be null");
this.failureListener = failureListener;
return this;
}
/**
* Sets the maximum timeout (in milliseconds) to honour in case of multiple retries of the same request.
* {@link #DEFAULT_MAX_RETRY_TIMEOUT_MILLIS} if not specified.
*
* @throws IllegalArgumentException if {@code maxRetryTimeoutMillis} is not greater than 0
*/
public RestClientBuilder setMaxRetryTimeoutMillis(int maxRetryTimeoutMillis) {
if (maxRetryTimeoutMillis <= 0) {
throw new IllegalArgumentException("maxRetryTimeoutMillis must be greater than 0");
}
this.maxRetryTimeout = maxRetryTimeoutMillis;
return this;
}
/**
* Sets the {@link HttpClientConfigCallback} to be used to customize http client configuration
*
* @throws NullPointerException if {@code httpClientConfigCallback} is {@code null}.
*/
public RestClientBuilder setHttpClientConfigCallback(HttpClientConfigCallback httpClientConfigCallback) {
Objects.requireNonNull(httpClientConfigCallback, "httpClientConfigCallback must not be null");
this.httpClientConfigCallback = httpClientConfigCallback;
return this;
}
/**
* Sets the {@link RequestConfigCallback} to be used to customize http client configuration
*
* @throws NullPointerException if {@code requestConfigCallback} is {@code null}.
*/
public RestClientBuilder setRequestConfigCallback(RequestConfigCallback requestConfigCallback) {
Objects.requireNonNull(requestConfigCallback, "requestConfigCallback must not be null");
this.requestConfigCallback = requestConfigCallback;
return this;
}
/**
* Sets the path's prefix for every request used by the http client.
* <p>
* For example, if this is set to "/my/path", then any client request will become <code>"/my/path/" + endpoint</code>.
* <p>
* In essence, every request's {@code endpoint} is prefixed by this {@code pathPrefix}. The path prefix is useful for when
* Elasticsearch is behind a proxy that provides a base path; it is not intended for other purposes and it should not be supplied in
* other scenarios.
*
* @throws NullPointerException if {@code pathPrefix} is {@code null}.
* @throws IllegalArgumentException if {@code pathPrefix} is empty, only '/', or ends with more than one '/'.
*/
public RestClientBuilder setPathPrefix(String pathPrefix) {
Objects.requireNonNull(pathPrefix, "pathPrefix must not be null");
String cleanPathPrefix = pathPrefix;
if (cleanPathPrefix.startsWith("/") == false) {
cleanPathPrefix = "/" + cleanPathPrefix;
}
// best effort to ensure that it looks like "/base/path" rather than "/base/path/"
if (cleanPathPrefix.endsWith("/")) {
cleanPathPrefix = cleanPathPrefix.substring(0, cleanPathPrefix.length() - 1);
if (cleanPathPrefix.endsWith("/")) {
throw new IllegalArgumentException("pathPrefix is malformed. too many trailing slashes: [" + pathPrefix + "]");
}
}
if (cleanPathPrefix.isEmpty() || "/".equals(cleanPathPrefix)) {
throw new IllegalArgumentException("pathPrefix must not be empty or '/': [" + pathPrefix + "]");
}
this.pathPrefix = cleanPathPrefix;
return this;
}
/**
* Creates a new {@link RestClient} based on the provided configuration.
*/
public RestClient build() {
if (failureListener == null) {
failureListener = new RestClient.FailureListener();
}
CloseableHttpAsyncClient httpClient = AccessController.doPrivileged(new PrivilegedAction<CloseableHttpAsyncClient>() {
@Override
public CloseableHttpAsyncClient run() {
return createHttpClient();
}
});
RestClient restClient = new RestClient(httpClient, maxRetryTimeout, defaultHeaders, hosts, pathPrefix, failureListener);
httpClient.start();
return restClient;
}
private CloseableHttpAsyncClient createHttpClient() {
//default timeouts are all infinite
RequestConfig.Builder requestConfigBuilder = RequestConfig.custom()
.setConnectTimeout(DEFAULT_CONNECT_TIMEOUT_MILLIS)
.setSocketTimeout(DEFAULT_SOCKET_TIMEOUT_MILLIS)
.setConnectionRequestTimeout(DEFAULT_CONNECTION_REQUEST_TIMEOUT_MILLIS);
if (requestConfigCallback != null) {
requestConfigBuilder = requestConfigCallback.customizeRequestConfig(requestConfigBuilder);
}
HttpAsyncClientBuilder httpClientBuilder = HttpAsyncClientBuilder.create().setDefaultRequestConfig(requestConfigBuilder.build())
//default settings for connection pooling may be too constraining
.setMaxConnPerRoute(DEFAULT_MAX_CONN_PER_ROUTE).setMaxConnTotal(DEFAULT_MAX_CONN_TOTAL).useSystemProperties();
if (httpClientConfigCallback != null) {
httpClientBuilder = httpClientConfigCallback.customizeHttpClient(httpClientBuilder);
}
final HttpAsyncClientBuilder finalBuilder = httpClientBuilder;
return AccessController.doPrivileged(new PrivilegedAction<CloseableHttpAsyncClient>() {
@Override
public CloseableHttpAsyncClient run() {
return finalBuilder.build();
}
});
}
/**
* Callback used the default {@link RequestConfig} being set to the {@link CloseableHttpClient}
* @see HttpClientBuilder#setDefaultRequestConfig
*/
public interface RequestConfigCallback {
/**
* Allows to customize the {@link RequestConfig} that will be used with each request.
* It is common to customize the different timeout values through this method without losing any other useful default
* value that the {@link RestClientBuilder} internally sets.
*/
RequestConfig.Builder customizeRequestConfig(RequestConfig.Builder requestConfigBuilder);
}
/**
* Callback used to customize the {@link CloseableHttpClient} instance used by a {@link RestClient} instance.
* Allows to customize default {@link RequestConfig} being set to the client and any parameter that
* can be set through {@link HttpClientBuilder}
*/
public interface HttpClientConfigCallback {
/**
* Allows to customize the {@link CloseableHttpAsyncClient} being created and used by the {@link RestClient}.
* Commonly used to customize the default {@link org.elasticsearch.client.http.client.CredentialsProvider} for authentication
* or the {@link SchemeIOSessionStrategy} for communication through ssl without losing any other useful default
* value that the {@link RestClientBuilder} internally sets, like connection pooling.
*/
HttpAsyncClientBuilder customizeHttpClient(HttpAsyncClientBuilder httpClientBuilder);
}
}
| |
/*
* Copyright 2004-2009 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.compass.core;
import java.io.Reader;
import java.io.Serializable;
import org.compass.core.CompassQuery.CompassSpanQuery;
/**
* <p>The query builder is used to construct {@link org.compass.core.CompassQuery} programmatically.
* Simple queries, like {@link #le(String,Object)}, will generate a {@link org.compass.core.CompassQuery}.
* More complex ones, will return their respective builder to continue and bulid them (like
* {@link #multiPhrase(String)}). Combining {@link org.compass.core.CompassQuery}s can be done using
* the {@link #bool()} operation.
*
* <p>An example of building a query using the query builder:
* <pre>
* CompassQueryBuilder queryBuilder = session.queryBuilder();
* queryBuilder.bool().addMust(queryBuilder.term("name", "jack")).addMust(queryBuilder.lt("birthdate", "19500101"))
* .toQuery().hits();
* </pre>
*
* @author kimchy
*/
public interface CompassQueryBuilder {
/**
* A general interface for internal builders that will create a
* {@link org.compass.core.CompassQuery}.
*
* @author kimchy
*/
public static interface ToCompassQuery {
/**
* Create the query.
*/
CompassQuery toQuery();
}
/**
* A boolean query builder. Used to construct query that will return hits
* that are the matching boolean combinations of other queries.
*
* @author kimchy
*/
public static interface CompassBooleanQueryBuilder extends ToCompassQuery {
/**
* Hits must match the given query.
*
* @param query The query to add
* @return The current builder
*/
CompassBooleanQueryBuilder addMust(CompassQuery query);
/**
* Hits must not match the given query. Note that it is not possible to
* build a boolean query that only consists of must not queries.
*
* @param query The query to add
* @return The current builder
*/
CompassBooleanQueryBuilder addMustNot(CompassQuery query);
/**
* Hits should match the given query. For a boolean build query with two
* <code>should</code> subqueries, at least one of the queries must
* appear in the matching hits.
*
* @param query The query to add
* @return The current builder
*/
CompassBooleanQueryBuilder addShould(CompassQuery query);
/**
* Specifies a minimum number of the optional BooleanClauses
* which must be satisfied.
*
* <p>By default no optional clauses are necessary for a match
* (unless there are no required clauses). If this method is used,
* then the specified number of clauses is required.
*
* <p>Use of this method is totally independent of specifying that
* any specific clauses are required (or prohibited). This number will
* only be compared against the number of matching optional clauses.
*
* <p>EXPERT NOTE: Using this method may force collecting docs in order,
* regardless of whether setAllowDocsOutOfOrder(true) has been called.
*/
CompassBooleanQueryBuilder setMinimumNumberShouldMatch(int min);
}
/**
* A query builder that constructs a phrase query. A phrase query is used to
* locate hits with terms within a certain distance from one another. The
* distance is also called slop. For example, you can use it to search for
* the values: java and london, which are near one another. "Near" is
* measured using the slop, and a value of 1 means that they will be in a
* distance of 1 other value from one another.
* <p/>
* The slop defaults to 0.
*
* @author kimchy
*/
public static interface CompassMultiPhraseQueryBuilder extends ToCompassQuery {
/**
* Sets the slop for the phrase query.
*/
CompassMultiPhraseQueryBuilder setSlop(int slop);
/**
* Adds a single value to the next position in the phrase.
*/
CompassMultiPhraseQueryBuilder add(Object value);
/**
* Adds a single value to the position given in the phrase.
*/
CompassMultiPhraseQueryBuilder add(Object value, int position);
/**
* Adds several values to the next position in the phrase.
*/
CompassMultiPhraseQueryBuilder add(Object[] values);
/**
* Adds several values to the position given in the phrase.
*/
CompassMultiPhraseQueryBuilder add(Object[] values, int position);
}
/**
* A query builder used to construct a query from a query string (i.e. +jack
* +fang). The analyzer that will be used to analyze the query string and
* the default search property (for search terms not prefixed with a
* property name) can be set before calling the <code>toQuery</code>.
*
* @author kimchy
*/
public static interface CompassQueryStringBuilder extends ToCompassQuery {
/**
* Sets the analyzer that will be used to analyze the query string. Can
* be <code>null</code>. It is used when parsing a query string and
* has no effect when using a bulit in query (using the {@link org.compass.core.CompassQuery}).
*/
CompassQueryStringBuilder setAnalyzer(String analyzer) throws CompassException;
/**
* Sets te query parser lookup name that will be used to parse the query string.
*/
CompassQueryStringBuilder setQueryParser(String queryParser) throws CompassException;
/**
* Uses the spell check for suggesting a query based on the query string.
*/
CompassQueryStringBuilder useSpellCheck() throws CompassException;
/**
* Sets the analyzer that will be used to analyzer the query string. The
* analyzer will be built based on analyzer settings for the mapping definitions
* the define the alias. It means that if a certain property is associated with
* a specific analyzer, a per property analyzer will be built.
*/
CompassQueryStringBuilder setAnalyzerByAlias(String alias) throws CompassException;
/**
* Sets the default search property for non prefixed terms in the query
* string. Can be <code>null</code>. It is used when parsing a query
* string and has no effect when using a bulit in query (using the
* {@link org.compass.core.CompassQuery}).
*/
CompassQueryStringBuilder setDefaultSearchProperty(String defaultSearchProperty);
/**
* Uses the and operator as the default operator instead of OR operator.
*/
CompassQueryStringBuilder useAndDefaultOperator();
/**
* Uses the OR operator as the default operator instead of AND operator.
*/
CompassQueryStringBuilder useOrDefaultOperator();
/**
* Forces the query string to only use the analyzer specificed (or configured)
* and not take into account any analyzers that might be specifiec within the mappings.
*/
CompassQueryStringBuilder forceAnalyzer();
}
/**
* Parses the query string into terms, which all of them are used against the given
* resource property name / meta-data.
* <p/>
* If the query string breaks into two terms (term1 and term2), and we use {@link #add(String)}
* to add two resource property names: title and body, the query will be expanded to:
* <code>(title:term1 body:term1) (title:term2 body:term2)</code>. If {@link #useAndDefaultOperator()}
* is called, the query will be: <code>+(title:term1 body:term1) +(title:term2 body:term2)</code>.
*
* @author kimchy
*/
public static interface CompassMultiPropertyQueryStringBuilder extends ToCompassQuery {
/**
* Sets the analyzer that will be used to analyze the query string. Can
* be <code>null</code>. It is used when parsing a query string and
* has no effect when using a bulit in query (using the {@link org.compass.core.CompassQuery}).
*/
CompassMultiPropertyQueryStringBuilder setAnalyzer(String analyzer);
/**
* Sets the analyzer that will be used to analyzer the query string. The
* analyzer will be build based on analyzer settings for the mapping definitions
* the define the alias. It means that if a certain property is associated with
* a specific analyzer, a per property analyzer will be built.
*/
CompassMultiPropertyQueryStringBuilder setAnalyzerByAlias(String alias) throws CompassException;
/**
* Sets te query parser lookup name that will be used to parse the query string.
*/
CompassMultiPropertyQueryStringBuilder setQueryParser(String queryParser) throws CompassException;
/**
* Uses the spell check for suggesting a query based on the query string.
*/
CompassMultiPropertyQueryStringBuilder useSpellCheck();
/**
* Adds another resource property name / meta-data that the query string will be executed against.
* <p/>
* The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name of the resource property / meta-data.
*/
CompassMultiPropertyQueryStringBuilder add(String name);
/**
* Adds another resource property name / meta-data that the query string will be executed against.
* <p/>
* The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name of the resource property / meta-data.
* @param boost The boosting factor of this resource property / meta-data.
*/
CompassMultiPropertyQueryStringBuilder add(String name, float boost);
/**
* If called, the query will be expanded to: <code>+(title:term1 body:term1) +(title:term2 body:term2)</code>
* (Instead of <code>(title:term1 body:term1) (title:term2 body:term2)</code>).
*/
CompassMultiPropertyQueryStringBuilder useAndDefaultOperator();
/**
* Uses the OR operator as the default operator instead of AND operator.
*/
CompassMultiPropertyQueryStringBuilder useOrDefaultOperator();
/**
* Forces the query parser to use the analyzer specified or confiugred and not
* analyzers that might be defined on different mappings.
*/
CompassMultiPropertyQueryStringBuilder forceAnalyzer();
}
/**
* A span near query builder. Matches spans which are near one another. One
* can specify <i>slop</i>, the maximum number of intervening unmatched
* positions, as well as whether matches are required to be in-order.
* <p/>
* <code>slop</code> defauls to <code>0</code> and <code>inOrder</code>
* defaults to <code>true</code>.
*
* @author kimchy
*/
public static interface CompassQuerySpanNearBuilder {
/**
* Sets the slop which is the distance allowed between spans.
*/
CompassQuerySpanNearBuilder setSlop(int slop);
/**
* Sets if the spans need to be in order.
*/
CompassQuerySpanNearBuilder setInOrder(boolean inOrder);
/**
* Adds a single value to the next span match.
*/
CompassQuerySpanNearBuilder add(Object value);
/**
* Adds a single span query to the next span match.
*/
CompassQuerySpanNearBuilder add(CompassSpanQuery query);
/**
* Returns the span near generated query.
*/
CompassSpanQuery toQuery();
}
/**
* Creates a span or query builder.
*
* @author kimchy
*/
public static interface CompassQuerySpanOrBuilder {
/**
* Adds a span query which is or'ed with the rest of the added span
* queries.
*/
CompassQuerySpanOrBuilder add(CompassSpanQuery query);
/**
* Returns the generated span or query.
*/
CompassSpanQuery toQuery();
}
/**
* A more like this query builder (maps to Lucene <code>MoreLikeThis</code> feature withing
* the contrib queries package).
*/
public static interface CompassMoreLikeThisQuery extends ToCompassQuery {
/**
* Sets the sub indexes that "more liket this" hits will be searched on
*/
CompassMoreLikeThisQuery setSubIndexes(String[] subIndexes);
/**
* Sets the aliases that "more liket this" hits will be searched on
*/
CompassMoreLikeThisQuery setAliases(String[] aliases);
/**
* Sets properties to the more like this query will be performed on.
*/
CompassMoreLikeThisQuery setProperties(String[] properties);
/**
* Adds a property to the more like this query will be performed on.
*/
CompassMoreLikeThisQuery addProperty(String property);
/**
* Sets the analyzer that will be used to analyze a more like this string (used when
* using {@link org.compass.core.CompassQueryBuilder#moreLikeThis(java.io.Reader)}.
*/
CompassMoreLikeThisQuery setAnalyzer(String analyzer);
/**
* Sets whether to boost terms in query based on "score" or not.
*/
CompassMoreLikeThisQuery setBoost(boolean boost);
/**
* The maximum number of tokens to parse in each example doc field that is not stored with TermVector support
*/
CompassMoreLikeThisQuery setMaxNumTokensParsed(int maxNumTokensParsed);
/**
* Sets the maximum number of query terms that will be included in any generated query.
*/
CompassMoreLikeThisQuery setMaxQueryTerms(int maxQueryTerms);
/**
* Sets the maximum word length above which words will be ignored. Set this to 0 for no
* maximum word length. The default is <code>0</code>.
*/
CompassMoreLikeThisQuery setMaxWordLen(int maxWordLen);
/**
* Sets the minimum word length below which words will be ignored. Set this to 0 for no
* minimum word length. The default is <code>0</code>.
*/
CompassMoreLikeThisQuery setMinWordLen(int minWordLen);
/**
* Sets the frequency at which words will be ignored which do not occur in at least this
* many resources. Defaults to 5.
*/
CompassMoreLikeThisQuery setMinResourceFreq(int minDocFreq);
/**
* Sets the frequency below which terms will be ignored in the source doc. Defaults to 2.
*/
CompassMoreLikeThisQuery setMinTermFreq(int minTermFreq);
/**
* Set the set of stopwords.
* Any word in this set is considered "uninteresting" and ignored.
* Even if your Analyzer allows stopwords, you might want to tell the MoreLikeThis code to ignore them, as
* for the purposes of document similarity it seems reasonable to assume that "a stop word is never interesting".
*/
CompassMoreLikeThisQuery setStopWords(String[] stopWords);
}
/**
* Should Compass use a converter for value passed even if there is no specific direct dot
* path notation to it. It will try and derive the best converter to use. Defaults to <code>false</code>.
*
* @see org.compass.core.mapping.ResourcePropertyLookup#setConvertOnlyWithDotPath(boolean)
*/
CompassQueryBuilder convertOnlyWithDotPath(boolean convertOnlyWithDotPath);
/**
* Should the query builder wrap automatically any query that has dot path notation (such as
* <code>alias.property</code>) with specific narrowing to match the given alias. Default to
* <code>true</code>.
*/
CompassQueryBuilder addAliasQueryIfNeeded(boolean addAliasQueryIfNeeded);
/**
* Constructs a boolean query builder.
*/
CompassBooleanQueryBuilder bool();
/**
* Constructs a boolean query builder, with coord disabled.
*/
CompassBooleanQueryBuilder bool(boolean disableCoord);
/**
* Constructs a multi phrase query builder for the given resource property /
* meta-data name.
* <p/>
* The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name of the resource property / meta-data.
* @return The multi phrase query builder.
*/
CompassMultiPhraseQueryBuilder multiPhrase(String name);
/**
* Constructs a query string query builder.
*
* @param queryString The query string (i.e. +jack +london).
* @return The query string query builder.
*/
CompassQueryStringBuilder queryString(String queryString);
/**
* Constructs a multi property query string builder, allowing to execute query strings
* against several resource property names.
*
* @param queryString The query string (i.e. +jack +london)
* @return The multi property string query builder.
*/
CompassMultiPropertyQueryStringBuilder multiPropertyQueryString(String queryString);
/**
* Returns a query that <b>exactly</b> match the given alias.
*
* <p>Note, this will <b>not</b> narrow down the search to specific sub indxes.
* In order to do that, please use {@link CompassQuery#setAliases(String[])}.
*
* @param aliasValue The alias value to match to.
* @return The generated query.
*/
CompassQuery alias(String aliasValue);
/**
* Returns a query that match the given alias or any extedning aliases.
*
* <p>Note, this will <b>not</b> narrow down the search to specific sub indxes.
* In order to do that, please use {@link CompassQuery#setAliases(String[])}.
*
* @param aliasValue The alias value to match to or any extending aliases.
* @return The generated query.
*/
CompassQuery polyAlias(String aliasValue);
/**
* <p>Creates a query where the resource property must have the given value.
* Note, that the value itself will not be analyzed, but the text that was
* indexed might have been (if <code>indexed</code>). The search is case
* sensative.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param value The value that must match
* @return The generated query
*/
CompassQuery term(String name, Object value);
/**
* Creates a query that match all documents.
*
* @return The generated query
*/
CompassQuery matchAll();
/**
* <p>Creates a query where the resource property is between the given values.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param low The low value limit
* @param high The high value limit
* @param inclusive If the values are inclusive or exclusive.
* @param constantScore If the query will affect the score of the results. With all other range queries
* it will default to <code>true</code>.
* @return The generated query
*/
CompassQuery between(String name, Object low, Object high, boolean inclusive, boolean constantScore);
/**
* <p>Creates a query where the resource property is between the given values.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param low The low value limit
* @param high The high value limit
* @param inclusive If the values are inclusive or exclusive.
* @return The generated query
*/
CompassQuery between(String name, Object low, Object high, boolean inclusive);
/**
* <p>Creates a query where the resource property is less than (<) the given
* value.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param value The high limit value
* @return The generated query
*/
CompassQuery lt(String name, Object value);
/**
* <p>Creates a query where the resource property is less or equal (<=) to the
* given value.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param value The high limit value
* @return The generated query
*/
CompassQuery le(String name, Object value);
/**
* <p>Creates a query where the resource property is greater than (>) to the
* given value.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param value The low limit value
* @return The generated query
*/
CompassQuery gt(String name, Object value);
/**
* <p>Creates a query where the resource property is greater or equal (>=) to
* the given value.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The resource property name
* @param value The low limit value
* @return The generated query
*/
CompassQuery ge(String name, Object value);
/**
* <p>Creates a query where the resource property values starts with the given
* prefix.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name the resource property name
* @param prefix The prefix value
* @return The generated query
*/
CompassQuery prefix(String name, String prefix);
/**
* Creates a query where the resource property values match the given
* wildcard. Supported wildcards are <code>*</code>, which matches any
* character sequence (including the empty one), and <code>?</code>,
* which matches any single character. Note this query can be slow, as it
* needs to iterate over many terms. In order to prevent extremely slow
* WildcardQueries, a Wildcard term should not start with one of the
* wildcards <code>*</code> or <code>?</code>.
*
* @param name The name
* @param wildcard The wildcard expression
* @return The generated query
*/
CompassQuery wildcard(String name, String wildcard);
/**
* <p>Creates a fuzzy query for the given resource property and the value. The
* similiarity measurement is based on the Levenshtein (edit distance)
* algorithm. The minimumSimilarity defaults to 0.5 and prefixLength
* defaults to 0.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @param value The value
* @return The generated query
*/
CompassQuery fuzzy(String name, String value);
/**
* <p>Creates a fuzzy query for the given resource property and the value. The
* similiarity measurement is based on the Levenshtein (edit distance)
* algorithm. The prefixLength defaults to 0.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @param value The value
* @param minimumSimilarity The minimum similarity, a value between 0.0 and 1.0
* @return The generated query
*/
CompassQuery fuzzy(String name, String value, float minimumSimilarity);
/**
* <p>Creates a fuzzy query for the given resource property and the value. The
* similiarity measurement is based on the Levenshtein (edit distance)
* algorithm.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @param value The value
* @param minimumSimilarity The minimum similarity, a value between 0.0 and 1.0
* @param prefixLength The length of common (non-fuzzy) prefix
* @return The generated query
*/
CompassQuery fuzzy(String name, String value, float minimumSimilarity, int prefixLength);
/**
* <p>Creates a span query where the resource property must match the given
* value.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @param value The value
* @return The span query
*/
CompassSpanQuery spanEq(String name, Object value);
/**
* <p>Creates a span query where the span occur within the first
* <code>end</code> positions.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @param value The value
* @param end The limit on the position from the start.
* @return The span query
*/
CompassSpanQuery spanFirst(String name, Object value, int end);
/**
* <p>Creates a span query.
*
* @param end The limit on the position from the start.
* @return The span query
*/
CompassSpanQuery spanFirst(CompassSpanQuery spanQuery, int end);
/**
* <p>Constructs a span near query builder.
*
* <p>The name can either be the actual resource property or meta-data value,
* or the path to the given resource property (alias.rProperty), or the
* class property (alias.cProperty) or the path to the meta-data
* (alias.cProperty.metaData)
*
* @param name The name
* @return The span near query builder
*/
CompassQuerySpanNearBuilder spanNear(String name);
/**
* <p>Creates a span query that excludes matches where one
* {@link org.compass.core.CompassQuery.CompassSpanQuery} overlaps
* with another.
*
* <p>Construct a span query matching spans from <code>include</code> which
* have no overlap with spans from <code>exclude</code>.
*
* @param include The span query to include.
* @param exclude The span query to exclude.
* @return The span query
*/
CompassSpanQuery spanNot(CompassSpanQuery include, CompassSpanQuery exclude);
/**
* Constructs a span or query builder.
*
* @return The span query builder
*/
CompassQuerySpanOrBuilder spanOr();
/**
* Constructs a more like this query. The id can be an object of
* the class (with the id attributes set), an array of id objects, or the
* actual id object. Throws an exception if the resource is not found.
*/
CompassMoreLikeThisQuery moreLikeThis(String alias, Serializable id);
/**
* Constructs a more like this query to find hits that are similar to
* the give text represented by the reader.
*/
CompassMoreLikeThisQuery moreLikeThis(Reader reader);
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapreduce.lib.db;
import java.io.IOException;
import java.sql.Connection;
import java.sql.DatabaseMetaData;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.OutputFormat;
import org.apache.hadoop.mapreduce.RecordWriter;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
import org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.StringUtils;
/**
* A OutputFormat that sends the reduce output to a SQL table.
* <p>
* {@link DBOutputFormat} accepts <key,value> pairs, where
* key has a type extending DBWritable. Returned {@link RecordWriter}
* writes <b>only the key</b> to the database with a batch SQL query.
*
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class DBOutputFormat<K extends DBWritable, V>
extends OutputFormat<K,V> {
private static final Log LOG = LogFactory.getLog(DBOutputFormat.class);
public String dbProductName = "DEFAULT";
public void checkOutputSpecs(JobContext context)
throws IOException, InterruptedException {}
public OutputCommitter getOutputCommitter(TaskAttemptContext context)
throws IOException, InterruptedException {
return new FileOutputCommitter(FileOutputFormat.getOutputPath(context),
context);
}
/**
* A RecordWriter that writes the reduce output to a SQL table
*/
@InterfaceStability.Evolving
public class DBRecordWriter
extends RecordWriter<K, V> {
private Connection connection;
private PreparedStatement statement;
public DBRecordWriter() throws SQLException {
}
public DBRecordWriter(Connection connection
, PreparedStatement statement) throws SQLException {
this.connection = connection;
this.statement = statement;
this.connection.setAutoCommit(false);
}
public Connection getConnection() {
return connection;
}
public PreparedStatement getStatement() {
return statement;
}
/** {@inheritDoc} */
public void close(TaskAttemptContext context) throws IOException {
try {
statement.executeBatch();
connection.commit();
} catch (SQLException e) {
try {
connection.rollback();
}
catch (SQLException ex) {
LOG.warn(StringUtils.stringifyException(ex));
}
throw new IOException(e.getMessage());
} finally {
try {
statement.close();
connection.close();
}
catch (SQLException ex) {
throw new IOException(ex.getMessage());
}
}
}
/** {@inheritDoc} */
public void write(K key, V value) throws IOException {
try {
key.write(statement);
statement.addBatch();
} catch (SQLException e) {
e.printStackTrace();
}
}
}
/**
* Constructs the query used as the prepared statement to insert data.
*
* @param table
* the table to insert into
* @param fieldNames
* the fields to insert into. If field names are unknown, supply an
* array of nulls.
*/
public String constructQuery(String table, String[] fieldNames) {
if(fieldNames == null) {
throw new IllegalArgumentException("Field names may not be null");
}
StringBuilder query = new StringBuilder();
query.append("INSERT INTO ").append(table);
if (fieldNames.length > 0 && fieldNames[0] != null) {
query.append(" (");
for (int i = 0; i < fieldNames.length; i++) {
query.append(fieldNames[i]);
if (i != fieldNames.length - 1) {
query.append(",");
}
}
query.append(")");
}
query.append(" VALUES (");
for (int i = 0; i < fieldNames.length; i++) {
query.append("?");
if(i != fieldNames.length - 1) {
query.append(",");
}
}
if (dbProductName.startsWith("DB2") || dbProductName.startsWith("ORACLE")) {
query.append(")");
} else {
query.append(");");
}
return query.toString();
}
/** {@inheritDoc} */
public RecordWriter<K, V> getRecordWriter(TaskAttemptContext context)
throws IOException {
DBConfiguration dbConf = new DBConfiguration(context.getConfiguration());
String tableName = dbConf.getOutputTableName();
String[] fieldNames = dbConf.getOutputFieldNames();
if(fieldNames == null) {
fieldNames = new String[dbConf.getOutputFieldCount()];
}
try {
Connection connection = dbConf.getConnection();
PreparedStatement statement = null;
DatabaseMetaData dbMeta = connection.getMetaData();
this.dbProductName = dbMeta.getDatabaseProductName().toUpperCase();
statement = connection.prepareStatement(
constructQuery(tableName, fieldNames));
return new DBRecordWriter(connection, statement);
} catch (Exception ex) {
throw new IOException(ex.getMessage());
}
}
/**
* Initializes the reduce-part of the job with
* the appropriate output settings
*
* @param job The job
* @param tableName The table to insert data into
* @param fieldNames The field names in the table.
*/
public static void setOutput(Job job, String tableName,
String... fieldNames) throws IOException {
if(fieldNames.length > 0 && fieldNames[0] != null) {
DBConfiguration dbConf = setOutput(job, tableName);
dbConf.setOutputFieldNames(fieldNames);
} else {
if (fieldNames.length > 0) {
setOutput(job, tableName, fieldNames.length);
}
else {
throw new IllegalArgumentException(
"Field names must be greater than 0");
}
}
}
/**
* Initializes the reduce-part of the job
* with the appropriate output settings
*
* @param job The job
* @param tableName The table to insert data into
* @param fieldCount the number of fields in the table.
*/
public static void setOutput(Job job, String tableName,
int fieldCount) throws IOException {
DBConfiguration dbConf = setOutput(job, tableName);
dbConf.setOutputFieldCount(fieldCount);
}
private static DBConfiguration setOutput(Job job,
String tableName) throws IOException {
job.setOutputFormatClass(DBOutputFormat.class);
job.setReduceSpeculativeExecution(false);
DBConfiguration dbConf = new DBConfiguration(job.getConfiguration());
dbConf.setOutputTableName(tableName);
return dbConf;
}
}
| |
package org.ringingmaster.engine.composition;
import com.google.common.collect.ImmutableSet;
import org.pcollections.HashTreePSet;
import org.pcollections.PSet;
import org.ringingmaster.engine.NumberOfBells;
import org.ringingmaster.engine.arraytable.ImmutableArrayTable;
import org.ringingmaster.engine.composition.tableaccess.DefaultCompositionTableAccess;
import org.ringingmaster.engine.method.Bell;
import org.ringingmaster.engine.method.Row;
import org.ringingmaster.engine.method.Stroke;
import org.ringingmaster.engine.notation.Notation;
import org.ringingmaster.engine.notation.NotationBuilderHelper;
import org.ringingmaster.engine.composition.cell.Cell;
import org.ringingmaster.engine.composition.compositiontype.CompositionType;
import org.ringingmaster.engine.composition.tableaccess.DefaultDefinitionTableAccess;
import org.ringingmaster.engine.composition.tableaccess.DefinitionTableAccess;
import org.ringingmaster.engine.composition.tableaccess.CompositionTableAccess;
import javax.annotation.concurrent.Immutable;
import java.util.Objects;
import java.util.Optional;
import java.util.Set;
import static com.google.common.base.Preconditions.checkNotNull;
/**
* Raw immutable POJO for a composition.
*
* @author Steve Lake
*/
@Immutable
public class Composition implements CompositionTableAccess<Cell>, DefinitionTableAccess<Cell> {
private final int sequenceNumber;
private final String actionName;
private final String title;
private final String author;
private final NumberOfBells numberOfBells;
private final CompositionType compositionType;
private final Bell callFromBell;
private final PSet<Notation> allNotations;
private final Optional<Notation> nonSplicedActiveNotation;
private final String plainLeadToken;
private final DefinitionTableAccess<Cell> definitionTableCellsDelegate;
private final Row startChange;
private final int startAtRow;
private final Stroke startStroke;
private final Optional<Notation> startNotation;
private final int terminationMaxRows;
private final Optional<Integer> terminationMaxLeads;
private final Optional<Integer> terminationMaxParts;
private final int terminationMaxPartCircularity;
private final Optional<TerminationChange> terminationChange;
private final CompositionTableAccess<Cell> compositionTableAccessDelegate;
public Composition(int sequenceNumber,
String actionName,
String title,
String author,
NumberOfBells numberOfBells,
CompositionType compositionType,
Bell callFromBell,
PSet<Notation> allNotations,
Optional<Notation> nonSplicedActiveNotation,
String plainLeadToken,
ImmutableArrayTable<Cell> definitionCells,
Row startChange, int startAtRow,
Stroke startStroke,
Optional<Notation> startNotation,
int terminationMaxRows,
Optional<Integer> terminationMaxLeads,
Optional<Integer> terminationMaxParts,
int terminationMaxPartCircularity,
Optional<TerminationChange> terminationChange,
ImmutableArrayTable<Cell> cells) {
this.sequenceNumber = sequenceNumber;
this.actionName = actionName;
this.title = title;
this.author = author;
this.numberOfBells = numberOfBells;
this.compositionType = compositionType;
this.callFromBell = callFromBell;
this.allNotations = allNotations;
this.nonSplicedActiveNotation = nonSplicedActiveNotation;
this.plainLeadToken = plainLeadToken;
this.definitionTableCellsDelegate = new DefaultDefinitionTableAccess<>(definitionCells);
this.startChange = startChange;
this.startAtRow = startAtRow;
this.startStroke = startStroke;
this.startNotation = startNotation;
this.terminationMaxRows = terminationMaxRows;
this.terminationMaxLeads = terminationMaxLeads;
this.terminationMaxParts = terminationMaxParts;
this.terminationMaxPartCircularity = terminationMaxPartCircularity;
this.terminationChange = terminationChange;
this.compositionTableAccessDelegate = new DefaultCompositionTableAccess<>(cells, compositionType, isSpliced());
}
public int getSequenceNumber() {
return sequenceNumber;
}
public String getActionName() {
return actionName;
}
public String getLoggingTag() {
return title + "<" + sequenceNumber + ">";
}
public String getTitle() {
return title;
}
public String getAuthor() {
return author;
}
public NumberOfBells getNumberOfBells() {
return numberOfBells;
}
public CompositionType getCompositionType() {
return compositionType;
}
public Bell getCallFromBell() {
return callFromBell;
}
public PSet<Notation> getAllNotations() {
return allNotations;
}
public PSet<Notation> getValidNotations() {
//TODO precalculate
return NotationBuilderHelper.filterNotationsUptoNumberOfBells(allNotations, numberOfBells);
}
public PSet<Notation> getAvailableNotations() {
//TODO precalculate
if (isSpliced()) {
return getValidNotations();
}
else {
// Not Spliced
return nonSplicedActiveNotation.map(HashTreePSet::singleton).orElseGet(HashTreePSet::empty);
}
}
public Optional<Notation> getNonSplicedActiveNotation() {
return nonSplicedActiveNotation;
}
public boolean isSpliced() {
return !allNotations.isEmpty() && !nonSplicedActiveNotation.isPresent();
}
public String getPlainLeadToken() {
return plainLeadToken;
}
@Override
public ImmutableArrayTable<Cell> allDefinitionCells() {
return definitionTableCellsDelegate.allDefinitionCells();
}
@Override
public ImmutableArrayTable<Cell> definitionShorthandCells() {
return definitionTableCellsDelegate.definitionShorthandCells();
}
@Override
public Optional<ImmutableArrayTable<Cell>> findDefinitionByShorthand(String shorthand) {
checkNotNull(shorthand);
return definitionTableCellsDelegate.findDefinitionByShorthand(shorthand);
}
@Override
public Set<ImmutableArrayTable<Cell>> getDefinitionAsTables() {
return definitionTableCellsDelegate.getDefinitionAsTables();
}
@Override
public ImmutableArrayTable<Cell> definitionDefinitionCells() {
return definitionTableCellsDelegate.definitionDefinitionCells();
}
public ImmutableSet<String> getAllDefinitionShorthands() {
return definitionTableCellsDelegate.getAllDefinitionShorthands();
}
public Row getStartChange() {
return startChange;
}
public int getStartAtRow() {
return startAtRow;
}
public Stroke getStartStroke() {
return startStroke;
}
public Optional<Notation> getStartNotation() {
return startNotation;
}
public int getTerminationMaxRows() {
return terminationMaxRows;
}
public Optional<Integer> getTerminationMaxLeads() {
return terminationMaxLeads;
}
public Optional<Integer> getTerminationMaxParts() {
return terminationMaxParts;
}
public int getTerminationMaxPartCircularity() {
return terminationMaxPartCircularity;
}
public Optional<TerminationChange> getTerminationChange() {
return terminationChange;
}
@Override
public ImmutableArrayTable<Cell> allCompositionCells() {
return compositionTableAccessDelegate.allCompositionCells();
}
@Override
public ImmutableArrayTable<Cell> mainBodyCells() {
return compositionTableAccessDelegate.mainBodyCells();
}
@Override
public ImmutableArrayTable<Cell> callingPositionCells() {
return compositionTableAccessDelegate.callingPositionCells();
}
@Override
public ImmutableArrayTable<Cell> splicedCells() {
return compositionTableAccessDelegate.splicedCells();
}
@Override
public ImmutableArrayTable<Cell> nullAreaCells() {
return compositionTableAccessDelegate.nullAreaCells();
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof Composition)) return false;
Composition that = (Composition) o;
return getStartAtRow() == that.getStartAtRow() &&
getTerminationMaxRows() == that.getTerminationMaxRows() &&
getTerminationMaxPartCircularity() == that.getTerminationMaxPartCircularity() &&
Objects.equals(getTitle(), that.getTitle()) &&
Objects.equals(getAuthor(), that.getAuthor()) &&
getNumberOfBells() == that.getNumberOfBells() &&
getCompositionType() == that.getCompositionType() &&
getCallFromBell() == that.getCallFromBell() &&
Objects.equals(getAllNotations(), that.getAllNotations()) &&
Objects.equals(getNonSplicedActiveNotation(), that.getNonSplicedActiveNotation()) &&
Objects.equals(getPlainLeadToken(), that.getPlainLeadToken()) &&
Objects.equals(definitionTableCellsDelegate, that.definitionTableCellsDelegate) &&
Objects.equals(getStartChange(), that.getStartChange()) &&
getStartStroke() == that.getStartStroke() &&
Objects.equals(getStartNotation(), that.getStartNotation()) &&
Objects.equals(getTerminationMaxLeads(), that.getTerminationMaxLeads()) &&
Objects.equals(getTerminationMaxParts(), that.getTerminationMaxParts()) &&
Objects.equals(getTerminationChange(), that.getTerminationChange()) &&
Objects.equals(compositionTableAccessDelegate, that.compositionTableAccessDelegate);
}
@Override
public int hashCode() {
return Objects.hash(getSequenceNumber(), getActionName(), getTitle(), getAuthor(), getNumberOfBells(), getCompositionType(), getCallFromBell(), getAllNotations(), getNonSplicedActiveNotation(), getPlainLeadToken(), definitionTableCellsDelegate, getStartChange(), getStartAtRow(), getStartStroke(), getStartNotation(), getTerminationMaxRows(), getTerminationMaxLeads(), getTerminationMaxParts(), getTerminationMaxPartCircularity(), getTerminationChange(), compositionTableAccessDelegate);
}
@Override
public String toString() {
return "Composition{" +
"<" + sequenceNumber + "> " +
"<" + actionName + "> " +
"title='" + title + '\'' +
", author=" + author +
", numberOfBells='" + numberOfBells + '\'' +
", compositionType=" + compositionType +
", callFromBell='" + callFromBell + '\'' +
", allNotations=" + allNotations +
", nonSplicedActiveNotation=" + nonSplicedActiveNotation +
", plainLeadToken='" + plainLeadToken + '\'' +
", definitions=" + definitionTableCellsDelegate.allDefinitionCells() +
", startChange=" + startChange +
", startAtRow=" + startAtRow +
", startStroke=" + startStroke +
", startNotation=" + startNotation +
", terminationMaxRows=" + terminationMaxRows +
", terminationMaxLeads=" + terminationMaxLeads +
", terminationMaxParts=" + terminationMaxParts +
", terminationMaxPartCircularity=" + terminationMaxPartCircularity +
", terminationChange=" + terminationChange +
", cells=" + compositionTableAccessDelegate.allCompositionCells() +
'}';
}
}
| |
/*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.nuklear;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* <h3>Layout</h3>
*
* <pre><code>
* struct nk_style_selectable {
* {@link NkStyleItem struct nk_style_item} normal;
* {@link NkStyleItem struct nk_style_item} hover;
* {@link NkStyleItem struct nk_style_item} pressed;
* {@link NkStyleItem struct nk_style_item} normal_active;
* {@link NkStyleItem struct nk_style_item} hover_active;
* {@link NkStyleItem struct nk_style_item} pressed_active;
* {@link NkColor struct nk_color} text_normal;
* {@link NkColor struct nk_color} text_hover;
* {@link NkColor struct nk_color} text_pressed;
* {@link NkColor struct nk_color} text_normal_active;
* {@link NkColor struct nk_color} text_hover_active;
* {@link NkColor struct nk_color} text_pressed_active;
* {@link NkColor struct nk_color} text_background;
* nk_flags text_alignment;
* float rounding;
* {@link NkVec2 struct nk_vec2} padding;
* {@link NkVec2 struct nk_vec2} touch_padding;
* {@link NkVec2 struct nk_vec2} image_padding;
* {@link NkHandle nk_handle} userdata;
* {@link NkDrawBeginCallbackI nk_draw_begin} draw_begin;
* {@link NkDrawEndCallbackI nk_draw_end} draw_end;
* }</code></pre>
*/
@NativeType("struct nk_style_selectable")
public class NkStyleSelectable extends Struct implements NativeResource {
/** The struct size in bytes. */
public static final int SIZEOF;
/** The struct alignment in bytes. */
public static final int ALIGNOF;
/** The struct member offsets. */
public static final int
NORMAL,
HOVER,
PRESSED,
NORMAL_ACTIVE,
HOVER_ACTIVE,
PRESSED_ACTIVE,
TEXT_NORMAL,
TEXT_HOVER,
TEXT_PRESSED,
TEXT_NORMAL_ACTIVE,
TEXT_HOVER_ACTIVE,
TEXT_PRESSED_ACTIVE,
TEXT_BACKGROUND,
TEXT_ALIGNMENT,
ROUNDING,
PADDING,
TOUCH_PADDING,
IMAGE_PADDING,
USERDATA,
DRAW_BEGIN,
DRAW_END;
static {
Layout layout = __struct(
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkStyleItem.SIZEOF, NkStyleItem.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(NkColor.SIZEOF, NkColor.ALIGNOF),
__member(4),
__member(4),
__member(NkVec2.SIZEOF, NkVec2.ALIGNOF),
__member(NkVec2.SIZEOF, NkVec2.ALIGNOF),
__member(NkVec2.SIZEOF, NkVec2.ALIGNOF),
__member(NkHandle.SIZEOF, NkHandle.ALIGNOF),
__member(POINTER_SIZE),
__member(POINTER_SIZE)
);
SIZEOF = layout.getSize();
ALIGNOF = layout.getAlignment();
NORMAL = layout.offsetof(0);
HOVER = layout.offsetof(1);
PRESSED = layout.offsetof(2);
NORMAL_ACTIVE = layout.offsetof(3);
HOVER_ACTIVE = layout.offsetof(4);
PRESSED_ACTIVE = layout.offsetof(5);
TEXT_NORMAL = layout.offsetof(6);
TEXT_HOVER = layout.offsetof(7);
TEXT_PRESSED = layout.offsetof(8);
TEXT_NORMAL_ACTIVE = layout.offsetof(9);
TEXT_HOVER_ACTIVE = layout.offsetof(10);
TEXT_PRESSED_ACTIVE = layout.offsetof(11);
TEXT_BACKGROUND = layout.offsetof(12);
TEXT_ALIGNMENT = layout.offsetof(13);
ROUNDING = layout.offsetof(14);
PADDING = layout.offsetof(15);
TOUCH_PADDING = layout.offsetof(16);
IMAGE_PADDING = layout.offsetof(17);
USERDATA = layout.offsetof(18);
DRAW_BEGIN = layout.offsetof(19);
DRAW_END = layout.offsetof(20);
}
/**
* Creates a {@code NkStyleSelectable} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public NkStyleSelectable(ByteBuffer container) {
super(memAddress(container), __checkContainer(container, SIZEOF));
}
@Override
public int sizeof() { return SIZEOF; }
/** @return a {@link NkStyleItem} view of the {@code normal} field. */
@NativeType("struct nk_style_item")
public NkStyleItem normal() { return nnormal(address()); }
/** @return a {@link NkStyleItem} view of the {@code hover} field. */
@NativeType("struct nk_style_item")
public NkStyleItem hover() { return nhover(address()); }
/** @return a {@link NkStyleItem} view of the {@code pressed} field. */
@NativeType("struct nk_style_item")
public NkStyleItem pressed() { return npressed(address()); }
/** @return a {@link NkStyleItem} view of the {@code normal_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem normal_active() { return nnormal_active(address()); }
/** @return a {@link NkStyleItem} view of the {@code hover_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem hover_active() { return nhover_active(address()); }
/** @return a {@link NkStyleItem} view of the {@code pressed_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem pressed_active() { return npressed_active(address()); }
/** @return a {@link NkColor} view of the {@code text_normal} field. */
@NativeType("struct nk_color")
public NkColor text_normal() { return ntext_normal(address()); }
/** @return a {@link NkColor} view of the {@code text_hover} field. */
@NativeType("struct nk_color")
public NkColor text_hover() { return ntext_hover(address()); }
/** @return a {@link NkColor} view of the {@code text_pressed} field. */
@NativeType("struct nk_color")
public NkColor text_pressed() { return ntext_pressed(address()); }
/** @return a {@link NkColor} view of the {@code text_normal_active} field. */
@NativeType("struct nk_color")
public NkColor text_normal_active() { return ntext_normal_active(address()); }
/** @return a {@link NkColor} view of the {@code text_hover_active} field. */
@NativeType("struct nk_color")
public NkColor text_hover_active() { return ntext_hover_active(address()); }
/** @return a {@link NkColor} view of the {@code text_pressed_active} field. */
@NativeType("struct nk_color")
public NkColor text_pressed_active() { return ntext_pressed_active(address()); }
/** @return a {@link NkColor} view of the {@code text_background} field. */
@NativeType("struct nk_color")
public NkColor text_background() { return ntext_background(address()); }
/** @return the value of the {@code text_alignment} field. */
@NativeType("nk_flags")
public int text_alignment() { return ntext_alignment(address()); }
/** @return the value of the {@code rounding} field. */
public float rounding() { return nrounding(address()); }
/** @return a {@link NkVec2} view of the {@code padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 padding() { return npadding(address()); }
/** @return a {@link NkVec2} view of the {@code touch_padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 touch_padding() { return ntouch_padding(address()); }
/** @return a {@link NkVec2} view of the {@code image_padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 image_padding() { return nimage_padding(address()); }
/** @return a {@link NkHandle} view of the {@code userdata} field. */
@NativeType("nk_handle")
public NkHandle userdata() { return nuserdata(address()); }
/** @return the value of the {@code draw_begin} field. */
@Nullable
@NativeType("nk_draw_begin")
public NkDrawBeginCallback draw_begin() { return ndraw_begin(address()); }
/** @return the value of the {@code draw_end} field. */
@Nullable
@NativeType("nk_draw_end")
public NkDrawEndCallback draw_end() { return ndraw_end(address()); }
/** Copies the specified {@link NkStyleItem} to the {@code normal} field. */
public NkStyleSelectable normal(@NativeType("struct nk_style_item") NkStyleItem value) { nnormal(address(), value); return this; }
/** Passes the {@code normal} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable normal(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(normal()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code hover} field. */
public NkStyleSelectable hover(@NativeType("struct nk_style_item") NkStyleItem value) { nhover(address(), value); return this; }
/** Passes the {@code hover} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable hover(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(hover()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code pressed} field. */
public NkStyleSelectable pressed(@NativeType("struct nk_style_item") NkStyleItem value) { npressed(address(), value); return this; }
/** Passes the {@code pressed} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable pressed(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(pressed()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code normal_active} field. */
public NkStyleSelectable normal_active(@NativeType("struct nk_style_item") NkStyleItem value) { nnormal_active(address(), value); return this; }
/** Passes the {@code normal_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable normal_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(normal_active()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code hover_active} field. */
public NkStyleSelectable hover_active(@NativeType("struct nk_style_item") NkStyleItem value) { nhover_active(address(), value); return this; }
/** Passes the {@code hover_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable hover_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(hover_active()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code pressed_active} field. */
public NkStyleSelectable pressed_active(@NativeType("struct nk_style_item") NkStyleItem value) { npressed_active(address(), value); return this; }
/** Passes the {@code pressed_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable pressed_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(pressed_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_normal} field. */
public NkStyleSelectable text_normal(@NativeType("struct nk_color") NkColor value) { ntext_normal(address(), value); return this; }
/** Passes the {@code text_normal} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_normal(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_normal()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_hover} field. */
public NkStyleSelectable text_hover(@NativeType("struct nk_color") NkColor value) { ntext_hover(address(), value); return this; }
/** Passes the {@code text_hover} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_hover(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_hover()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_pressed} field. */
public NkStyleSelectable text_pressed(@NativeType("struct nk_color") NkColor value) { ntext_pressed(address(), value); return this; }
/** Passes the {@code text_pressed} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_pressed(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_pressed()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_normal_active} field. */
public NkStyleSelectable text_normal_active(@NativeType("struct nk_color") NkColor value) { ntext_normal_active(address(), value); return this; }
/** Passes the {@code text_normal_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_normal_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_normal_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_hover_active} field. */
public NkStyleSelectable text_hover_active(@NativeType("struct nk_color") NkColor value) { ntext_hover_active(address(), value); return this; }
/** Passes the {@code text_hover_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_hover_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_hover_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_pressed_active} field. */
public NkStyleSelectable text_pressed_active(@NativeType("struct nk_color") NkColor value) { ntext_pressed_active(address(), value); return this; }
/** Passes the {@code text_pressed_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_pressed_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_pressed_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_background} field. */
public NkStyleSelectable text_background(@NativeType("struct nk_color") NkColor value) { ntext_background(address(), value); return this; }
/** Passes the {@code text_background} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable text_background(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_background()); return this; }
/** Sets the specified value to the {@code text_alignment} field. */
public NkStyleSelectable text_alignment(@NativeType("nk_flags") int value) { ntext_alignment(address(), value); return this; }
/** Sets the specified value to the {@code rounding} field. */
public NkStyleSelectable rounding(float value) { nrounding(address(), value); return this; }
/** Copies the specified {@link NkVec2} to the {@code padding} field. */
public NkStyleSelectable padding(@NativeType("struct nk_vec2") NkVec2 value) { npadding(address(), value); return this; }
/** Passes the {@code padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(padding()); return this; }
/** Copies the specified {@link NkVec2} to the {@code touch_padding} field. */
public NkStyleSelectable touch_padding(@NativeType("struct nk_vec2") NkVec2 value) { ntouch_padding(address(), value); return this; }
/** Passes the {@code touch_padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable touch_padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(touch_padding()); return this; }
/** Copies the specified {@link NkVec2} to the {@code image_padding} field. */
public NkStyleSelectable image_padding(@NativeType("struct nk_vec2") NkVec2 value) { nimage_padding(address(), value); return this; }
/** Passes the {@code image_padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable image_padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(image_padding()); return this; }
/** Copies the specified {@link NkHandle} to the {@code userdata} field. */
public NkStyleSelectable userdata(@NativeType("nk_handle") NkHandle value) { nuserdata(address(), value); return this; }
/** Passes the {@code userdata} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable userdata(java.util.function.Consumer<NkHandle> consumer) { consumer.accept(userdata()); return this; }
/** Sets the specified value to the {@code draw_begin} field. */
public NkStyleSelectable draw_begin(@Nullable @NativeType("nk_draw_begin") NkDrawBeginCallbackI value) { ndraw_begin(address(), value); return this; }
/** Sets the specified value to the {@code draw_end} field. */
public NkStyleSelectable draw_end(@Nullable @NativeType("nk_draw_end") NkDrawEndCallbackI value) { ndraw_end(address(), value); return this; }
/** Initializes this struct with the specified values. */
public NkStyleSelectable set(
NkStyleItem normal,
NkStyleItem hover,
NkStyleItem pressed,
NkStyleItem normal_active,
NkStyleItem hover_active,
NkStyleItem pressed_active,
NkColor text_normal,
NkColor text_hover,
NkColor text_pressed,
NkColor text_normal_active,
NkColor text_hover_active,
NkColor text_pressed_active,
NkColor text_background,
int text_alignment,
float rounding,
NkVec2 padding,
NkVec2 touch_padding,
NkVec2 image_padding,
NkHandle userdata,
NkDrawBeginCallbackI draw_begin,
NkDrawEndCallbackI draw_end
) {
normal(normal);
hover(hover);
pressed(pressed);
normal_active(normal_active);
hover_active(hover_active);
pressed_active(pressed_active);
text_normal(text_normal);
text_hover(text_hover);
text_pressed(text_pressed);
text_normal_active(text_normal_active);
text_hover_active(text_hover_active);
text_pressed_active(text_pressed_active);
text_background(text_background);
text_alignment(text_alignment);
rounding(rounding);
padding(padding);
touch_padding(touch_padding);
image_padding(image_padding);
userdata(userdata);
draw_begin(draw_begin);
draw_end(draw_end);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public NkStyleSelectable set(NkStyleSelectable src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code NkStyleSelectable} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static NkStyleSelectable malloc() {
return wrap(NkStyleSelectable.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code NkStyleSelectable} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static NkStyleSelectable calloc() {
return wrap(NkStyleSelectable.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code NkStyleSelectable} instance allocated with {@link BufferUtils}. */
public static NkStyleSelectable create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(NkStyleSelectable.class, memAddress(container), container);
}
/** Returns a new {@code NkStyleSelectable} instance for the specified memory address. */
public static NkStyleSelectable create(long address) {
return wrap(NkStyleSelectable.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static NkStyleSelectable createSafe(long address) {
return address == NULL ? null : wrap(NkStyleSelectable.class, address);
}
/**
* Returns a new {@link NkStyleSelectable.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link NkStyleSelectable.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link NkStyleSelectable.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link NkStyleSelectable.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static NkStyleSelectable.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static NkStyleSelectable.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code NkStyleSelectable} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static NkStyleSelectable malloc(MemoryStack stack) {
return wrap(NkStyleSelectable.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code NkStyleSelectable} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static NkStyleSelectable calloc(MemoryStack stack) {
return wrap(NkStyleSelectable.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link NkStyleSelectable.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link NkStyleSelectable.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static NkStyleSelectable.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** Unsafe version of {@link #normal}. */
public static NkStyleItem nnormal(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.NORMAL); }
/** Unsafe version of {@link #hover}. */
public static NkStyleItem nhover(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.HOVER); }
/** Unsafe version of {@link #pressed}. */
public static NkStyleItem npressed(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.PRESSED); }
/** Unsafe version of {@link #normal_active}. */
public static NkStyleItem nnormal_active(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.NORMAL_ACTIVE); }
/** Unsafe version of {@link #hover_active}. */
public static NkStyleItem nhover_active(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.HOVER_ACTIVE); }
/** Unsafe version of {@link #pressed_active}. */
public static NkStyleItem npressed_active(long struct) { return NkStyleItem.create(struct + NkStyleSelectable.PRESSED_ACTIVE); }
/** Unsafe version of {@link #text_normal}. */
public static NkColor ntext_normal(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_NORMAL); }
/** Unsafe version of {@link #text_hover}. */
public static NkColor ntext_hover(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_HOVER); }
/** Unsafe version of {@link #text_pressed}. */
public static NkColor ntext_pressed(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_PRESSED); }
/** Unsafe version of {@link #text_normal_active}. */
public static NkColor ntext_normal_active(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_NORMAL_ACTIVE); }
/** Unsafe version of {@link #text_hover_active}. */
public static NkColor ntext_hover_active(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_HOVER_ACTIVE); }
/** Unsafe version of {@link #text_pressed_active}. */
public static NkColor ntext_pressed_active(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_PRESSED_ACTIVE); }
/** Unsafe version of {@link #text_background}. */
public static NkColor ntext_background(long struct) { return NkColor.create(struct + NkStyleSelectable.TEXT_BACKGROUND); }
/** Unsafe version of {@link #text_alignment}. */
public static int ntext_alignment(long struct) { return UNSAFE.getInt(null, struct + NkStyleSelectable.TEXT_ALIGNMENT); }
/** Unsafe version of {@link #rounding}. */
public static float nrounding(long struct) { return UNSAFE.getFloat(null, struct + NkStyleSelectable.ROUNDING); }
/** Unsafe version of {@link #padding}. */
public static NkVec2 npadding(long struct) { return NkVec2.create(struct + NkStyleSelectable.PADDING); }
/** Unsafe version of {@link #touch_padding}. */
public static NkVec2 ntouch_padding(long struct) { return NkVec2.create(struct + NkStyleSelectable.TOUCH_PADDING); }
/** Unsafe version of {@link #image_padding}. */
public static NkVec2 nimage_padding(long struct) { return NkVec2.create(struct + NkStyleSelectable.IMAGE_PADDING); }
/** Unsafe version of {@link #userdata}. */
public static NkHandle nuserdata(long struct) { return NkHandle.create(struct + NkStyleSelectable.USERDATA); }
/** Unsafe version of {@link #draw_begin}. */
@Nullable public static NkDrawBeginCallback ndraw_begin(long struct) { return NkDrawBeginCallback.createSafe(memGetAddress(struct + NkStyleSelectable.DRAW_BEGIN)); }
/** Unsafe version of {@link #draw_end}. */
@Nullable public static NkDrawEndCallback ndraw_end(long struct) { return NkDrawEndCallback.createSafe(memGetAddress(struct + NkStyleSelectable.DRAW_END)); }
/** Unsafe version of {@link #normal(NkStyleItem) normal}. */
public static void nnormal(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.NORMAL, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #hover(NkStyleItem) hover}. */
public static void nhover(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.HOVER, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #pressed(NkStyleItem) pressed}. */
public static void npressed(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.PRESSED, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #normal_active(NkStyleItem) normal_active}. */
public static void nnormal_active(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.NORMAL_ACTIVE, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #hover_active(NkStyleItem) hover_active}. */
public static void nhover_active(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.HOVER_ACTIVE, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #pressed_active(NkStyleItem) pressed_active}. */
public static void npressed_active(long struct, NkStyleItem value) { memCopy(value.address(), struct + NkStyleSelectable.PRESSED_ACTIVE, NkStyleItem.SIZEOF); }
/** Unsafe version of {@link #text_normal(NkColor) text_normal}. */
public static void ntext_normal(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_NORMAL, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_hover(NkColor) text_hover}. */
public static void ntext_hover(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_HOVER, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_pressed(NkColor) text_pressed}. */
public static void ntext_pressed(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_PRESSED, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_normal_active(NkColor) text_normal_active}. */
public static void ntext_normal_active(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_NORMAL_ACTIVE, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_hover_active(NkColor) text_hover_active}. */
public static void ntext_hover_active(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_HOVER_ACTIVE, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_pressed_active(NkColor) text_pressed_active}. */
public static void ntext_pressed_active(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_PRESSED_ACTIVE, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_background(NkColor) text_background}. */
public static void ntext_background(long struct, NkColor value) { memCopy(value.address(), struct + NkStyleSelectable.TEXT_BACKGROUND, NkColor.SIZEOF); }
/** Unsafe version of {@link #text_alignment(int) text_alignment}. */
public static void ntext_alignment(long struct, int value) { UNSAFE.putInt(null, struct + NkStyleSelectable.TEXT_ALIGNMENT, value); }
/** Unsafe version of {@link #rounding(float) rounding}. */
public static void nrounding(long struct, float value) { UNSAFE.putFloat(null, struct + NkStyleSelectable.ROUNDING, value); }
/** Unsafe version of {@link #padding(NkVec2) padding}. */
public static void npadding(long struct, NkVec2 value) { memCopy(value.address(), struct + NkStyleSelectable.PADDING, NkVec2.SIZEOF); }
/** Unsafe version of {@link #touch_padding(NkVec2) touch_padding}. */
public static void ntouch_padding(long struct, NkVec2 value) { memCopy(value.address(), struct + NkStyleSelectable.TOUCH_PADDING, NkVec2.SIZEOF); }
/** Unsafe version of {@link #image_padding(NkVec2) image_padding}. */
public static void nimage_padding(long struct, NkVec2 value) { memCopy(value.address(), struct + NkStyleSelectable.IMAGE_PADDING, NkVec2.SIZEOF); }
/** Unsafe version of {@link #userdata(NkHandle) userdata}. */
public static void nuserdata(long struct, NkHandle value) { memCopy(value.address(), struct + NkStyleSelectable.USERDATA, NkHandle.SIZEOF); }
/** Unsafe version of {@link #draw_begin(NkDrawBeginCallbackI) draw_begin}. */
public static void ndraw_begin(long struct, @Nullable NkDrawBeginCallbackI value) { memPutAddress(struct + NkStyleSelectable.DRAW_BEGIN, memAddressSafe(value)); }
/** Unsafe version of {@link #draw_end(NkDrawEndCallbackI) draw_end}. */
public static void ndraw_end(long struct, @Nullable NkDrawEndCallbackI value) { memPutAddress(struct + NkStyleSelectable.DRAW_END, memAddressSafe(value)); }
// -----------------------------------
/** An array of {@link NkStyleSelectable} structs. */
public static class Buffer extends StructBuffer<NkStyleSelectable, Buffer> implements NativeResource {
private static final NkStyleSelectable ELEMENT_FACTORY = NkStyleSelectable.create(-1L);
/**
* Creates a new {@code NkStyleSelectable.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link NkStyleSelectable#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container, container.remaining() / SIZEOF);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected NkStyleSelectable getElementFactory() {
return ELEMENT_FACTORY;
}
/** @return a {@link NkStyleItem} view of the {@code normal} field. */
@NativeType("struct nk_style_item")
public NkStyleItem normal() { return NkStyleSelectable.nnormal(address()); }
/** @return a {@link NkStyleItem} view of the {@code hover} field. */
@NativeType("struct nk_style_item")
public NkStyleItem hover() { return NkStyleSelectable.nhover(address()); }
/** @return a {@link NkStyleItem} view of the {@code pressed} field. */
@NativeType("struct nk_style_item")
public NkStyleItem pressed() { return NkStyleSelectable.npressed(address()); }
/** @return a {@link NkStyleItem} view of the {@code normal_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem normal_active() { return NkStyleSelectable.nnormal_active(address()); }
/** @return a {@link NkStyleItem} view of the {@code hover_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem hover_active() { return NkStyleSelectable.nhover_active(address()); }
/** @return a {@link NkStyleItem} view of the {@code pressed_active} field. */
@NativeType("struct nk_style_item")
public NkStyleItem pressed_active() { return NkStyleSelectable.npressed_active(address()); }
/** @return a {@link NkColor} view of the {@code text_normal} field. */
@NativeType("struct nk_color")
public NkColor text_normal() { return NkStyleSelectable.ntext_normal(address()); }
/** @return a {@link NkColor} view of the {@code text_hover} field. */
@NativeType("struct nk_color")
public NkColor text_hover() { return NkStyleSelectable.ntext_hover(address()); }
/** @return a {@link NkColor} view of the {@code text_pressed} field. */
@NativeType("struct nk_color")
public NkColor text_pressed() { return NkStyleSelectable.ntext_pressed(address()); }
/** @return a {@link NkColor} view of the {@code text_normal_active} field. */
@NativeType("struct nk_color")
public NkColor text_normal_active() { return NkStyleSelectable.ntext_normal_active(address()); }
/** @return a {@link NkColor} view of the {@code text_hover_active} field. */
@NativeType("struct nk_color")
public NkColor text_hover_active() { return NkStyleSelectable.ntext_hover_active(address()); }
/** @return a {@link NkColor} view of the {@code text_pressed_active} field. */
@NativeType("struct nk_color")
public NkColor text_pressed_active() { return NkStyleSelectable.ntext_pressed_active(address()); }
/** @return a {@link NkColor} view of the {@code text_background} field. */
@NativeType("struct nk_color")
public NkColor text_background() { return NkStyleSelectable.ntext_background(address()); }
/** @return the value of the {@code text_alignment} field. */
@NativeType("nk_flags")
public int text_alignment() { return NkStyleSelectable.ntext_alignment(address()); }
/** @return the value of the {@code rounding} field. */
public float rounding() { return NkStyleSelectable.nrounding(address()); }
/** @return a {@link NkVec2} view of the {@code padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 padding() { return NkStyleSelectable.npadding(address()); }
/** @return a {@link NkVec2} view of the {@code touch_padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 touch_padding() { return NkStyleSelectable.ntouch_padding(address()); }
/** @return a {@link NkVec2} view of the {@code image_padding} field. */
@NativeType("struct nk_vec2")
public NkVec2 image_padding() { return NkStyleSelectable.nimage_padding(address()); }
/** @return a {@link NkHandle} view of the {@code userdata} field. */
@NativeType("nk_handle")
public NkHandle userdata() { return NkStyleSelectable.nuserdata(address()); }
/** @return the value of the {@code draw_begin} field. */
@Nullable
@NativeType("nk_draw_begin")
public NkDrawBeginCallback draw_begin() { return NkStyleSelectable.ndraw_begin(address()); }
/** @return the value of the {@code draw_end} field. */
@Nullable
@NativeType("nk_draw_end")
public NkDrawEndCallback draw_end() { return NkStyleSelectable.ndraw_end(address()); }
/** Copies the specified {@link NkStyleItem} to the {@code normal} field. */
public NkStyleSelectable.Buffer normal(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.nnormal(address(), value); return this; }
/** Passes the {@code normal} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer normal(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(normal()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code hover} field. */
public NkStyleSelectable.Buffer hover(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.nhover(address(), value); return this; }
/** Passes the {@code hover} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer hover(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(hover()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code pressed} field. */
public NkStyleSelectable.Buffer pressed(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.npressed(address(), value); return this; }
/** Passes the {@code pressed} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer pressed(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(pressed()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code normal_active} field. */
public NkStyleSelectable.Buffer normal_active(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.nnormal_active(address(), value); return this; }
/** Passes the {@code normal_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer normal_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(normal_active()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code hover_active} field. */
public NkStyleSelectable.Buffer hover_active(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.nhover_active(address(), value); return this; }
/** Passes the {@code hover_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer hover_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(hover_active()); return this; }
/** Copies the specified {@link NkStyleItem} to the {@code pressed_active} field. */
public NkStyleSelectable.Buffer pressed_active(@NativeType("struct nk_style_item") NkStyleItem value) { NkStyleSelectable.npressed_active(address(), value); return this; }
/** Passes the {@code pressed_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer pressed_active(java.util.function.Consumer<NkStyleItem> consumer) { consumer.accept(pressed_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_normal} field. */
public NkStyleSelectable.Buffer text_normal(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_normal(address(), value); return this; }
/** Passes the {@code text_normal} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_normal(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_normal()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_hover} field. */
public NkStyleSelectable.Buffer text_hover(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_hover(address(), value); return this; }
/** Passes the {@code text_hover} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_hover(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_hover()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_pressed} field. */
public NkStyleSelectable.Buffer text_pressed(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_pressed(address(), value); return this; }
/** Passes the {@code text_pressed} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_pressed(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_pressed()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_normal_active} field. */
public NkStyleSelectable.Buffer text_normal_active(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_normal_active(address(), value); return this; }
/** Passes the {@code text_normal_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_normal_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_normal_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_hover_active} field. */
public NkStyleSelectable.Buffer text_hover_active(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_hover_active(address(), value); return this; }
/** Passes the {@code text_hover_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_hover_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_hover_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_pressed_active} field. */
public NkStyleSelectable.Buffer text_pressed_active(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_pressed_active(address(), value); return this; }
/** Passes the {@code text_pressed_active} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_pressed_active(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_pressed_active()); return this; }
/** Copies the specified {@link NkColor} to the {@code text_background} field. */
public NkStyleSelectable.Buffer text_background(@NativeType("struct nk_color") NkColor value) { NkStyleSelectable.ntext_background(address(), value); return this; }
/** Passes the {@code text_background} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer text_background(java.util.function.Consumer<NkColor> consumer) { consumer.accept(text_background()); return this; }
/** Sets the specified value to the {@code text_alignment} field. */
public NkStyleSelectable.Buffer text_alignment(@NativeType("nk_flags") int value) { NkStyleSelectable.ntext_alignment(address(), value); return this; }
/** Sets the specified value to the {@code rounding} field. */
public NkStyleSelectable.Buffer rounding(float value) { NkStyleSelectable.nrounding(address(), value); return this; }
/** Copies the specified {@link NkVec2} to the {@code padding} field. */
public NkStyleSelectable.Buffer padding(@NativeType("struct nk_vec2") NkVec2 value) { NkStyleSelectable.npadding(address(), value); return this; }
/** Passes the {@code padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(padding()); return this; }
/** Copies the specified {@link NkVec2} to the {@code touch_padding} field. */
public NkStyleSelectable.Buffer touch_padding(@NativeType("struct nk_vec2") NkVec2 value) { NkStyleSelectable.ntouch_padding(address(), value); return this; }
/** Passes the {@code touch_padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer touch_padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(touch_padding()); return this; }
/** Copies the specified {@link NkVec2} to the {@code image_padding} field. */
public NkStyleSelectable.Buffer image_padding(@NativeType("struct nk_vec2") NkVec2 value) { NkStyleSelectable.nimage_padding(address(), value); return this; }
/** Passes the {@code image_padding} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer image_padding(java.util.function.Consumer<NkVec2> consumer) { consumer.accept(image_padding()); return this; }
/** Copies the specified {@link NkHandle} to the {@code userdata} field. */
public NkStyleSelectable.Buffer userdata(@NativeType("nk_handle") NkHandle value) { NkStyleSelectable.nuserdata(address(), value); return this; }
/** Passes the {@code userdata} field to the specified {@link java.util.function.Consumer Consumer}. */
public NkStyleSelectable.Buffer userdata(java.util.function.Consumer<NkHandle> consumer) { consumer.accept(userdata()); return this; }
/** Sets the specified value to the {@code draw_begin} field. */
public NkStyleSelectable.Buffer draw_begin(@Nullable @NativeType("nk_draw_begin") NkDrawBeginCallbackI value) { NkStyleSelectable.ndraw_begin(address(), value); return this; }
/** Sets the specified value to the {@code draw_end} field. */
public NkStyleSelectable.Buffer draw_end(@Nullable @NativeType("nk_draw_end") NkDrawEndCallbackI value) { NkStyleSelectable.ndraw_end(address(), value); return this; }
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.leveldb;
import java.io.IOException;
import java.io.UnsupportedEncodingException;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.spi.RecoverableAggregationRepository;
import org.apache.camel.support.ServiceSupport;
import org.apache.camel.util.IOHelper;
import org.apache.camel.util.ObjectHelper;
import org.apache.camel.util.ServiceHelper;
import org.fusesource.hawtbuf.Buffer;
import org.iq80.leveldb.DBIterator;
import org.iq80.leveldb.WriteBatch;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.fusesource.leveldbjni.JniDBFactory.asString;
/**
* An instance of {@link org.apache.camel.spi.AggregationRepository} which is backed by a {@link LevelDBFile}.
*/
public class LevelDBAggregationRepository extends ServiceSupport implements RecoverableAggregationRepository {
private static final Logger LOG = LoggerFactory.getLogger(LevelDBAggregationRepository.class);
private LevelDBFile levelDBFile;
private String persistentFileName;
private String repositoryName;
private boolean sync;
private boolean returnOldExchange;
private LevelDBCamelCodec codec = new LevelDBCamelCodec();
private long recoveryInterval = 5000;
private boolean useRecovery = true;
private int maximumRedeliveries;
private String deadLetterUri;
/**
* Creates an aggregation repository
*/
public LevelDBAggregationRepository() {
}
/**
* Creates an aggregation repository
*
* @param repositoryName the repository name
*/
public LevelDBAggregationRepository(String repositoryName) {
ObjectHelper.notEmpty(repositoryName, "repositoryName");
this.repositoryName = repositoryName;
}
/**
* Creates an aggregation repository using a new {@link LevelDBFile}
* that persists using the provided file.
*
* @param repositoryName the repository name
* @param persistentFileName the persistent store filename
*/
public LevelDBAggregationRepository(String repositoryName, String persistentFileName) {
ObjectHelper.notEmpty(repositoryName, "repositoryName");
ObjectHelper.notEmpty(persistentFileName, "persistentFileName");
this.repositoryName = repositoryName;
this.persistentFileName = persistentFileName;
}
/**
* Creates an aggregation repository using the provided {@link LevelDBFile}.
*
* @param repositoryName the repository name
* @param levelDBFile the leveldb file to use as persistent store
*/
public LevelDBAggregationRepository(String repositoryName, LevelDBFile levelDBFile) {
ObjectHelper.notEmpty(repositoryName, "repositoryName");
ObjectHelper.notNull(levelDBFile, "levelDBFile");
this.levelDBFile = levelDBFile;
this.repositoryName = repositoryName;
}
public Exchange add(final CamelContext camelContext, final String key, final Exchange exchange) {
LOG.debug("Adding key [{}] -> {}", key, exchange);
try {
byte[] lDbKey = keyBuilder(repositoryName, key);
final Buffer exchangeBuffer = codec.marshallExchange(camelContext, exchange);
byte[] rc = null;
if (isReturnOldExchange()) {
rc = levelDBFile.getDb().get(lDbKey);
}
LOG.trace("Adding key index {} for repository {}", key, repositoryName);
levelDBFile.getDb().put(lDbKey, exchangeBuffer.toByteArray(), levelDBFile.getWriteOptions());
LOG.trace("Added key index {}", key);
if (rc == null) {
return null;
}
// only return old exchange if enabled
if (isReturnOldExchange()) {
return codec.unmarshallExchange(camelContext, new Buffer(rc));
}
} catch (IOException e) {
throw new RuntimeException("Error adding to repository " + repositoryName + " with key " + key, e);
}
return null;
}
public Exchange get(final CamelContext camelContext, final String key) {
Exchange answer = null;
try {
byte[] lDbKey = keyBuilder(repositoryName, key);
LOG.trace("Getting key index {}", key);
byte[] rc = levelDBFile.getDb().get(lDbKey);
if (rc != null) {
answer = codec.unmarshallExchange(camelContext, new Buffer(rc));
}
} catch (IOException e) {
throw new RuntimeException("Error getting key " + key + " from repository " + repositoryName, e);
}
LOG.debug("Getting key [{}] -> {}", key, answer);
return answer;
}
public void remove(final CamelContext camelContext, final String key, final Exchange exchange) {
LOG.debug("Removing key [{}]", key);
try {
byte[] lDbKey = keyBuilder(repositoryName, key);
final String exchangeId = exchange.getExchangeId();
final Buffer exchangeBuffer = codec.marshallExchange(camelContext, exchange);
// remove the exchange
byte[] rc = levelDBFile.getDb().get(lDbKey);
if (rc != null) {
WriteBatch batch = levelDBFile.getDb().createWriteBatch();
try {
batch.delete(lDbKey);
LOG.trace("Removed key index {} -> {}", key, new Buffer(rc));
// add exchange to confirmed index
byte[] confirmedLDBKey = keyBuilder(getRepositoryNameCompleted(), exchangeId);
batch.put(confirmedLDBKey, exchangeBuffer.toByteArray());
LOG.trace("Added confirm index {} for repository {}", exchangeId, getRepositoryNameCompleted());
levelDBFile.getDb().write(batch, levelDBFile.getWriteOptions());
} finally {
batch.close();
}
} else {
LOG.warn("Unable to remove key {} from repository {}: Not Found", key, repositoryName);
}
} catch (IOException e) {
throw new RuntimeException("Error removing key " + key + " from repository " + repositoryName, e);
}
}
public void confirm(final CamelContext camelContext, final String exchangeId) {
LOG.debug("Confirming exchangeId [{}]", exchangeId);
byte[] confirmedLDBKey = keyBuilder(getRepositoryNameCompleted(), exchangeId);
byte[] rc = levelDBFile.getDb().get(confirmedLDBKey);
if (rc != null) {
levelDBFile.getDb().delete(confirmedLDBKey);
LOG.trace("Removed confirm index {} -> {}", exchangeId, new Buffer(rc));
} else {
LOG.warn("Unable to confirm exchangeId [{}]", exchangeId + " from repository " + repositoryName + ": Not Found");
}
}
public Set<String> getKeys() {
final Set<String> keys = new LinkedHashSet<String>();
// interval task could potentially be running while we are shutting down so check for that
if (!isRunAllowed()) {
return null;
}
DBIterator it = levelDBFile.getDb().iterator();
String keyBuffer = null;
try {
String prefix = repositoryName + '\0';
for (it.seek(keyBuilder(repositoryName, "")); it.hasNext(); it.next()) {
if (!isRunAllowed()) {
break;
}
keyBuffer = asString(it.peekNext().getKey());
if (!keyBuffer.startsWith(prefix)) {
break;
}
String key = keyBuffer.substring(prefix.length());
if (key != null) {
LOG.trace("getKey [{}]", key);
keys.add(key);
}
}
} finally {
// Make sure you close the iterator to avoid resource leaks.
IOHelper.close(it);
}
return Collections.unmodifiableSet(keys);
}
public Set<String> scan(CamelContext camelContext) {
final Set<String> answer = new LinkedHashSet<String>();
if (!isRunAllowed()) {
return null;
}
DBIterator it = levelDBFile.getDb().iterator();
String keyBuffer = null;
try {
String prefix = getRepositoryNameCompleted() + '\0';
for (it.seek(keyBuilder(getRepositoryNameCompleted(), "")); it.hasNext(); it.next()) {
keyBuffer = asString(it.peekNext().getKey());
if (!keyBuffer.startsWith(prefix)) {
break;
}
String exchangeId = keyBuffer.substring(prefix.length());
if (exchangeId != null) {
LOG.trace("Scan exchangeId [{}]", exchangeId);
answer.add(exchangeId);
}
}
} finally {
// Make sure you close the iterator to avoid resource leaks.
IOHelper.close(it);
}
if (answer.size() == 0) {
LOG.trace("Scanned and found no exchange to recover.");
} else {
if (LOG.isDebugEnabled()) {
LOG.debug("Scanned and found {} exchange(s) to recover (note some of them may already be in progress).", answer.size());
}
}
return answer;
}
public Exchange recover(CamelContext camelContext, final String exchangeId) {
Exchange answer = null;
try {
byte[] completedLDBKey = keyBuilder(getRepositoryNameCompleted(), exchangeId);
byte[] rc = levelDBFile.getDb().get(completedLDBKey);
if (rc != null) {
answer = codec.unmarshallExchange(camelContext, new Buffer(rc));
}
} catch (IOException e) {
throw new RuntimeException("Error recovering exchangeId " + exchangeId + " from repository " + repositoryName, e);
}
LOG.debug("Recovering exchangeId [{}] -> {}", exchangeId, answer);
return answer;
}
private int size(final String repositoryName) {
DBIterator it = levelDBFile.getDb().iterator();
String prefix = repositoryName + '\0';
int count = 0;
try {
for (it.seek(keyBuilder(repositoryName, "")); it.hasNext(); it.next()) {
if (!asString(it.peekNext().getKey()).startsWith(prefix)) {
break;
}
count++;
}
} finally {
// Make sure you close the iterator to avoid resource leaks.
IOHelper.close(it);
}
LOG.debug("Size of repository [{}] -> {}", repositoryName, count);
return count;
}
public LevelDBFile getLevelDBFile() {
return levelDBFile;
}
public void setLevelDBFile(LevelDBFile levelDBFile) {
this.levelDBFile = levelDBFile;
}
public String getRepositoryName() {
return repositoryName;
}
private String getRepositoryNameCompleted() {
return repositoryName + "-completed";
}
public void setRepositoryName(String repositoryName) {
this.repositoryName = repositoryName;
}
public boolean isSync() {
return sync;
}
public void setSync(boolean sync) {
this.sync = sync;
}
public boolean isReturnOldExchange() {
return returnOldExchange;
}
public void setReturnOldExchange(boolean returnOldExchange) {
this.returnOldExchange = returnOldExchange;
}
public void setRecoveryInterval(long interval, TimeUnit timeUnit) {
this.recoveryInterval = timeUnit.toMillis(interval);
}
public void setRecoveryInterval(long interval) {
this.recoveryInterval = interval;
}
public long getRecoveryIntervalInMillis() {
return recoveryInterval;
}
public boolean isUseRecovery() {
return useRecovery;
}
public void setUseRecovery(boolean useRecovery) {
this.useRecovery = useRecovery;
}
public int getMaximumRedeliveries() {
return maximumRedeliveries;
}
public void setMaximumRedeliveries(int maximumRedeliveries) {
this.maximumRedeliveries = maximumRedeliveries;
}
public String getDeadLetterUri() {
return deadLetterUri;
}
public void setDeadLetterUri(String deadLetterUri) {
this.deadLetterUri = deadLetterUri;
}
public String getPersistentFileName() {
return persistentFileName;
}
public void setPersistentFileName(String persistentFileName) {
this.persistentFileName = persistentFileName;
}
@Override
protected void doStart() throws Exception {
// either we have a LevelDB configured or we use a provided fileName
if (levelDBFile == null && persistentFileName != null) {
levelDBFile = new LevelDBFile();
levelDBFile.setSync(isSync());
levelDBFile.setFileName(persistentFileName);
}
ObjectHelper.notNull(levelDBFile, "Either set a persistentFileName or a levelDBFile");
ObjectHelper.notNull(repositoryName, "repositoryName");
ServiceHelper.startService(levelDBFile);
// log number of existing exchanges
int current = size(getRepositoryName());
int completed = size(getRepositoryNameCompleted());
if (current > 0) {
LOG.info("On startup there are " + current + " aggregate exchanges (not completed) in repository: " + getRepositoryName());
} else {
LOG.info("On startup there are no existing aggregate exchanges (not completed) in repository: " + getRepositoryName());
}
if (completed > 0) {
LOG.warn("On startup there are " + completed + " completed exchanges to be recovered in repository: " + getRepositoryNameCompleted());
} else {
LOG.info("On startup there are no completed exchanges to be recovered in repository: " + getRepositoryNameCompleted());
}
}
@Override
protected void doStop() throws Exception {
ServiceHelper.stopService(levelDBFile);
}
public static byte[] keyBuilder(String repo, String key) {
try {
return (repo + '\0' + key).getBytes("UTF-8");
} catch (UnsupportedEncodingException e) {
throw new RuntimeException(e);
}
}
}
| |
/*
* 3D City Database - The Open Source CityGML Database
* https://www.3dcitydb.org/
*
* Copyright 2013 - 2021
* Chair of Geoinformatics
* Technical University of Munich, Germany
* https://www.lrg.tum.de/gis/
*
* The 3D City Database is jointly developed with the following
* cooperation partners:
*
* Virtual City Systems, Berlin <https://vc.systems/>
* M.O.S.S. Computer Grafik Systeme GmbH, Taufkirchen <http://www.moss.de/>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.citydb.core.operation.exporter.database.content;
import org.citydb.core.database.schema.TableEnum;
import org.citydb.core.database.schema.mapping.FeatureType;
import org.citydb.core.operation.exporter.CityGMLExportException;
import org.citydb.core.operation.exporter.util.AttributeValueSplitter;
import org.citydb.core.operation.exporter.util.DefaultGeometrySetterHandler;
import org.citydb.core.operation.exporter.util.GeometrySetterHandler;
import org.citydb.core.operation.exporter.util.SplitValue;
import org.citydb.core.query.filter.lod.LodFilter;
import org.citydb.core.query.filter.projection.CombinedProjectionFilter;
import org.citydb.core.query.filter.projection.ProjectionFilter;
import org.citydb.sqlbuilder.schema.Table;
import org.citydb.sqlbuilder.select.FetchToken;
import org.citydb.sqlbuilder.select.Select;
import org.citydb.sqlbuilder.select.join.JoinFactory;
import org.citydb.sqlbuilder.select.operator.comparison.ComparisonFactory;
import org.citydb.sqlbuilder.select.operator.comparison.ComparisonName;
import org.citydb.sqlbuilder.select.projection.ColumnExpression;
import org.citygml4j.model.citygml.building.*;
import org.citygml4j.model.citygml.core.AbstractCityObject;
import org.citygml4j.model.citygml.core.AddressProperty;
import org.citygml4j.model.gml.basicTypes.Code;
import org.citygml4j.model.module.citygml.CityGMLModuleType;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.*;
public class DBRoom extends AbstractFeatureExporter<Room> {
private final Set<Long> batches;
private final DBSurfaceGeometry geometryExporter;
private final DBCityObject cityObjectExporter;
private final DBBuildingInstallation buildingInstallationExporter;
private final DBThematicSurface thematicSurfaceExporter;
private final DBOpening openingExporter;
private final DBBuildingFurniture buildingFurnitureExporter;
private final DBAddress addressExporter;
private final int batchSize;
private final String buildingModule;
private final LodFilter lodFilter;
private final AttributeValueSplitter valueSplitter;
private final boolean hasObjectClassIdColumn;
private final boolean useXLink;
private final List<Table> roomADEHookTables;
private List<Table> surfaceADEHookTables;
private List<Table> openingADEHookTables;
private List<Table> addressADEHookTables;
public DBRoom(Connection connection, CityGMLExportManager exporter) throws CityGMLExportException, SQLException {
super(Room.class, connection, exporter);
batches = new HashSet<>();
batchSize = exporter.getFeatureBatchSize();
cityObjectExporter = exporter.getExporter(DBCityObject.class);
buildingInstallationExporter = exporter.getExporter(DBBuildingInstallation.class);
thematicSurfaceExporter = exporter.getExporter(DBThematicSurface.class);
openingExporter = exporter.getExporter(DBOpening.class);
buildingFurnitureExporter = exporter.getExporter(DBBuildingFurniture.class);
addressExporter = exporter.getExporter(DBAddress.class);
geometryExporter = exporter.getExporter(DBSurfaceGeometry.class);
valueSplitter = exporter.getAttributeValueSplitter();
CombinedProjectionFilter projectionFilter = exporter.getCombinedProjectionFilter(TableEnum.ROOM.getName());
buildingModule = exporter.getTargetCityGMLVersion().getCityGMLModule(CityGMLModuleType.BUILDING).getNamespaceURI();
lodFilter = exporter.getLodFilter();
hasObjectClassIdColumn = exporter.getDatabaseAdapter().getConnectionMetaData().getCityDBVersion().compareTo(4, 0, 0) >= 0;
useXLink = exporter.getInternalConfig().isExportFeatureReferences();
String schema = exporter.getDatabaseAdapter().getConnectionDetails().getSchema();
table = new Table(TableEnum.ROOM.getName(), schema);
select = new Select().addProjection(table.getColumn("id"), table.getColumn("building_id"));
if (hasObjectClassIdColumn) select.addProjection(table.getColumn("objectclass_id"));
if (projectionFilter.containsProperty("class", buildingModule)) select.addProjection(table.getColumn("class"), table.getColumn("class_codespace"));
if (projectionFilter.containsProperty("function", buildingModule)) select.addProjection(table.getColumn("function"), table.getColumn("function_codespace"));
if (projectionFilter.containsProperty("usage", buildingModule)) select.addProjection(table.getColumn("usage"), table.getColumn("usage_codespace"));
if (lodFilter.isEnabled(4)) {
if (projectionFilter.containsProperty("lod4MultiSurface", buildingModule)) select.addProjection(table.getColumn("lod4_multi_surface_id"));
if (projectionFilter.containsProperty("lod4Solid", buildingModule)) select.addProjection(table.getColumn("lod4_solid_id"));
if (projectionFilter.containsProperty("boundedBy", buildingModule)) {
CombinedProjectionFilter boundarySurfaceProjectionFilter = exporter.getCombinedProjectionFilter(TableEnum.THEMATIC_SURFACE.getName());
Table thematicSurface = new Table(TableEnum.THEMATIC_SURFACE.getName(), schema);
thematicSurfaceExporter.addProjection(select, thematicSurface, boundarySurfaceProjectionFilter, "ts")
.addJoin(JoinFactory.left(thematicSurface, "room_id", ComparisonName.EQUAL_TO, table.getColumn("id")));
if (boundarySurfaceProjectionFilter.containsProperty("opening", buildingModule)) {
CombinedProjectionFilter openingProjectionFilter = exporter.getCombinedProjectionFilter(TableEnum.OPENING.getName());
Table opening = new Table(TableEnum.OPENING.getName(), schema);
Table openingToThemSurface = new Table(TableEnum.OPENING_TO_THEM_SURFACE.getName(), schema);
Table cityObject = new Table(TableEnum.CITYOBJECT.getName(), schema);
openingExporter.addProjection(select, opening, openingProjectionFilter, "op")
.addProjection(cityObject.getColumn("gmlid", "opgmlid"))
.addJoin(JoinFactory.left(openingToThemSurface, "thematic_surface_id", ComparisonName.EQUAL_TO, thematicSurface.getColumn("id")))
.addJoin(JoinFactory.left(opening, "id", ComparisonName.EQUAL_TO, openingToThemSurface.getColumn("opening_id")))
.addJoin(JoinFactory.left(cityObject, "id", ComparisonName.EQUAL_TO, opening.getColumn("id")));
if (openingProjectionFilter.containsProperty("address", buildingModule)) {
Table address = new Table(TableEnum.ADDRESS.getName(), schema);
addressExporter.addProjection(select, address, "oa")
.addJoin(JoinFactory.left(address, "id", ComparisonName.EQUAL_TO, opening.getColumn("address_id")));
addressADEHookTables = addJoinsToADEHookTables(TableEnum.ADDRESS, address);
}
openingADEHookTables = addJoinsToADEHookTables(TableEnum.OPENING, opening);
}
surfaceADEHookTables = addJoinsToADEHookTables(TableEnum.THEMATIC_SURFACE, thematicSurface);
}
if (projectionFilter.containsProperty("roomInstallation", buildingModule)) {
Table installation = new Table(TableEnum.BUILDING_INSTALLATION.getName(), schema);
select.addProjection(new ColumnExpression(new Select()
.addProjection(installation.getColumn("id"))
.addSelection(ComparisonFactory.equalTo(installation.getColumn("room_id"), table.getColumn("id")))
.withFetch(new FetchToken(1)), "inid"));
}
if (projectionFilter.containsProperty("interiorFurniture", buildingModule)) {
Table buildingFurniture = new Table(TableEnum.BUILDING_FURNITURE.getName(), schema);
select.addProjection(new ColumnExpression(new Select()
.addProjection(buildingFurniture.getColumn("id"))
.addSelection(ComparisonFactory.equalTo(buildingFurniture.getColumn("room_id"), table.getColumn("id")))
.withFetch(new FetchToken(1)), "bfid"));
}
}
roomADEHookTables = addJoinsToADEHookTables(TableEnum.ROOM, table);
}
private void addBatch(long id, Map<Long, Collection<Room>> rooms) throws CityGMLExportException, SQLException {
batches.add(id);
if (batches.size() == batchSize)
executeBatch(rooms);
}
private void executeBatch(Map<Long, Collection<Room>> rooms) throws CityGMLExportException, SQLException {
if (batches.isEmpty())
return;
try {
PreparedStatement ps;
if (batches.size() == 1) {
ps = getOrCreateStatement("building_id");
ps.setLong(1, batches.iterator().next());
} else {
ps = getOrCreateBulkStatement("building_id", batchSize);
prepareBulkStatement(ps, batches.toArray(new Long[0]), batchSize);
}
try (ResultSet rs = ps.executeQuery()) {
for (Map.Entry<Long, Room> entry : doExport(0, null, null, rs).entrySet()) {
Long buildingId = (Long) entry.getValue().getLocalProperty("building_id");
if (buildingId == null) {
exporter.logOrThrowErrorMessage("Failed to assign room with id " + entry.getKey() + " to a building.");
continue;
}
rooms.computeIfAbsent(buildingId, v -> new ArrayList<>()).add(entry.getValue());
}
}
} finally {
batches.clear();
}
}
protected Collection<Room> doExport(long buildingId) throws CityGMLExportException, SQLException {
return doExport(buildingId, null, null, getOrCreateStatement("building_id"));
}
protected Map<Long, Collection<Room>> doExport(Set<Long> buildingIds) throws CityGMLExportException, SQLException {
if (buildingIds.isEmpty()) {
return Collections.emptyMap();
}
Map<Long, Collection<Room>> rooms = new HashMap<>();
for (Long buildingId : buildingIds) {
addBatch(buildingId, rooms);
}
executeBatch(rooms);
return rooms;
}
@Override
protected Collection<Room> doExport(long id, Room root, FeatureType rootType, PreparedStatement ps) throws CityGMLExportException, SQLException {
ps.setLong(1, id);
try (ResultSet rs = ps.executeQuery()) {
return doExport(id, root, rootType, rs).values();
}
}
private Map<Long, Room> doExport(long id, Room root, FeatureType rootType, ResultSet rs) throws CityGMLExportException, SQLException {
long currentRoomId = 0;
Room room = null;
ProjectionFilter projectionFilter = null;
Map<Long, Room> rooms = new HashMap<>();
Map<Long, GeometrySetterHandler> geometries = new LinkedHashMap<>();
Map<Long, List<String>> adeHookTables = roomADEHookTables != null ? new HashMap<>() : null;
long currentBoundarySurfaceId = 0;
AbstractBoundarySurface boundarySurface = null;
ProjectionFilter boundarySurfaceProjectionFilter = null;
Map<Long, AbstractBoundarySurface> boundarySurfaces = new HashMap<>();
long currentOpeningId = 0;
OpeningProperty openingProperty = null;
ProjectionFilter openingProjectionFilter = null;
Map<String, OpeningProperty> openingProperties = new HashMap<>();
Set<Long> installations = new HashSet<>();
Set<Long> buildingFurnitures = new HashSet<>();
Set<String> addresses = new HashSet<>();
while (rs.next()) {
long roomId = rs.getLong("id");
if (roomId != currentRoomId || room == null) {
currentRoomId = roomId;
room = rooms.get(roomId);
if (room == null) {
FeatureType featureType;
if (roomId == id && root != null) {
room = root;
featureType = rootType;
} else {
if (hasObjectClassIdColumn) {
// create room object
int objectClassId = rs.getInt("objectclass_id");
room = exporter.createObject(objectClassId, Room.class);
if (room == null) {
exporter.logOrThrowErrorMessage("Failed to instantiate " + exporter.getObjectSignature(objectClassId, roomId) + " as room object.");
continue;
}
featureType = exporter.getFeatureType(objectClassId);
} else {
room = new Room();
featureType = exporter.getFeatureType(room);
}
}
// get projection filter
projectionFilter = exporter.getProjectionFilter(featureType);
// export city object information
cityObjectExporter.addBatch(room, roomId, featureType, projectionFilter);
if (projectionFilter.containsProperty("class", buildingModule)) {
String clazz = rs.getString("class");
if (!rs.wasNull()) {
Code code = new Code(clazz);
code.setCodeSpace(rs.getString("class_codespace"));
room.setClazz(code);
}
}
if (projectionFilter.containsProperty("function", buildingModule)) {
for (SplitValue splitValue : valueSplitter.split(rs.getString("function"), rs.getString("function_codespace"))) {
Code function = new Code(splitValue.result(0));
function.setCodeSpace(splitValue.result(1));
room.addFunction(function);
}
}
if (projectionFilter.containsProperty("usage", buildingModule)) {
for (SplitValue splitValue : valueSplitter.split(rs.getString("usage"), rs.getString("usage_codespace"))) {
Code usage = new Code(splitValue.result(0));
usage.setCodeSpace(splitValue.result(1));
room.addUsage(usage);
}
}
if (lodFilter.isEnabled(4)) {
// bldg:lod4MultiSurface
if (projectionFilter.containsProperty("lod4MultiSurface", buildingModule)) {
long geometryId = rs.getLong("lod4_multi_surface_id");
if (!rs.wasNull())
geometries.put(geometryId, new DefaultGeometrySetterHandler(room::setLod4MultiSurface));
}
// bldg:lod4Solid
if (projectionFilter.containsProperty("lod4Solid", buildingModule)) {
long geometryId = rs.getLong("lod4_solid_id");
if (!rs.wasNull())
geometries.put(geometryId, new DefaultGeometrySetterHandler(room::setLod4Solid));
}
}
// bldg:roomInstallation
if (lodFilter.isEnabled(4)
&& projectionFilter.containsProperty("roomInstallation", buildingModule)) {
if (rs.getLong("inid") != 0) {
installations.add(roomId);
}
}
// bldg:interiorFurniture
if (lodFilter.isEnabled(4)
&& projectionFilter.containsProperty("interiorFurniture", buildingModule)) {
if (rs.getLong("bfid") != 0) {
buildingFurnitures.add(roomId);
}
}
// get tables of ADE hook properties
if (roomADEHookTables != null) {
List<String> tables = retrieveADEHookTables(roomADEHookTables, rs);
if (tables != null) {
adeHookTables.put(roomId, tables);
room.setLocalProperty("type", featureType);
}
}
room.setLocalProperty("building_id", rs.getLong("building_id"));
room.setLocalProperty("projection", projectionFilter);
rooms.put(roomId, room);
} else
projectionFilter = (ProjectionFilter) room.getLocalProperty("projection");
}
if (!lodFilter.isEnabled(4)
|| !projectionFilter.containsProperty("boundedBy", buildingModule))
continue;
// bldg:boundedBy
long boundarySurfaceId = rs.getLong("tsid");
if (rs.wasNull())
continue;
if (boundarySurfaceId != currentBoundarySurfaceId || boundarySurface == null) {
currentBoundarySurfaceId = boundarySurfaceId;
currentOpeningId = 0;
boundarySurface = boundarySurfaces.get(boundarySurfaceId);
if (boundarySurface == null) {
int objectClassId = rs.getInt("tsobjectclass_id");
FeatureType featureType = exporter.getFeatureType(objectClassId);
boundarySurface = thematicSurfaceExporter.doExport(boundarySurfaceId, featureType, "ts", surfaceADEHookTables, rs);
if (boundarySurface == null) {
exporter.logOrThrowErrorMessage("Failed to instantiate " + exporter.getObjectSignature(objectClassId, boundarySurfaceId) + " as boundary surface object.");
continue;
}
// get projection filter
boundarySurfaceProjectionFilter = exporter.getProjectionFilter(featureType);
boundarySurface.setLocalProperty("projection", boundarySurfaceProjectionFilter);
room.getBoundedBySurface().add(new BoundarySurfaceProperty(boundarySurface));
boundarySurfaces.put(boundarySurfaceId, boundarySurface);
} else
boundarySurfaceProjectionFilter = (ProjectionFilter) boundarySurface.getLocalProperty("projection");
}
// continue if openings shall not be exported
if (!boundarySurfaceProjectionFilter.containsProperty("opening", buildingModule))
continue;
long openingId = rs.getLong("opid");
if (rs.wasNull())
continue;
if (openingId != currentOpeningId || openingProperty == null) {
currentOpeningId = openingId;
String key = currentBoundarySurfaceId + "_" + openingId;
openingProperty = openingProperties.get(key);
if (openingProperty == null) {
int objectClassId = rs.getInt("opobjectclass_id");
// check whether we need an XLink
String gmlId = rs.getString("opgmlid");
boolean generateNewGmlId = false;
if (!rs.wasNull()) {
if (exporter.lookupAndPutObjectId(gmlId, openingId, objectClassId)) {
if (useXLink) {
openingProperty = new OpeningProperty();
openingProperty.setHref("#" + gmlId);
boundarySurface.addOpening(openingProperty);
openingProperties.put(key, openingProperty);
continue;
} else
generateNewGmlId = true;
}
}
// create new opening object
FeatureType featureType = exporter.getFeatureType(objectClassId);
AbstractOpening opening = openingExporter.doExport(openingId, featureType, "op", openingADEHookTables, rs);
if (opening == null) {
exporter.logOrThrowErrorMessage("Failed to instantiate " + exporter.getObjectSignature(objectClassId, openingId) + " as opening object.");
continue;
}
if (generateNewGmlId)
opening.setId(exporter.generateFeatureGmlId(opening, gmlId));
// get projection filter
openingProjectionFilter = exporter.getProjectionFilter(featureType);
opening.setLocalProperty("projection", openingProjectionFilter);
openingProperty = new OpeningProperty(opening);
boundarySurface.getOpening().add(openingProperty);
openingProperties.put(key, openingProperty);
} else if (openingProperty.isSetOpening())
openingProjectionFilter = (ProjectionFilter) openingProperty.getOpening().getLocalProperty("projection");
}
if (openingProperty.getOpening() instanceof Door
&& (openingProjectionFilter == null
|| openingProjectionFilter.containsProperty("address", buildingModule))) {
long addressId = rs.getLong("oaid");
if (!rs.wasNull() && addresses.add(currentOpeningId + "_" + addressId)) {
AddressProperty addressProperty = addressExporter.doExport(addressId, "oa", addressADEHookTables, rs);
if (addressProperty != null) {
Door door = (Door) openingProperty.getOpening();
door.addAddress(addressProperty);
}
}
}
}
// export installations
for (Map.Entry<Long, Collection<AbstractCityObject>> entry : buildingInstallationExporter.doExportForRooms(installations).entrySet()) {
room = rooms.get(entry.getKey());
if (room != null) {
for (AbstractCityObject installation : entry.getValue()) {
if (installation instanceof IntBuildingInstallation) {
room.addRoomInstallation(new IntBuildingInstallationProperty((IntBuildingInstallation) installation));
}
}
}
}
// export furniture
for (Map.Entry<Long, Collection<BuildingFurniture>> entry : buildingFurnitureExporter.doExport(buildingFurnitures).entrySet()) {
room = rooms.get(entry.getKey());
if (room != null) {
for (BuildingFurniture buildingFurniture : entry.getValue()) {
room.addInteriorFurniture(new InteriorFurnitureProperty(buildingFurniture));
}
}
}
// export postponed geometries
for (Map.Entry<Long, GeometrySetterHandler> entry : geometries.entrySet())
geometryExporter.addBatch(entry.getKey(), entry.getValue());
// delegate export of generic ADE properties
if (adeHookTables != null) {
for (Map.Entry<Long, List<String>> entry : adeHookTables.entrySet()) {
long roomId = entry.getKey();
room = rooms.get(roomId);
exporter.delegateToADEExporter(entry.getValue(), room, roomId,
(FeatureType) room.getLocalProperty("type"),
(ProjectionFilter) room.getLocalProperty("projection"));
}
}
return rooms;
}
}
| |
/*
* Copyright (C) 2013 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.android.printspooler.ui;
import android.annotation.NonNull;
import android.annotation.Nullable;
import android.app.Activity;
import android.app.LoaderManager;
import android.content.ComponentName;
import android.content.Context;
import android.content.Loader;
import android.content.pm.ServiceInfo;
import android.location.Criteria;
import android.location.Location;
import android.location.LocationListener;
import android.location.LocationManager;
import android.location.LocationRequest;
import android.os.AsyncTask;
import android.os.Bundle;
import android.os.Handler;
import android.os.Looper;
import android.os.SystemClock;
import android.print.PrintManager;
import android.print.PrintServicesLoader;
import android.print.PrinterDiscoverySession;
import android.print.PrinterDiscoverySession.OnPrintersChangeListener;
import android.print.PrinterId;
import android.print.PrinterInfo;
import android.printservice.PrintServiceInfo;
import android.util.ArrayMap;
import android.util.ArraySet;
import android.util.AtomicFile;
import android.util.Log;
import android.util.Pair;
import android.util.Slog;
import android.util.Xml;
import com.android.internal.util.FastXmlSerializer;
import libcore.io.IoUtils;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import org.xmlpull.v1.XmlSerializer;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
/**
* This class is responsible for loading printers by doing discovery
* and merging the discovered printers with the previously used ones.
*/
public final class FusedPrintersProvider extends Loader<List<PrinterInfo>>
implements LocationListener {
private static final String LOG_TAG = "FusedPrintersProvider";
private static final boolean DEBUG = false;
private static final double WEIGHT_DECAY_COEFFICIENT = 0.95f;
private static final int MAX_HISTORY_LENGTH = 50;
private static final int MAX_FAVORITE_PRINTER_COUNT = 4;
/** Interval of location updated in ms */
private static final int LOCATION_UPDATE_MS = 30 * 1000;
/** Maximum acceptable age of the location in ms */
private static final int MAX_LOCATION_AGE_MS = 10 * 60 * 1000;
/** The worst accuracy that is considered usable in m */
private static final int MIN_LOCATION_ACCURACY = 50;
/** Maximum distance where a printer is still considered "near" */
private static final int MAX_PRINTER_DISTANCE = MIN_LOCATION_ACCURACY * 2;
private final List<PrinterInfo> mPrinters =
new ArrayList<>();
private final List<Pair<PrinterInfo, Location>> mFavoritePrinters =
new ArrayList<>();
private final PersistenceManager mPersistenceManager;
private PrinterDiscoverySession mDiscoverySession;
private PrinterId mTrackedPrinter;
private boolean mPrintersUpdatedBefore;
/** Last known location, can be null or out of date */
private final Object mLocationLock;
private Location mLocation;
/** Location used when the printers were updated the last time */
private Location mLocationOfLastPrinterUpdate;
/** Reference to the system's location manager */
private final LocationManager mLocationManager;
/**
* Get a reference to the current location.
*/
private Location getCurrentLocation() {
synchronized (mLocationLock) {
return mLocation;
}
}
public FusedPrintersProvider(Activity activity, int internalLoaderId) {
super(activity);
mLocationLock = new Object();
mPersistenceManager = new PersistenceManager(activity, internalLoaderId);
mLocationManager = (LocationManager) activity.getSystemService(Context.LOCATION_SERVICE);
}
public void addHistoricalPrinter(PrinterInfo printer) {
mPersistenceManager.addPrinterAndWritePrinterHistory(printer);
}
/**
* Add printer to dest, or if updatedPrinters add the updated printer. If the updated printer
* was added, remove it from updatedPrinters.
*
* @param dest The list the printers should be added to
* @param printer The printer to add
* @param updatedPrinters The printer to add
*/
private void updateAndAddPrinter(List<PrinterInfo> dest, PrinterInfo printer,
Map<PrinterId, PrinterInfo> updatedPrinters) {
PrinterInfo updatedPrinter = updatedPrinters.remove(printer.getId());
if (updatedPrinter != null) {
dest.add(updatedPrinter);
} else {
dest.add(printer);
}
}
/**
* Compute the printers, order them appropriately and deliver the printers to the clients. We
* prefer printers that have been previously used (favorites) and printers that have been used
* previously close to the current location (near printers).
*
* @param discoveredPrinters All printers currently discovered by the print discovery session.
* @param favoritePrinters The ordered list of printers. The earlier in the list, the more
* preferred.
*/
private void computeAndDeliverResult(Map<PrinterId, PrinterInfo> discoveredPrinters,
List<Pair<PrinterInfo, Location>> favoritePrinters) {
List<PrinterInfo> printers = new ArrayList<>();
// Store the printerIds that have already been added. We cannot compare the printerInfos in
// "printers" as they might have been taken from discoveredPrinters and the printerInfo does
// not equals() anymore
HashSet<PrinterId> alreadyAddedPrinter = new HashSet<>(MAX_FAVORITE_PRINTER_COUNT);
Location location = getCurrentLocation();
// Add the favorite printers that have last been used close to the current location
final int favoritePrinterCount = favoritePrinters.size();
if (location != null) {
for (int i = 0; i < favoritePrinterCount; i++) {
// Only add a certain amount of favorite printers
if (printers.size() == MAX_FAVORITE_PRINTER_COUNT) {
break;
}
PrinterInfo favoritePrinter = favoritePrinters.get(i).first;
Location printerLocation = favoritePrinters.get(i).second;
if (printerLocation != null
&& !alreadyAddedPrinter.contains(favoritePrinter.getId())) {
if (printerLocation.distanceTo(location) <= MAX_PRINTER_DISTANCE) {
updateAndAddPrinter(printers, favoritePrinter, discoveredPrinters);
alreadyAddedPrinter.add(favoritePrinter.getId());
}
}
}
}
// Add the other favorite printers
for (int i = 0; i < favoritePrinterCount; i++) {
// Only add a certain amount of favorite printers
if (printers.size() == MAX_FAVORITE_PRINTER_COUNT) {
break;
}
PrinterInfo favoritePrinter = favoritePrinters.get(i).first;
if (!alreadyAddedPrinter.contains(favoritePrinter.getId())) {
updateAndAddPrinter(printers, favoritePrinter, discoveredPrinters);
alreadyAddedPrinter.add(favoritePrinter.getId());
}
}
// Add other updated printers. Printers that have already been added have been removed from
// discoveredPrinters in the calls to updateAndAddPrinter
final int printerCount = mPrinters.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo printer = mPrinters.get(i);
PrinterInfo updatedPrinter = discoveredPrinters.remove(
printer.getId());
if (updatedPrinter != null) {
printers.add(updatedPrinter);
}
}
// Add the new printers, i.e. what is left.
printers.addAll(discoveredPrinters.values());
// Update the list of printers.
mPrinters.clear();
mPrinters.addAll(printers);
if (isStarted()) {
// If stated deliver the new printers.
deliverResult(printers);
} else {
// Otherwise, take a note for the change.
onContentChanged();
}
}
@Override
protected void onStartLoading() {
if (DEBUG) {
Log.i(LOG_TAG, "onStartLoading() " + FusedPrintersProvider.this.hashCode());
}
mLocationManager.requestLocationUpdates(LocationRequest.create()
.setQuality(LocationRequest.POWER_LOW).setInterval(LOCATION_UPDATE_MS), this,
Looper.getMainLooper());
Location lastLocation = mLocationManager.getLastLocation();
if (lastLocation != null) {
onLocationChanged(lastLocation);
}
// Jumpstart location with a single forced update
Criteria oneTimeCriteria = new Criteria();
oneTimeCriteria.setAccuracy(Criteria.ACCURACY_FINE);
mLocationManager.requestSingleUpdate(oneTimeCriteria, this, Looper.getMainLooper());
// The contract is that if we already have a valid,
// result the we have to deliver it immediately.
(new Handler(Looper.getMainLooper())).post(new Runnable() {
@Override public void run() {
deliverResult(new ArrayList<>(mPrinters));
}
});
// Always load the data to ensure discovery period is
// started and to make sure obsolete printers are updated.
onForceLoad();
}
@Override
protected void onStopLoading() {
if (DEBUG) {
Log.i(LOG_TAG, "onStopLoading() " + FusedPrintersProvider.this.hashCode());
}
onCancelLoad();
mLocationManager.removeUpdates(this);
}
@Override
protected void onForceLoad() {
if (DEBUG) {
Log.i(LOG_TAG, "onForceLoad() " + FusedPrintersProvider.this.hashCode());
}
loadInternal();
}
private void loadInternal() {
if (mDiscoverySession == null) {
PrintManager printManager = (PrintManager) getContext()
.getSystemService(Context.PRINT_SERVICE);
mDiscoverySession = printManager.createPrinterDiscoverySession();
mPersistenceManager.readPrinterHistory();
} else if (mPersistenceManager.isHistoryChanged()) {
mPersistenceManager.readPrinterHistory();
}
if (mPersistenceManager.isReadHistoryCompleted()
&& !mDiscoverySession.isPrinterDiscoveryStarted()) {
mDiscoverySession.setOnPrintersChangeListener(new OnPrintersChangeListener() {
@Override
public void onPrintersChanged() {
if (DEBUG) {
Log.i(LOG_TAG, "onPrintersChanged() count:"
+ mDiscoverySession.getPrinters().size()
+ " " + FusedPrintersProvider.this.hashCode());
}
updatePrinters(mDiscoverySession.getPrinters(), mFavoritePrinters,
getCurrentLocation());
}
});
final int favoriteCount = mFavoritePrinters.size();
List<PrinterId> printerIds = new ArrayList<>(favoriteCount);
for (int i = 0; i < favoriteCount; i++) {
printerIds.add(mFavoritePrinters.get(i).first.getId());
}
mDiscoverySession.startPrinterDiscovery(printerIds);
List<PrinterInfo> printers = mDiscoverySession.getPrinters();
updatePrinters(printers, mFavoritePrinters, getCurrentLocation());
}
}
private void updatePrinters(List<PrinterInfo> printers,
List<Pair<PrinterInfo, Location>> favoritePrinters,
Location location) {
if (mPrintersUpdatedBefore && mPrinters.equals(printers)
&& mFavoritePrinters.equals(favoritePrinters)
&& Objects.equals(mLocationOfLastPrinterUpdate, location)) {
return;
}
mLocationOfLastPrinterUpdate = location;
mPrintersUpdatedBefore = true;
// Some of the found printers may have be a printer that is in the
// history but with its properties changed. Hence, we try to update the
// printer to use its current properties instead of the historical one.
mPersistenceManager.updateHistoricalPrintersIfNeeded(printers);
Map<PrinterId, PrinterInfo> printersMap = new LinkedHashMap<>();
final int printerCount = printers.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo printer = printers.get(i);
printersMap.put(printer.getId(), printer);
}
computeAndDeliverResult(printersMap, favoritePrinters);
}
@Override
protected boolean onCancelLoad() {
if (DEBUG) {
Log.i(LOG_TAG, "onCancelLoad() " + FusedPrintersProvider.this.hashCode());
}
return cancelInternal();
}
private boolean cancelInternal() {
if (mDiscoverySession != null
&& mDiscoverySession.isPrinterDiscoveryStarted()) {
if (mTrackedPrinter != null) {
mDiscoverySession.stopPrinterStateTracking(mTrackedPrinter);
mTrackedPrinter = null;
}
mDiscoverySession.stopPrinterDiscovery();
return true;
} else if (mPersistenceManager.isReadHistoryInProgress()) {
return mPersistenceManager.stopReadPrinterHistory();
}
return false;
}
@Override
protected void onReset() {
if (DEBUG) {
Log.i(LOG_TAG, "onReset() " + FusedPrintersProvider.this.hashCode());
}
onStopLoading();
mPrinters.clear();
if (mDiscoverySession != null) {
mDiscoverySession.destroy();
}
}
@Override
protected void onAbandon() {
if (DEBUG) {
Log.i(LOG_TAG, "onAbandon() " + FusedPrintersProvider.this.hashCode());
}
onStopLoading();
}
/**
* Check if the location is acceptable. This is to filter out excessively old or inaccurate
* location updates.
*
* @param location the location to check
* @return true iff the location is usable.
*/
private boolean isLocationAcceptable(Location location) {
return location != null
&& location.getElapsedRealtimeNanos() > SystemClock.elapsedRealtimeNanos()
- MAX_LOCATION_AGE_MS * 1000_000L
&& location.hasAccuracy()
&& location.getAccuracy() < MIN_LOCATION_ACCURACY;
}
@Override
public void onLocationChanged(Location location) {
synchronized(mLocationLock) {
// We expect the user to not move too fast while printing. Hence prefer more accurate
// updates over more recent ones for LOCATION_UPDATE_MS. We add a 10% fudge factor here
// as the location provider might send an update slightly too early.
if (isLocationAcceptable(location)
&& !location.equals(mLocation)
&& (mLocation == null
|| location
.getElapsedRealtimeNanos() > mLocation.getElapsedRealtimeNanos()
+ LOCATION_UPDATE_MS * 0.9 * 1000_000L
|| (!mLocation.hasAccuracy()
|| location.getAccuracy() < mLocation.getAccuracy()))) {
// Other callers of updatePrinters might want to know the location, hence cache it
mLocation = location;
if (areHistoricalPrintersLoaded()) {
updatePrinters(mDiscoverySession.getPrinters(), mFavoritePrinters, mLocation);
}
}
}
}
@Override
public void onStatusChanged(String provider, int status, Bundle extras) {
// nothing to do
}
@Override
public void onProviderEnabled(String provider) {
// nothing to do
}
@Override
public void onProviderDisabled(String provider) {
// nothing to do
}
public boolean areHistoricalPrintersLoaded() {
return mPersistenceManager.mReadHistoryCompleted;
}
public void setTrackedPrinter(@Nullable PrinterId printerId) {
if (isStarted() && mDiscoverySession != null
&& mDiscoverySession.isPrinterDiscoveryStarted()) {
if (mTrackedPrinter != null) {
if (mTrackedPrinter.equals(printerId)) {
return;
}
mDiscoverySession.stopPrinterStateTracking(mTrackedPrinter);
}
mTrackedPrinter = printerId;
if (printerId != null) {
mDiscoverySession.startPrinterStateTracking(printerId);
}
}
}
public boolean isFavoritePrinter(PrinterId printerId) {
final int printerCount = mFavoritePrinters.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo favoritePritner = mFavoritePrinters.get(i).first;
if (favoritePritner.getId().equals(printerId)) {
return true;
}
}
return false;
}
public void forgetFavoritePrinter(PrinterId printerId) {
final int favoritePrinterCount = mFavoritePrinters.size();
List<Pair<PrinterInfo, Location>> newFavoritePrinters = new ArrayList<>(
favoritePrinterCount - 1);
// Remove the printer from the favorites.
for (int i = 0; i < favoritePrinterCount; i++) {
if (!mFavoritePrinters.get(i).first.getId().equals(printerId)) {
newFavoritePrinters.add(mFavoritePrinters.get(i));
}
}
// Remove the printer from history and persist the latter.
mPersistenceManager.removeHistoricalPrinterAndWritePrinterHistory(printerId);
// Recompute and deliver the printers.
updatePrinters(mDiscoverySession.getPrinters(), newFavoritePrinters, getCurrentLocation());
}
private final class PersistenceManager implements
LoaderManager.LoaderCallbacks<List<PrintServiceInfo>> {
private static final String PERSIST_FILE_NAME = "printer_history.xml";
private static final String TAG_PRINTERS = "printers";
private static final String TAG_PRINTER = "printer";
private static final String TAG_LOCATION = "location";
private static final String TAG_PRINTER_ID = "printerId";
private static final String ATTR_LOCAL_ID = "localId";
private static final String ATTR_SERVICE_NAME = "serviceName";
private static final String ATTR_LONGITUDE = "longitude";
private static final String ATTR_LATITUDE = "latitude";
private static final String ATTR_ACCURACY = "accuracy";
private static final String ATTR_NAME = "name";
private static final String ATTR_DESCRIPTION = "description";
private final AtomicFile mStatePersistFile;
/**
* Whether the enabled print services have been updated since last time the history was
* read.
*/
private boolean mAreEnabledServicesUpdated;
/** The enabled services read when they were last updated */
private @NonNull List<PrintServiceInfo> mEnabledServices;
private List<Pair<PrinterInfo, Location>> mHistoricalPrinters = new ArrayList<>();
private boolean mReadHistoryCompleted;
private ReadTask mReadTask;
private volatile long mLastReadHistoryTimestamp;
private PersistenceManager(final Activity activity, final int internalLoaderId) {
mStatePersistFile = new AtomicFile(new File(activity.getFilesDir(),
PERSIST_FILE_NAME));
// Initialize enabled services to make sure they are set are the read task might be done
// before the loader updated the services the first time.
mEnabledServices = ((PrintManager) activity
.getSystemService(Context.PRINT_SERVICE))
.getPrintServices(PrintManager.ENABLED_SERVICES);
mAreEnabledServicesUpdated = true;
// Cannot start a loader while starting another, hence delay this loader
(new Handler(activity.getMainLooper())).post(new Runnable() {
@Override
public void run() {
activity.getLoaderManager().initLoader(internalLoaderId, null,
PersistenceManager.this);
}
});
}
@Override
public Loader<List<PrintServiceInfo>> onCreateLoader(int id, Bundle args) {
return new PrintServicesLoader(
(PrintManager) getContext().getSystemService(Context.PRINT_SERVICE),
getContext(), PrintManager.ENABLED_SERVICES);
}
@Override
public void onLoadFinished(Loader<List<PrintServiceInfo>> loader,
List<PrintServiceInfo> services) {
mAreEnabledServicesUpdated = true;
mEnabledServices = services;
// Ask the fused printer provider to reload which will cause the persistence manager to
// reload the history and reconsider the enabled services.
if (isStarted()) {
forceLoad();
}
}
@Override
public void onLoaderReset(Loader<List<PrintServiceInfo>> loader) {
// no data is cached
}
public boolean isReadHistoryInProgress() {
return mReadTask != null;
}
public boolean isReadHistoryCompleted() {
return mReadHistoryCompleted;
}
public boolean stopReadPrinterHistory() {
return mReadTask.cancel(true);
}
public void readPrinterHistory() {
if (DEBUG) {
Log.i(LOG_TAG, "read history started "
+ FusedPrintersProvider.this.hashCode());
}
mReadTask = new ReadTask();
mReadTask.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR, (Void[]) null);
}
public void updateHistoricalPrintersIfNeeded(List<PrinterInfo> printers) {
boolean writeHistory = false;
final int printerCount = printers.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo printer = printers.get(i);
writeHistory |= updateHistoricalPrinterIfNeeded(printer);
}
if (writeHistory) {
writePrinterHistory();
}
}
/**
* Updates the historical printer state with the given printer.
*
* @param printer the printer to update
*
* @return true iff the historical printer list needs to be updated
*/
public boolean updateHistoricalPrinterIfNeeded(PrinterInfo printer) {
boolean writeHistory = false;
final int printerCount = mHistoricalPrinters.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo historicalPrinter = mHistoricalPrinters.get(i).first;
if (!historicalPrinter.getId().equals(printer.getId())) {
continue;
}
// Overwrite the historical printer with the updated printer as some properties
// changed. We ignore the status as this is a volatile state.
if (historicalPrinter.equalsIgnoringStatus(printer)) {
continue;
}
mHistoricalPrinters.set(i, new Pair<PrinterInfo, Location>(printer,
mHistoricalPrinters.get(i).second));
// We only persist limited information in the printer history, hence check if
// we need to persist the update.
// @see PersistenceManager.WriteTask#doWritePrinterHistory
if (!historicalPrinter.getName().equals(printer.getName())) {
if (Objects.equals(historicalPrinter.getDescription(),
printer.getDescription())) {
writeHistory = true;
}
}
}
return writeHistory;
}
public void addPrinterAndWritePrinterHistory(PrinterInfo printer) {
if (mHistoricalPrinters.size() >= MAX_HISTORY_LENGTH) {
mHistoricalPrinters.remove(0);
}
Location location = getCurrentLocation();
if (!isLocationAcceptable(location)) {
location = null;
}
mHistoricalPrinters.add(new Pair<PrinterInfo, Location>(printer, location));
writePrinterHistory();
}
public void removeHistoricalPrinterAndWritePrinterHistory(PrinterId printerId) {
boolean writeHistory = false;
final int printerCount = mHistoricalPrinters.size();
for (int i = printerCount - 1; i >= 0; i--) {
PrinterInfo historicalPrinter = mHistoricalPrinters.get(i).first;
if (historicalPrinter.getId().equals(printerId)) {
mHistoricalPrinters.remove(i);
writeHistory = true;
}
}
if (writeHistory) {
writePrinterHistory();
}
}
@SuppressWarnings("unchecked")
private void writePrinterHistory() {
new WriteTask().executeOnExecutor(AsyncTask.SERIAL_EXECUTOR,
new ArrayList<>(mHistoricalPrinters));
}
public boolean isHistoryChanged() {
return mAreEnabledServicesUpdated ||
mLastReadHistoryTimestamp != mStatePersistFile.getBaseFile().lastModified();
}
/**
* Sort the favorite printers by weight. If a printer is in the list multiple times for
* different locations, all instances are considered to have the accumulative weight. The
* actual favorite printers to display are computed in {@link #computeAndDeliverResult} as
* only at this time we know the location to use to determine if a printer is close enough
* to be preferred.
*
* @param printers The printers to sort.
* @return The sorted printers.
*/
private List<Pair<PrinterInfo, Location>> sortFavoritePrinters(
List<Pair<PrinterInfo, Location>> printers) {
Map<PrinterId, PrinterRecord> recordMap = new ArrayMap<>();
// Compute the weights.
float currentWeight = 1.0f;
final int printerCount = printers.size();
for (int i = printerCount - 1; i >= 0; i--) {
PrinterId printerId = printers.get(i).first.getId();
PrinterRecord record = recordMap.get(printerId);
if (record == null) {
record = new PrinterRecord();
recordMap.put(printerId, record);
}
record.printers.add(printers.get(i));
// Aggregate weight for the same printer
record.weight += currentWeight;
currentWeight *= WEIGHT_DECAY_COEFFICIENT;
}
// Sort the favorite printers.
List<PrinterRecord> favoriteRecords = new ArrayList<>(
recordMap.values());
Collections.sort(favoriteRecords);
// Write the favorites to the output.
final int recordCount = favoriteRecords.size();
List<Pair<PrinterInfo, Location>> favoritePrinters = new ArrayList<>(printerCount);
for (int i = 0; i < recordCount; i++) {
favoritePrinters.addAll(favoriteRecords.get(i).printers);
}
return favoritePrinters;
}
/**
* A set of printers with the same ID and the weight associated with them during
* {@link #sortFavoritePrinters}.
*/
private final class PrinterRecord implements Comparable<PrinterRecord> {
/**
* The printers, all with the same ID, but potentially different properties or locations
*/
public final List<Pair<PrinterInfo, Location>> printers;
/** The weight associated with the printers */
public float weight;
/**
* Create a new record.
*/
public PrinterRecord() {
printers = new ArrayList<>();
}
/**
* Compare two records by weight.
*/
@Override
public int compareTo(PrinterRecord another) {
return Float.floatToIntBits(another.weight) - Float.floatToIntBits(weight);
}
}
private final class ReadTask
extends AsyncTask<Void, Void, List<Pair<PrinterInfo, Location>>> {
@Override
protected List<Pair<PrinterInfo, Location>> doInBackground(Void... args) {
return doReadPrinterHistory();
}
@Override
protected void onPostExecute(List<Pair<PrinterInfo, Location>> printers) {
if (DEBUG) {
Log.i(LOG_TAG, "read history completed "
+ FusedPrintersProvider.this.hashCode());
}
// Ignore printer records whose target services are not enabled.
Set<ComponentName> enabledComponents = new ArraySet<>();
final int installedServiceCount = mEnabledServices.size();
for (int i = 0; i < installedServiceCount; i++) {
ServiceInfo serviceInfo = mEnabledServices.get(i).getResolveInfo().serviceInfo;
ComponentName componentName = new ComponentName(
serviceInfo.packageName, serviceInfo.name);
enabledComponents.add(componentName);
}
mAreEnabledServicesUpdated = false;
final int printerCount = printers.size();
for (int i = printerCount - 1; i >= 0; i--) {
ComponentName printerServiceName = printers.get(i).first.getId()
.getServiceName();
if (!enabledComponents.contains(printerServiceName)) {
printers.remove(i);
}
}
// Store the filtered list.
mHistoricalPrinters = printers;
// Compute the favorite printers.
mFavoritePrinters.clear();
mFavoritePrinters.addAll(sortFavoritePrinters(mHistoricalPrinters));
mReadHistoryCompleted = true;
// Deliver the printers.
updatePrinters(mDiscoverySession.getPrinters(), mFavoritePrinters,
getCurrentLocation());
// We are done.
mReadTask = null;
// Loading the available printers if needed.
loadInternal();
}
@Override
protected void onCancelled(List<Pair<PrinterInfo, Location>> printerInfos) {
// We are done.
mReadTask = null;
}
private List<Pair<PrinterInfo, Location>> doReadPrinterHistory() {
final FileInputStream in;
try {
in = mStatePersistFile.openRead();
} catch (FileNotFoundException fnfe) {
if (DEBUG) {
Log.i(LOG_TAG, "No existing printer history "
+ FusedPrintersProvider.this.hashCode());
}
return new ArrayList<>();
}
try {
List<Pair<PrinterInfo, Location>> printers = new ArrayList<>();
XmlPullParser parser = Xml.newPullParser();
parser.setInput(in, StandardCharsets.UTF_8.name());
parseState(parser, printers);
// Take a note which version of the history was read.
mLastReadHistoryTimestamp = mStatePersistFile.getBaseFile().lastModified();
return printers;
} catch (IllegalStateException
| NullPointerException
| NumberFormatException
| XmlPullParserException
| IOException
| IndexOutOfBoundsException e) {
Slog.w(LOG_TAG, "Failed parsing ", e);
} finally {
IoUtils.closeQuietly(in);
}
return Collections.emptyList();
}
private void parseState(XmlPullParser parser,
List<Pair<PrinterInfo, Location>> outPrinters)
throws IOException, XmlPullParserException {
parser.next();
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.START_TAG, TAG_PRINTERS);
parser.next();
while (parsePrinter(parser, outPrinters)) {
// Be nice and respond to cancellation
if (isCancelled()) {
return;
}
parser.next();
}
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.END_TAG, TAG_PRINTERS);
}
private boolean parsePrinter(XmlPullParser parser,
List<Pair<PrinterInfo, Location>> outPrinters)
throws IOException, XmlPullParserException {
skipEmptyTextTags(parser);
if (!accept(parser, XmlPullParser.START_TAG, TAG_PRINTER)) {
return false;
}
String name = parser.getAttributeValue(null, ATTR_NAME);
String description = parser.getAttributeValue(null, ATTR_DESCRIPTION);
parser.next();
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.START_TAG, TAG_PRINTER_ID);
String localId = parser.getAttributeValue(null, ATTR_LOCAL_ID);
ComponentName service = ComponentName.unflattenFromString(parser.getAttributeValue(
null, ATTR_SERVICE_NAME));
PrinterId printerId = new PrinterId(service, localId);
parser.next();
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.END_TAG, TAG_PRINTER_ID);
parser.next();
skipEmptyTextTags(parser);
Location location;
if (accept(parser, XmlPullParser.START_TAG, TAG_LOCATION)) {
location = new Location("");
location.setLongitude(
Double.parseDouble(parser.getAttributeValue(null, ATTR_LONGITUDE)));
location.setLatitude(
Double.parseDouble(parser.getAttributeValue(null, ATTR_LATITUDE)));
location.setAccuracy(
Float.parseFloat(parser.getAttributeValue(null, ATTR_ACCURACY)));
parser.next();
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.END_TAG, TAG_LOCATION);
parser.next();
} else {
location = null;
}
// If the printer is available the printer will be replaced by the one read from the
// discovery session, hence the only time when this object is used is when the
// printer is unavailable.
PrinterInfo.Builder builder = new PrinterInfo.Builder(printerId, name,
PrinterInfo.STATUS_UNAVAILABLE);
builder.setDescription(description);
PrinterInfo printer = builder.build();
outPrinters.add(new Pair<PrinterInfo, Location>(printer, location));
if (DEBUG) {
Log.i(LOG_TAG, "[RESTORED] " + printer);
}
skipEmptyTextTags(parser);
expect(parser, XmlPullParser.END_TAG, TAG_PRINTER);
return true;
}
private void expect(XmlPullParser parser, int type, String tag)
throws XmlPullParserException {
if (!accept(parser, type, tag)) {
throw new XmlPullParserException("Exepected event: " + type
+ " and tag: " + tag + " but got event: " + parser.getEventType()
+ " and tag:" + parser.getName());
}
}
private void skipEmptyTextTags(XmlPullParser parser)
throws IOException, XmlPullParserException {
while (accept(parser, XmlPullParser.TEXT, null)
&& "\n".equals(parser.getText())) {
parser.next();
}
}
private boolean accept(XmlPullParser parser, int type, String tag)
throws XmlPullParserException {
if (parser.getEventType() != type) {
return false;
}
if (tag != null) {
if (!tag.equals(parser.getName())) {
return false;
}
} else if (parser.getName() != null) {
return false;
}
return true;
}
}
private final class WriteTask
extends AsyncTask<List<Pair<PrinterInfo, Location>>, Void, Void> {
@Override
protected Void doInBackground(
@SuppressWarnings("unchecked") List<Pair<PrinterInfo, Location>>... printers) {
doWritePrinterHistory(printers[0]);
return null;
}
private void doWritePrinterHistory(List<Pair<PrinterInfo, Location>> printers) {
FileOutputStream out = null;
try {
out = mStatePersistFile.startWrite();
XmlSerializer serializer = new FastXmlSerializer();
serializer.setOutput(out, StandardCharsets.UTF_8.name());
serializer.startDocument(null, true);
serializer.startTag(null, TAG_PRINTERS);
final int printerCount = printers.size();
for (int i = 0; i < printerCount; i++) {
PrinterInfo printer = printers.get(i).first;
serializer.startTag(null, TAG_PRINTER);
serializer.attribute(null, ATTR_NAME, printer.getName());
String description = printer.getDescription();
if (description != null) {
serializer.attribute(null, ATTR_DESCRIPTION, description);
}
PrinterId printerId = printer.getId();
serializer.startTag(null, TAG_PRINTER_ID);
serializer.attribute(null, ATTR_LOCAL_ID, printerId.getLocalId());
serializer.attribute(null, ATTR_SERVICE_NAME, printerId.getServiceName()
.flattenToString());
serializer.endTag(null, TAG_PRINTER_ID);
Location location = printers.get(i).second;
if (location != null) {
serializer.startTag(null, TAG_LOCATION);
serializer.attribute(null, ATTR_LONGITUDE,
String.valueOf(location.getLongitude()));
serializer.attribute(null, ATTR_LATITUDE,
String.valueOf(location.getLatitude()));
serializer.attribute(null, ATTR_ACCURACY,
String.valueOf(location.getAccuracy()));
serializer.endTag(null, TAG_LOCATION);
}
serializer.endTag(null, TAG_PRINTER);
if (DEBUG) {
Log.i(LOG_TAG, "[PERSISTED] " + printer);
}
}
serializer.endTag(null, TAG_PRINTERS);
serializer.endDocument();
mStatePersistFile.finishWrite(out);
if (DEBUG) {
Log.i(LOG_TAG, "[PERSIST END]");
}
} catch (IOException ioe) {
Slog.w(LOG_TAG, "Failed to write printer history, restoring backup.", ioe);
mStatePersistFile.failWrite(out);
} finally {
IoUtils.closeQuietly(out);
}
}
}
}
}
| |
/**
* Licensed to DigitalPebble Ltd under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* DigitalPebble licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.digitalpebble.stormcrawler.bolt;
import static com.digitalpebble.stormcrawler.Constants.StatusStreamName;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.net.URL;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.storm.metric.api.MultiCountMetric;
import org.apache.storm.task.OutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.tuple.Fields;
import org.apache.storm.tuple.Tuple;
import org.apache.storm.tuple.Values;
import org.apache.tika.config.TikaConfig;
import org.apache.tika.detect.Detector;
import org.apache.tika.io.TikaInputStream;
import org.apache.tika.mime.MediaType;
import org.jsoup.nodes.Element;
import org.jsoup.parser.Parser;
import org.jsoup.select.Elements;
import org.slf4j.LoggerFactory;
import org.w3c.dom.DocumentFragment;
import com.digitalpebble.stormcrawler.Constants;
import com.digitalpebble.stormcrawler.Metadata;
import com.digitalpebble.stormcrawler.parse.JSoupDOMBuilder;
import com.digitalpebble.stormcrawler.parse.Outlink;
import com.digitalpebble.stormcrawler.parse.ParseData;
import com.digitalpebble.stormcrawler.parse.ParseFilter;
import com.digitalpebble.stormcrawler.parse.ParseFilters;
import com.digitalpebble.stormcrawler.parse.ParseResult;
import com.digitalpebble.stormcrawler.persistence.Status;
import com.digitalpebble.stormcrawler.protocol.HttpHeaders;
import com.digitalpebble.stormcrawler.util.CharsetIdentification;
import com.digitalpebble.stormcrawler.util.ConfUtils;
import com.digitalpebble.stormcrawler.util.RefreshTag;
import com.digitalpebble.stormcrawler.util.RobotsTags;
/**
* Parser for HTML documents only which uses ICU4J to detect the charset
* encoding. Kindly donated to storm-crawler by shopstyle.com.
*/
@SuppressWarnings("serial")
public class JSoupParserBolt extends StatusEmitterBolt {
/** Metadata key name for tracking the anchors */
public static final String ANCHORS_KEY_NAME = "anchors";
private static final org.slf4j.Logger LOG = LoggerFactory
.getLogger(JSoupParserBolt.class);
private MultiCountMetric eventCounter;
private ParseFilter parseFilters = null;
private Detector detector = TikaConfig.getDefaultConfig().getDetector();
private boolean detectMimeType = true;
private boolean trackAnchors = true;
private boolean emitOutlinks = true;
private boolean robots_noFollow_strict = true;
/**
* If a Tuple is not HTML whether to send it to the status stream as an
* error or pass it on the default stream
**/
private boolean treat_non_html_as_error = true;
/**
* Length of content to use for detecting the charset. Set to -1 to use the
* full content (will make the parser slow), 0 to deactivate the detection
* altogether, or any other value (at least a few hundred bytes).
**/
private int maxLengthCharsetDetection = -1;
@SuppressWarnings({ "rawtypes", "unchecked" })
@Override
public void prepare(Map conf, TopologyContext context,
OutputCollector collector) {
super.prepare(conf, context, collector);
eventCounter = context.registerMetric(this.getClass().getSimpleName(),
new MultiCountMetric(), 10);
parseFilters = ParseFilters.fromConf(conf);
emitOutlinks = ConfUtils.getBoolean(conf, "parser.emitOutlinks", true);
trackAnchors = ConfUtils.getBoolean(conf, "track.anchors", true);
robots_noFollow_strict = ConfUtils.getBoolean(conf,
RobotsTags.ROBOTS_NO_FOLLOW_STRICT, true);
treat_non_html_as_error = ConfUtils.getBoolean(conf,
"jsoup.treat.non.html.as.error", true);
detectMimeType = ConfUtils.getBoolean(conf, "detect.mimetype", true);
maxLengthCharsetDetection = ConfUtils.getInt(conf,
"detect.charset.maxlength", -1);
}
@Override
public void execute(Tuple tuple) {
byte[] content = tuple.getBinaryByField("content");
String url = tuple.getStringByField("url");
Metadata metadata = (Metadata) tuple.getValueByField("metadata");
LOG.info("Parsing : starting {}", url);
// check that its content type is HTML
// look at value found in HTTP headers
boolean CT_OK = false;
String mimeType = metadata.getFirstValue(HttpHeaders.CONTENT_TYPE);
if (detectMimeType) {
try {
mimeType = guessMimeType(url, mimeType, content);
} catch (Exception e) {
String errorMessage = "Exception while guessing mimetype on "
+ url + ": " + e;
handleException(url, e, metadata, tuple, "mimetype guessing",
errorMessage);
return;
}
// store identified type in md
metadata.setValue("parse.Content-Type", mimeType);
}
if (StringUtils.isNotBlank(mimeType)) {
if (mimeType.toLowerCase().contains("html")) {
CT_OK = true;
}
}
// go ahead even if no mimetype is available
else {
CT_OK = true;
}
if (!CT_OK) {
if (this.treat_non_html_as_error) {
String errorMessage = "Exception content-type " + mimeType
+ " for " + url;
RuntimeException e = new RuntimeException(errorMessage);
handleException(url, e, metadata, tuple,
"content-type checking", errorMessage);
} else {
LOG.info("Incorrect mimetype - passing on : {}", url);
collector.emit(tuple, new Values(url, content, metadata, ""));
collector.ack(tuple);
}
return;
}
long start = System.currentTimeMillis();
String charset = CharsetIdentification.getCharset(metadata, content,
maxLengthCharsetDetection);
// get the robots tags from the fetch metadata
RobotsTags robotsTags = new RobotsTags(metadata);
Map<String, List<String>> slinks;
String text = "";
DocumentFragment fragment;
try {
String html = Charset.forName(charset)
.decode(ByteBuffer.wrap(content)).toString();
org.jsoup.nodes.Document jsoupDoc = Parser.htmlParser().parseInput(
html, url);
fragment = JSoupDOMBuilder.jsoup2HTML(jsoupDoc);
// extracts the robots directives from the meta tags
robotsTags.extractMetaTags(fragment);
// store a normalised representation in metadata
// so that the indexer is aware of it
robotsTags.normaliseToMetadata(metadata);
// do not extract the links if no follow has been set
// and we are in strict mode
if (robotsTags.isNoFollow() && robots_noFollow_strict) {
slinks = new HashMap<>(0);
} else {
Elements links = jsoupDoc.select("a[href]");
slinks = new HashMap<>(links.size());
for (Element link : links) {
// abs:href tells jsoup to return fully qualified domains
// for
// relative urls.
// e.g.: /foo will resolve to http://shopstyle.com/foo
String targetURL = link.attr("abs:href");
// nofollow
boolean noFollow = "nofollow".equalsIgnoreCase(link
.attr("rel"));
// remove altogether
if (noFollow && robots_noFollow_strict) {
continue;
}
// link not specifically marked as no follow
// but whole page is
if (!noFollow && robotsTags.isNoFollow()) {
noFollow = true;
}
String anchor = link.text();
if (StringUtils.isNotBlank(targetURL)) {
// any existing anchors for the same target?
List<String> anchors = slinks.get(targetURL);
if (anchors == null) {
anchors = new LinkedList<>();
slinks.put(targetURL, anchors);
}
// track the anchors only if no follow is false
if (!noFollow && StringUtils.isNotBlank(anchor)) {
anchors.add(anchor);
}
}
}
}
Element body = jsoupDoc.body();
if (body != null) {
text = body.text();
}
} catch (Throwable e) {
String errorMessage = "Exception while parsing " + url + ": " + e;
handleException(url, e, metadata, tuple, "content parsing",
errorMessage);
return;
}
// store identified charset in md
metadata.setValue("parse.Content-Encoding", charset);
long duration = System.currentTimeMillis() - start;
LOG.info("Parsed {} in {} msec", url, duration);
// redirection?
try {
String redirection = RefreshTag.extractRefreshURL(fragment);
if (StringUtils.isNotBlank(redirection)) {
// stores the URL it redirects to
// used for debugging mainly - do not resolve the target
// URL
LOG.info("Found redir in {} to {}", url, redirection);
metadata.setValue("_redirTo", redirection);
if (allowRedirs() && StringUtils.isNotBlank(redirection)) {
emitOutlink(tuple, new URL(url), redirection, metadata);
}
// Mark URL as redirected
collector
.emit(com.digitalpebble.stormcrawler.Constants.StatusStreamName,
tuple, new Values(url, metadata,
Status.REDIRECTION));
collector.ack(tuple);
eventCounter.scope("tuple_success").incr();
return;
}
} catch (MalformedURLException e) {
LOG.error("MalformedURLException on {}", url);
}
List<Outlink> outlinks = toOutlinks(url, metadata, slinks);
ParseResult parse = new ParseResult(outlinks);
// parse data of the parent URL
ParseData parseData = parse.get(url);
parseData.setMetadata(metadata);
parseData.setText(text);
parseData.setContent(content);
// apply the parse filters if any
try {
parseFilters.filter(url, content, fragment, parse);
} catch (RuntimeException e) {
String errorMessage = "Exception while running parse filters on "
+ url + ": " + e;
handleException(url, e, metadata, tuple, "content filtering",
errorMessage);
return;
}
if (emitOutlinks) {
for (Outlink outlink : parse.getOutlinks()) {
collector.emit(
StatusStreamName,
tuple,
new Values(outlink.getTargetURL(), outlink
.getMetadata(), Status.DISCOVERED));
}
}
// emit each document/subdocument in the ParseResult object
// there should be at least one ParseData item for the "parent" URL
for (Map.Entry<String, ParseData> doc : parse) {
ParseData parseDoc = doc.getValue();
collector.emit(
tuple,
new Values(doc.getKey(), parseDoc.getContent(), parseDoc
.getMetadata(), parseDoc.getText()));
}
collector.ack(tuple);
eventCounter.scope("tuple_success").incr();
}
private void handleException(String url, Throwable e, Metadata metadata,
Tuple tuple, String errorSource, String errorMessage) {
LOG.error(errorMessage);
// send to status stream in case another component wants to update
// its status
metadata.setValue(Constants.STATUS_ERROR_SOURCE, errorSource);
metadata.setValue(Constants.STATUS_ERROR_MESSAGE, errorMessage);
collector.emit(StatusStreamName, tuple, new Values(url, metadata,
Status.ERROR));
collector.ack(tuple);
// Increment metric that is context specific
String s = "error_" + errorSource.replaceAll(" ", "_") + "_";
eventCounter.scope(s + e.getClass().getSimpleName()).incrBy(1);
// Increment general metric
eventCounter.scope("parse exception").incrBy(1);
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
super.declareOutputFields(declarer);
// output of this module is the list of fields to index
// with at least the URL, text content
declarer.declare(new Fields("url", "content", "metadata", "text"));
}
public String guessMimeType(String URL, String httpCT, byte[] content) {
org.apache.tika.metadata.Metadata metadata = new org.apache.tika.metadata.Metadata();
if (StringUtils.isNotBlank(httpCT)) {
// pass content type from server as a clue
metadata.set(org.apache.tika.metadata.Metadata.CONTENT_TYPE, httpCT);
}
// use filename as a clue
try {
URL _url = new URL(URL);
metadata.set(org.apache.tika.metadata.Metadata.RESOURCE_NAME_KEY,
_url.getFile());
} catch (MalformedURLException e1) {
throw new IllegalStateException("Malformed URL", e1);
}
try {
try (InputStream stream = TikaInputStream.get(content)) {
MediaType mt = detector.detect(stream, metadata);
return mt.toString();
}
} catch (IOException e) {
throw new IllegalStateException("Unexpected IOException", e);
}
}
private List<Outlink> toOutlinks(String url, Metadata metadata,
Map<String, List<String>> slinks) {
Map<String, Outlink> outlinks = new HashMap<>();
URL sourceUrl;
try {
sourceUrl = new URL(url);
} catch (MalformedURLException e) {
// we would have known by now as previous components check whether
// the URL is valid
LOG.error("MalformedURLException on {}", url);
eventCounter.scope("error_invalid_source_url").incrBy(1);
return new LinkedList<Outlink>();
}
for (Map.Entry<String, List<String>> linkEntry : slinks.entrySet()) {
String targetURL = linkEntry.getKey();
Outlink ol = filterOutlink(sourceUrl, targetURL, metadata);
if (ol == null) {
eventCounter.scope("outlink_filtered").incr();
continue;
}
// the same link could already be there post-normalisation
Outlink old = outlinks.get(ol.getTargetURL());
if (old != null) {
ol = old;
}
List<String> anchors = linkEntry.getValue();
if (trackAnchors && anchors.size() > 0) {
ol.getMetadata().addValues(ANCHORS_KEY_NAME, anchors);
// sets the first anchor
ol.setAnchor(anchors.get(0));
}
if (old == null) {
outlinks.put(ol.getTargetURL(), ol);
eventCounter.scope("outlink_kept").incr();
}
}
return new LinkedList<Outlink>(outlinks.values());
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.datastructures;
import org.apache.ignite.*;
import org.apache.ignite.configuration.*;
import org.apache.ignite.internal.*;
import org.apache.ignite.internal.util.*;
import org.apache.ignite.internal.util.typedef.*;
import org.apache.ignite.lang.*;
import org.apache.ignite.resources.*;
import org.apache.ignite.testframework.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.*;
import static org.apache.ignite.cache.CacheAtomicityMode.*;
/**
* Failover tests for cache data structures.
*/
public abstract class GridCacheAbstractDataStructuresFailoverSelfTest extends IgniteCollectionAbstractTest {
/** */
private static final long TEST_TIMEOUT = 2 * 60 * 1000;
/** */
private static final String NEW_GRID_NAME = "newGrid";
/** */
private static final String STRUCTURE_NAME = "structure";
/** */
private static final String TRANSACTIONAL_CACHE_NAME = "tx_cache";
/** */
private static final int TOP_CHANGE_CNT = 5;
/** */
private static final int TOP_CHANGE_THREAD_CNT = 3;
/** {@inheritDoc} */
@Override protected long getTestTimeout() {
return TEST_TIMEOUT;
}
/**
* @return Grids count to start.
*/
@Override public int gridCount() {
return 3;
}
/** {@inheritDoc} */
@Override protected void beforeTestsStarted() throws Exception {
// No-op
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
// No-op
}
/** {@inheritDoc} */
@Override protected void beforeTest() throws Exception {
startGridsMultiThreaded(gridCount());
super.beforeTest();
}
/** {@inheritDoc} */
@Override protected void afterTest() throws Exception {
stopAllGrids();
}
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String gridName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(gridName);
AtomicConfiguration atomicCfg = new AtomicConfiguration();
atomicCfg.setCacheMode(collectionCacheMode());
atomicCfg.setBackups(collectionConfiguration().getBackups());
cfg.setAtomicConfiguration(atomicCfg);
CacheConfiguration ccfg = new CacheConfiguration();
ccfg.setName(TRANSACTIONAL_CACHE_NAME);
ccfg.setAtomicityMode(TRANSACTIONAL);
cfg.setCacheConfiguration(ccfg);
return cfg;
}
/**
* @throws Exception If failed.
*/
public void testAtomicLongTopologyChange() throws Exception {
try (IgniteAtomicLong atomic = grid(0).atomicLong(STRUCTURE_NAME, 10, true)) {
Ignite g = startGrid(NEW_GRID_NAME);
assert g.atomicLong(STRUCTURE_NAME, 10, true).get() == 10;
assert g.atomicLong(STRUCTURE_NAME, 10, true).addAndGet(10) == 20;
stopGrid(NEW_GRID_NAME);
assert grid(0).atomicLong(STRUCTURE_NAME, 10, true).get() == 20;
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicLongConstantTopologyChange() throws Exception {
try (IgniteAtomicLong s = grid(0).atomicLong(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override
public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
String name = UUID.randomUUID().toString();
try {
Ignite g = startGrid(name);
assert g.atomicLong(STRUCTURE_NAME, 1, true).get() > 0;
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
long val = s.get();
while (!fut.isDone()) {
assert s.get() == val;
assert s.incrementAndGet() == val + 1;
val++;
}
fut.get();
for (Ignite g : G.allGrids())
assertEquals(val, g.atomicLong(STRUCTURE_NAME, 1, true).get());
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicLongConstantMultipleTopologyChange() throws Exception {
try (IgniteAtomicLong s = grid(0).atomicLong(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
assert g.atomicLong(STRUCTURE_NAME, 1, true).get() > 0;
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
long val = s.get();
while (!fut.isDone()) {
assert s.get() == val;
assert s.incrementAndGet() == val + 1;
val++;
}
fut.get();
for (Ignite g : G.allGrids())
assertEquals(val, g.atomicLong(STRUCTURE_NAME, 1, true).get());
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicReferenceTopologyChange() throws Exception {
try (IgniteAtomicReference atomic = grid(0).atomicReference(STRUCTURE_NAME, 10, true)) {
Ignite g = startGrid(NEW_GRID_NAME);
assert g.atomicReference(STRUCTURE_NAME, 10, true).get() == 10;
g.atomicReference(STRUCTURE_NAME, 10, true).set(20);
stopGrid(NEW_GRID_NAME);
assertEquals(20, (int) grid(0).atomicReference(STRUCTURE_NAME, 10, true).get());
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicReferenceConstantTopologyChange() throws Exception {
try (IgniteAtomicReference<Integer> s = grid(0).atomicReference(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override
public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
String name = UUID.randomUUID().toString();
try {
Ignite g = startGrid(name);
assert g.atomicReference(STRUCTURE_NAME, 1, true).get() > 0;
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.get();
while (!fut.isDone()) {
assert s.get() == val;
s.set(++val);
}
fut.get();
for (Ignite g : G.allGrids())
assertEquals(val, (int)g.atomicReference(STRUCTURE_NAME, 1, true).get());
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicReferenceConstantMultipleTopologyChange() throws Exception {
try (IgniteAtomicReference<Integer> s = grid(0).atomicReference(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
assert g.atomicReference(STRUCTURE_NAME, 1, true).get() > 0;
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.get();
while (!fut.isDone()) {
assert s.get() == val;
s.set(++val);
}
fut.get();
for (Ignite g : G.allGrids())
assert g.atomicReference(STRUCTURE_NAME, 1, true).get() == val;
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicStampedTopologyChange() throws Exception {
try (IgniteAtomicStamped atomic = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, true)) {
Ignite g = startGrid(NEW_GRID_NAME);
IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 10, 10, true).get();
assert t.get1() == 10;
assert t.get2() == 10;
g.atomicStamped(STRUCTURE_NAME, 10, 10, true).set(20, 20);
stopGrid(NEW_GRID_NAME);
t = grid(0).atomicStamped(STRUCTURE_NAME, 10, 10, true).get();
assert t.get1() == 20;
assert t.get2() == 20;
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicStampedConstantTopologyChange() throws Exception {
try (IgniteAtomicStamped<Integer, Integer> s = grid(0).atomicStamped(STRUCTURE_NAME, 1, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override
public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
String name = UUID.randomUUID().toString();
try {
Ignite g = startGrid(name);
IgniteBiTuple<Integer, Integer> t =
g.atomicStamped(STRUCTURE_NAME, 1, 1, true).get();
assert t.get1() > 0;
assert t.get2() > 0;
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.value();
while (!fut.isDone()) {
IgniteBiTuple<Integer, Integer> t = s.get();
assert t.get1() == val;
assert t.get2() == val;
val++;
s.set(val, val);
}
fut.get();
for (Ignite g : G.allGrids()) {
IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 1, 1, true).get();
assert t.get1() == val;
assert t.get2() == val;
}
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicStampedConstantMultipleTopologyChange() throws Exception {
try (IgniteAtomicStamped<Integer, Integer> s = grid(0).atomicStamped(STRUCTURE_NAME, 1, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
IgniteBiTuple<Integer, Integer> t =
g.atomicStamped(STRUCTURE_NAME, 1, 1, true).get();
assert t.get1() > 0;
assert t.get2() > 0;
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.value();
while (!fut.isDone()) {
IgniteBiTuple<Integer, Integer> t = s.get();
assert t.get1() == val;
assert t.get2() == val;
val++;
s.set(val, val);
}
fut.get();
for (Ignite g : G.allGrids()) {
IgniteBiTuple<Integer, Integer> t = g.atomicStamped(STRUCTURE_NAME, 1, 1, true).get();
assert t.get1() == val;
assert t.get2() == val;
}
}
}
/**
* @throws Exception If failed.
*/
public void testCountDownLatchTopologyChange() throws Exception {
try (IgniteCountDownLatch latch = grid(0).countDownLatch(STRUCTURE_NAME, 20, true, true)) {
try {
Ignite g = startGrid(NEW_GRID_NAME);
assert g.countDownLatch(STRUCTURE_NAME, 20, true, true).count() == 20;
g.countDownLatch(STRUCTURE_NAME, 20, true, true).countDown(10);
stopGrid(NEW_GRID_NAME);
assert grid(0).countDownLatch(STRUCTURE_NAME, 20, true, true).count() == 10;
}
finally {
grid(0).countDownLatch(STRUCTURE_NAME, 20, true, true).countDownAll();
}
}
}
/**
* @throws Exception If failed.
*/
public void testCountDownLatchConstantTopologyChange() throws Exception {
try (IgniteCountDownLatch s = grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true)) {
try {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
String name = UUID.randomUUID().toString();
try {
Ignite g = startGrid(name);
assert g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false) != null;
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.count();
while (!fut.isDone()) {
assert s.count() == val;
assert s.countDown() == val - 1;
val--;
}
fut.get();
for (Ignite g : G.allGrids())
assert g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true).count() == val;
}
finally {
grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true).countDownAll();
}
}
}
/**
* @throws Exception If failed.
*/
public void testCountDownLatchConstantMultipleTopologyChange() throws Exception {
try (IgniteCountDownLatch s = grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, true)) {
try {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
assert g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false) != null;
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.count();
while (!fut.isDone()) {
assert s.count() == val;
assert s.countDown() == val - 1;
val--;
}
fut.get();
for (Ignite g : G.allGrids())
assertEquals(val, g.countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).count());
}
finally {
grid(0).countDownLatch(STRUCTURE_NAME, Integer.MAX_VALUE, false, false).countDownAll();
}
}
}
/**
* @throws Exception If failed.
*/
public void testFifoQueueTopologyChange() throws Exception {
try {
grid(0).queue(STRUCTURE_NAME, 0, config(false)).put(10);
Ignite g = startGrid(NEW_GRID_NAME);
assert g.<Integer>queue(STRUCTURE_NAME, 0, null).poll() == 10;
g.queue(STRUCTURE_NAME, 0, null).put(20);
stopGrid(NEW_GRID_NAME);
assert grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).peek() == 20;
}
finally {
grid(0).<Integer>queue(STRUCTURE_NAME, 0, null).close();
}
}
/**
* @throws Exception If failed.
*/
public void testQueueConstantTopologyChange() throws Exception {
try (IgniteQueue<Integer> s = grid(0).queue(STRUCTURE_NAME, 0, config(false))) {
s.put(1);
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
String name = UUID.randomUUID().toString();
try {
Ignite g = startGrid(name);
assert g.<Integer>queue(STRUCTURE_NAME, 0, null).peek() > 0;
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.peek();
int origVal = val;
while (!fut.isDone())
s.put(++val);
fut.get();
for (Ignite g : G.allGrids())
assert g.<Integer>queue(STRUCTURE_NAME, 0, null).peek() == origVal;
}
}
/**
* @throws Exception If failed.
*/
public void testQueueConstantMultipleTopologyChange() throws Exception {
try (IgniteQueue<Integer> s = grid(0).queue(STRUCTURE_NAME, 0, config(false))) {
s.put(1);
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
assert g.<Integer>queue(STRUCTURE_NAME, 0, null).peek() > 0;
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
int val = s.peek();
int origVal = val;
while (!fut.isDone())
s.put(++val);
fut.get();
for (Ignite g : G.allGrids())
assert g.<Integer>queue(STRUCTURE_NAME, 0, null).peek() == origVal;
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicSequenceTopologyChange() throws Exception {
try (IgniteAtomicSequence s = grid().atomicSequence(STRUCTURE_NAME, 10, true)) {
Ignite g = startGrid(NEW_GRID_NAME);
assert g.atomicSequence(STRUCTURE_NAME, 10, false).get() == 1010;
assert g.atomicSequence(STRUCTURE_NAME, 10, false).addAndGet(10) == 1020;
stopGrid(NEW_GRID_NAME);
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicSequenceConstantTopologyChange() throws Exception {
try (IgniteAtomicSequence s = grid(0).atomicSequence(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
String name = UUID.randomUUID().toString();
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
try {
Ignite g = startGrid(name);
assertTrue(g.atomicSequence(STRUCTURE_NAME, 1, false).get() > 0);
}
finally {
if (i != TOP_CHANGE_CNT - 1)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
long old = s.get();
while (!fut.isDone()) {
assertEquals(old, s.get());
long val = s.incrementAndGet();
assertTrue(val > old);
old = val;
}
fut.get();
}
}
/**
* @throws Exception If failed.
*/
public void testAtomicSequenceInitialization() throws Exception {
int threadCnt = 3;
final AtomicInteger idx = new AtomicInteger(gridCount());
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
int id = idx.getAndIncrement();
try {
startGrid(id);
Thread.sleep(1000);
}
catch (Exception e) {
throw F.wrap(e);
}
finally {
stopGrid(id);
info("Thread finished.");
}
}
}, threadCnt, "test-thread");
while (!fut.isDone()) {
grid(0).compute().call(new IgniteCallable<Object>() {
/** */
@IgniteInstanceResource
private Ignite g;
@Override public Object call() throws Exception {
IgniteAtomicSequence seq = g.atomicSequence(STRUCTURE_NAME, 1, true);
assert seq != null;
for (int i = 0; i < 1000; i++)
seq.getAndIncrement();
return null;
}
});
}
fut.get();
}
/**
* @throws Exception If failed.
*/
public void testAtomicSequenceConstantMultipleTopologyChange() throws Exception {
try (IgniteAtomicSequence s = grid(0).atomicSequence(STRUCTURE_NAME, 1, true)) {
IgniteInternalFuture<?> fut = GridTestUtils.runMultiThreadedAsync(new CA() {
@Override public void apply() {
try {
for (int i = 0; i < TOP_CHANGE_CNT; i++) {
Collection<String> names = new GridLeanSet<>(3);
try {
for (int j = 0; j < 3; j++) {
String name = UUID.randomUUID().toString();
names.add(name);
Ignite g = startGrid(name);
assertTrue(g.atomicSequence(STRUCTURE_NAME, 1, false).get() > 0);
}
}
finally {
if (i != TOP_CHANGE_CNT - 1)
for (String name : names)
stopGrid(name);
}
}
}
catch (Exception e) {
throw F.wrap(e);
}
}
}, TOP_CHANGE_THREAD_CNT, "topology-change-thread");
long old = s.get();
while (!fut.isDone()) {
assertEquals(old, s.get());
long val = s.incrementAndGet();
assertTrue(val > old);
old = val;
}
fut.get();
}
}
/**
* @throws Exception If failed.
*/
public void testUncommitedTxLeave() throws Exception {
final int val = 10;
grid(0).atomicLong(STRUCTURE_NAME, val, true);
GridTestUtils.runAsync(new Callable<Object>() {
@Override public Object call() throws Exception {
Ignite g = startGrid(NEW_GRID_NAME);
try {
g.transactions().txStart();
g.jcache(TRANSACTIONAL_CACHE_NAME).put(1, 1);
assert g.atomicLong(STRUCTURE_NAME, val, false).incrementAndGet() == val + 1;
}
finally {
stopGrid(NEW_GRID_NAME);
}
return null;
}
}).get();
waitForDiscovery(G.allGrids().toArray(new Ignite[gridCount()]));
assert grid(0).atomicLong(STRUCTURE_NAME, val, false).get() == val + 1;
}
}
| |
/*
* Copyright (C) 2011 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.common.net;
import com.google.common.annotations.Beta;
import com.google.common.annotations.GwtCompatible;
/**
* Contains constant definitions for the HTTP header field names. See:
*
* <ul>
* <li><a href="http://www.ietf.org/rfc/rfc2109.txt">RFC 2109</a>
* <li><a href="http://www.ietf.org/rfc/rfc2183.txt">RFC 2183</a>
* <li><a href="http://www.ietf.org/rfc/rfc2616.txt">RFC 2616</a>
* <li><a href="http://www.ietf.org/rfc/rfc2965.txt">RFC 2965</a>
* <li><a href="http://www.ietf.org/rfc/rfc5988.txt">RFC 5988</a>
* </ul>
*
*
* @author Kurt Alfred Kluever
* @since 11.0
*/
@GwtCompatible
public final class HttpHeaders {
private HttpHeaders() {}
// HTTP Request and Response header fields
/** The HTTP {@code Cache-Control} header field name. */
public static final String CACHE_CONTROL = "Cache-Control";
/** The HTTP {@code Content-Length} header field name. */
public static final String CONTENT_LENGTH = "Content-Length";
/** The HTTP {@code Content-Type} header field name. */
public static final String CONTENT_TYPE = "Content-Type";
/** The HTTP {@code Date} header field name. */
public static final String DATE = "Date";
/** The HTTP {@code Pragma} header field name. */
public static final String PRAGMA = "Pragma";
/** The HTTP {@code Via} header field name. */
public static final String VIA = "Via";
/** The HTTP {@code Warning} header field name. */
public static final String WARNING = "Warning";
// HTTP Request header fields
/** The HTTP {@code Accept} header field name. */
public static final String ACCEPT = "Accept";
/** The HTTP {@code Accept-Charset} header field name. */
public static final String ACCEPT_CHARSET = "Accept-Charset";
/** The HTTP {@code Accept-Encoding} header field name. */
public static final String ACCEPT_ENCODING = "Accept-Encoding";
/** The HTTP {@code Accept-Language} header field name. */
public static final String ACCEPT_LANGUAGE = "Accept-Language";
/** The HTTP {@code Access-Control-Request-Headers} header field name. */
public static final String ACCESS_CONTROL_REQUEST_HEADERS = "Access-Control-Request-Headers";
/** The HTTP {@code Access-Control-Request-Method} header field name. */
public static final String ACCESS_CONTROL_REQUEST_METHOD = "Access-Control-Request-Method";
/** The HTTP {@code Authorization} header field name. */
public static final String AUTHORIZATION = "Authorization";
/** The HTTP {@code Connection} header field name. */
public static final String CONNECTION = "Connection";
/** The HTTP {@code Cookie} header field name. */
public static final String COOKIE = "Cookie";
/** The HTTP {@code Expect} header field name. */
public static final String EXPECT = "Expect";
/** The HTTP {@code From} header field name. */
public static final String FROM = "From";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc7239">{@code Forwarded}</a> header field name.
*
* @since 20.0
*/
public static final String FORWARDED = "Forwarded";
/**
* The HTTP {@code Follow-Only-When-Prerender-Shown} header field name.
*
* @since 17.0
*/
@Beta
public static final String FOLLOW_ONLY_WHEN_PRERENDER_SHOWN = "Follow-Only-When-Prerender-Shown";
/** The HTTP {@code Host} header field name. */
public static final String HOST = "Host";
/**
* The HTTP <a href="https://tools.ietf.org/html/rfc7540#section-3.2.1">{@code HTTP2-Settings}
* </a> header field name.
*
* @since 24.0
*/
public static final String HTTP2_SETTINGS = "HTTP2-Settings";
/** The HTTP {@code If-Match} header field name. */
public static final String IF_MATCH = "If-Match";
/** The HTTP {@code If-Modified-Since} header field name. */
public static final String IF_MODIFIED_SINCE = "If-Modified-Since";
/** The HTTP {@code If-None-Match} header field name. */
public static final String IF_NONE_MATCH = "If-None-Match";
/** The HTTP {@code If-Range} header field name. */
public static final String IF_RANGE = "If-Range";
/** The HTTP {@code If-Unmodified-Since} header field name. */
public static final String IF_UNMODIFIED_SINCE = "If-Unmodified-Since";
/** The HTTP {@code Last-Event-ID} header field name. */
public static final String LAST_EVENT_ID = "Last-Event-ID";
/** The HTTP {@code Max-Forwards} header field name. */
public static final String MAX_FORWARDS = "Max-Forwards";
/** The HTTP {@code Origin} header field name. */
public static final String ORIGIN = "Origin";
/** The HTTP {@code Proxy-Authorization} header field name. */
public static final String PROXY_AUTHORIZATION = "Proxy-Authorization";
/** The HTTP {@code Range} header field name. */
public static final String RANGE = "Range";
/** The HTTP {@code Referer} header field name. */
public static final String REFERER = "Referer";
/**
* The HTTP <a href="https://www.w3.org/TR/referrer-policy/">{@code Referrer-Policy}</a> header
* field name.
*
* @since 23.4
*/
public static final String REFERRER_POLICY = "Referrer-Policy";
/**
* Values for the <a href="https://www.w3.org/TR/referrer-policy/">{@code Referrer-Policy}</a>
* header.
*
* @since 23.4
*/
public static final class ReferrerPolicyValues {
private ReferrerPolicyValues() {}
public static final String NO_REFERRER = "no-referrer";
public static final String NO_REFFERER_WHEN_DOWNGRADE = "no-referrer-when-downgrade";
public static final String SAME_ORIGIN = "same-origin";
public static final String ORIGIN = "origin";
public static final String STRICT_ORIGIN = "strict-origin";
public static final String ORIGIN_WHEN_CROSS_ORIGIN = "origin-when-cross-origin";
public static final String STRICT_ORIGIN_WHEN_CROSS_ORIGIN = "strict-origin-when-cross-origin";
public static final String UNSAFE_URL = "unsafe-url";
}
/**
* The HTTP <a href="https://www.w3.org/TR/service-workers/#update-algorithm">{@code
* Service-Worker}</a> header field name.
*/
public static final String SERVICE_WORKER = "Service-Worker";
/** The HTTP {@code TE} header field name. */
public static final String TE = "TE";
/** The HTTP {@code Upgrade} header field name. */
public static final String UPGRADE = "Upgrade";
/** The HTTP {@code User-Agent} header field name. */
public static final String USER_AGENT = "User-Agent";
// HTTP Response header fields
/** The HTTP {@code Accept-Ranges} header field name. */
public static final String ACCEPT_RANGES = "Accept-Ranges";
/** The HTTP {@code Access-Control-Allow-Headers} header field name. */
public static final String ACCESS_CONTROL_ALLOW_HEADERS = "Access-Control-Allow-Headers";
/** The HTTP {@code Access-Control-Allow-Methods} header field name. */
public static final String ACCESS_CONTROL_ALLOW_METHODS = "Access-Control-Allow-Methods";
/** The HTTP {@code Access-Control-Allow-Origin} header field name. */
public static final String ACCESS_CONTROL_ALLOW_ORIGIN = "Access-Control-Allow-Origin";
/** The HTTP {@code Access-Control-Allow-Credentials} header field name. */
public static final String ACCESS_CONTROL_ALLOW_CREDENTIALS = "Access-Control-Allow-Credentials";
/** The HTTP {@code Access-Control-Expose-Headers} header field name. */
public static final String ACCESS_CONTROL_EXPOSE_HEADERS = "Access-Control-Expose-Headers";
/** The HTTP {@code Access-Control-Max-Age} header field name. */
public static final String ACCESS_CONTROL_MAX_AGE = "Access-Control-Max-Age";
/** The HTTP {@code Age} header field name. */
public static final String AGE = "Age";
/** The HTTP {@code Allow} header field name. */
public static final String ALLOW = "Allow";
/** The HTTP {@code Content-Disposition} header field name. */
public static final String CONTENT_DISPOSITION = "Content-Disposition";
/** The HTTP {@code Content-Encoding} header field name. */
public static final String CONTENT_ENCODING = "Content-Encoding";
/** The HTTP {@code Content-Language} header field name. */
public static final String CONTENT_LANGUAGE = "Content-Language";
/** The HTTP {@code Content-Location} header field name. */
public static final String CONTENT_LOCATION = "Content-Location";
/** The HTTP {@code Content-MD5} header field name. */
public static final String CONTENT_MD5 = "Content-MD5";
/** The HTTP {@code Content-Range} header field name. */
public static final String CONTENT_RANGE = "Content-Range";
/**
* The HTTP <a href="http://w3.org/TR/CSP/#content-security-policy-header-field">{@code
* Content-Security-Policy}</a> header field name.
*
* @since 15.0
*/
public static final String CONTENT_SECURITY_POLICY = "Content-Security-Policy";
/**
* The HTTP <a href="http://w3.org/TR/CSP/#content-security-policy-report-only-header-field">
* {@code Content-Security-Policy-Report-Only}</a> header field name.
*
* @since 15.0
*/
public static final String CONTENT_SECURITY_POLICY_REPORT_ONLY =
"Content-Security-Policy-Report-Only";
/**
* The HTTP nonstandard {@code X-Content-Security-Policy} header field name. It was introduced in
* <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Firefox until
* version 23 and the Internet Explorer version 10. Please, use {@link #CONTENT_SECURITY_POLICY}
* to pass the CSP.
*
* @since 20.0
*/
public static final String X_CONTENT_SECURITY_POLICY = "X-Content-Security-Policy";
/**
* The HTTP nonstandard {@code X-Content-Security-Policy-Report-Only} header field name. It was
* introduced in <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the
* Firefox until version 23 and the Internet Explorer version 10. Please, use {@link
* #CONTENT_SECURITY_POLICY_REPORT_ONLY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_CONTENT_SECURITY_POLICY_REPORT_ONLY =
"X-Content-Security-Policy-Report-Only";
/**
* The HTTP nonstandard {@code X-WebKit-CSP} header field name. It was introduced in <a
* href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Chrome until
* version 25. Please, use {@link #CONTENT_SECURITY_POLICY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_WEBKIT_CSP = "X-WebKit-CSP";
/**
* The HTTP nonstandard {@code X-WebKit-CSP-Report-Only} header field name. It was introduced in
* <a href="https://www.w3.org/TR/2011/WD-CSP-20111129/">CSP v.1</a> and used by the Chrome until
* version 25. Please, use {@link #CONTENT_SECURITY_POLICY_REPORT_ONLY} to pass the CSP.
*
* @since 20.0
*/
public static final String X_WEBKIT_CSP_REPORT_ONLY = "X-WebKit-CSP-Report-Only";
/** The HTTP {@code ETag} header field name. */
public static final String ETAG = "ETag";
/** The HTTP {@code Expires} header field name. */
public static final String EXPIRES = "Expires";
/** The HTTP {@code Last-Modified} header field name. */
public static final String LAST_MODIFIED = "Last-Modified";
/** The HTTP {@code Link} header field name. */
public static final String LINK = "Link";
/** The HTTP {@code Location} header field name. */
public static final String LOCATION = "Location";
/** The HTTP {@code P3P} header field name. Limited browser support. */
public static final String P3P = "P3P";
/** The HTTP {@code Proxy-Authenticate} header field name. */
public static final String PROXY_AUTHENTICATE = "Proxy-Authenticate";
/** The HTTP {@code Refresh} header field name. Non-standard header supported by most browsers. */
public static final String REFRESH = "Refresh";
/** The HTTP {@code Retry-After} header field name. */
public static final String RETRY_AFTER = "Retry-After";
/** The HTTP {@code Server} header field name. */
public static final String SERVER = "Server";
/**
* The HTTP <a href="https://www.w3.org/TR/server-timing/">{@code Server-Timing}</a> header field
* name.
*
* @since 23.6
*/
public static final String SERVER_TIMING = "Server-Timing";
/**
* The HTTP <a href="https://www.w3.org/TR/service-workers/#update-algorithm">{@code
* Service-Worker-Allowed}</a> header field name.
*
* @since 20.0
*/
public static final String SERVICE_WORKER_ALLOWED = "Service-Worker-Allowed";
/** The HTTP {@code Set-Cookie} header field name. */
public static final String SET_COOKIE = "Set-Cookie";
/** The HTTP {@code Set-Cookie2} header field name. */
public static final String SET_COOKIE2 = "Set-Cookie2";
/**
* The HTTP <a href="http://tools.ietf.org/html/rfc6797#section-6.1">{@code
* Strict-Transport-Security}</a> header field name.
*
* @since 15.0
*/
public static final String STRICT_TRANSPORT_SECURITY = "Strict-Transport-Security";
/**
* The HTTP <a href="http://www.w3.org/TR/resource-timing/#cross-origin-resources">{@code
* Timing-Allow-Origin}</a> header field name.
*
* @since 15.0
*/
public static final String TIMING_ALLOW_ORIGIN = "Timing-Allow-Origin";
/** The HTTP {@code Trailer} header field name. */
public static final String TRAILER = "Trailer";
/** The HTTP {@code Transfer-Encoding} header field name. */
public static final String TRANSFER_ENCODING = "Transfer-Encoding";
/** The HTTP {@code Vary} header field name. */
public static final String VARY = "Vary";
/** The HTTP {@code WWW-Authenticate} header field name. */
public static final String WWW_AUTHENTICATE = "WWW-Authenticate";
// Common, non-standard HTTP header fields
/** The HTTP {@code DNT} header field name. */
public static final String DNT = "DNT";
/** The HTTP {@code X-Content-Type-Options} header field name. */
public static final String X_CONTENT_TYPE_OPTIONS = "X-Content-Type-Options";
/** The HTTP {@code X-Do-Not-Track} header field name. */
public static final String X_DO_NOT_TRACK = "X-Do-Not-Track";
/** The HTTP {@code X-Forwarded-For} header field name (superseded by {@code Forwarded}). */
public static final String X_FORWARDED_FOR = "X-Forwarded-For";
/** The HTTP {@code X-Forwarded-Proto} header field name. */
public static final String X_FORWARDED_PROTO = "X-Forwarded-Proto";
/**
* The HTTP <a href="http://goo.gl/lQirAH">{@code X-Forwarded-Host}</a> header field name.
*
* @since 20.0
*/
public static final String X_FORWARDED_HOST = "X-Forwarded-Host";
/**
* The HTTP <a href="http://goo.gl/YtV2at">{@code X-Forwarded-Port}</a> header field name.
*
* @since 20.0
*/
public static final String X_FORWARDED_PORT = "X-Forwarded-Port";
/** The HTTP {@code X-Frame-Options} header field name. */
public static final String X_FRAME_OPTIONS = "X-Frame-Options";
/** The HTTP {@code X-Powered-By} header field name. */
public static final String X_POWERED_BY = "X-Powered-By";
/**
* The HTTP <a href="http://tools.ietf.org/html/draft-evans-palmer-key-pinning">{@code
* Public-Key-Pins}</a> header field name.
*
* @since 15.0
*/
@Beta public static final String PUBLIC_KEY_PINS = "Public-Key-Pins";
/**
* The HTTP <a href="http://tools.ietf.org/html/draft-evans-palmer-key-pinning">{@code
* Public-Key-Pins-Report-Only}</a> header field name.
*
* @since 15.0
*/
@Beta public static final String PUBLIC_KEY_PINS_REPORT_ONLY = "Public-Key-Pins-Report-Only";
/** The HTTP {@code X-Requested-With} header field name. */
public static final String X_REQUESTED_WITH = "X-Requested-With";
/** The HTTP {@code X-User-IP} header field name. */
public static final String X_USER_IP = "X-User-IP";
/**
* The HTTP <a href="https://goo.gl/VKpXxa">{@code X-Download-Options}</a> header field name.
*
* <p>When the new X-Download-Options header is present with the value {@code noopen}, the user is
* prevented from opening a file download directly; instead, they must first save the file
* locally.
*
* @since 24.1
*/
@Beta
public static final String X_DOWNLOAD_OPTIONS = "X-Download-Options";
/** The HTTP {@code X-XSS-Protection} header field name. */
public static final String X_XSS_PROTECTION = "X-XSS-Protection";
/**
* The HTTP <a
* href="https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-DNS-Prefetch-Control">{@code
* X-DNS-Prefetch-Control}</a> header controls DNS prefetch behavior. Value can be "on" or "off".
* By default, DNS prefetching is "on" for HTTP pages and "off" for HTTPS pages.
*/
public static final String X_DNS_PREFETCH_CONTROL = "X-DNS-Prefetch-Control";
/**
* The HTTP <a href="http://html.spec.whatwg.org/multipage/semantics.html#hyperlink-auditing">
* {@code Ping-From}</a> header field name.
*
* @since 19.0
*/
public static final String PING_FROM = "Ping-From";
/**
* The HTTP <a href="http://html.spec.whatwg.org/multipage/semantics.html#hyperlink-auditing">
* {@code Ping-To}</a> header field name.
*
* @since 19.0
*/
public static final String PING_TO = "Ping-To";
/**
* The HTTP <a href="https://github.com/mikewest/sec-metadata">{@code Sec-Metadata}</a> header
* field name.
*
* @since 26.0
*/
public static final String SEC_METADATA = "Sec-Metadata";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-https">{@code
* Sec-Token-Binding}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_TOKEN_BINDING = "Sec-Token-Binding";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-ttrp">{@code
* Sec-Provided-Token-Binding-ID}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_PROVIDED_TOKEN_BINDING_ID = "Sec-Provided-Token-Binding-ID";
/**
* The HTTP <a href="https://tools.ietf.org/html/draft-ietf-tokbind-ttrp">{@code
* Sec-Referred-Token-Binding-ID}</a> header field name.
*
* @since 25.1
*/
public static final String SEC_REFERRED_TOKEN_BINDING_ID = "Sec-Referred-Token-Binding-ID";
}
| |
/*
* Licensed to ElasticSearch and Shay Banon under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. ElasticSearch licenses this
* file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.env;
import com.google.common.collect.Sets;
import com.google.common.primitives.Ints;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.NativeFSLockFactory;
import org.elasticsearch.ElasticSearchIllegalStateException;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.ByteSizeValue;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.shard.ShardId;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Set;
/**
*
*/
public class NodeEnvironment extends AbstractComponent {
private final File[] nodeFiles;
private final File[] nodeIndicesLocations;
private final Lock[] locks;
private final int localNodeId;
@Inject
public NodeEnvironment(Settings settings, Environment environment) {
super(settings);
if (!DiscoveryNode.nodeRequiresLocalStorage(settings)) {
nodeFiles = null;
nodeIndicesLocations = null;
locks = null;
localNodeId = -1;
return;
}
File[] nodesFiles = new File[environment.dataWithClusterFiles().length];
Lock[] locks = new Lock[environment.dataWithClusterFiles().length];
int localNodeId = -1;
IOException lastException = null;
int maxLocalStorageNodes = settings.getAsInt("node.max_local_storage_nodes", 50);
for (int possibleLockId = 0; possibleLockId < maxLocalStorageNodes; possibleLockId++) {
for (int dirIndex = 0; dirIndex < environment.dataWithClusterFiles().length; dirIndex++) {
File dir = new File(new File(environment.dataWithClusterFiles()[dirIndex], "nodes"), Integer.toString(possibleLockId));
if (!dir.exists()) {
FileSystemUtils.mkdirs(dir);
}
logger.trace("obtaining node lock on {} ...", dir.getAbsolutePath());
try {
NativeFSLockFactory lockFactory = new NativeFSLockFactory(dir);
Lock tmpLock = lockFactory.makeLock("node.lock");
boolean obtained = tmpLock.obtain();
if (obtained) {
locks[dirIndex] = tmpLock;
nodesFiles[dirIndex] = dir;
localNodeId = possibleLockId;
} else {
logger.trace("failed to obtain node lock on {}", dir.getAbsolutePath());
// release all the ones that were obtained up until now
for (int i = 0; i < locks.length; i++) {
if (locks[i] != null) {
try {
locks[i].release();
} catch (Exception e1) {
// ignore
}
}
locks[i] = null;
}
break;
}
} catch (IOException e) {
logger.trace("failed to obtain node lock on {}", e, dir.getAbsolutePath());
lastException = new IOException("failed to obtain lock on " + dir.getAbsolutePath(), e);
// release all the ones that were obtained up until now
for (int i = 0; i < locks.length; i++) {
if (locks[i] != null) {
try {
locks[i].release();
} catch (Exception e1) {
// ignore
}
}
locks[i] = null;
}
break;
}
}
if (locks[0] != null) {
// we found a lock, break
break;
}
}
if (locks[0] == null) {
throw new ElasticSearchIllegalStateException("Failed to obtain node lock, is the following location writable?: " + Arrays.toString(environment.dataWithClusterFiles()), lastException);
}
this.localNodeId = localNodeId;
this.locks = locks;
this.nodeFiles = nodesFiles;
if (logger.isDebugEnabled()) {
logger.debug("using node location [{}], local_node_id [{}]", nodesFiles, localNodeId);
}
if (logger.isTraceEnabled()) {
StringBuilder sb = new StringBuilder("node data locations details:\n");
for (File file : nodesFiles) {
sb.append(" -> ").append(file.getAbsolutePath()).append(", free_space [").append(new ByteSizeValue(file.getFreeSpace())).append("], usable_space [").append(new ByteSizeValue(file.getUsableSpace())).append("]\n");
}
logger.trace(sb.toString());
}
this.nodeIndicesLocations = new File[nodeFiles.length];
for (int i = 0; i < nodeFiles.length; i++) {
nodeIndicesLocations[i] = new File(nodeFiles[i], "indices");
}
}
public int localNodeId() {
return this.localNodeId;
}
public boolean hasNodeFile() {
return nodeFiles != null && locks != null;
}
public File[] nodeDataLocations() {
if (nodeFiles == null || locks == null) {
throw new ElasticSearchIllegalStateException("node is not configured to store local location");
}
return nodeFiles;
}
public File[] indicesLocations() {
return nodeIndicesLocations;
}
public File[] indexLocations(Index index) {
File[] indexLocations = new File[nodeFiles.length];
for (int i = 0; i < nodeFiles.length; i++) {
indexLocations[i] = new File(new File(nodeFiles[i], "indices"), index.name());
}
return indexLocations;
}
public File[] shardLocations(ShardId shardId) {
File[] shardLocations = new File[nodeFiles.length];
for (int i = 0; i < nodeFiles.length; i++) {
shardLocations[i] = new File(new File(new File(nodeFiles[i], "indices"), shardId.index().name()), Integer.toString(shardId.id()));
}
return shardLocations;
}
public Set<String> findAllIndices() throws Exception {
if (nodeFiles == null || locks == null) {
throw new ElasticSearchIllegalStateException("node is not configured to store local location");
}
Set<String> indices = Sets.newHashSet();
for (File indicesLocation : nodeIndicesLocations) {
File[] indicesList = indicesLocation.listFiles();
if (indicesList == null) {
continue;
}
for (File indexLocation : indicesList) {
if (indexLocation.isDirectory()) {
indices.add(indexLocation.getName());
}
}
}
return indices;
}
public Set<ShardId> findAllShardIds() throws Exception {
if (nodeFiles == null || locks == null) {
throw new ElasticSearchIllegalStateException("node is not configured to store local location");
}
Set<ShardId> shardIds = Sets.newHashSet();
for (File indicesLocation : nodeIndicesLocations) {
File[] indicesList = indicesLocation.listFiles();
if (indicesList == null) {
continue;
}
for (File indexLocation : indicesList) {
if (!indexLocation.isDirectory()) {
continue;
}
String indexName = indexLocation.getName();
File[] shardsList = indexLocation.listFiles();
if (shardsList == null) {
continue;
}
for (File shardLocation : shardsList) {
if (!shardLocation.isDirectory()) {
continue;
}
Integer shardId = Ints.tryParse(shardLocation.getName());
if (shardId != null) {
shardIds.add(new ShardId(indexName, shardId));
}
}
}
}
return shardIds;
}
public void close() {
if (locks != null) {
for (Lock lock : locks) {
try {
lock.release();
} catch (IOException e) {
// ignore
}
}
}
}
}
| |
/**
* Copyright (C) 2009-2013 Enstratius, Inc.
*
* ====================================================================
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* ====================================================================
*/
package org.dasein.cloud.aws.compute;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.*;
import org.apache.log4j.Logger;
import org.dasein.cloud.CloudException;
import org.dasein.cloud.InternalException;
import org.dasein.cloud.OperationNotSupportedException;
import org.dasein.cloud.ProviderContext;
import org.dasein.cloud.Requirement;
import org.dasein.cloud.ResourceStatus;
import org.dasein.cloud.Tag;
import org.dasein.cloud.aws.AWSCloud;
import org.dasein.cloud.compute.AbstractVolumeSupport;
import org.dasein.cloud.compute.Platform;
import org.dasein.cloud.compute.Volume;
import org.dasein.cloud.compute.VolumeCreateOptions;
import org.dasein.cloud.compute.VolumeFilterOptions;
import org.dasein.cloud.compute.VolumeFormat;
import org.dasein.cloud.compute.VolumeProduct;
import org.dasein.cloud.compute.VolumeState;
import org.dasein.cloud.compute.VolumeSupport;
import org.dasein.cloud.compute.VolumeType;
import org.dasein.cloud.dc.DataCenter;
import org.dasein.cloud.identity.ServiceAction;
import org.dasein.cloud.util.APITrace;
import org.dasein.cloud.util.Cache;
import org.dasein.cloud.util.CacheLevel;
import org.dasein.util.uom.storage.Gigabyte;
import org.dasein.util.uom.storage.Storage;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
public class EBSVolume extends AbstractVolumeSupport {
static private final Logger logger = Logger.getLogger(EBSVolume.class);
private AWSCloud provider = null;
EBSVolume(AWSCloud provider) {
super(provider);
this.provider = provider;
}
@Override
public void attach(@Nonnull String volumeId, @Nonnull String toServer, @Nonnull String device) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.attach");
try {
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.ATTACH_VOLUME);
EC2Method method;
parameters.put("VolumeId", volumeId);
parameters.put("InstanceId", toServer);
parameters.put("Device", device);
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull String createVolume(@Nonnull VolumeCreateOptions options) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.createVolume");
try {
if( !options.getFormat().equals(VolumeFormat.BLOCK)) {
throw new OperationNotSupportedException("NFS volumes are not currently supported");
}
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new InternalException("No context was specified for this request");
}
Map<String,String> parameters = provider.getStandardParameters(ctx, EC2Method.CREATE_VOLUME);
EC2Method method;
NodeList blocks;
Document doc;
if( options.getSnapshotId() != null ) {
parameters.put("SnapshotId", options.getSnapshotId());
}
parameters.put("Size", String.valueOf(options.getVolumeSize().getQuantity().intValue()));
String az = options.getDataCenterId();
if( az == null ) {
for( DataCenter dc : provider.getDataCenterServices().listDataCenters(ctx.getRegionId()) ) {
az = dc.getProviderDataCenterId();
}
if( az == null ) {
throw new CloudException("Unable to identify a launch data center");
}
}
parameters.put("AvailabilityZone", az);
if( provider.getEC2Provider().isAWS() || provider.getEC2Provider().isEnStratus() ) {
if( options.getVolumeProductId() != null ) {
VolumeProduct prd = null;
for( VolumeProduct p : listVolumeProducts() ) {
if( p.getProviderProductId().equals(options.getVolumeProductId()) ) {
prd = p;
break;
}
}
if( prd != null ) {
parameters.put("VolumeType", prd.getProviderProductId());
if( prd.getMaxIops() > 0 && options.getIops() > 0 ) {
parameters.put("Iops", String.valueOf(options.getIops()));
}
else if( prd.getMinIops() > 0 ) {
parameters.put("Iops", String.valueOf(prd.getMinIops()));
}
}
}
}
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("volumeId");
if( blocks.getLength() > 0 ) {
String id = blocks.item(0).getFirstChild().getNodeValue().trim();
Map<String,Object> meta = options.getMetaData();
meta.put("Name", options.getName());
meta.put("Description", options.getDescription());
ArrayList<Tag> tags = new ArrayList<Tag>();
for( Map.Entry<String,Object> entry : meta.entrySet() ) {
Object value = entry.getValue();
if( value != null ) {
tags.add(new Tag(entry.getKey(), value.toString()));
}
}
if( !tags.isEmpty() ) {
provider.createTags(id, tags.toArray(new Tag[tags.size()]));
}
return id;
}
throw new CloudException("Successful POST, but no volume information was provided");
}
finally {
APITrace.end();
}
}
@Override
public void detach(@Nonnull String volumeId, boolean force) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.detach");
try {
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DETACH_VOLUME);
EC2Method method;
NodeList blocks;
Document doc;
parameters.put("VolumeId", volumeId);
if( force ) {
parameters.put("Force", "true");
}
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("return");
if( blocks.getLength() > 0 ) {
if( !blocks.item(0).getFirstChild().getNodeValue().equalsIgnoreCase("true") ) {
throw new CloudException("Detach of volume denied.");
}
}
}
finally {
APITrace.end();
}
}
@Override
public int getMaximumVolumeCount() throws InternalException, CloudException {
return -2;
}
@Override
public @Nullable Storage<Gigabyte> getMaximumVolumeSize() throws InternalException, CloudException {
return new Storage<Gigabyte>(1024, Storage.GIGABYTE);
}
@Override
public @Nonnull Storage<Gigabyte> getMinimumVolumeSize() throws InternalException, CloudException {
return new Storage<Gigabyte>(10, Storage.GIGABYTE);
}
@Override
public @Nonnull String getProviderTermForVolume(@Nonnull Locale locale) {
return "volume";
}
@Override
public boolean isSubscribed() throws CloudException, InternalException {
return true;
}
@Override
public @Nonnull Iterable<String> listPossibleDeviceIds(@Nonnull Platform platform) throws InternalException, CloudException {
ArrayList<String> list = new ArrayList<String>();
if( platform.isWindows() ) {
list.add("xvdf");
list.add("xvdg");
list.add("xvdh");
list.add("xvdi");
list.add("xvdj");
}
else {
list.add("/dev/sdf");
list.add("/dev/sdg");
list.add("/dev/sdh");
list.add("/dev/sdi");
list.add("/dev/sdj");
}
return list;
}
@Override
public @Nonnull Iterable<VolumeFormat> listSupportedFormats() throws InternalException, CloudException {
return Collections.singletonList(VolumeFormat.BLOCK);
}
@Override
public @Nonnull Iterable<VolumeProduct> listVolumeProducts() throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.listVolumeProducts");
try {
Cache<VolumeProduct> cache = Cache.getInstance(getProvider(), "volumeProducts", VolumeProduct.class, CacheLevel.REGION);
Iterable<VolumeProduct> products = cache.get(getContext());
if( products == null ) {
ArrayList<VolumeProduct> prds = new ArrayList<VolumeProduct>();
ProviderContext ctx = provider.getContext();
float rawPrice = 0.11f;
if( ctx != null ) {
String regionId = ctx.getRegionId();
if( regionId != null ) {
if( regionId.equals("us-east-1") || regionId.equals("us-west-2") ) {
rawPrice = 0.10f;
}
else if( regionId.equals("ap-northeast-1") ) {
rawPrice = 0.12f;
}
else if( regionId.equals("sa-east-1") ) {
rawPrice = 0.19f;
}
}
}
prds.add(VolumeProduct.getInstance("standard", "Standard", "Standard EBS with no IOPS Guarantees", VolumeType.HDD, getMinimumVolumeSize(), "USD", 0, 0, rawPrice, 0f));
prds.add(VolumeProduct.getInstance("io1", "IOPS EBS", "EBS Volume with IOPS guarantees", VolumeType.HDD, getMinimumVolumeSize(), "USD", 100, 1000, 0.125f, 0.1f));
cache.put(getContext(), prds);
products = prds;
}
return products;
}
finally {
APITrace.end();
}
}
@Override
public @Nullable Volume getVolume(@Nonnull String volumeId) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.getVolume");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context exists for this request.");
}
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DESCRIBE_VOLUMES);
EC2Method method;
NodeList blocks;
Document doc;
parameters.put("VolumeId.1", volumeId);
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
String code = e.getCode();
if( code != null && (code.startsWith("InvalidVolume.NotFound") || code.equals("InvalidParameterValue")) ) {
return null;
}
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("volumeSet");
for( int i=0; i<blocks.getLength(); i++ ) {
NodeList items = blocks.item(i).getChildNodes();
for( int j=0; j<items.getLength(); j++ ) {
Node item = items.item(j);
if( item.getNodeName().equals("item") ) {
Volume volume = toVolume(ctx, item);
if( volume != null && volume.getProviderVolumeId().equals(volumeId) ) {
return volume;
}
}
}
}
return null;
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull Requirement getVolumeProductRequirement() throws InternalException, CloudException {
return ((provider.getEC2Provider().isEucalyptus() || provider.getEC2Provider().isOpenStack()) ? Requirement.NONE : Requirement.OPTIONAL);
}
@Override
public boolean isVolumeSizeDeterminedByProduct() throws InternalException, CloudException {
return false;
}
@Override
public @Nonnull Iterable<ResourceStatus> listVolumeStatus() throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.listVolumeStatus");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context exists for this request.");
}
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DESCRIBE_VOLUMES);
ArrayList<ResourceStatus> list = new ArrayList<ResourceStatus>();
EC2Method method;
NodeList blocks;
Document doc;
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("volumeSet");
for( int i=0; i<blocks.getLength(); i++ ) {
NodeList items = blocks.item(i).getChildNodes();
for( int j=0; j<items.getLength(); j++ ) {
Node item = items.item(j);
if( item.getNodeName().equals("item") ) {
ResourceStatus status = toStatus(item);
if( status != null ) {
list.add(status);
}
}
}
}
return list;
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull Iterable<Volume> listVolumes() throws InternalException, CloudException {
return listVolumes( null );
}
@Override
public @Nonnull Iterable<Volume> listVolumes(@Nullable VolumeFilterOptions options) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.listVolumes");
try {
ProviderContext ctx = provider.getContext();
if( ctx == null ) {
throw new CloudException("No context exists for this request.");
}
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DESCRIBE_VOLUMES);
ArrayList<Volume> list = new ArrayList<Volume>();
EC2Method method;
NodeList blocks;
Document doc;
if ( options != null ) {
provider.putExtraParameters( parameters, provider.getTagFilterParams( options.getTags() ) );
}
method = new EC2Method( provider, provider.getEc2Url(), parameters );
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("volumeSet");
for( int i=0; i<blocks.getLength(); i++ ) {
NodeList items = blocks.item(i).getChildNodes();
for( int j=0; j<items.getLength(); j++ ) {
Node item = items.item(j);
if( item.getNodeName().equals("item") ) {
Volume volume = toVolume( ctx, item );
if( volume != null && (options == null || options.matches(volume)) ) {
list.add(volume);
}
}
}
}
return list;
}
finally {
APITrace.end();
}
}
@Override
public @Nonnull String[] mapServiceAction(@Nonnull ServiceAction action) {
if( action.equals(VolumeSupport.ANY) ) {
return new String[] { EC2Method.EC2_PREFIX + "*" };
}
else if( action.equals(VolumeSupport.ATTACH) ) {
return new String[] { EC2Method.EC2_PREFIX + EC2Method.ATTACH_VOLUME };
}
else if( action.equals(VolumeSupport.CREATE_VOLUME) ) {
return new String[] { EC2Method.EC2_PREFIX + EC2Method.CREATE_VOLUME };
}
else if( action.equals(VolumeSupport.DETACH) ) {
return new String[] { EC2Method.EC2_PREFIX + EC2Method.DETACH_VOLUME };
}
else if( action.equals(VolumeSupport.GET_VOLUME) || action.equals(VolumeSupport.LIST_VOLUME) ) {
return new String[] { EC2Method.EC2_PREFIX + EC2Method.DESCRIBE_VOLUMES };
}
else if( action.equals(VolumeSupport.REMOVE_VOLUME) ) {
return new String[] { EC2Method.EC2_PREFIX + EC2Method.DELETE_VOLUME };
}
return new String[0];
}
@Override
public void remove(@Nonnull String volumeId) throws InternalException, CloudException {
APITrace.begin(getProvider(), "Volume.remove");
try {
Map<String,String> parameters = provider.getStandardParameters(provider.getContext(), EC2Method.DELETE_VOLUME);
EC2Method method;
NodeList blocks;
Document doc;
parameters.put("VolumeId", volumeId);
method = new EC2Method(provider, provider.getEc2Url(), parameters);
try {
doc = method.invoke();
}
catch( EC2Exception e ) {
logger.error(e.getSummary());
throw new CloudException(e);
}
blocks = doc.getElementsByTagName("return");
if( blocks.getLength() > 0 ) {
if( !blocks.item(0).getFirstChild().getNodeValue().equalsIgnoreCase("true") ) {
throw new CloudException("Deletion of volume denied.");
}
}
}
finally {
APITrace.end();
}
}
@Override
public void updateTags(@Nonnull String volumeId, @Nonnull Tag ... tags) throws CloudException, InternalException {
updateTags(new String[] { volumeId }, tags);
}
@Override
public void updateTags(@Nonnull String[] volumeIds, @Nonnull Tag... tags) throws CloudException, InternalException {
APITrace.begin(getProvider(), "Volume.updateTags");
try {
provider.createTags( volumeIds, tags );
}
finally {
APITrace.end();
}
}
@Override
public void removeTags(@Nonnull String volumeId, @Nonnull Tag ... tags) throws CloudException, InternalException {
removeTags(new String[] { volumeId }, tags);
}
@Override
public void removeTags(@Nonnull String[] volumeIds, @Nonnull Tag... tags) throws CloudException, InternalException {
APITrace.begin(getProvider(), "Volume.removeTags");
try {
provider.removeTags( volumeIds, tags );
}
finally {
APITrace.end();
}
}
private @Nullable ResourceStatus toStatus(@Nullable Node node) throws CloudException {
if( node == null ) {
return null;
}
NodeList attrs = node.getChildNodes();
VolumeState state = VolumeState.PENDING;
String volumeId = null;
for( int i=0; i<attrs.getLength(); i++ ) {
Node attr = attrs.item(i);
String name;
name = attr.getNodeName();
if( name.equals("volumeId") ) {
volumeId = attr.getFirstChild().getNodeValue().trim();
}
else if( name.equals("status") ) {
String s = attr.getFirstChild().getNodeValue().trim();
if( s.equals("creating") || s.equals("attaching") || s.equals("attached") || s.equals("detaching") || s.equals("detached") ) {
state = VolumeState.PENDING;
}
else if( s.equals("available") || s.equals("in-use") ) {
state = VolumeState.AVAILABLE;
}
else {
state = VolumeState.DELETED;
}
}
}
if( volumeId == null ) {
return null;
}
return new ResourceStatus(volumeId, state);
}
private @Nullable Volume toVolume(@Nonnull ProviderContext ctx, @Nullable Node node) throws CloudException {
if( node == null ) {
return null;
}
NodeList attrs = node.getChildNodes();
Volume volume = new Volume();
volume.setProviderProductId("standard");
volume.setType( VolumeType.HDD );
volume.setFormat(VolumeFormat.BLOCK);
for( int i=0; i<attrs.getLength(); i++ ) {
Node attr = attrs.item(i);
String name;
name = attr.getNodeName();
if( name.equals("volumeId") ) {
volume.setProviderVolumeId(attr.getFirstChild().getNodeValue().trim());
}
else if( name.equalsIgnoreCase("name") && attr.hasChildNodes() ) {
volume.setName(attr.getFirstChild().getNodeName().trim());
}
else if( name.equalsIgnoreCase("description") && attr.hasChildNodes() ) {
volume.setDescription(attr.getFirstChild().getNodeName().trim());
}
else if( name.equals("size") ) {
int size = Integer.parseInt(attr.getFirstChild().getNodeValue().trim());
volume.setSize(new Storage<Gigabyte>(size, Storage.GIGABYTE));
}
else if( name.equals("snapshotId") ) {
NodeList values = attr.getChildNodes();
if( values != null && values.getLength() > 0 ) {
volume.setProviderSnapshotId(values.item(0).getNodeValue().trim());
}
}
else if( name.equals("availabilityZone") ) {
String zoneId = attr.getFirstChild().getNodeValue().trim();
volume.setProviderDataCenterId(zoneId);
}
else if( name.equalsIgnoreCase("volumeType") && attr.hasChildNodes() ) {
volume.setProviderProductId(attr.getFirstChild().getNodeValue().trim());
}
else if( name.equalsIgnoreCase("iops") && attr.hasChildNodes() ) {
volume.setIops(Integer.parseInt(attr.getFirstChild().getNodeValue().trim()));
}
else if( name.equals("createTime") ) {
SimpleDateFormat fmt = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
String value = attr.getFirstChild().getNodeValue().trim();
try {
volume.setCreationTimestamp(fmt.parse(value).getTime());
}
catch( ParseException e ) {
logger.error(e);
e.printStackTrace();
throw new CloudException(e);
}
}
else if( name.equals("status") ) {
String s = attr.getFirstChild().getNodeValue().trim();
VolumeState state;
if( s.equals("creating") || s.equals("attaching") || s.equals("attached") || s.equals("detaching") || s.equals("detached") ) {
state = VolumeState.PENDING;
}
else if( s.equals("available") || s.equals("in-use") ) {
state = VolumeState.AVAILABLE;
}
else {
state = VolumeState.DELETED;
}
volume.setCurrentState(state);
}
else if( name.equals("tagSet") ) {
provider.setTags(attr, volume);
String s = volume.getTag("Name");
if( s != null && volume.getName() == null ) {
volume.setName(s);
}
s = volume.getTag("Description");
if( s != null && volume.getDescription() == null ) {
volume.setDescription(s);
}
}
else if( name.equals("attachmentSet") ) {
NodeList attachments = attr.getChildNodes();
for( int j=0; j<attachments.getLength(); j++ ) {
Node item = attachments.item(j);
if( item.getNodeName().equals("item") ) {
NodeList infoList = item.getChildNodes();
for( int k=0; k<infoList.getLength(); k++ ) {
Node info = infoList.item(k);
name = info.getNodeName();
if( name.equals("instanceId") ) {
volume.setProviderVirtualMachineId(info.getFirstChild().getNodeValue().trim());
}
else if( name.equals("device") ) {
String deviceId = info.getFirstChild().getNodeValue().trim();
if( deviceId.startsWith("unknown,requested:") ) {
deviceId = deviceId.substring(18);
}
volume.setDeviceId(deviceId);
}
}
}
}
}
}
if( volume.getProviderVolumeId() == null ) {
return null;
}
if( volume.getName() == null ) {
volume.setName(volume.getProviderVolumeId());
}
if( volume.getDescription() == null ) {
volume.setDescription(volume.getName());
}
volume.setProviderRegionId(ctx.getRegionId());
return volume;
}
}
| |
/*
* Copyright 2005-2014 The Kuali Foundation.
*
* Licensed under the Educational Community License, Version 1.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ecl1.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kuali.coeus.s2sgen.impl.generate;
import gov.grants.apply.system.globalV10.HashValueDocument;
import gov.grants.apply.system.headerV10.GrantSubmissionHeaderDocument;
import gov.grants.apply.system.metaGrantApplication.GrantApplicationDocument;
import gov.grants.apply.system.metaGrantApplication.GrantApplicationDocument.GrantApplication.Forms;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.xmlbeans.XmlCursor;
import org.apache.xmlbeans.XmlObject;
import org.kuali.coeus.propdev.api.core.DevelopmentProposalContract;
import org.kuali.coeus.propdev.api.s2s.*;
import org.kuali.coeus.propdev.api.core.ProposalDevelopmentDocumentContract;
import org.kuali.coeus.s2sgen.api.core.InfastructureConstants;
import org.kuali.coeus.s2sgen.api.core.S2SException;
import org.kuali.coeus.propdev.api.attachment.NarrativeService;
import org.kuali.coeus.s2sgen.api.generate.*;
import org.kuali.coeus.s2sgen.api.hash.GrantApplicationHashService;
import org.kuali.coeus.s2sgen.impl.datetime.S2SDateTimeService;
import org.kuali.coeus.s2sgen.impl.validate.S2SValidatorService;
import org.kuali.coeus.s2sgen.api.core.AuditError;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import javax.xml.namespace.QName;
import java.util.*;
@Component("formGeneratorService")
public class FormGeneratorServiceImpl implements FormGeneratorService {
private static final Log LOG = LogFactory.getLog(FormGeneratorServiceImpl.class);
@Autowired
@Qualifier("s2SFormGeneratorRetrievalService")
private S2SFormGeneratorRetrievalService s2SFormGeneratorService;
@Autowired
@Qualifier("s2SValidatorService")
private S2SValidatorService s2SValidatorService;
@Autowired
@Qualifier("narrativeService")
private NarrativeService narrativeService;
@Autowired
@Qualifier("formMappingService")
private FormMappingService formMappingService;
@Autowired
@Qualifier("s2SDateTimeService")
private S2SDateTimeService s2SDateTimeService;
@Autowired
@Qualifier("grantApplicationHashService")
private GrantApplicationHashService grantApplicationHashService;
/**
*
* This method is used to validate application before submission.
*
* @param pdDoc
* Proposal Development Document.
* @throws S2SException
*/
@Override
public FormValidationResult validateForms(
Object pdDoc)
throws S2SException {
if (pdDoc == null) {
throw new IllegalArgumentException("pdDoc is null");
}
ProposalDevelopmentDocumentContract pdDocContract = (ProposalDevelopmentDocumentContract) pdDoc;
return generateAndValidateForms(null, null,
pdDocContract);
}
/**
*
* This method is to generate and validate the generated forms.
*
* @param forms
* Forms
* @param attList
* List of attachments.
* @param pdDoc
* Proposal Development Document.
* @throws S2SException
*/
protected FormGenerationResult generateAndValidateForms(Forms forms,
List<AttachmentData> attList, ProposalDevelopmentDocumentContract pdDoc) throws S2SException {
boolean validationSucceeded = true;
DevelopmentProposalContract developmentProposal = pdDoc.getDevelopmentProposal();
List<? extends S2sOppFormsContract> opportunityForms = developmentProposal.getS2sOppForms();
if (attList == null) {
attList = new ArrayList<AttachmentData>();
}
List<AuditError> auditErrors = new ArrayList<AuditError>();
getNarrativeService().deleteSystemGeneratedNarratives(pdDoc.getDevelopmentProposal().getNarratives());
for (S2sOppFormsContract opportunityForm : opportunityForms) {
if (!opportunityForm.getInclude()) {
continue;
}
List<AttachmentData> formAttList = new ArrayList<AttachmentData>();
S2SBaseFormGenerator s2sFormGenerator = null;
FormMappingInfo info = formMappingService.getFormInfo(developmentProposal.getProposalNumber(),opportunityForm.getOppNameSpace());
if(info==null) continue;
String namespace = info.getNameSpace();
s2sFormGenerator = (S2SBaseFormGenerator)s2SFormGeneratorService.getS2SGenerator(developmentProposal.getProposalNumber(),namespace);
s2sFormGenerator.setAuditErrors(auditErrors);
s2sFormGenerator.setAttachments(formAttList);
s2sFormGenerator.setNamespace(info.getNameSpace());
try {
XmlObject formObject = s2sFormGenerator.getFormObject(pdDoc);
if (s2SValidatorService.validate(formObject, auditErrors)) {
if (forms != null && attList != null) {
setFormObject(forms, formObject);
}
} else {
validationSucceeded = false;
}
attList.addAll(formAttList);
} catch (Exception ex) {
LOG.error(
"Unknown error from " + opportunityForm.getFormName(),
ex);
throw new S2SException("Could not generate form for "
+ opportunityForm.getFormName(), ex);
}
}
FormGenerationResult result = new FormGenerationResult();
result.setValid(validationSucceeded);
result.setErrors(auditErrors);
result.setAttachments(attList);
return result;
}
@Override
public FormGenerationResult generateAndValidateForms(Object pdDoc) throws S2SException {
if (pdDoc == null) {
throw new IllegalArgumentException("pdDoc is null");
}
ProposalDevelopmentDocumentContract pdDocContract = (ProposalDevelopmentDocumentContract) pdDoc;
GrantApplicationDocument.GrantApplication.Forms forms = GrantApplicationDocument.GrantApplication.Forms.Factory.newInstance();
List<AttachmentData> attList = new ArrayList<AttachmentData>();
final FormGenerationResult result = generateAndValidateForms(forms, attList, pdDocContract);
if (result.isValid()) {
String applicationXml = getGrantApplicationDocument(pdDocContract,forms);
result.setApplicationXml(applicationXml);
}
return result;
}
/**
*
* This method populates values for {@link GrantApplicationDocument} for a
* given {@link ProposalDevelopmentDocumentContract}
*
* @param pdDoc
* {@link ProposalDevelopmentDocumentContract}
* @param forms
* {@link gov.grants.apply.system.metaGrantApplication.GrantApplicationDocument.GrantApplication.Forms} generated XML forms
* @return {@link GrantApplicationDocument} populated with forms
* @throws S2SException
*/
protected String getGrantApplicationDocument(
ProposalDevelopmentDocumentContract pdDoc, GrantApplicationDocument.GrantApplication.Forms forms) throws S2SException {
GrantApplicationDocument grantApplicationDocument = GrantApplicationDocument.Factory.newInstance();
GrantApplicationDocument.GrantApplication grantApplication = GrantApplicationDocument.GrantApplication.Factory.newInstance();
grantApplication.setForms(forms);
GrantSubmissionHeaderDocument.GrantSubmissionHeader grantSubmissionHeader = GrantSubmissionHeaderDocument.GrantSubmissionHeader.Factory.newInstance();
grantSubmissionHeader.setActivityTitle(pdDoc.getDevelopmentProposal().getProgramAnnouncementTitle());
grantSubmissionHeader.setOpportunityTitle(pdDoc.getDevelopmentProposal().getProgramAnnouncementTitle());
grantSubmissionHeader.setAgencyName(pdDoc.getDevelopmentProposal().getSponsor().getSponsorName());
if(pdDoc.getDevelopmentProposal().getCfdaNumber()!=null){
grantSubmissionHeader.setCFDANumber(pdDoc.getDevelopmentProposal().getCfdaNumber());
}
S2sOpportunityContract s2sOpportunity = pdDoc.getDevelopmentProposal().getS2sOpportunity();
if (s2sOpportunity.getCompetetionId() != null) {
grantSubmissionHeader.setCompetitionID(s2sOpportunity.getCompetetionId());
}
grantSubmissionHeader.setOpportunityID(s2sOpportunity.getOpportunityId());
grantSubmissionHeader.setSchemaVersion(FormVersion.v1_0.getVersion());
grantSubmissionHeader.setSubmissionTitle(s2sOpportunity.getProposalNumber());
// set closing date unless null
Calendar closingDate = s2sOpportunity.getClosingDate();
if (closingDate != null) {
grantSubmissionHeader.setClosingDate(closingDate);
}
// set opening date unless null
Calendar openingDate = s2sOpportunity.getOpeningDate();
if (openingDate != null) {
grantSubmissionHeader.setOpeningDate(openingDate);
}
String applicationXml = getXmlFromDocument(grantApplication);
String hashVal = grantApplicationHashService.computeGrantFormsHash(applicationXml);
HashValueDocument.HashValue hashValue = HashValueDocument.HashValue.Factory.newInstance();
hashValue.setHashAlgorithm(InfastructureConstants.HASH_ALGORITHM);
hashValue.setStringValue(hashVal);
grantSubmissionHeader.setHashValue(hashValue);
grantApplication.setGrantSubmissionHeader(grantSubmissionHeader);
grantApplicationDocument.setGrantApplication(grantApplication);
String schemaUrl = s2sOpportunity.getSchemaUrl();
XmlCursor cursor = grantApplicationDocument.newCursor();
cursor.toStartDoc();
if (cursor.toFirstChild()){
String defaultNameSpace = cursor.getName().getNamespaceURI();
String schemaLocation = defaultNameSpace+ " "+schemaUrl;
cursor.setAttributeText(new QName("http://www.w3.org/2001/XMLSchema-instance","schemaLocation"), schemaLocation);
}
return getXmlFromDocument(grantApplicationDocument);
}
private String getXmlFromDocument(XmlObject grantApplicationDocument) {
String applicationXmlText = grantApplicationDocument
.xmlText(s2SFormGeneratorService.getXmlOptionsPrefixes());
String applicationXml = s2SDateTimeService.removeTimezoneFactor(applicationXmlText);
return applicationXml;
}
/**
*
* This method is to set the formObject to MetaGrants Forms Object. The
* xmlbeans Schema compiled with xsd:any does not provide a direct method to
* add individual forms to the Forms object. In this method, an XML Cursor
* is created from the existing Forms object and use the moveXml method to
* add the form object to the Forms object.
*
* @param forms
* Forms object to which the grants.gov form is added.
* @param formObject
* xml object representing the grants.gov form.
*/
protected void setFormObject(Forms forms, XmlObject formObject) {
// Create a cursor from the grants.gov form
XmlCursor formCursor = formObject.newCursor();
formCursor.toStartDoc();
formCursor.toNextToken();
// Create a cursor from the Forms object
XmlCursor metaGrantCursor = forms.newCursor();
metaGrantCursor.toNextToken();
// Add the form to the Forms object.
formCursor.moveXml(metaGrantCursor);
}
/**
*
* Setter for {@link S2SFormGeneratorRetrievalService}
*
* @param s2SFormGeneratorService
*/
public void setS2SFormGeneratorService(
S2SFormGeneratorRetrievalService s2SFormGeneratorService) {
this.s2SFormGeneratorService = s2SFormGeneratorService;
}
/**
* Gets the s2SFormGeneratorService attribute.
*
* @return Returns the s2SFormGeneratorService.
*/
public S2SFormGeneratorRetrievalService getS2SFormGeneratorService() {
return s2SFormGeneratorService;
}
/**
* Gets the s2SValidatorService attribute.
*
* @return Returns the s2SValidatorService.
*/
public S2SValidatorService getS2SValidatorService() {
return s2SValidatorService;
}
/**
* Sets the s2SValidatorService attribute value.
*
* @param validatorService
* The s2SValidatorService to set.
*/
public void setS2SValidatorService(S2SValidatorService validatorService) {
s2SValidatorService = validatorService;
}
public NarrativeService getNarrativeService() {
return narrativeService;
}
public void setNarrativeService(NarrativeService narrativeService) {
this.narrativeService = narrativeService;
}
public S2SDateTimeService getS2SDateTimeService() {
return s2SDateTimeService;
}
public void setS2SDateTimeService(S2SDateTimeService s2SUtilService) {
this.s2SDateTimeService = s2SDateTimeService;
}
public FormMappingService getFormMappingService() {
return formMappingService;
}
public void setFormMappingService(FormMappingService formMappingService) {
this.formMappingService = formMappingService;
}
public GrantApplicationHashService getGrantApplicationHashService() {
return grantApplicationHashService;
}
public void setGrantApplicationHashService(GrantApplicationHashService grantApplicationHashService) {
this.grantApplicationHashService = grantApplicationHashService;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.catalina.tribes.membership;
import java.io.IOException;
import java.net.DatagramPacket;
import java.util.Properties;
import org.apache.catalina.tribes.Channel;
import org.apache.catalina.tribes.ChannelException;
import org.apache.catalina.tribes.ChannelMessage;
import org.apache.catalina.tribes.Member;
import org.apache.catalina.tribes.MembershipListener;
import org.apache.catalina.tribes.MembershipService;
import org.apache.catalina.tribes.MessageListener;
import org.apache.catalina.tribes.io.ChannelData;
import org.apache.catalina.tribes.io.XByteBuffer;
import org.apache.catalina.tribes.util.Arrays;
import org.apache.catalina.tribes.util.StringManager;
import org.apache.catalina.tribes.util.UUIDGenerator;
/**
* A <b>membership</b> implementation using simple multicast.
* This is the representation of a multicast membership service.
* This class is responsible for maintaining a list of active cluster nodes in the cluster.
* If a node fails to send out a heartbeat, the node will be dismissed.
*
* @author Filip Hanik
*/
public class McastService implements MembershipService,MembershipListener,MessageListener {
private static final org.apache.juli.logging.Log log =
org.apache.juli.logging.LogFactory.getLog( McastService.class );
/**
* The string manager for this package.
*/
protected static final StringManager sm = StringManager.getManager(Constants.Package);
/**
* The descriptive information about this implementation.
*/
private static final String info = "McastService/2.1";
/**
* The implementation specific properties
*/
protected Properties properties = new Properties();
/**
* A handle to the actual low level implementation
*/
protected McastServiceImpl impl;
/**
* A membership listener delegate (should be the cluster :)
*/
protected MembershipListener listener;
/**
* A message listener delegate for broadcasts
*/
protected MessageListener msglistener;
/**
* The local member
*/
protected MemberImpl localMember ;
private int mcastSoTimeout;
private int mcastTTL;
protected byte[] payload;
protected byte[] domain;
private Channel channel;
/**
* Create a membership service.
*/
public McastService() {
//default values
properties.setProperty("mcastPort","45564");
properties.setProperty("mcastAddress","228.0.0.4");
properties.setProperty("memberDropTime","3000");
properties.setProperty("mcastFrequency","500");
}
/**
* Return descriptive information about this implementation and the
* corresponding version number, in the format
* <code><description>/<version></code>.
*/
public String getInfo() {
return (info);
}
/**
*
* @param properties
* <br>All are required<br>
* 1. mcastPort - the port to listen to<BR>
* 2. mcastAddress - the mcast group address<BR>
* 4. bindAddress - the bind address if any - only one that can be null<BR>
* 5. memberDropTime - the time a member is gone before it is considered gone.<BR>
* 6. mcastFrequency - the frequency of sending messages<BR>
* 7. tcpListenPort - the port this member listens to<BR>
* 8. tcpListenHost - the bind address of this member<BR>
* @exception java.lang.IllegalArgumentException if a property is missing.
*/
@Override
public void setProperties(Properties properties) {
hasProperty(properties,"mcastPort");
hasProperty(properties,"mcastAddress");
hasProperty(properties,"memberDropTime");
hasProperty(properties,"mcastFrequency");
hasProperty(properties,"tcpListenPort");
hasProperty(properties,"tcpListenHost");
this.properties = properties;
}
/**
* Return the properties, see setProperties
*/
@Override
public Properties getProperties() {
return properties;
}
/**
* Return the local member name
*/
public String getLocalMemberName() {
return localMember.toString() ;
}
/**
* Return the local member
*/
@Override
public Member getLocalMember(boolean alive) {
if ( alive && localMember != null && impl != null) localMember.setMemberAliveTime(System.currentTimeMillis()-impl.getServiceStartTime());
return localMember;
}
/**
* Sets the local member properties for broadcasting
*/
@Override
public void setLocalMemberProperties(String listenHost, int listenPort, int securePort, int udpPort) {
properties.setProperty("tcpListenHost",listenHost);
properties.setProperty("tcpListenPort",String.valueOf(listenPort));
properties.setProperty("udpListenPort",String.valueOf(udpPort));
properties.setProperty("tcpSecurePort",String.valueOf(securePort));
try {
if (localMember != null) {
localMember.setHostname(listenHost);
localMember.setPort(listenPort);
} else {
localMember = new MemberImpl(listenHost, listenPort, 0);
localMember.setUniqueId(UUIDGenerator.randomUUID(true));
localMember.setPayload(getPayload());
localMember.setDomain(getDomain());
localMember.setLocal(true);
}
localMember.setSecurePort(securePort);
localMember.setUdpPort(udpPort);
localMember.getData(true, true);
}catch ( IOException x ) {
throw new IllegalArgumentException(x);
}
}
public void setAddress(String addr) {
properties.setProperty("mcastAddress", addr);
}
/**
* @deprecated use setAddress
* @param addr String
*/
@Deprecated
public void setMcastAddr(String addr) {
setAddress(addr);
}
public String getAddress() {
return properties.getProperty("mcastAddress");
}
/**
* @deprecated use getAddress
* @return String
*/
@Deprecated
public String getMcastAddr() {
return getAddress();
}
public void setMcastBindAddress(String bindaddr) {
setBind(bindaddr);
}
public void setBind(String bindaddr) {
properties.setProperty("mcastBindAddress", bindaddr);
}
/**
* @deprecated use getBind
* @return String
*/
@Deprecated
public String getMcastBindAddress() {
return getBind();
}
public String getBind() {
return properties.getProperty("mcastBindAddress");
}
/**
* @deprecated use setPort
* @param port int
*/
@Deprecated
public void setMcastPort(int port) {
setPort(port);
}
public void setPort(int port) {
properties.setProperty("mcastPort", String.valueOf(port));
}
public void setRecoveryCounter(int recoveryCounter) {
properties.setProperty("recoveryCounter", String.valueOf(recoveryCounter));
}
public int getRecoveryCounter(){
String p = properties.getProperty("recoveryCounter");
if(p != null){
return Integer.parseInt(p);
}
return -1;
}
public void setRecoveryEnabled(boolean recoveryEnabled) {
properties.setProperty("recoveryEnabled", String.valueOf(recoveryEnabled));
}
public boolean getRecoveryEnabled() {
String p = properties.getProperty("recoveryEnabled");
if(p != null){
return Boolean.parseBoolean(p);
}
return false;
}
public void setRecoverySleepTime(long recoverySleepTime) {
properties.setProperty("recoverySleepTime", String.valueOf(recoverySleepTime));
}
public long getRecoverySleepTime(){
String p = properties.getProperty("recoverySleepTime");
if(p != null){
return Long.parseLong(p);
}
return -1;
}
public void setLocalLoopbackDisabled(boolean localLoopbackDisabled) {
properties.setProperty("localLoopbackDisabled",String.valueOf(localLoopbackDisabled));
}
public boolean getLocalLoopbackDisabled(boolean localLoopbackDisabled) {
String p = properties.getProperty("localLoopbackDisabled");
if(p != null){
return Boolean.parseBoolean(p);
}
return false;
}
/**
* @deprecated use getPort()
* @return int
*/
@Deprecated
public int getMcastPort() {
return getPort();
}
public int getPort() {
String p = properties.getProperty("mcastPort");
return Integer.parseInt(p);
}
/**
* @deprecated use setFrequency
* @param time long
*/
@Deprecated
public void setMcastFrequency(long time) {
setFrequency(time);
}
public void setFrequency(long time) {
properties.setProperty("mcastFrequency", String.valueOf(time));
}
/**
* @deprecated use getFrequency
* @return long
*/
@Deprecated
public long getMcastFrequency() {
return getFrequency();
}
public long getFrequency() {
String p = properties.getProperty("mcastFrequency");
return Long.parseLong(p);
}
public void setMcastDropTime(long time) {
setDropTime(time);
}
public void setDropTime(long time) {
properties.setProperty("memberDropTime", String.valueOf(time));
}
/**
* @deprecated use getDropTime
* @return long
*/
@Deprecated
public long getMcastDropTime() {
return getDropTime();
}
public long getDropTime() {
String p = properties.getProperty("memberDropTime");
return Long.parseLong(p);
}
/**
* Check if a required property is available.
* @param properties The set of properties
* @param name The property to check for
*/
protected void hasProperty(Properties properties, String name){
if ( properties.getProperty(name)==null) throw new IllegalArgumentException("McastService:Required property \""+name+"\" is missing.");
}
/**
* Start broadcasting and listening to membership pings
* @throws java.lang.Exception if a IO error occurs
*/
@Override
public void start() throws java.lang.Exception {
start(MembershipService.MBR_RX);
start(MembershipService.MBR_TX);
}
@Override
public void start(int level) throws java.lang.Exception {
hasProperty(properties,"mcastPort");
hasProperty(properties,"mcastAddress");
hasProperty(properties,"memberDropTime");
hasProperty(properties,"mcastFrequency");
hasProperty(properties,"tcpListenPort");
hasProperty(properties,"tcpListenHost");
hasProperty(properties,"tcpSecurePort");
hasProperty(properties,"udpListenPort");
if ( impl != null ) {
impl.start(level);
return;
}
String host = getProperties().getProperty("tcpListenHost");
int port = Integer.parseInt(getProperties().getProperty("tcpListenPort"));
int securePort = Integer.parseInt(getProperties().getProperty("tcpSecurePort"));
int udpPort = Integer.parseInt(getProperties().getProperty("udpListenPort"));
if ( localMember == null ) {
localMember = new MemberImpl(host, port, 100);
localMember.setUniqueId(UUIDGenerator.randomUUID(true));
localMember.setLocal(true);
} else {
localMember.setHostname(host);
localMember.setPort(port);
localMember.setMemberAliveTime(100);
}
localMember.setSecurePort(securePort);
localMember.setUdpPort(udpPort);
if ( this.payload != null ) localMember.setPayload(payload);
if ( this.domain != null ) localMember.setDomain(domain);
localMember.setServiceStartTime(System.currentTimeMillis());
java.net.InetAddress bind = null;
if ( properties.getProperty("mcastBindAddress")!= null ) {
bind = java.net.InetAddress.getByName(properties.getProperty("mcastBindAddress"));
}
int ttl = -1;
int soTimeout = -1;
if ( properties.getProperty("mcastTTL") != null ) {
try {
ttl = Integer.parseInt(properties.getProperty("mcastTTL"));
} catch ( Exception x ) {
log.error("Unable to parse mcastTTL="+properties.getProperty("mcastTTL"),x);
}
}
if ( properties.getProperty("mcastSoTimeout") != null ) {
try {
soTimeout = Integer.parseInt(properties.getProperty("mcastSoTimeout"));
} catch ( Exception x ) {
log.error("Unable to parse mcastSoTimeout="+properties.getProperty("mcastSoTimeout"),x);
}
}
impl = new McastServiceImpl(localMember,Long.parseLong(properties.getProperty("mcastFrequency")),
Long.parseLong(properties.getProperty("memberDropTime")),
Integer.parseInt(properties.getProperty("mcastPort")),
bind,
java.net.InetAddress.getByName(properties.getProperty("mcastAddress")),
ttl,
soTimeout,
this,
this,
Boolean.parseBoolean(properties.getProperty("localLoopbackDisabled","false")));
String value = properties.getProperty("recoveryEnabled","true");
boolean recEnabled = Boolean.parseBoolean(value);
impl.setRecoveryEnabled(recEnabled);
int recCnt = Integer.parseInt(properties.getProperty("recoveryCounter","10"));
impl.setRecoveryCounter(recCnt);
long recSlpTime = Long.parseLong(properties.getProperty("recoverySleepTime","5000"));
impl.setRecoverySleepTime(recSlpTime);
impl.setChannel(channel);
impl.start(level);
}
/**
* Stop broadcasting and listening to membership pings
*/
@Override
public void stop(int svc) {
try {
if ( impl != null && impl.stop(svc) ) {
impl.setChannel(null);
impl = null;
channel = null;
}
} catch ( Exception x) {
log.error("Unable to stop the mcast service, level:"+svc+".",x);
}
}
/**
* Return all the members by name
*/
@Override
public String[] getMembersByName() {
Member[] currentMembers = getMembers();
String [] membernames ;
if(currentMembers != null) {
membernames = new String[currentMembers.length];
for (int i = 0; i < currentMembers.length; i++) {
membernames[i] = currentMembers[i].toString() ;
}
} else
membernames = new String[0] ;
return membernames ;
}
/**
* Return the member by name
*/
@Override
public Member findMemberByName(String name) {
Member[] currentMembers = getMembers();
for (int i = 0; i < currentMembers.length; i++) {
if (name.equals(currentMembers[i].toString()))
return currentMembers[i];
}
return null;
}
/**
* has members?
*/
@Override
public boolean hasMembers() {
if ( impl == null || impl.membership == null ) return false;
return impl.membership.hasMembers();
}
@Override
public Member getMember(Member mbr) {
if ( impl == null || impl.membership == null ) return null;
return impl.membership.getMember(mbr);
}
/**
* Return all the members
*/
protected static final Member[]EMPTY_MEMBERS = new Member[0];
@Override
public Member[] getMembers() {
if ( impl == null || impl.membership == null ) return EMPTY_MEMBERS;
return impl.membership.getMembers();
}
/**
* Add a membership listener, this version only supports one listener per service,
* so calling this method twice will result in only the second listener being active.
* @param listener The listener
*/
@Override
public void setMembershipListener(MembershipListener listener) {
this.listener = listener;
}
public void setMessageListener(MessageListener listener) {
this.msglistener = listener;
}
public void removeMessageListener() {
this.msglistener = null;
}
/**
* Remove the membership listener
*/
@Override
public void removeMembershipListener(){
listener = null;
}
@Override
public void memberAdded(Member member) {
if ( listener!=null ) listener.memberAdded(member);
}
/**
* Callback from the impl when a new member has been received
* @param member The member
*/
@Override
public void memberDisappeared(Member member)
{
if ( listener!=null ) listener.memberDisappeared(member);
}
@Override
public void messageReceived(ChannelMessage msg) {
if (msglistener!=null && msglistener.accept(msg)) msglistener.messageReceived(msg);
}
@Override
public boolean accept(ChannelMessage msg) {
return true;
}
@Override
public void broadcast(ChannelMessage message) throws ChannelException {
if (impl==null || (impl.startLevel & Channel.MBR_TX_SEQ)!=Channel.MBR_TX_SEQ )
throw new ChannelException("Multicast send is not started or enabled.");
byte[] data = XByteBuffer.createDataPackage((ChannelData)message);
if (data.length>McastServiceImpl.MAX_PACKET_SIZE) {
throw new ChannelException("Packet length["+data.length+"] exceeds max packet size of "+McastServiceImpl.MAX_PACKET_SIZE+" bytes.");
}
DatagramPacket packet = new DatagramPacket(data,0,data.length);
try {
impl.send(false, packet);
} catch (Exception x) {
throw new ChannelException(x);
}
}
/**
* @deprecated use getSoTimeout
* @return int
*/
@Deprecated
public int getMcastSoTimeout() {
return getSoTimeout();
}
public int getSoTimeout() {
return mcastSoTimeout;
}
/**
* @deprecated use setSoTimeout
* @param mcastSoTimeout int
*/
@Deprecated
public void setMcastSoTimeout(int mcastSoTimeout) {
setSoTimeout(mcastSoTimeout);
}
public void setSoTimeout(int mcastSoTimeout) {
this.mcastSoTimeout = mcastSoTimeout;
properties.setProperty("mcastSoTimeout", String.valueOf(mcastSoTimeout));
}
/**
* @deprecated use getTtl
* @return int
*/
@Deprecated
public int getMcastTTL() {
return getTtl();
}
public int getTtl() {
return mcastTTL;
}
public byte[] getPayload() {
return payload;
}
public byte[] getDomain() {
return domain;
}
/**
* @deprecated use setTtl
* @param mcastTTL int
*/
@Deprecated
public void setMcastTTL(int mcastTTL) {
setTtl(mcastTTL);
}
public void setTtl(int mcastTTL) {
this.mcastTTL = mcastTTL;
properties.setProperty("mcastTTL", String.valueOf(mcastTTL));
}
@Override
public void setPayload(byte[] payload) {
this.payload = payload;
if ( localMember != null ) {
localMember.setPayload(payload);
localMember.getData(true,true);
try {
if (impl != null) impl.send(false);
}catch ( Exception x ) {
log.error("Unable to send payload update.",x);
}
}
}
@Override
public void setDomain(byte[] domain) {
this.domain = domain;
if ( localMember != null ) {
localMember.setDomain(domain);
localMember.getData(true,true);
try {
if (impl != null) impl.send(false);
}catch ( Exception x ) {
log.error("Unable to send domain update.",x);
}
}
}
public void setDomain(String domain) {
if ( domain == null ) return;
if ( domain.startsWith("{") ) setDomain(Arrays.fromString(domain));
else setDomain(Arrays.convert(domain));
}
public Channel getChannel() {
return channel;
}
public void setChannel(Channel channel) {
this.channel = channel;
}
/**
* Simple test program
* @param args Command-line arguments
* @throws Exception If an error occurs
*/
public static void main(String args[]) throws Exception {
if(log.isInfoEnabled())
log.info("Usage McastService hostname tcpport");
McastService service = new McastService();
java.util.Properties p = new java.util.Properties();
p.setProperty("mcastPort","5555");
p.setProperty("mcastAddress","224.10.10.10");
p.setProperty("mcastClusterDomain","catalina");
p.setProperty("bindAddress","localhost");
p.setProperty("memberDropTime","3000");
p.setProperty("mcastFrequency","500");
p.setProperty("tcpListenPort","4000");
p.setProperty("tcpListenHost","127.0.0.1");
p.setProperty("tcpSecurePort","4100");
p.setProperty("udpListenPort","4200");
service.setProperties(p);
service.start();
Thread.sleep(60*1000*60);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.fsdataset.impl;
import java.io.IOException;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.StorageType;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.datanode.FinalizedReplica;
import org.apache.hadoop.hdfs.server.datanode.ReplicaAlreadyExistsException;
import org.apache.hadoop.hdfs.server.datanode.ReplicaBeingWritten;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInPipeline;
import org.apache.hadoop.hdfs.server.datanode.ReplicaInfo;
import org.apache.hadoop.hdfs.server.datanode.ReplicaNotFoundException;
import org.apache.hadoop.hdfs.server.datanode.ReplicaUnderRecovery;
import org.apache.hadoop.hdfs.server.datanode.ReplicaWaitingToBeRecovered;
import org.apache.hadoop.util.DiskChecker.DiskOutOfSpaceException;
import org.junit.Assert;
import org.junit.Test;
/** Test if FSDataset#append, writeToRbw, and writeToTmp */
public class TestWriteToReplica {
final private static int FINALIZED = 0;
final private static int TEMPORARY = 1;
final private static int RBW = 2;
final private static int RWR = 3;
final private static int RUR = 4;
final private static int NON_EXISTENT = 5;
// test close
@Test
public void testClose() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(new HdfsConfiguration()).build();
try {
cluster.waitActive();
DataNode dn = cluster.getDataNodes().get(0);
FsDatasetImpl dataSet = (FsDatasetImpl)DataNodeTestUtils.getFSDataset(dn);
// set up replicasMap
String bpid = cluster.getNamesystem().getBlockPoolId();
ExtendedBlock[] blocks = setup(bpid, dataSet);
// test close
testClose(dataSet, blocks);
} finally {
cluster.shutdown();
}
}
// test append
@Test
public void testAppend() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(new HdfsConfiguration()).build();
try {
cluster.waitActive();
DataNode dn = cluster.getDataNodes().get(0);
FsDatasetImpl dataSet = (FsDatasetImpl)DataNodeTestUtils.getFSDataset(dn);
// set up replicasMap
String bpid = cluster.getNamesystem().getBlockPoolId();
ExtendedBlock[] blocks = setup(bpid, dataSet);
// test append
testAppend(bpid, dataSet, blocks);
} finally {
cluster.shutdown();
}
}
// test writeToRbw
@Test
public void testWriteToRbw() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(new HdfsConfiguration()).build();
try {
cluster.waitActive();
DataNode dn = cluster.getDataNodes().get(0);
FsDatasetImpl dataSet = (FsDatasetImpl)DataNodeTestUtils.getFSDataset(dn);
// set up replicasMap
String bpid = cluster.getNamesystem().getBlockPoolId();
ExtendedBlock[] blocks = setup(bpid, dataSet);
// test writeToRbw
testWriteToRbw(dataSet, blocks);
} finally {
cluster.shutdown();
}
}
// test writeToTemporary
@Test
public void testWriteToTemporary() throws Exception {
MiniDFSCluster cluster = new MiniDFSCluster.Builder(new HdfsConfiguration()).build();
try {
cluster.waitActive();
DataNode dn = cluster.getDataNodes().get(0);
FsDatasetImpl dataSet = (FsDatasetImpl)DataNodeTestUtils.getFSDataset(dn);
// set up replicasMap
String bpid = cluster.getNamesystem().getBlockPoolId();
ExtendedBlock[] blocks = setup(bpid, dataSet);
// test writeToTemporary
testWriteToTemporary(dataSet, blocks);
} finally {
cluster.shutdown();
}
}
/**
* Generate testing environment and return a collection of blocks
* on which to run the tests.
*
* @param bpid Block pool ID to generate blocks for
* @param dataSet Namespace in which to insert blocks
* @return Contrived blocks for further testing.
* @throws IOException
*/
private ExtendedBlock[] setup(String bpid, FsDatasetImpl dataSet) throws IOException {
// setup replicas map
ExtendedBlock[] blocks = new ExtendedBlock[] {
new ExtendedBlock(bpid, 1, 1, 2001), new ExtendedBlock(bpid, 2, 1, 2002),
new ExtendedBlock(bpid, 3, 1, 2003), new ExtendedBlock(bpid, 4, 1, 2004),
new ExtendedBlock(bpid, 5, 1, 2005), new ExtendedBlock(bpid, 6, 1, 2006)
};
ReplicaMap replicasMap = dataSet.volumeMap;
FsVolumeImpl vol = dataSet.volumes.getNextVolume(StorageType.DEFAULT, 0);
ReplicaInfo replicaInfo = new FinalizedReplica(
blocks[FINALIZED].getLocalBlock(), vol, vol.getCurrentDir().getParentFile());
replicasMap.add(bpid, replicaInfo);
replicaInfo.getBlockFile().createNewFile();
replicaInfo.getMetaFile().createNewFile();
replicasMap.add(bpid, new ReplicaInPipeline(
blocks[TEMPORARY].getBlockId(),
blocks[TEMPORARY].getGenerationStamp(), vol,
vol.createTmpFile(bpid, blocks[TEMPORARY].getLocalBlock()).getParentFile(), 0));
replicaInfo = new ReplicaBeingWritten(blocks[RBW].getLocalBlock(), vol,
vol.createRbwFile(bpid, blocks[RBW].getLocalBlock()).getParentFile(), null);
replicasMap.add(bpid, replicaInfo);
replicaInfo.getBlockFile().createNewFile();
replicaInfo.getMetaFile().createNewFile();
replicasMap.add(bpid, new ReplicaWaitingToBeRecovered(
blocks[RWR].getLocalBlock(), vol, vol.createRbwFile(bpid,
blocks[RWR].getLocalBlock()).getParentFile()));
replicasMap.add(bpid, new ReplicaUnderRecovery(new FinalizedReplica(blocks[RUR]
.getLocalBlock(), vol, vol.getCurrentDir().getParentFile()), 2007));
return blocks;
}
private void testAppend(String bpid, FsDatasetImpl dataSet, ExtendedBlock[] blocks) throws IOException {
long newGS = blocks[FINALIZED].getGenerationStamp()+1;
final FsVolumeImpl v = (FsVolumeImpl)dataSet.volumeMap.get(
bpid, blocks[FINALIZED].getLocalBlock()).getVolume();
long available = v.getCapacity()-v.getDfsUsed();
long expectedLen = blocks[FINALIZED].getNumBytes();
try {
v.decDfsUsed(bpid, -available);
blocks[FINALIZED].setNumBytes(expectedLen+100);
dataSet.append(blocks[FINALIZED], newGS, expectedLen);
Assert.fail("Should not have space to append to an RWR replica" + blocks[RWR]);
} catch (DiskOutOfSpaceException e) {
Assert.assertTrue(e.getMessage().startsWith(
"Insufficient space for appending to "));
}
v.decDfsUsed(bpid, available);
blocks[FINALIZED].setNumBytes(expectedLen);
newGS = blocks[RBW].getGenerationStamp()+1;
dataSet.append(blocks[FINALIZED], newGS,
blocks[FINALIZED].getNumBytes()); // successful
blocks[FINALIZED].setGenerationStamp(newGS);
try {
dataSet.append(blocks[TEMPORARY], blocks[TEMPORARY].getGenerationStamp()+1,
blocks[TEMPORARY].getNumBytes());
Assert.fail("Should not have appended to a temporary replica "
+ blocks[TEMPORARY]);
} catch (ReplicaNotFoundException e) {
Assert.assertEquals(ReplicaNotFoundException.UNFINALIZED_REPLICA +
blocks[TEMPORARY], e.getMessage());
}
try {
dataSet.append(blocks[RBW], blocks[RBW].getGenerationStamp()+1,
blocks[RBW].getNumBytes());
Assert.fail("Should not have appended to an RBW replica" + blocks[RBW]);
} catch (ReplicaNotFoundException e) {
Assert.assertEquals(ReplicaNotFoundException.UNFINALIZED_REPLICA +
blocks[RBW], e.getMessage());
}
try {
dataSet.append(blocks[RWR], blocks[RWR].getGenerationStamp()+1,
blocks[RBW].getNumBytes());
Assert.fail("Should not have appended to an RWR replica" + blocks[RWR]);
} catch (ReplicaNotFoundException e) {
Assert.assertEquals(ReplicaNotFoundException.UNFINALIZED_REPLICA +
blocks[RWR], e.getMessage());
}
try {
dataSet.append(blocks[RUR], blocks[RUR].getGenerationStamp()+1,
blocks[RUR].getNumBytes());
Assert.fail("Should not have appended to an RUR replica" + blocks[RUR]);
} catch (ReplicaNotFoundException e) {
Assert.assertEquals(ReplicaNotFoundException.UNFINALIZED_REPLICA +
blocks[RUR], e.getMessage());
}
try {
dataSet.append(blocks[NON_EXISTENT],
blocks[NON_EXISTENT].getGenerationStamp(),
blocks[NON_EXISTENT].getNumBytes());
Assert.fail("Should not have appended to a non-existent replica " +
blocks[NON_EXISTENT]);
} catch (ReplicaNotFoundException e) {
Assert.assertEquals(ReplicaNotFoundException.NON_EXISTENT_REPLICA +
blocks[NON_EXISTENT], e.getMessage());
}
newGS = blocks[FINALIZED].getGenerationStamp()+1;
dataSet.recoverAppend(blocks[FINALIZED], newGS,
blocks[FINALIZED].getNumBytes()); // successful
blocks[FINALIZED].setGenerationStamp(newGS);
try {
dataSet.recoverAppend(blocks[TEMPORARY], blocks[TEMPORARY].getGenerationStamp()+1,
blocks[TEMPORARY].getNumBytes());
Assert.fail("Should not have appended to a temporary replica "
+ blocks[TEMPORARY]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
newGS = blocks[RBW].getGenerationStamp()+1;
dataSet.recoverAppend(blocks[RBW], newGS, blocks[RBW].getNumBytes());
blocks[RBW].setGenerationStamp(newGS);
try {
dataSet.recoverAppend(blocks[RWR], blocks[RWR].getGenerationStamp()+1,
blocks[RBW].getNumBytes());
Assert.fail("Should not have appended to an RWR replica" + blocks[RWR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
try {
dataSet.recoverAppend(blocks[RUR], blocks[RUR].getGenerationStamp()+1,
blocks[RUR].getNumBytes());
Assert.fail("Should not have appended to an RUR replica" + blocks[RUR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
try {
dataSet.recoverAppend(blocks[NON_EXISTENT],
blocks[NON_EXISTENT].getGenerationStamp(),
blocks[NON_EXISTENT].getNumBytes());
Assert.fail("Should not have appended to a non-existent replica " +
blocks[NON_EXISTENT]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_EXISTENT_REPLICA));
}
}
private void testClose(FsDatasetImpl dataSet, ExtendedBlock [] blocks) throws IOException {
long newGS = blocks[FINALIZED].getGenerationStamp()+1;
dataSet.recoverClose(blocks[FINALIZED], newGS,
blocks[FINALIZED].getNumBytes()); // successful
blocks[FINALIZED].setGenerationStamp(newGS);
try {
dataSet.recoverClose(blocks[TEMPORARY], blocks[TEMPORARY].getGenerationStamp()+1,
blocks[TEMPORARY].getNumBytes());
Assert.fail("Should not have recovered close a temporary replica "
+ blocks[TEMPORARY]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
newGS = blocks[RBW].getGenerationStamp()+1;
dataSet.recoverClose(blocks[RBW], newGS, blocks[RBW].getNumBytes());
blocks[RBW].setGenerationStamp(newGS);
try {
dataSet.recoverClose(blocks[RWR], blocks[RWR].getGenerationStamp()+1,
blocks[RBW].getNumBytes());
Assert.fail("Should not have recovered close an RWR replica" + blocks[RWR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
try {
dataSet.recoverClose(blocks[RUR], blocks[RUR].getGenerationStamp()+1,
blocks[RUR].getNumBytes());
Assert.fail("Should not have recovered close an RUR replica" + blocks[RUR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.UNFINALIZED_AND_NONRBW_REPLICA));
}
try {
dataSet.recoverClose(blocks[NON_EXISTENT],
blocks[NON_EXISTENT].getGenerationStamp(),
blocks[NON_EXISTENT].getNumBytes());
Assert.fail("Should not have recovered close a non-existent replica " +
blocks[NON_EXISTENT]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_EXISTENT_REPLICA));
}
}
private void testWriteToRbw(FsDatasetImpl dataSet, ExtendedBlock[] blocks) throws IOException {
try {
dataSet.recoverRbw(blocks[FINALIZED],
blocks[FINALIZED].getGenerationStamp()+1,
0L, blocks[FINALIZED].getNumBytes());
Assert.fail("Should not have recovered a finalized replica " +
blocks[FINALIZED]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_RBW_REPLICA));
}
try {
dataSet.createRbw(StorageType.DEFAULT, blocks[FINALIZED], false);
Assert.fail("Should not have created a replica that's already " +
"finalized " + blocks[FINALIZED]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.recoverRbw(blocks[TEMPORARY],
blocks[TEMPORARY].getGenerationStamp()+1,
0L, blocks[TEMPORARY].getNumBytes());
Assert.fail("Should not have recovered a temporary replica " +
blocks[TEMPORARY]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_RBW_REPLICA));
}
try {
dataSet.createRbw(StorageType.DEFAULT, blocks[TEMPORARY], false);
Assert.fail("Should not have created a replica that had created as " +
"temporary " + blocks[TEMPORARY]);
} catch (ReplicaAlreadyExistsException e) {
}
dataSet.recoverRbw(blocks[RBW], blocks[RBW].getGenerationStamp()+1,
0L, blocks[RBW].getNumBytes()); // expect to be successful
try {
dataSet.createRbw(StorageType.DEFAULT, blocks[RBW], false);
Assert.fail("Should not have created a replica that had created as RBW " +
blocks[RBW]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.recoverRbw(blocks[RWR], blocks[RWR].getGenerationStamp()+1,
0L, blocks[RWR].getNumBytes());
Assert.fail("Should not have recovered a RWR replica " + blocks[RWR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_RBW_REPLICA));
}
try {
dataSet.createRbw(StorageType.DEFAULT, blocks[RWR], false);
Assert.fail("Should not have created a replica that was waiting to be " +
"recovered " + blocks[RWR]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.recoverRbw(blocks[RUR], blocks[RUR].getGenerationStamp()+1,
0L, blocks[RUR].getNumBytes());
Assert.fail("Should not have recovered a RUR replica " + blocks[RUR]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(e.getMessage().startsWith(
ReplicaNotFoundException.NON_RBW_REPLICA));
}
try {
dataSet.createRbw(StorageType.DEFAULT, blocks[RUR], false);
Assert.fail("Should not have created a replica that was under recovery " +
blocks[RUR]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.recoverRbw(blocks[NON_EXISTENT],
blocks[NON_EXISTENT].getGenerationStamp()+1,
0L, blocks[NON_EXISTENT].getNumBytes());
Assert.fail("Cannot recover a non-existent replica " +
blocks[NON_EXISTENT]);
} catch (ReplicaNotFoundException e) {
Assert.assertTrue(
e.getMessage().contains(ReplicaNotFoundException.NON_EXISTENT_REPLICA));
}
dataSet.createRbw(StorageType.DEFAULT, blocks[NON_EXISTENT], false);
}
private void testWriteToTemporary(FsDatasetImpl dataSet, ExtendedBlock[] blocks) throws IOException {
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[FINALIZED]);
Assert.fail("Should not have created a temporary replica that was " +
"finalized " + blocks[FINALIZED]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[TEMPORARY]);
Assert.fail("Should not have created a replica that had created as" +
"temporary " + blocks[TEMPORARY]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[RBW]);
Assert.fail("Should not have created a replica that had created as RBW " +
blocks[RBW]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[RWR]);
Assert.fail("Should not have created a replica that was waiting to be " +
"recovered " + blocks[RWR]);
} catch (ReplicaAlreadyExistsException e) {
}
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[RUR]);
Assert.fail("Should not have created a replica that was under recovery " +
blocks[RUR]);
} catch (ReplicaAlreadyExistsException e) {
}
dataSet.createTemporary(StorageType.DEFAULT, blocks[NON_EXISTENT]);
try {
dataSet.createTemporary(StorageType.DEFAULT, blocks[NON_EXISTENT]);
Assert.fail("Should not have created a replica that had already been "
+ "created " + blocks[NON_EXISTENT]);
} catch (Exception e) {
Assert.assertTrue(
e.getMessage().contains(blocks[NON_EXISTENT].getBlockName()));
Assert.assertTrue(e instanceof ReplicaAlreadyExistsException);
}
long newGenStamp = blocks[NON_EXISTENT].getGenerationStamp() * 10;
blocks[NON_EXISTENT].setGenerationStamp(newGenStamp);
try {
ReplicaInPipeline replicaInfo =
dataSet.createTemporary(StorageType.DEFAULT, blocks[NON_EXISTENT]);
Assert.assertTrue(replicaInfo.getGenerationStamp() == newGenStamp);
Assert.assertTrue(
replicaInfo.getBlockId() == blocks[NON_EXISTENT].getBlockId());
} catch (ReplicaAlreadyExistsException e) {
Assert.fail("createRbw() Should have removed the block with the older "
+ "genstamp and replaced it with the newer one: " + blocks[NON_EXISTENT]);
}
}
}
| |
/* Copyright 2013 The jeo project. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.jeo.vector;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import io.jeo.geom.Geom;
import io.jeo.proj.Proj;
import io.jeo.util.Function;
import io.jeo.util.Optional;
import io.jeo.util.Util;
import org.osgeo.proj4j.CoordinateReferenceSystem;
import com.vividsolutions.jts.geom.Envelope;
import com.vividsolutions.jts.geom.Geometry;
import com.vividsolutions.jts.geom.GeometryFactory;
import com.vividsolutions.jts.geom.MultiLineString;
import com.vividsolutions.jts.geom.MultiPoint;
import com.vividsolutions.jts.geom.MultiPolygon;
import static io.jeo.vector.VectorQuery.all;
/**
* Feature utility class.
*
* @author Justin Deoliveira, OpenGeo
*/
public class Features {
/** geometry factory */
static GeometryFactory gfac = new GeometryFactory();
/**
* Returns the bounds of the feature object.
* <p>
* The bounds is computed by computing the aggregated bounds of all geometries of the feature
* object. Projections are not taken into account. To handle geometries in different projections
* use the {@link #boundsReprojected(Feature)} method.
* </p>
* @param f The feature.
*
* @return The bounds, or a bounds object in which {@link Envelope#isNull()} returns true.
*/
public static Envelope bounds(Feature f) {
Envelope e = new Envelope();
for (Object obj : f.map().values()) {
if (obj instanceof Geometry) {
e.expandToInclude(((Geometry) obj).getEnvelopeInternal());
}
}
return e;
}
/**
* Returns the bounds of the feature object, reprojecting geometries of the feature if required.
* <p>
* The bounds is computed by computing the aggregated bounds of all geometries of the feature
* object. All geometries are reprojected to the crs returned from * <tt>f.geometry()</tt>. Therefore this method
* requires that the features default geometry has a crs object.
* </p>
* @param f The feature.
*
* @return The bounds, or a bounds object in which {@link Envelope#isNull()} returns true.
*/
public static Envelope boundsReprojected(Feature f) {
Geometry geom = f.geometry();
CoordinateReferenceSystem crs = Proj.crs(geom);
if (crs == null) {
throw new IllegalArgumentException("Feature default geometry has no crs");
}
return boundsReprojected(f, crs);
}
/**
* Returns the bounds of the feature object, reprojecting geometries of the feature if required.
* <p>
* The bounds is computed by computing the aggregated bounds of all geometries of the feature
* object in the specified crs.
* </p>
* @param f The feature.
* @param crs The target projection.
*
* @return The bounds, or a bounds object in which {@link Envelope#isNull()} returns true.
*/
public static Envelope boundsReprojected(Feature f, CoordinateReferenceSystem crs) {
if (crs == null) {
throw new IllegalArgumentException("crs must not be null");
}
Envelope e = new Envelope();
for (Object val : f.map().values()) {
if (val instanceof Geometry) {
Geometry g = (Geometry) val;
if (g == null) {
//ignore
continue;
}
CoordinateReferenceSystem c = Proj.crs(g);
if (c != null) {
g = Proj.reproject(g, c, crs);
}
else {
// no crs, just assume it is the same reference system
}
e.expandToInclude(g.getEnvelopeInternal());
}
}
return e;
}
/**
* Retypes a feature object to a new schema.
* <p>
* This method works by "pulling" the attributes defined by the fields of {@link Schema} from
* the feature object.
* </p>
* @param feature The original feature.
* @param schema The schema to retype to.
*
* @return The retyped feature.
*/
public static Feature retype(Feature feature, Schema schema) {
List<Object> values = new ArrayList<Object>();
for (Field f : schema) {
values.add(feature.get(f.name()));
}
return new ListFeature(feature.id(), schema, values);
}
/**
* Copies values from one feature to another.
*
* @param from THe source feature.
* @param to The target feature.
*
* @return The target feature.
*/
public static Feature copy(Feature from, Feature to) {
for (Map.Entry<String, Object> kv : from.map().entrySet()) {
String key = kv.getKey();
Object val = kv.getValue();
to.put(key, val);
}
return to;
}
/**
* Converts non geometry collection types in the schema to appropriate collection type.
*
* @param schema The original schema.
*
* @return The transformed schema.
*/
public static Schema multify(Schema schema) {
SchemaBuilder b = Schema.build(schema.name());
for (Field fld : schema) {
if (Geometry.class.isAssignableFrom(fld.type())) {
Class<? extends Geometry> t = (Class<? extends Geometry>) fld.type();
switch(Geom.Type.from(t)) {
case POINT:
t = MultiPoint.class;
break;
case LINESTRING:
t = MultiLineString.class;
break;
case POLYGON:
t = MultiPolygon.class;
break;
}
b.field(fld.name(), t, fld.crs());
}
else {
b.field(fld);
}
}
return b.schema();
}
/**
* Converts non collection geometry objects to associated collection type.
*
* @param feature The original feature.
*
* @return The transformed feature.
*/
public static Feature multify(Feature feature) {
return new GeometryTransformFeature(feature) {
@Override
protected Geometry wrap(Geometry g) {
return Geom.multi(g);
}
};
}
/**
* Returns the crs of a feature.
* <p>
* The crs of a feature is a crs associated with it's default geometry.
* </p>
*
* @see Feature#geometry()
* @see Proj#crs(Geometry)
*/
public static CoordinateReferenceSystem crs(Feature f) {
return Proj.crs(f.geometry());
}
/**
* Returns a new feature id if the specified id is null.
*/
public static String id(String id) {
return id != null ? id : Util.uuid();
}
/**
* Derives a schema for a vector dataset.
* <p>
* This method computes the schema by inspecting the first feature.
* </p>
* @param data The dataset.
*
* @return The optional schema.
*/
public static Optional<Schema> schema(final VectorDataset data) throws IOException {
return data.read(all().limit(1)).first().map(new Function<Feature, Schema>() {
@Override
public Schema apply(Feature f) {
return schema(data.name(), f);
}
});
}
/**
* Creates a schema from a feature object.
*
* @param name Name of the schema.
* @param f The feature.
*
* @return The new schema.
*
* @see {@link SchemaBuilder#fields(Feature)}
*/
public static Schema schema(String name, Feature f) {
return Schema.build(name).fields(f).schema();
}
/**
* Compares two feature objects for equality.
* <p>
* Equality is based on {@link Feature#id()} and contents obtained from {@link Feature#map()}.
* </p>
* @param f1 The first feature.
* @param f2 The second feature.
*
* @return True if the two features are equal.
*/
public static boolean equals(Feature f1, Feature f2) {
if (!Objects.equals(f1.id(), f2.id())) {
return false;
}
return Objects.equals(f1.map(), f2.map());
}
/**
* Computes the hashcode for a feature.
* <p>
* To remain consistent with {@link #equals(Feature, Feature)} the hash code is computed based on
* {@link Feature#id()} and {@link Feature#map()}
* </p>
* @param f The feature.
*
* @return A hashcode.
*/
public static int hashCode(Feature f) {
return Objects.hash(f.id(), f.map());
}
/**
* Returns a string representation of a feature.
* <p>
* Implementations of {@link Feature} are encouraged to use this method to implement {@link Feature#toString()}.
* </p>
*/
public static String toString(Feature f) {
StringBuilder sb = new StringBuilder(f.id()).append("{");
Map<String,Object> map = f.map();
if (!map.isEmpty()) {
for (Map.Entry<String,Object> e : map.entrySet()) {
sb.append(e.getKey()).append("=").append(e.getValue()).append(", ");
}
sb.setLength(sb.length()-2);
}
return sb.append("}").toString();
}
}
| |
/**
*/
package es.um.nosql.s13e.NoSQLSchema.impl;
import es.um.nosql.s13e.NoSQLSchema.*;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.EObject;
import org.eclipse.emf.ecore.EPackage;
import org.eclipse.emf.ecore.impl.EFactoryImpl;
import org.eclipse.emf.ecore.plugin.EcorePlugin;
/**
* <!-- begin-user-doc -->
* An implementation of the model <b>Factory</b>.
* <!-- end-user-doc -->
* @generated
*/
public class NoSQLSchemaFactoryImpl extends EFactoryImpl implements NoSQLSchemaFactory {
/**
* Creates the default factory implementation.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public static NoSQLSchemaFactory init() {
try {
NoSQLSchemaFactory theNoSQLSchemaFactory = (NoSQLSchemaFactory)EPackage.Registry.INSTANCE.getEFactory(NoSQLSchemaPackage.eNS_URI);
if (theNoSQLSchemaFactory != null) {
return theNoSQLSchemaFactory;
}
}
catch (Exception exception) {
EcorePlugin.INSTANCE.log(exception);
}
return new NoSQLSchemaFactoryImpl();
}
/**
* Creates an instance of the factory.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
public NoSQLSchemaFactoryImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EObject create(EClass eClass) {
switch (eClass.getClassifierID()) {
case NoSQLSchemaPackage.NO_SQL_SCHEMA: return createNoSQLSchema();
case NoSQLSchemaPackage.ENTITY_TYPE: return createEntityType();
case NoSQLSchemaPackage.STRUCTURAL_VARIATION: return createStructuralVariation();
case NoSQLSchemaPackage.ATTRIBUTE: return createAttribute();
case NoSQLSchemaPackage.PLIST: return createPList();
case NoSQLSchemaPackage.REFERENCE: return createReference();
case NoSQLSchemaPackage.AGGREGATE: return createAggregate();
case NoSQLSchemaPackage.PRIMITIVE_TYPE: return createPrimitiveType();
case NoSQLSchemaPackage.NULL: return createNull();
case NoSQLSchemaPackage.RELATIONSHIP_TYPE: return createRelationshipType();
case NoSQLSchemaPackage.PMAP: return createPMap();
case NoSQLSchemaPackage.PSET: return createPSet();
case NoSQLSchemaPackage.PTUPLE: return createPTuple();
default:
throw new IllegalArgumentException("The class '" + eClass.getName() + "' is not a valid classifier");
}
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NoSQLSchema createNoSQLSchema() {
NoSQLSchemaImpl noSQLSchema = new NoSQLSchemaImpl();
return noSQLSchema;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EntityType createEntityType() {
EntityTypeImpl entityType = new EntityTypeImpl();
return entityType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public StructuralVariation createStructuralVariation() {
StructuralVariationImpl structuralVariation = new StructuralVariationImpl();
return structuralVariation;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Attribute createAttribute() {
AttributeImpl attribute = new AttributeImpl();
return attribute;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public PList createPList() {
PListImpl pList = new PListImpl();
return pList;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Reference createReference() {
ReferenceImpl reference = new ReferenceImpl();
return reference;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Aggregate createAggregate() {
AggregateImpl aggregate = new AggregateImpl();
return aggregate;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public PrimitiveType createPrimitiveType() {
PrimitiveTypeImpl primitiveType = new PrimitiveTypeImpl();
return primitiveType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Null createNull() {
NullImpl null_ = new NullImpl();
return null_;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public RelationshipType createRelationshipType() {
RelationshipTypeImpl relationshipType = new RelationshipTypeImpl();
return relationshipType;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public PMap createPMap() {
PMapImpl pMap = new PMapImpl();
return pMap;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public PSet createPSet() {
PSetImpl pSet = new PSetImpl();
return pSet;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public PTuple createPTuple() {
PTupleImpl pTuple = new PTupleImpl();
return pTuple;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NoSQLSchemaPackage getNoSQLSchemaPackage() {
return (NoSQLSchemaPackage)getEPackage();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @deprecated
* @generated
*/
@Deprecated
public static NoSQLSchemaPackage getPackage() {
return NoSQLSchemaPackage.eINSTANCE;
}
} //NoSQLSchemaFactoryImpl
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.storm.kafka.spout;
import static org.apache.storm.kafka.spout.FirstPollOffsetStrategy.EARLIEST;
import static org.apache.storm.kafka.spout.FirstPollOffsetStrategy.LATEST;
import static org.apache.storm.kafka.spout.FirstPollOffsetStrategy.UNCOMMITTED_EARLIEST;
import static org.apache.storm.kafka.spout.FirstPollOffsetStrategy.UNCOMMITTED_LATEST;
import com.google.common.annotations.VisibleForTesting;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import org.apache.commons.lang.Validate;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRebalanceListener;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.clients.consumer.OffsetAndMetadata;
import org.apache.kafka.common.TopicPartition;
import org.apache.kafka.common.errors.InterruptException;
import org.apache.kafka.common.errors.RetriableException;
import org.apache.storm.kafka.spout.KafkaSpoutConfig.ProcessingGuarantee;
import org.apache.storm.kafka.spout.internal.CommitMetadataManager;
import org.apache.storm.kafka.spout.internal.ConsumerFactory;
import org.apache.storm.kafka.spout.internal.ConsumerFactoryDefault;
import org.apache.storm.kafka.spout.internal.OffsetManager;
import org.apache.storm.kafka.spout.internal.Timer;
import org.apache.storm.kafka.spout.metrics.KafkaOffsetMetric;
import org.apache.storm.kafka.spout.subscription.TopicAssigner;
import org.apache.storm.spout.SpoutOutputCollector;
import org.apache.storm.task.TopologyContext;
import org.apache.storm.topology.OutputFieldsDeclarer;
import org.apache.storm.topology.base.BaseRichSpout;
import org.apache.storm.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class KafkaSpout<K, V> extends BaseRichSpout {
private static final long serialVersionUID = 4151921085047987154L;
//Initial delay for the commit and assignment refresh timers
public static final long TIMER_DELAY_MS = 500;
private static final Logger LOG = LoggerFactory.getLogger(KafkaSpout.class);
// Storm
protected SpoutOutputCollector collector;
// Kafka
private final KafkaSpoutConfig<K, V> kafkaSpoutConfig;
private final ConsumerFactory<K, V> kafkaConsumerFactory;
private final TopicAssigner topicAssigner;
private transient Consumer<K, V> consumer;
// Bookkeeping
// Strategy to determine the fetch offset of the first realized by the spout upon activation
private transient FirstPollOffsetStrategy firstPollOffsetStrategy;
// Class that has the logic to handle tuple failure.
private transient KafkaSpoutRetryService retryService;
// Handles tuple events (emit, ack etc.)
private transient KafkaTupleListener tupleListener;
// timer == null only if the processing guarantee is at-most-once
private transient Timer commitTimer;
// Initialization is only complete after the first call to KafkaSpoutConsumerRebalanceListener.onPartitionsAssigned()
// Tuples that were successfully acked/emitted. These tuples will be committed periodically when the commit timer expires,
// or after a consumer rebalance, or during close/deactivate. Always empty if processing guarantee is none or at-most-once.
private transient Map<TopicPartition, OffsetManager> offsetManagers;
// Tuples that have been emitted but that are "on the wire", i.e. pending being acked or failed.
// Always empty if processing guarantee is none or at-most-once
private transient Set<KafkaSpoutMessageId> emitted;
// Records that have been polled and are queued to be emitted in the nextTuple() call. One record is emitted per nextTuple()
private transient Map<TopicPartition, List<ConsumerRecord<K, V>>> waitingToEmit;
// Triggers when an assignment should be refreshed
private transient Timer refreshAssignmentTimer;
private transient TopologyContext context;
private transient CommitMetadataManager commitMetadataManager;
private transient KafkaOffsetMetric<K, V> kafkaOffsetMetric;
private transient KafkaSpoutConsumerRebalanceListener rebalanceListener;
public KafkaSpout(KafkaSpoutConfig<K, V> kafkaSpoutConfig) {
this(kafkaSpoutConfig, new ConsumerFactoryDefault<>(), new TopicAssigner());
}
@VisibleForTesting
KafkaSpout(KafkaSpoutConfig<K, V> kafkaSpoutConfig, ConsumerFactory<K, V> kafkaConsumerFactory, TopicAssigner topicAssigner) {
this.kafkaConsumerFactory = kafkaConsumerFactory;
this.topicAssigner = topicAssigner;
this.kafkaSpoutConfig = kafkaSpoutConfig;
}
@Override
public void open(Map<String, Object> conf, TopologyContext context, SpoutOutputCollector collector) {
this.context = context;
// Spout internals
this.collector = collector;
// Offset management
firstPollOffsetStrategy = kafkaSpoutConfig.getFirstPollOffsetStrategy();
// Retries management
retryService = kafkaSpoutConfig.getRetryService();
tupleListener = kafkaSpoutConfig.getTupleListener();
if (kafkaSpoutConfig.getProcessingGuarantee() != KafkaSpoutConfig.ProcessingGuarantee.AT_MOST_ONCE) {
// In at-most-once mode the offsets are committed after every poll, and not periodically as controlled by the timer
commitTimer = new Timer(TIMER_DELAY_MS, kafkaSpoutConfig.getOffsetsCommitPeriodMs(), TimeUnit.MILLISECONDS);
}
refreshAssignmentTimer = new Timer(TIMER_DELAY_MS, kafkaSpoutConfig.getPartitionRefreshPeriodMs(), TimeUnit.MILLISECONDS);
offsetManagers = new HashMap<>();
emitted = new HashSet<>();
waitingToEmit = new HashMap<>();
commitMetadataManager = new CommitMetadataManager(context, kafkaSpoutConfig.getProcessingGuarantee());
rebalanceListener = new KafkaSpoutConsumerRebalanceListener();
consumer = kafkaConsumerFactory.createConsumer(kafkaSpoutConfig.getKafkaProps());
tupleListener.open(conf, context);
if (canRegisterMetrics()) {
registerMetric();
}
LOG.info("Kafka Spout opened with the following configuration: {}", kafkaSpoutConfig);
}
private void registerMetric() {
LOG.info("Registering Spout Metrics");
kafkaOffsetMetric = new KafkaOffsetMetric<>(() -> Collections.unmodifiableMap(offsetManagers), () -> consumer);
context.registerMetric("kafkaOffset", kafkaOffsetMetric, kafkaSpoutConfig.getMetricsTimeBucketSizeInSecs());
}
private boolean canRegisterMetrics() {
try {
KafkaConsumer.class.getDeclaredMethod("beginningOffsets", Collection.class);
} catch (NoSuchMethodException e) {
LOG.warn("Minimum required kafka-clients library version to enable metrics is 0.10.1.0. Disabling spout metrics.");
return false;
}
return true;
}
private boolean isAtLeastOnceProcessing() {
return kafkaSpoutConfig.getProcessingGuarantee() == KafkaSpoutConfig.ProcessingGuarantee.AT_LEAST_ONCE;
}
// =========== Consumer Rebalance Listener - On the same thread as the caller ===========
private class KafkaSpoutConsumerRebalanceListener implements ConsumerRebalanceListener {
private Collection<TopicPartition> previousAssignment = new HashSet<>();
@Override
public void onPartitionsRevoked(Collection<TopicPartition> partitions) {
previousAssignment = partitions;
LOG.info("Partitions revoked. [consumer-group={}, consumer={}, topic-partitions={}]",
kafkaSpoutConfig.getConsumerGroupId(), consumer, partitions);
if (isAtLeastOnceProcessing()) {
commitOffsetsForAckedTuples();
}
}
@Override
public void onPartitionsAssigned(Collection<TopicPartition> partitions) {
LOG.info("Partitions reassignment. [task-ID={}, consumer-group={}, consumer={}, topic-partitions={}]",
context.getThisTaskId(), kafkaSpoutConfig.getConsumerGroupId(), consumer, partitions);
initialize(partitions);
tupleListener.onPartitionsReassigned(partitions);
}
private void initialize(Collection<TopicPartition> partitions) {
if (isAtLeastOnceProcessing()) {
// remove offsetManagers for all partitions that are no longer assigned to this spout
offsetManagers.keySet().retainAll(partitions);
retryService.retainAll(partitions);
/*
* Emitted messages for partitions that are no longer assigned to this spout can't be acked and should not be retried, hence
* remove them from emitted collection.
*/
emitted.removeIf(msgId -> !partitions.contains(msgId.getTopicPartition()));
}
waitingToEmit.keySet().retainAll(partitions);
Set<TopicPartition> newPartitions = new HashSet<>(partitions);
// If this partition was previously assigned to this spout,
// leave the acked offsets and consumer position as they were to resume where it left off
newPartitions.removeAll(previousAssignment);
for (TopicPartition newTp : newPartitions) {
final OffsetAndMetadata committedOffset = consumer.committed(newTp);
final long fetchOffset = doSeek(newTp, committedOffset);
LOG.debug("Set consumer position to [{}] for topic-partition [{}] with [{}] and committed offset [{}]",
fetchOffset, newTp, firstPollOffsetStrategy, committedOffset);
if (isAtLeastOnceProcessing() && !offsetManagers.containsKey(newTp)) {
offsetManagers.put(newTp, new OffsetManager(newTp, fetchOffset));
}
}
LOG.info("Initialization complete");
}
/**
* Sets the cursor to the location dictated by the first poll strategy and returns the fetch offset.
*/
private long doSeek(TopicPartition newTp, OffsetAndMetadata committedOffset) {
LOG.trace("Seeking offset for topic-partition [{}] with [{}] and committed offset [{}]",
newTp, firstPollOffsetStrategy, committedOffset);
if (committedOffset != null) {
// offset was previously committed for this consumer group and topic-partition, either by this or another topology.
if (commitMetadataManager.isOffsetCommittedByThisTopology(newTp,
committedOffset,
Collections.unmodifiableMap(offsetManagers))) {
// Another KafkaSpout instance (of this topology) already committed, therefore FirstPollOffsetStrategy does not apply.
consumer.seek(newTp, committedOffset.offset());
} else {
// offset was not committed by this topology, therefore FirstPollOffsetStrategy applies
// (only when the topology is first deployed).
if (firstPollOffsetStrategy.equals(EARLIEST)) {
consumer.seekToBeginning(Collections.singleton(newTp));
} else if (firstPollOffsetStrategy.equals(LATEST)) {
consumer.seekToEnd(Collections.singleton(newTp));
} else {
// Resume polling at the last committed offset, i.e. the first offset that is not marked as processed.
consumer.seek(newTp, committedOffset.offset());
}
}
} else {
// no offset commits have ever been done for this consumer group and topic-partition,
// so start at the beginning or end depending on FirstPollOffsetStrategy
if (firstPollOffsetStrategy.equals(EARLIEST) || firstPollOffsetStrategy.equals(UNCOMMITTED_EARLIEST)) {
consumer.seekToBeginning(Collections.singleton(newTp));
} else if (firstPollOffsetStrategy.equals(LATEST) || firstPollOffsetStrategy.equals(UNCOMMITTED_LATEST)) {
consumer.seekToEnd(Collections.singleton(newTp));
}
}
return consumer.position(newTp);
}
}
// ======== Next Tuple =======
@Override
public void nextTuple() {
try {
if (refreshAssignmentTimer.isExpiredResetOnTrue()) {
refreshAssignment();
}
if (commitTimer != null && commitTimer.isExpiredResetOnTrue()) {
if (isAtLeastOnceProcessing()) {
commitOffsetsForAckedTuples();
} else if (kafkaSpoutConfig.getProcessingGuarantee() == ProcessingGuarantee.NO_GUARANTEE) {
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit =
createFetchedOffsetsMetadata(consumer.assignment());
consumer.commitAsync(offsetsToCommit, null);
LOG.debug("Committed offsets {} to Kafka", offsetsToCommit);
}
}
PollablePartitionsInfo pollablePartitionsInfo = getPollablePartitionsInfo();
if (pollablePartitionsInfo.shouldPoll()) {
try {
setWaitingToEmit(pollKafkaBroker(pollablePartitionsInfo));
} catch (RetriableException e) {
LOG.error("Failed to poll from kafka.", e);
}
}
emitIfWaitingNotEmitted();
} catch (InterruptException e) {
throwKafkaConsumerInterruptedException();
}
}
private void throwKafkaConsumerInterruptedException() {
//Kafka throws their own type of exception when interrupted.
//Throw a new Java InterruptedException to ensure Storm can recognize the exception as a reaction to an interrupt.
throw new RuntimeException(new InterruptedException("Kafka consumer was interrupted"));
}
private PollablePartitionsInfo getPollablePartitionsInfo() {
if (isWaitingToEmit()) {
LOG.debug("Not polling. Tuples waiting to be emitted.");
return new PollablePartitionsInfo(Collections.emptySet(), Collections.emptyMap());
}
Set<TopicPartition> assignment = consumer.assignment();
if (!isAtLeastOnceProcessing()) {
return new PollablePartitionsInfo(assignment, Collections.emptyMap());
}
Map<TopicPartition, Long> earliestRetriableOffsets = retryService.earliestRetriableOffsets();
Set<TopicPartition> pollablePartitions = new HashSet<>();
final int maxUncommittedOffsets = kafkaSpoutConfig.getMaxUncommittedOffsets();
for (TopicPartition tp : assignment) {
OffsetManager offsetManager = offsetManagers.get(tp);
int numUncommittedOffsets = offsetManager.getNumUncommittedOffsets();
if (numUncommittedOffsets < maxUncommittedOffsets) {
//Allow poll if the partition is not at the maxUncommittedOffsets limit
pollablePartitions.add(tp);
} else {
long offsetAtLimit = offsetManager.getNthUncommittedOffsetAfterCommittedOffset(maxUncommittedOffsets);
Long earliestRetriableOffset = earliestRetriableOffsets.get(tp);
if (earliestRetriableOffset != null && earliestRetriableOffset <= offsetAtLimit) {
//Allow poll if there are retriable tuples within the maxUncommittedOffsets limit
pollablePartitions.add(tp);
} else {
LOG.debug("Not polling on partition [{}]. It has [{}] uncommitted offsets, which exceeds the limit of [{}]. ", tp,
numUncommittedOffsets, maxUncommittedOffsets);
}
}
}
return new PollablePartitionsInfo(pollablePartitions, earliestRetriableOffsets);
}
private boolean isWaitingToEmit() {
return waitingToEmit.values().stream()
.anyMatch(list -> !list.isEmpty());
}
private void setWaitingToEmit(ConsumerRecords<K, V> consumerRecords) {
for (TopicPartition tp : consumerRecords.partitions()) {
waitingToEmit.put(tp, new LinkedList<>(consumerRecords.records(tp)));
}
}
// ======== poll =========
private ConsumerRecords<K, V> pollKafkaBroker(PollablePartitionsInfo pollablePartitionsInfo) {
doSeekRetriableTopicPartitions(pollablePartitionsInfo.pollableEarliestRetriableOffsets);
Set<TopicPartition> pausedPartitions = new HashSet<>(consumer.assignment());
pausedPartitions.removeIf(pollablePartitionsInfo.pollablePartitions::contains);
try {
consumer.pause(pausedPartitions);
final ConsumerRecords<K, V> consumerRecords = consumer.poll(kafkaSpoutConfig.getPollTimeoutMs());
ackRetriableOffsetsIfCompactedAway(pollablePartitionsInfo.pollableEarliestRetriableOffsets, consumerRecords);
final int numPolledRecords = consumerRecords.count();
LOG.debug("Polled [{}] records from Kafka",
numPolledRecords);
if (kafkaSpoutConfig.getProcessingGuarantee() == KafkaSpoutConfig.ProcessingGuarantee.AT_MOST_ONCE) {
//Commit polled records immediately to ensure delivery is at-most-once.
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit =
createFetchedOffsetsMetadata(consumer.assignment());
consumer.commitSync(offsetsToCommit);
LOG.debug("Committed offsets {} to Kafka", offsetsToCommit);
}
return consumerRecords;
} finally {
consumer.resume(pausedPartitions);
}
}
private void doSeekRetriableTopicPartitions(Map<TopicPartition, Long> pollableEarliestRetriableOffsets) {
for (Entry<TopicPartition, Long> retriableTopicPartitionAndOffset : pollableEarliestRetriableOffsets.entrySet()) {
//Seek directly to the earliest retriable message for each retriable topic partition
consumer.seek(retriableTopicPartitionAndOffset.getKey(), retriableTopicPartitionAndOffset.getValue());
}
}
private void ackRetriableOffsetsIfCompactedAway(Map<TopicPartition, Long> earliestRetriableOffsets,
ConsumerRecords<K, V> consumerRecords) {
for (Entry<TopicPartition, Long> entry : earliestRetriableOffsets.entrySet()) {
TopicPartition tp = entry.getKey();
List<ConsumerRecord<K, V>> records = consumerRecords.records(tp);
if (!records.isEmpty()) {
ConsumerRecord<K, V> record = records.get(0);
long seekOffset = entry.getValue();
long earliestReceivedOffset = record.offset();
if (seekOffset < earliestReceivedOffset) {
//Since we asked for tuples starting at seekOffset, some retriable records must have been compacted away.
//Ack up to the first offset received if the record is not already acked or currently in the topology
for (long i = seekOffset; i < earliestReceivedOffset; i++) {
KafkaSpoutMessageId msgId = retryService.getMessageId(tp, i);
if (!offsetManagers.get(tp).contains(msgId) && !emitted.contains(msgId)) {
LOG.debug("Record at offset [{}] appears to have been compacted away from topic [{}], marking as acked", i, tp);
retryService.remove(msgId);
emitted.add(msgId);
ack(msgId);
}
}
}
}
}
}
// ======== emit =========
private void emitIfWaitingNotEmitted() {
Iterator<List<ConsumerRecord<K, V>>> waitingToEmitIter = waitingToEmit.values().iterator();
outerLoop:
while (waitingToEmitIter.hasNext()) {
List<ConsumerRecord<K, V>> waitingToEmitForTp = waitingToEmitIter.next();
while (!waitingToEmitForTp.isEmpty()) {
final boolean emittedTuple = emitOrRetryTuple(waitingToEmitForTp.remove(0));
if (emittedTuple) {
break outerLoop;
}
}
waitingToEmitIter.remove();
}
}
/**
* Creates a tuple from the kafka record and emits it if it was never emitted or it is ready to be retried.
*
* @param record to be emitted
* @return true if tuple was emitted. False if tuple has been acked or has been emitted and is pending ack or fail
*/
private boolean emitOrRetryTuple(ConsumerRecord<K, V> record) {
final TopicPartition tp = new TopicPartition(record.topic(), record.partition());
final KafkaSpoutMessageId msgId = retryService.getMessageId(tp, record.offset());
if (offsetManagers.containsKey(tp) && offsetManagers.get(tp).contains(msgId)) { // has been acked
LOG.trace("Tuple for record [{}] has already been acked. Skipping", record);
} else if (emitted.contains(msgId)) { // has been emitted and it is pending ack or fail
LOG.trace("Tuple for record [{}] has already been emitted. Skipping", record);
} else {
final List<Object> tuple = kafkaSpoutConfig.getTranslator().apply(record);
if (isEmitTuple(tuple)) {
final boolean isScheduled = retryService.isScheduled(msgId);
// not scheduled <=> never failed (i.e. never emitted), or scheduled and ready to be retried
if (!isScheduled || retryService.isReady(msgId)) {
final String stream = tuple instanceof KafkaTuple ? ((KafkaTuple) tuple).getStream() : Utils.DEFAULT_STREAM_ID;
if (!isAtLeastOnceProcessing()) {
if (kafkaSpoutConfig.isTupleTrackingEnforced()) {
collector.emit(stream, tuple, msgId);
LOG.trace("Emitted tuple [{}] for record [{}] with msgId [{}]", tuple, record, msgId);
} else {
collector.emit(stream, tuple);
LOG.trace("Emitted tuple [{}] for record [{}]", tuple, record);
}
} else {
emitted.add(msgId);
offsetManagers.get(tp).addToEmitMsgs(msgId.offset());
if (isScheduled) { // Was scheduled for retry and re-emitted, so remove from schedule.
retryService.remove(msgId);
}
collector.emit(stream, tuple, msgId);
tupleListener.onEmit(tuple, msgId);
LOG.trace("Emitted tuple [{}] for record [{}] with msgId [{}]", tuple, record, msgId);
}
return true;
}
} else {
/*
* if a null tuple is not configured to be emitted, it should be marked as emitted and acked immediately to allow its offset
* to be commited to Kafka
*/
LOG.debug("Not emitting null tuple for record [{}] as defined in configuration.", record);
if (isAtLeastOnceProcessing()) {
msgId.setNullTuple(true);
offsetManagers.get(tp).addToEmitMsgs(msgId.offset());
ack(msgId);
}
}
}
return false;
}
/**
* Emits a tuple if it is not a null tuple, or if the spout is configured to emit null tuples.
*/
private boolean isEmitTuple(List<Object> tuple) {
return tuple != null || kafkaSpoutConfig.isEmitNullTuples();
}
private Map<TopicPartition, OffsetAndMetadata> createFetchedOffsetsMetadata(Set<TopicPartition> assignedPartitions) {
Map<TopicPartition, OffsetAndMetadata> offsetsToCommit = new HashMap<>();
for (TopicPartition tp : assignedPartitions) {
offsetsToCommit.put(tp, new OffsetAndMetadata(consumer.position(tp), commitMetadataManager.getCommitMetadata()));
}
return offsetsToCommit;
}
private void commitOffsetsForAckedTuples() {
final Map<TopicPartition, OffsetAndMetadata> nextCommitOffsets = new HashMap<>();
for (Map.Entry<TopicPartition, OffsetManager> tpOffset : offsetManagers.entrySet()) {
final OffsetAndMetadata nextCommitOffset = tpOffset.getValue().findNextCommitOffset(commitMetadataManager.getCommitMetadata());
if (nextCommitOffset != null) {
nextCommitOffsets.put(tpOffset.getKey(), nextCommitOffset);
}
}
// Commit offsets that are ready to be committed for every topic partition
if (!nextCommitOffsets.isEmpty()) {
consumer.commitSync(nextCommitOffsets);
LOG.debug("Offsets successfully committed to Kafka [{}]", nextCommitOffsets);
// Instead of iterating again, it would be possible to commit and update the state for each TopicPartition
// in the prior loop, but the multiple network calls should be more expensive than iterating twice over a small loop
for (Map.Entry<TopicPartition, OffsetAndMetadata> tpOffset : nextCommitOffsets.entrySet()) {
//Update the OffsetManager for each committed partition, and update numUncommittedOffsets
final TopicPartition tp = tpOffset.getKey();
long position = consumer.position(tp);
long committedOffset = tpOffset.getValue().offset();
if (position < committedOffset) {
/*
* The position is behind the committed offset. This can happen in some cases, e.g. if a message failed, lots of (more
* than max.poll.records) later messages were acked, and the failed message then gets acked. The consumer may only be
* part way through "catching up" to where it was when it went back to retry the failed tuple. Skip the consumer forward
* to the committed offset.
*/
LOG.debug("Consumer fell behind committed offset. Catching up. Position was [{}], skipping to [{}]",
position, committedOffset);
consumer.seek(tp, committedOffset);
}
/**
* In some cases the waitingToEmit list may contain tuples that have just been committed. Drop these.
*/
List<ConsumerRecord<K, V>> waitingToEmitForTp = waitingToEmit.get(tp);
if (waitingToEmitForTp != null) {
//Discard the pending records that are already committed
waitingToEmit.put(tp, waitingToEmitForTp.stream()
.filter(record -> record.offset() >= committedOffset)
.collect(Collectors.toCollection(LinkedList::new)));
}
final OffsetManager offsetManager = offsetManagers.get(tp);
offsetManager.commit(tpOffset.getValue());
LOG.debug("[{}] uncommitted offsets for partition [{}] after commit", offsetManager.getNumUncommittedOffsets(), tp);
}
} else {
LOG.trace("No offsets to commit. {}", this);
}
}
// ======== Ack =======
@Override
public void ack(Object messageId) {
if (!isAtLeastOnceProcessing()) {
return;
}
// Only need to keep track of acked tuples if commits to Kafka are controlled by
// tuple acks, which happens only for at-least-once processing semantics
final KafkaSpoutMessageId msgId = (KafkaSpoutMessageId) messageId;
if (msgId.isNullTuple()) {
//a null tuple should be added to the ack list since by definition is a direct ack
offsetManagers.get(msgId.getTopicPartition()).addToAckMsgs(msgId);
LOG.debug("Received direct ack for message [{}], associated with null tuple", msgId);
tupleListener.onAck(msgId);
return;
}
if (!emitted.contains(msgId)) {
LOG.debug("Received ack for message [{}], associated with tuple emitted for a ConsumerRecord that "
+ "came from a topic-partition that this consumer group instance is no longer tracking "
+ "due to rebalance/partition reassignment. No action taken.", msgId);
} else {
Validate.isTrue(!retryService.isScheduled(msgId), "The message id " + msgId + " is queued for retry while being acked."
+ " This should never occur barring errors in the RetryService implementation or the spout code.");
offsetManagers.get(msgId.getTopicPartition()).addToAckMsgs(msgId);
emitted.remove(msgId);
}
tupleListener.onAck(msgId);
}
// ======== Fail =======
@Override
public void fail(Object messageId) {
if (!isAtLeastOnceProcessing()) {
return;
}
// Only need to keep track of failed tuples if commits to Kafka are controlled by
// tuple acks, which happens only for at-least-once processing semantics
final KafkaSpoutMessageId msgId = (KafkaSpoutMessageId) messageId;
if (!emitted.contains(msgId)) {
LOG.debug("Received fail for tuple this spout is no longer tracking."
+ " Partitions may have been reassigned. Ignoring message [{}]", msgId);
return;
}
Validate.isTrue(!retryService.isScheduled(msgId), "The message id " + msgId + " is queued for retry while being failed."
+ " This should never occur barring errors in the RetryService implementation or the spout code.");
msgId.incrementNumFails();
if (!retryService.schedule(msgId)) {
LOG.debug("Reached maximum number of retries. Message [{}] being marked as acked.", msgId);
// this tuple should be removed from emitted only inside the ack() method. This is to ensure
// that the OffsetManager for that TopicPartition is updated and allows commit progression
tupleListener.onMaxRetryReached(msgId);
ack(msgId);
} else {
tupleListener.onRetry(msgId);
emitted.remove(msgId);
}
}
// ======== Activate / Deactivate / Close / Declare Outputs =======
@Override
public void activate() {
try {
refreshAssignment();
} catch (InterruptException e) {
throwKafkaConsumerInterruptedException();
}
}
private void refreshAssignment() {
Set<TopicPartition> allPartitions = kafkaSpoutConfig.getTopicFilter().getAllSubscribedPartitions(consumer);
List<TopicPartition> allPartitionsSorted = new ArrayList<>(allPartitions);
Collections.sort(allPartitionsSorted, TopicPartitionComparator.INSTANCE);
Set<TopicPartition> assignedPartitions = kafkaSpoutConfig.getTopicPartitioner()
.getPartitionsForThisTask(allPartitionsSorted, context);
topicAssigner.assignPartitions(consumer, assignedPartitions, rebalanceListener);
}
@Override
public void deactivate() {
try {
commitIfNecessary();
} catch (InterruptException e) {
throwKafkaConsumerInterruptedException();
}
}
@Override
public void close() {
try {
shutdown();
} catch (InterruptException e) {
throwKafkaConsumerInterruptedException();
}
}
private void commitIfNecessary() {
if (isAtLeastOnceProcessing()) {
commitOffsetsForAckedTuples();
}
}
private void shutdown() {
try {
commitIfNecessary();
} finally {
//remove resources
consumer.close();
}
}
@Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
RecordTranslator<K, V> translator = kafkaSpoutConfig.getTranslator();
for (String stream : translator.streams()) {
declarer.declareStream(stream, translator.getFieldsFor(stream));
}
}
@Override
public String toString() {
return "KafkaSpout{"
+ "offsetManagers =" + offsetManagers
+ ", emitted=" + emitted
+ "}";
}
@Override
public Map<String, Object> getComponentConfiguration() {
Map<String, Object> configuration = super.getComponentConfiguration();
if (configuration == null) {
configuration = new HashMap<>();
}
String configKeyPrefix = "config.";
configuration.put(configKeyPrefix + "topics", getTopicsString());
configuration.put(configKeyPrefix + "groupid", kafkaSpoutConfig.getConsumerGroupId());
for (Entry<String, Object> conf : kafkaSpoutConfig.getKafkaProps().entrySet()) {
if (conf.getValue() != null && isPrimitiveOrWrapper(conf.getValue().getClass())) {
configuration.put(configKeyPrefix + conf.getKey(), conf.getValue());
} else {
LOG.debug("Dropping Kafka prop '{}' from component configuration", conf.getKey());
}
}
return configuration;
}
private boolean isPrimitiveOrWrapper(Class<?> type) {
if (type == null) {
return false;
}
return type.isPrimitive() || isWrapper(type);
}
private boolean isWrapper(Class<?> type) {
return type == Double.class || type == Float.class || type == Long.class
|| type == Integer.class || type == Short.class || type == Character.class
|| type == Byte.class || type == Boolean.class || type == String.class;
}
private String getTopicsString() {
return kafkaSpoutConfig.getTopicFilter().getTopicsString();
}
private static class PollablePartitionsInfo {
private final Set<TopicPartition> pollablePartitions;
//The subset of earliest retriable offsets that are on pollable partitions
private final Map<TopicPartition, Long> pollableEarliestRetriableOffsets;
PollablePartitionsInfo(Set<TopicPartition> pollablePartitions, Map<TopicPartition, Long> earliestRetriableOffsets) {
this.pollablePartitions = pollablePartitions;
this.pollableEarliestRetriableOffsets = earliestRetriableOffsets.entrySet().stream()
.filter(entry -> pollablePartitions.contains(entry.getKey()))
.collect(Collectors.toMap(entry -> entry.getKey(), entry -> entry.getValue()));
}
public boolean shouldPoll() {
return !this.pollablePartitions.isEmpty();
}
}
@VisibleForTesting
KafkaOffsetMetric<K, V> getKafkaOffsetMetric() {
return kafkaOffsetMetric;
}
}
| |
/*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package android.support.v4.content;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.Callable;
import java.util.concurrent.CancellationException;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executor;
import java.util.concurrent.FutureTask;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import android.os.Handler;
import android.os.Looper;
import android.os.Message;
import android.os.Process;
/**
* Copy of the required parts of {@link android.os.AsyncTask} from Android 3.0 that is
* needed to support AsyncTaskLoader. We use this rather than the one from the platform
* because we rely on some subtle behavior of AsyncTask that is not reliable on
* older platforms.
*
* <p>Note that for now this is not publicly available because it is not a
* complete implementation, only sufficient for the needs of
* {@link AsyncTaskLoader}.
*/
abstract class ModernAsyncTask<Params, Progress, Result> {
private static final String LOG_TAG = "AsyncTask";
private static final int CORE_POOL_SIZE = 5;
private static final int MAXIMUM_POOL_SIZE = 128;
private static final int KEEP_ALIVE = 1;
private static final ThreadFactory sThreadFactory = new ThreadFactory() {
private final AtomicInteger mCount = new AtomicInteger(1);
public Thread newThread(Runnable r) {
return new Thread(r, "ModernAsyncTask #" + mCount.getAndIncrement());
}
};
private static final BlockingQueue<Runnable> sPoolWorkQueue =
new LinkedBlockingQueue<Runnable>(10);
/**
* An {@link Executor} that can be used to execute tasks in parallel.
*/
public static final Executor THREAD_POOL_EXECUTOR
= new ThreadPoolExecutor(CORE_POOL_SIZE, MAXIMUM_POOL_SIZE, KEEP_ALIVE,
TimeUnit.SECONDS, sPoolWorkQueue, sThreadFactory);
private static final int MESSAGE_POST_RESULT = 0x1;
private static final int MESSAGE_POST_PROGRESS = 0x2;
private static InternalHandler sHandler;
private static volatile Executor sDefaultExecutor = THREAD_POOL_EXECUTOR;
private final WorkerRunnable<Params, Result> mWorker;
private final FutureTask<Result> mFuture;
private volatile Status mStatus = Status.PENDING;
private final AtomicBoolean mTaskInvoked = new AtomicBoolean();
/**
* Indicates the current status of the task. Each status will be set only once
* during the lifetime of a task.
*/
public enum Status {
/**
* Indicates that the task has not been executed yet.
*/
PENDING,
/**
* Indicates that the task is running.
*/
RUNNING,
/**
* Indicates that {@link android.os.AsyncTask#onPostExecute(Object)} has finished.
*/
FINISHED,
}
private static Handler getHandler() {
synchronized (ModernAsyncTask.class) {
if (sHandler == null) {
sHandler = new InternalHandler();
}
return sHandler;
}
}
/** @hide */
public static void setDefaultExecutor(Executor exec) {
sDefaultExecutor = exec;
}
/**
* Creates a new asynchronous task. This constructor must be invoked on the UI thread.
*/
public ModernAsyncTask() {
mWorker = new WorkerRunnable<Params, Result>() {
public Result call() throws Exception {
mTaskInvoked.set(true);
Process.setThreadPriority(Process.THREAD_PRIORITY_BACKGROUND);
return postResult(doInBackground(mParams));
}
};
mFuture = new FutureTask<Result>(mWorker) {
@Override
protected void done() {
try {
final Result result = get();
postResultIfNotInvoked(result);
} catch (InterruptedException e) {
android.util.Log.w(LOG_TAG, e);
} catch (ExecutionException e) {
throw new RuntimeException(
"An error occurred while executing doInBackground()", e.getCause());
} catch (CancellationException e) {
postResultIfNotInvoked(null);
} catch (Throwable t) {
throw new RuntimeException(
"An error occurred while executing doInBackground()", t);
}
}
};
}
private void postResultIfNotInvoked(Result result) {
final boolean wasTaskInvoked = mTaskInvoked.get();
if (!wasTaskInvoked) {
postResult(result);
}
}
private Result postResult(Result result) {
Message message = getHandler().obtainMessage(MESSAGE_POST_RESULT,
new AsyncTaskResult<Result>(this, result));
message.sendToTarget();
return result;
}
/**
* Returns the current status of this task.
*
* @return The current status.
*/
public final Status getStatus() {
return mStatus;
}
/**
* Override this method to perform a computation on a background thread. The
* specified parameters are the parameters passed to {@link #execute}
* by the caller of this task.
*
* This method can call {@link #publishProgress} to publish updates
* on the UI thread.
*
* @param params The parameters of the task.
*
* @return A result, defined by the subclass of this task.
*
* @see #onPreExecute()
* @see #onPostExecute
* @see #publishProgress
*/
protected abstract Result doInBackground(Params... params);
/**
* Runs on the UI thread before {@link #doInBackground}.
*
* @see #onPostExecute
* @see #doInBackground
*/
protected void onPreExecute() {
}
/**
* <p>Runs on the UI thread after {@link #doInBackground}. The
* specified result is the value returned by {@link #doInBackground}.</p>
*
* <p>This method won't be invoked if the task was cancelled.</p>
*
* @param result The result of the operation computed by {@link #doInBackground}.
*
* @see #onPreExecute
* @see #doInBackground
* @see #onCancelled(Object)
*/
@SuppressWarnings({"UnusedDeclaration"})
protected void onPostExecute(Result result) {
}
/**
* Runs on the UI thread after {@link #publishProgress} is invoked.
* The specified values are the values passed to {@link #publishProgress}.
*
* @param values The values indicating progress.
*
* @see #publishProgress
* @see #doInBackground
*/
@SuppressWarnings({"UnusedDeclaration"})
protected void onProgressUpdate(Progress... values) {
}
/**
* <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
* {@link #doInBackground(Object[])} has finished.</p>
*
* <p>The default implementation simply invokes {@link #onCancelled()} and
* ignores the result. If you write your own implementation, do not call
* <code>super.onCancelled(result)</code>.</p>
*
* @param result The result, if any, computed in
* {@link #doInBackground(Object[])}, can be null
*
* @see #cancel(boolean)
* @see #isCancelled()
*/
@SuppressWarnings({"UnusedParameters"})
protected void onCancelled(Result result) {
onCancelled();
}
/**
* <p>Applications should preferably override {@link #onCancelled(Object)}.
* This method is invoked by the default implementation of
* {@link #onCancelled(Object)}.</p>
*
* <p>Runs on the UI thread after {@link #cancel(boolean)} is invoked and
* {@link #doInBackground(Object[])} has finished.</p>
*
* @see #onCancelled(Object)
* @see #cancel(boolean)
* @see #isCancelled()
*/
protected void onCancelled() {
}
/**
* Returns <tt>true</tt> if this task was cancelled before it completed
* normally. If you are calling {@link #cancel(boolean)} on the task,
* the value returned by this method should be checked periodically from
* {@link #doInBackground(Object[])} to end the task as soon as possible.
*
* @return <tt>true</tt> if task was cancelled before it completed
*
* @see #cancel(boolean)
*/
public final boolean isCancelled() {
return mFuture.isCancelled();
}
/**
* <p>Attempts to cancel execution of this task. This attempt will
* fail if the task has already completed, already been cancelled,
* or could not be cancelled for some other reason. If successful,
* and this task has not started when <tt>cancel</tt> is called,
* this task should never run. If the task has already started,
* then the <tt>mayInterruptIfRunning</tt> parameter determines
* whether the thread executing this task should be interrupted in
* an attempt to stop the task.</p>
*
* <p>Calling this method will result in {@link #onCancelled(Object)} being
* invoked on the UI thread after {@link #doInBackground(Object[])}
* returns. Calling this method guarantees that {@link #onPostExecute(Object)}
* is never invoked. After invoking this method, you should check the
* value returned by {@link #isCancelled()} periodically from
* {@link #doInBackground(Object[])} to finish the task as early as
* possible.</p>
*
* @param mayInterruptIfRunning <tt>true</tt> if the thread executing this
* task should be interrupted; otherwise, in-progress tasks are allowed
* to complete.
*
* @return <tt>false</tt> if the task could not be cancelled,
* typically because it has already completed normally;
* <tt>true</tt> otherwise
*
* @see #isCancelled()
* @see #onCancelled(Object)
*/
public final boolean cancel(boolean mayInterruptIfRunning) {
return mFuture.cancel(mayInterruptIfRunning);
}
/**
* Waits if necessary for the computation to complete, and then
* retrieves its result.
*
* @return The computed result.
*
* @throws CancellationException If the computation was cancelled.
* @throws ExecutionException If the computation threw an exception.
* @throws InterruptedException If the current thread was interrupted
* while waiting.
*/
public final Result get() throws InterruptedException, ExecutionException {
return mFuture.get();
}
/**
* Waits if necessary for at most the given time for the computation
* to complete, and then retrieves its result.
*
* @param timeout Time to wait before cancelling the operation.
* @param unit The time unit for the timeout.
*
* @return The computed result.
*
* @throws CancellationException If the computation was cancelled.
* @throws ExecutionException If the computation threw an exception.
* @throws InterruptedException If the current thread was interrupted
* while waiting.
* @throws TimeoutException If the wait timed out.
*/
public final Result get(long timeout, TimeUnit unit) throws InterruptedException,
ExecutionException, TimeoutException {
return mFuture.get(timeout, unit);
}
/**
* Executes the task with the specified parameters. The task returns
* itself (this) so that the caller can keep a reference to it.
*
* <p>Note: this function schedules the task on a queue for a single background
* thread or pool of threads depending on the platform version. When first
* introduced, AsyncTasks were executed serially on a single background thread.
* Starting with {@link android.os.Build.VERSION_CODES#DONUT}, this was changed
* to a pool of threads allowing multiple tasks to operate in parallel. After
* {@link android.os.Build.VERSION_CODES#HONEYCOMB}, it is planned to change this
* back to a single thread to avoid common application errors caused
* by parallel execution. If you truly want parallel execution, you can use
* the {@link #executeOnExecutor} version of this method
* with {@link #THREAD_POOL_EXECUTOR}; however, see commentary there for warnings on
* its use.
*
* <p>This method must be invoked on the UI thread.
*
* @param params The parameters of the task.
*
* @return This instance of AsyncTask.
*
* @throws IllegalStateException If {@link #getStatus()} returns either
* {@link android.os.AsyncTask.Status#RUNNING} or
* {@link android.os.AsyncTask.Status#FINISHED}.
*/
public final ModernAsyncTask<Params, Progress, Result> execute(Params... params) {
return executeOnExecutor(sDefaultExecutor, params);
}
/**
* Executes the task with the specified parameters. The task returns
* itself (this) so that the caller can keep a reference to it.
*
* <p>This method is typically used with {@link #THREAD_POOL_EXECUTOR} to
* allow multiple tasks to run in parallel on a pool of threads managed by
* AsyncTask, however you can also use your own {@link Executor} for custom
* behavior.
*
* <p><em>Warning:</em> Allowing multiple tasks to run in parallel from
* a thread pool is generally <em>not</em> what one wants, because the order
* of their operation is not defined. For example, if these tasks are used
* to modify any state in common (such as writing a file due to a button click),
* there are no guarantees on the order of the modifications.
* Without careful work it is possible in rare cases for the newer version
* of the data to be over-written by an older one, leading to obscure data
* loss and stability issues.
*
* <p>This method must be invoked on the UI thread.
*
* @param exec The executor to use. {@link #THREAD_POOL_EXECUTOR} is available as a
* convenient process-wide thread pool for tasks that are loosely coupled.
* @param params The parameters of the task.
*
* @return This instance of AsyncTask.
*
* @throws IllegalStateException If {@link #getStatus()} returns either
* {@link android.os.AsyncTask.Status#RUNNING}
* or {@link android.os.AsyncTask.Status#FINISHED}.
*/
public final ModernAsyncTask<Params, Progress, Result> executeOnExecutor(Executor exec,
Params... params) {
if (mStatus != Status.PENDING) {
switch (mStatus) {
case RUNNING:
throw new IllegalStateException("Cannot execute task:"
+ " the task is already running.");
case FINISHED:
throw new IllegalStateException("Cannot execute task:"
+ " the task has already been executed "
+ "(a task can be executed only once)");
}
}
mStatus = Status.RUNNING;
onPreExecute();
mWorker.mParams = params;
exec.execute(mFuture);
return this;
}
/**
* Convenience version of {@link #execute(Object...)} for use with
* a simple Runnable object.
*/
public static void execute(Runnable runnable) {
sDefaultExecutor.execute(runnable);
}
/**
* This method can be invoked from {@link #doInBackground} to
* publish updates on the UI thread while the background computation is
* still running. Each call to this method will trigger the execution of
* {@link #onProgressUpdate} on the UI thread.
*
* {@link #onProgressUpdate} will note be called if the task has been
* canceled.
*
* @param values The progress values to update the UI with.
*
* @see #onProgressUpdate
* @see #doInBackground
*/
protected final void publishProgress(Progress... values) {
if (!isCancelled()) {
getHandler().obtainMessage(MESSAGE_POST_PROGRESS,
new AsyncTaskResult<Progress>(this, values)).sendToTarget();
}
}
private void finish(Result result) {
if (isCancelled()) {
onCancelled(result);
} else {
onPostExecute(result);
}
mStatus = Status.FINISHED;
}
private static class InternalHandler extends Handler {
public InternalHandler() {
super(Looper.getMainLooper());
}
@SuppressWarnings({"unchecked", "RawUseOfParameterizedType"})
@Override
public void handleMessage(Message msg) {
AsyncTaskResult result = (AsyncTaskResult) msg.obj;
switch (msg.what) {
case MESSAGE_POST_RESULT:
// There is only one result
result.mTask.finish(result.mData[0]);
break;
case MESSAGE_POST_PROGRESS:
result.mTask.onProgressUpdate(result.mData);
break;
}
}
}
private static abstract class WorkerRunnable<Params, Result> implements Callable<Result> {
Params[] mParams;
}
@SuppressWarnings({"RawUseOfParameterizedType"})
private static class AsyncTaskResult<Data> {
final ModernAsyncTask mTask;
final Data[] mData;
AsyncTaskResult(ModernAsyncTask task, Data... data) {
mTask = task;
mData = data;
}
}
}
| |
/*
* Copyright 2015-2016 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.hal.client.accesscontrol;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;
import org.jboss.hal.config.Role;
import org.jboss.hal.core.OperationFactory;
import org.jboss.hal.dmr.Composite;
import org.jboss.hal.dmr.ModelNode;
import org.jboss.hal.dmr.Operation;
import org.jboss.hal.dmr.ResourceAddress;
import org.jboss.hal.dmr.ResourceCheck;
import org.jboss.hal.dmr.dispatch.Dispatcher;
import org.jboss.hal.flow.FlowContext;
import org.jboss.hal.flow.Task;
import org.jboss.hal.meta.Metadata;
import rx.Completable;
import static java.util.stream.Collectors.toList;
import static org.jboss.hal.dmr.ModelDescriptionConstants.*;
/** Tasks related to principals, roles and assignments. */
final class AccessControlTasks {
/**
* Checks whether a role mapping for a given role exists and pushes {@code 200} to the context stack if it exists,
* {@code 404} otherwise.
*/
static class CheckRoleMapping extends ResourceCheck {
CheckRoleMapping(Dispatcher dispatcher, Role role) {
super(dispatcher, AddressTemplates.roleMapping(role));
}
}
/**
* Adds a role mapping for a given role if the predicate returns {@code true}, proceeds otherwise.
* Expects an integer status code at the top of the context stack which is used to call the predicate.
*/
static class AddRoleMapping implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
private final Predicate<Integer> predicate;
AddRoleMapping(Dispatcher dispatcher, Role role, Predicate<Integer> predicate) {
this.dispatcher = dispatcher;
this.role = role;
this.predicate = predicate;
}
@Override
public Completable call(FlowContext context) {
Completable result = Completable.complete();
if (!context.emptyStack()) {
Integer status = context.pop();
if (predicate.test(status)) {
Operation operation = new Operation.Builder(AddressTemplates.roleMapping(role), ADD).build();
result = dispatcher.execute(operation).toCompletable();
}
}
return result;
}
}
/**
* Modifies the include-all flag of a role-mapping. Please make sure that the role-mapping exists before using this
* function. Use a combination of {@link CheckRoleMapping} and {@link AddRoleMapping} to do so.
*/
static class ModifyIncludeAll implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
private final boolean includeAll;
ModifyIncludeAll(Dispatcher dispatcher, Role role, boolean includeAll) {
this.dispatcher = dispatcher;
this.role = role;
this.includeAll = includeAll;
}
@Override
public Completable call(FlowContext context) {
Operation operation = new Operation.Builder(AddressTemplates.roleMapping(role), WRITE_ATTRIBUTE_OPERATION)
.param(NAME, INCLUDE_ALL)
.param(VALUE, includeAll)
.build();
return dispatcher.execute(operation).toCompletable();
}
}
/**
* Removes a role mapping for a given role if the predicate returns {@code true}, proceeds otherwise.
* Expects an integer status code at the top of the context stack which is used to call the predicate.
*/
static class RemoveRoleMapping implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
private final Predicate<Integer> predicate;
RemoveRoleMapping(Dispatcher dispatcher, Role role, Predicate<Integer> predicate) {
this.dispatcher = dispatcher;
this.role = role;
this.predicate = predicate;
}
@Override
public Completable call(FlowContext context) {
if (!context.emptyStack()) {
Integer status = context.pop();
if (predicate.test(status)) {
Operation operation = new Operation.Builder(AddressTemplates.roleMapping(role), REMOVE).build();
return dispatcher.execute(operation).toCompletable();
}
}
return Completable.complete();
}
}
/**
* Adds an assignment to a role-mapping. Please make sure that the role-mapping exists before using this task.
* Use a combination of {@link CheckRoleMapping} and {@link AddRoleMapping} to do so.
*/
static class AddAssignment implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
private final Principal principal;
private final boolean include;
AddAssignment(Dispatcher dispatcher, Role role, Principal principal, boolean include) {
this.dispatcher = dispatcher;
this.role = role;
this.principal = principal;
this.include = include;
}
@Override
public Completable call(FlowContext context) {
ResourceAddress address = AddressTemplates.roleMapping(role)
.add(include ? INCLUDE : EXCLUDE, principal.getResourceName());
Operation.Builder builder = new Operation.Builder(address, ADD)
.param(NAME, principal.getName())
.param(TYPE, principal.getType().name());
if (principal.getRealm() != null) {
builder.param(REALM, principal.getRealm());
}
return dispatcher.execute(builder.build()).toCompletable();
}
}
/**
* Removes assignments from a role-mapping. Please make sure that the role-mapping exists before using this
* function. Use a combination of {@link CheckRoleMapping} and {@link AddRoleMapping} to do so.
*/
static class RemoveAssignments implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final List<Assignment> assignments;
RemoveAssignments(Dispatcher dispatcher, List<Assignment> assignments) {
this.dispatcher = dispatcher;
this.assignments = assignments;
}
@Override
public Completable call(FlowContext context) {
Completable completable;
if (assignments.isEmpty()) {
completable = Completable.complete();
} else if (assignments.size() == 1) {
Assignment assignment = assignments.get(0);
ResourceAddress address = AddressTemplates.assignment(assignment);
Operation operation = new Operation.Builder(address, REMOVE).build();
completable = dispatcher.execute(operation).toCompletable();
} else {
List<Operation> operations = assignments.stream()
.map(assignment -> {
ResourceAddress address = AddressTemplates.assignment(assignment);
return new Operation.Builder(address, REMOVE).build();
})
.collect(toList());
completable = dispatcher.execute(new Composite(operations)).toCompletable();
}
return completable;
}
}
/** Adds a scoped role. */
static class AddScopedRole implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role.Type type;
private final String name;
private final ModelNode payload;
AddScopedRole(Dispatcher dispatcher, Role.Type type, String name, ModelNode payload) {
this.dispatcher = dispatcher;
this.type = type;
this.name = name;
this.payload = payload;
}
@Override
public Completable call(FlowContext context) {
ResourceAddress address = AddressTemplates.scopedRole(new Role(name, null, type, null));
Operation operation = new Operation.Builder(address, ADD)
.payload(payload)
.build();
return dispatcher.execute(operation).toCompletable();
}
}
/** Modifies a scoped role. */
static class ModifyScopedRole implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
private final Map<String, Object> changedValues;
private final Metadata metadata;
ModifyScopedRole(Dispatcher dispatcher, Role role, Map<String, Object> changedValues,
Metadata metadata) {
this.dispatcher = dispatcher;
this.role = role;
this.changedValues = changedValues;
this.metadata = metadata;
}
@Override
public Completable call(FlowContext context) {
ResourceAddress address = AddressTemplates.scopedRole(role);
Operation operation = new OperationFactory().fromChangeSet(address, changedValues, metadata);
return dispatcher.execute(operation).toCompletable();
}
}
/**
* Removes a scoped role.
*/
static class RemoveScopedRole implements Task<FlowContext> {
private final Dispatcher dispatcher;
private final Role role;
RemoveScopedRole(Dispatcher dispatcher, Role role) {
this.dispatcher = dispatcher;
this.role = role;
}
@Override
public Completable call(FlowContext context) {
ResourceAddress address = AddressTemplates.scopedRole(role);
Operation operation = new Operation.Builder(address, REMOVE).build();
return dispatcher.execute(operation).toCompletable();
}
}
private AccessControlTasks() {
}
}
| |
/*
* Copyright 2013-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.event.listener;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.notNullValue;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.artifact_cache.ArtifactCacheConnectEvent;
import com.facebook.buck.artifact_cache.ArtifactCacheMode;
import com.facebook.buck.artifact_cache.CacheResult;
import com.facebook.buck.artifact_cache.HttpArtifactCacheEvent;
import com.facebook.buck.config.FakeBuckConfig;
import com.facebook.buck.event.AbstractBuckEvent;
import com.facebook.buck.event.ArtifactCompressionEvent;
import com.facebook.buck.event.BuckEventBus;
import com.facebook.buck.event.BuckEventBusForTests;
import com.facebook.buck.event.ChromeTraceEvent;
import com.facebook.buck.event.CommandEvent;
import com.facebook.buck.event.CompilerPluginDurationEvent;
import com.facebook.buck.event.DefaultBuckEventBus;
import com.facebook.buck.event.EventKey;
import com.facebook.buck.event.PerfEventId;
import com.facebook.buck.event.SimplePerfEvent;
import com.facebook.buck.io.filesystem.ProjectFilesystem;
import com.facebook.buck.io.filesystem.TestProjectFilesystems;
import com.facebook.buck.jvm.java.AnnotationProcessingEvent;
import com.facebook.buck.jvm.java.tracing.JavacPhaseEvent;
import com.facebook.buck.log.InvocationInfo;
import com.facebook.buck.model.BuildId;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildEvent;
import com.facebook.buck.rules.BuildRuleDurationTracker;
import com.facebook.buck.rules.BuildRuleEvent;
import com.facebook.buck.rules.BuildRuleKeys;
import com.facebook.buck.rules.BuildRuleStatus;
import com.facebook.buck.rules.BuildRuleSuccessType;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.RuleKey;
import com.facebook.buck.step.StepEvent;
import com.facebook.buck.test.external.ExternalTestRunEvent;
import com.facebook.buck.test.external.ExternalTestSpecCalculationEvent;
import com.facebook.buck.test.selectors.TestSelectorList;
import com.facebook.buck.timing.Clock;
import com.facebook.buck.timing.FakeClock;
import com.facebook.buck.timing.IncrementingFakeClock;
import com.facebook.buck.util.HumanReadableException;
import com.facebook.buck.util.MoreCollectors;
import com.facebook.buck.util.ObjectMappers;
import com.facebook.buck.util.perf.PerfStatsTracking;
import com.fasterxml.jackson.core.type.TypeReference;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import java.io.BufferedInputStream;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
import java.util.Optional;
import java.util.TimeZone;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPInputStream;
import org.hamcrest.Matchers;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
public class ChromeTraceBuildListenerTest {
private static final BuildId BUILD_ID = new BuildId("BUILD_ID");
private static final long CURRENT_TIME_MILLIS = 1409702151000L;
private static final long NANO_TIME = TimeUnit.SECONDS.toNanos(300);
private static final FakeClock FAKE_CLOCK =
FakeClock.builder().currentTimeMillis(CURRENT_TIME_MILLIS).nanoTime(NANO_TIME).build();
private static final String EXPECTED_DIR =
"buck-out/log/2014-09-02_23h55m51s_no_sub_command_BUILD_ID/";
@Rule public TemporaryFolder tmpDir = new TemporaryFolder();
private InvocationInfo invocationInfo;
private BuildRuleDurationTracker durationTracker;
private BuckEventBus eventBus;
@Before
public void setUp() throws IOException {
invocationInfo =
InvocationInfo.builder()
.setTimestampMillis(CURRENT_TIME_MILLIS)
.setBuckLogDir(tmpDir.getRoot().toPath().resolve("buck-out/log"))
.setBuildId(BUILD_ID)
.setSubCommand("no_sub_command")
.setIsDaemon(false)
.setSuperConsoleEnabled(false)
.setUnexpandedCommandArgs(ImmutableList.of("@mode/arglist", "--foo", "--bar"))
.setCommandArgs(ImmutableList.of("--config", "configvalue", "--foo", "--bar"))
.build();
durationTracker = new BuildRuleDurationTracker();
eventBus = new DefaultBuckEventBus(FAKE_CLOCK, BUILD_ID);
}
@Test
public void testEventsUseNanoTime() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem, invocationInfo, FAKE_CLOCK, chromeTraceConfig(1, false));
FakeBuckEvent event = new FakeBuckEvent();
eventBus.post(event); // Populates it with a timestamp
listener.writeChromeTraceEvent(
"test", event.getEventName(), ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of(), event);
listener.outputTrace(BUILD_ID);
List<ChromeTraceEvent> originalResultList =
ObjectMappers.readValue(
tmpDir.getRoot().toPath().resolve("buck-out").resolve("log").resolve("build.trace"),
new TypeReference<List<ChromeTraceEvent>>() {});
assertThat(originalResultList, Matchers.hasSize(4));
ChromeTraceEvent testEvent = originalResultList.get(3);
assertThat(testEvent.getName(), Matchers.equalTo(event.getEventName()));
assertThat(
testEvent.getMicroTime(),
Matchers.equalTo(TimeUnit.NANOSECONDS.toMicros(FAKE_CLOCK.nanoTime())));
assertThat(
testEvent.getMicroThreadUserTime(),
Matchers.equalTo(
TimeUnit.NANOSECONDS.toMicros(FAKE_CLOCK.threadUserNanoTime(testEvent.getThreadId()))));
}
@Test
public void testMetadataEventsUseNanoTime() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem, invocationInfo, FAKE_CLOCK, chromeTraceConfig(1, false));
listener.writeChromeTraceMetadataEvent("test", ImmutableMap.of());
listener.outputTrace(BUILD_ID);
List<ChromeTraceEvent> originalResultList =
ObjectMappers.readValue(
tmpDir.getRoot().toPath().resolve("buck-out").resolve("log").resolve("build.trace"),
new TypeReference<List<ChromeTraceEvent>>() {});
assertThat(originalResultList, Matchers.hasSize(4));
ChromeTraceEvent testEvent = originalResultList.get(3);
assertThat(testEvent.getName(), Matchers.equalTo("test"));
assertThat(
testEvent.getMicroTime(),
Matchers.equalTo(TimeUnit.NANOSECONDS.toMicros(FAKE_CLOCK.nanoTime())));
assertThat(
testEvent.getMicroThreadUserTime(),
Matchers.equalTo(
TimeUnit.NANOSECONDS.toMicros(FAKE_CLOCK.threadUserNanoTime(testEvent.getThreadId()))));
}
@Test
public void testDeleteFiles() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
String tracePath = invocationInfo.getLogDirectoryPath().resolve("build.trace").toString();
File traceFile = new File(tracePath);
projectFilesystem.createParentDirs(tracePath);
traceFile.createNewFile();
traceFile.setLastModified(0);
for (int i = 0; i < 10; ++i) {
File oldResult =
new File(String.format("%s/build.100%d.trace", invocationInfo.getLogDirectoryPath(), i));
oldResult.createNewFile();
oldResult.setLastModified(TimeUnit.SECONDS.toMillis(i));
}
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
FAKE_CLOCK,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
chromeTraceConfig(3, false));
listener.outputTrace(invocationInfo.getBuildId());
ImmutableList<String> files =
projectFilesystem
.getDirectoryContents(invocationInfo.getLogDirectoryPath())
.stream()
.filter(i -> i.toString().endsWith(".trace"))
.map(path -> path.getFileName().toString())
.collect(MoreCollectors.toImmutableList());
assertEquals(4, files.size());
assertEquals(
ImmutableSortedSet.of(
"build.trace",
"build.1009.trace",
"build.1008.trace",
"build.2014-09-02.16-55-51.BUILD_ID.trace"),
ImmutableSortedSet.copyOf(files));
}
@Test
public void testBuildJson() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
BuildId buildId = new BuildId("ChromeTraceBuildListenerTestBuildId");
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
FAKE_CLOCK,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
chromeTraceConfig(42, false));
BuildTarget target = BuildTargetFactory.newInstance("//fake:rule");
FakeBuildRule rule = new FakeBuildRule(target, ImmutableSortedSet.of());
RuleKey ruleKey = new RuleKey("abc123");
String stepShortName = "fakeStep";
String stepDescription = "I'm a Fake Step!";
UUID stepUuid = UUID.randomUUID();
ImmutableSet<BuildTarget> buildTargets = ImmutableSet.of(target);
Iterable<String> buildArgs = Iterables.transform(buildTargets, Object::toString);
Clock fakeClock = new IncrementingFakeClock(TimeUnit.MILLISECONDS.toNanos(1));
BuckEventBus eventBus = BuckEventBusForTests.newInstance(fakeClock, buildId);
eventBus.register(listener);
CommandEvent.Started commandEventStarted =
CommandEvent.started("party", ImmutableList.of("arg1", "arg2"), true, 23L);
eventBus.post(commandEventStarted);
eventBus.post(
new PerfStatsTracking.MemoryPerfStatsEvent(
/* freeMemoryBytes */ 1024 * 1024L,
/* totalMemoryBytes */ 3 * 1024 * 1024L,
/* maxMemoryBytes */ 4 * 1024 * 1024L,
/* timeSpentInGcMs */ -1,
/* currentMemoryBytesUsageByPool */ ImmutableMap.of("flower", 42L * 1024 * 1024)));
ArtifactCacheConnectEvent.Started artifactCacheConnectEventStarted =
ArtifactCacheConnectEvent.started();
eventBus.post(artifactCacheConnectEventStarted);
eventBus.post(ArtifactCacheConnectEvent.finished(artifactCacheConnectEventStarted));
BuildEvent.Started buildEventStarted = BuildEvent.started(buildArgs);
eventBus.post(buildEventStarted);
HttpArtifactCacheEvent.Started artifactCacheEventStarted =
ArtifactCacheTestUtils.newFetchStartedEvent(ruleKey);
eventBus.post(artifactCacheEventStarted);
eventBus.post(
ArtifactCacheTestUtils.newFetchFinishedEvent(
artifactCacheEventStarted, CacheResult.hit("http", ArtifactCacheMode.http)));
ArtifactCompressionEvent.Started artifactCompressionStartedEvent =
ArtifactCompressionEvent.started(
ArtifactCompressionEvent.Operation.COMPRESS, ImmutableSet.of(ruleKey));
eventBus.post(artifactCompressionStartedEvent);
eventBus.post(ArtifactCompressionEvent.finished(artifactCompressionStartedEvent));
BuildRuleEvent.Started started = BuildRuleEvent.started(rule, durationTracker);
eventBus.post(started);
eventBus.post(StepEvent.started(stepShortName, stepDescription, stepUuid));
JavacPhaseEvent.Started runProcessorsStartedEvent =
JavacPhaseEvent.started(
target, JavacPhaseEvent.Phase.RUN_ANNOTATION_PROCESSORS, ImmutableMap.of());
eventBus.post(runProcessorsStartedEvent);
String annotationProcessorName = "com.facebook.FakeProcessor";
AnnotationProcessingEvent.Operation operation = AnnotationProcessingEvent.Operation.PROCESS;
int annotationRound = 1;
boolean isLastRound = false;
AnnotationProcessingEvent.Started annotationProcessingEventStarted =
AnnotationProcessingEvent.started(
target, annotationProcessorName, operation, annotationRound, isLastRound);
eventBus.post(annotationProcessingEventStarted);
HttpArtifactCacheEvent.Started httpStarted =
ArtifactCacheTestUtils.newUploadStartedEvent(
new BuildId("horse"), Optional.of("TARGET_ONE"), ImmutableSet.of(ruleKey));
eventBus.post(httpStarted);
HttpArtifactCacheEvent.Finished httpFinished =
ArtifactCacheTestUtils.newFinishedEvent(httpStarted, false);
eventBus.post(httpFinished);
final CompilerPluginDurationEvent.Started processingPartOneStarted =
CompilerPluginDurationEvent.started(
target, annotationProcessorName, "processingPartOne", ImmutableMap.of());
eventBus.post(processingPartOneStarted);
eventBus.post(
CompilerPluginDurationEvent.finished(processingPartOneStarted, ImmutableMap.of()));
eventBus.post(AnnotationProcessingEvent.finished(annotationProcessingEventStarted));
eventBus.post(JavacPhaseEvent.finished(runProcessorsStartedEvent, ImmutableMap.of()));
eventBus.post(
StepEvent.finished(StepEvent.started(stepShortName, stepDescription, stepUuid), 0));
eventBus.post(
BuildRuleEvent.finished(
started,
BuildRuleKeys.of(ruleKey),
BuildRuleStatus.SUCCESS,
CacheResult.miss(),
Optional.empty(),
Optional.of(BuildRuleSuccessType.BUILT_LOCALLY),
false,
Optional.empty(),
Optional.empty(),
Optional.empty()));
try (final SimplePerfEvent.Scope scope1 =
SimplePerfEvent.scope(
eventBus,
PerfEventId.of("planning"),
ImmutableMap.<String, Object>of("nefarious", true))) {
try (final SimplePerfEvent.Scope scope2 =
SimplePerfEvent.scope(eventBus, PerfEventId.of("scheming"))) {
scope2.appendFinishedInfo("success", false);
}
scope1.appendFinishedInfo(
"extras",
ImmutableList.<ImmutableMap<String, Object>>of(
ImmutableMap.of("boolean", true),
ImmutableMap.of("string", "ok"),
ImmutableMap.of("int", 42)));
}
eventBus.post(
ExternalTestRunEvent.started(true, TestSelectorList.EMPTY, false, ImmutableSet.of()));
BuildTarget buildTarget = BuildTargetFactory.newInstance("//example:app");
eventBus.post(ExternalTestSpecCalculationEvent.started(buildTarget));
eventBus.post(ExternalTestSpecCalculationEvent.finished(buildTarget));
eventBus.post(ExternalTestRunEvent.finished(ImmutableSet.of(), 0));
eventBus.post(BuildEvent.finished(buildEventStarted, 0));
eventBus.post(CommandEvent.finished(commandEventStarted, /* exitCode */ 0));
listener.outputTrace(new BuildId("BUILD_ID"));
List<ChromeTraceEvent> originalResultList =
ObjectMappers.readValue(
tmpDir.getRoot().toPath().resolve("buck-out").resolve("log").resolve("build.trace"),
new TypeReference<List<ChromeTraceEvent>>() {});
List<ChromeTraceEvent> resultListCopy = new ArrayList<>();
resultListCopy.addAll(originalResultList);
ImmutableMap<String, String> emptyArgs = ImmutableMap.of();
assertNextResult(
resultListCopy,
"process_name",
ChromeTraceEvent.Phase.METADATA,
ImmutableMap.<String, Object>builder().put("name", "BUILD_ID").build());
assertNextResult(
resultListCopy,
"process_labels",
ChromeTraceEvent.Phase.METADATA,
ImmutableMap.<String, Object>builder()
.put(
"labels",
String.format(
"user_args=[@mode/arglist, --foo, --bar], is_daemon=false, timestamp=%d",
invocationInfo.getTimestampMillis()))
.build());
assertNextResult(
resultListCopy,
"ProjectFilesystemDelegate",
ChromeTraceEvent.Phase.METADATA,
ImmutableMap.of(
"filesystem",
"default",
"filesystem.root",
projectFilesystem.getRootPath().toString()));
assertNextResult(
resultListCopy,
"party",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("command_args", "arg1 arg2"));
assertNextResult(
resultListCopy,
"memory",
ChromeTraceEvent.Phase.COUNTER,
ImmutableMap.<String, String>builder()
.put("used_memory_mb", "2")
.put("free_memory_mb", "1")
.put("total_memory_mb", "3")
.put("max_memory_mb", "4")
.put("time_spent_in_gc_sec", "0")
.put("pool_flower_mb", "42")
.build());
assertNextResult(resultListCopy, "artifact_connect", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(resultListCopy, "artifact_connect", ChromeTraceEvent.Phase.END, emptyArgs);
assertNextResult(resultListCopy, "build", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(
resultListCopy,
"http_artifact_fetch",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", "abc123"));
assertNextResult(
resultListCopy,
"http_artifact_fetch",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"rule_key", "abc123",
"success", "true",
"cache_result", "HTTP_HIT"));
assertNextResult(
resultListCopy,
"artifact_compress",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", "abc123"));
assertNextResult(
resultListCopy,
"artifact_compress",
ChromeTraceEvent.Phase.END,
ImmutableMap.of("rule_key", "abc123"));
// BuildRuleEvent.Started
assertNextResult(
resultListCopy, "//fake:rule", ChromeTraceEvent.Phase.BEGIN, ImmutableMap.of());
assertNextResult(resultListCopy, "fakeStep", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(
resultListCopy, "run annotation processors", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(
resultListCopy,
"com.facebook.FakeProcessor.process",
ChromeTraceEvent.Phase.BEGIN,
emptyArgs);
assertNextResult(
resultListCopy,
"http_artifact_store",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("rule_key", "abc123"));
assertNextResult(
resultListCopy,
"http_artifact_store",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"success", "true",
"rule_key", "abc123"));
assertNextResult(resultListCopy, "processingPartOne", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(resultListCopy, "processingPartOne", ChromeTraceEvent.Phase.END, emptyArgs);
assertNextResult(
resultListCopy,
"com.facebook.FakeProcessor.process",
ChromeTraceEvent.Phase.END,
emptyArgs);
assertNextResult(
resultListCopy, "run annotation processors", ChromeTraceEvent.Phase.END, emptyArgs);
assertNextResult(
resultListCopy,
"fakeStep",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"description", "I'm a Fake Step!",
"exit_code", "0"));
assertNextResult(
resultListCopy,
"//fake:rule",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"cache_result", "miss",
"success_type", "BUILT_LOCALLY"));
assertNextResult(
resultListCopy,
"planning",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("nefarious", true));
assertNextResult(resultListCopy, "scheming", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(
resultListCopy, "scheming", ChromeTraceEvent.Phase.END, ImmutableMap.of("success", false));
assertNextResult(
resultListCopy,
"planning",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"extras",
ImmutableList.<ImmutableMap<String, Object>>of(
ImmutableMap.of("boolean", true),
ImmutableMap.of("string", "ok"),
ImmutableMap.of("int", 42))));
assertNextResult(resultListCopy, "external_test_run", ChromeTraceEvent.Phase.BEGIN, emptyArgs);
assertNextResult(
resultListCopy,
"external_test_spec_calc",
ChromeTraceEvent.Phase.BEGIN,
ImmutableMap.of("target", "//example:app"));
assertNextResult(
resultListCopy,
"external_test_spec_calc",
ChromeTraceEvent.Phase.END,
ImmutableMap.of("target", "//example:app"));
assertNextResult(resultListCopy, "external_test_run", ChromeTraceEvent.Phase.END, emptyArgs);
assertNextResult(resultListCopy, "build", ChromeTraceEvent.Phase.END, emptyArgs);
assertNextResult(
resultListCopy,
"party",
ChromeTraceEvent.Phase.END,
ImmutableMap.of(
"command_args", "arg1 arg2",
"daemon", "true"));
assertEquals(0, resultListCopy.size());
}
private static void assertNextResult(
List<ChromeTraceEvent> resultList,
String expectedName,
ChromeTraceEvent.Phase expectedPhase,
ImmutableMap<String, ? extends Object> expectedArgs) {
assertTrue(resultList.size() > 0);
assertEquals(expectedName, resultList.get(0).getName());
assertEquals(expectedPhase, resultList.get(0).getPhase());
assertEquals(expectedArgs, resultList.get(0).getArgs());
resultList.remove(0);
}
@Test
public void testOutputFailed() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
assumeTrue("Can make the root directory read-only", tmpDir.getRoot().setReadOnly());
try {
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
FAKE_CLOCK,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
chromeTraceConfig(3, false));
listener.outputTrace(invocationInfo.getBuildId());
fail("Expected an exception.");
} catch (HumanReadableException e) {
assertEquals(
"Unable to write trace file: java.nio.file.AccessDeniedException: "
+ projectFilesystem.resolve(projectFilesystem.getBuckPaths().getBuckOut()),
e.getMessage());
} finally {
tmpDir.getRoot().setWritable(true);
}
}
@Test
public void outputFileUsesCurrentTime() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
FAKE_CLOCK,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
chromeTraceConfig(1, false));
listener.outputTrace(invocationInfo.getBuildId());
assertTrue(
projectFilesystem.exists(
Paths.get(EXPECTED_DIR + "build.2014-09-02.16-55-51.BUILD_ID.trace")));
}
@Test
public void canCompressTraces() throws InterruptedException, IOException {
ProjectFilesystem projectFilesystem =
TestProjectFilesystems.createProjectFilesystem(tmpDir.getRoot().toPath());
ChromeTraceBuildListener listener =
new ChromeTraceBuildListener(
projectFilesystem,
invocationInfo,
FAKE_CLOCK,
Locale.US,
TimeZone.getTimeZone("America/Los_Angeles"),
chromeTraceConfig(1, true));
listener.outputTrace(invocationInfo.getBuildId());
Path tracePath = Paths.get(EXPECTED_DIR + "build.2014-09-02.16-55-51.BUILD_ID.trace.gz");
assertTrue(projectFilesystem.exists(tracePath));
BufferedInputStream stream =
new BufferedInputStream(
new GZIPInputStream(projectFilesystem.newFileInputStream(tracePath)));
List<Object> elements =
ObjectMappers.createParser(stream).readValueAs(new TypeReference<List<Object>>() {});
assertThat(elements, notNullValue());
assertThat(elements, not(empty()));
}
private static ChromeTraceBuckConfig chromeTraceConfig(int tracesToKeep, boolean compressTraces) {
return ChromeTraceBuckConfig.of(
FakeBuckConfig.builder()
.setSections(
ImmutableMap.of(
"log",
ImmutableMap.of(
"max_traces",
Integer.toString(tracesToKeep),
"compress_traces",
Boolean.toString(compressTraces))))
.build());
}
private static class FakeBuckEvent extends AbstractBuckEvent {
protected FakeBuckEvent() {
super(EventKey.of(42));
}
@Override
public String getEventName() {
return "fake";
}
@Override
protected String getValueString() {
return "fake";
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.protocol.amqp.proton;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import org.apache.activemq.artemis.api.core.ActiveMQSecurityException;
import org.apache.activemq.artemis.core.server.ActiveMQServer;
import org.apache.activemq.artemis.core.server.ServerProducer;
import org.apache.activemq.artemis.core.server.impl.ServerProducerImpl;
import org.apache.activemq.artemis.protocol.amqp.connect.mirror.AMQPMirrorControllerSource;
import org.apache.activemq.artemis.protocol.amqp.connect.mirror.AMQPMirrorControllerTarget;
import org.apache.activemq.artemis.protocol.amqp.broker.AMQPSessionCallback;
import org.apache.activemq.artemis.protocol.amqp.client.ProtonClientSenderContext;
import org.apache.activemq.artemis.protocol.amqp.exceptions.ActiveMQAMQPException;
import org.apache.activemq.artemis.protocol.amqp.exceptions.ActiveMQAMQPInternalErrorException;
import org.apache.activemq.artemis.protocol.amqp.proton.transaction.ProtonTransactionHandler;
import org.apache.qpid.proton.amqp.Symbol;
import org.apache.qpid.proton.amqp.transaction.Coordinator;
import org.apache.qpid.proton.amqp.transport.ErrorCondition;
import org.apache.qpid.proton.engine.EndpointState;
import org.apache.qpid.proton.engine.Receiver;
import org.apache.qpid.proton.engine.Sender;
import org.apache.qpid.proton.engine.Session;
import org.jboss.logging.Logger;
public class AMQPSessionContext extends ProtonInitializable {
private static final Logger log = Logger.getLogger(AMQPSessionContext.class);
protected final AMQPConnectionContext connection;
protected final AMQPSessionCallback sessionSPI;
protected final Session session;
protected Map<Receiver, ProtonAbstractReceiver> receivers = new ConcurrentHashMap<>();
protected Map<Sender, ProtonServerSenderContext> senders = new ConcurrentHashMap<>();
protected boolean closed = false;
protected final AmqpTransferTagGenerator tagCache = new AmqpTransferTagGenerator();
protected final ActiveMQServer server;
public AMQPSessionContext(AMQPSessionCallback sessionSPI, AMQPConnectionContext connection, Session session, ActiveMQServer server) {
this.connection = connection;
this.sessionSPI = sessionSPI;
this.session = session;
this.server = server;
}
protected Map<Object, ProtonServerSenderContext> serverSenders = new ConcurrentHashMap<>();
public AMQPSessionCallback getSessionSPI() {
return sessionSPI;
}
@Override
public void initialize() throws Exception {
if (!isInitialized()) {
super.initialize();
if (sessionSPI != null) {
try {
sessionSPI.init(this, connection.getSASLResult());
} catch (ActiveMQSecurityException e) {
throw e;
} catch (Exception e) {
throw new ActiveMQAMQPInternalErrorException(e.getMessage(), e);
}
}
}
}
/**
* @param consumer
* @param queueName
*/
public void disconnect(Object consumer, String queueName) {
ProtonServerSenderContext protonConsumer = senders.remove(consumer);
if (protonConsumer != null) {
serverSenders.remove(protonConsumer.getBrokerConsumer());
try {
protonConsumer.close(false);
} catch (ActiveMQAMQPException e) {
protonConsumer.getSender().setTarget(null);
protonConsumer.getSender().setCondition(new ErrorCondition(e.getAmqpError(), e.getMessage()));
}
}
}
public byte[] getTag() {
return tagCache.getNextTag();
}
public void replaceTag(byte[] tag) {
tagCache.returnTag(tag);
}
public void close() {
if (closed) {
return;
}
// Making a copy to avoid ConcurrentModificationException during the iteration
Set<ProtonAbstractReceiver> receiversCopy = new HashSet<>();
receiversCopy.addAll(receivers.values());
for (ProtonAbstractReceiver protonProducer : receiversCopy) {
try {
protonProducer.close(false);
} catch (Exception e) {
log.warn(e.getMessage(), e);
}
}
receivers.clear();
Set<ProtonServerSenderContext> protonSendersClone = new HashSet<>();
protonSendersClone.addAll(senders.values());
for (ProtonServerSenderContext protonConsumer : protonSendersClone) {
try {
protonConsumer.close(false);
} catch (Exception e) {
log.warn(e.getMessage(), e);
}
}
senders.clear();
serverSenders.clear();
try {
if (sessionSPI != null) {
sessionSPI.close();
}
} catch (Exception e) {
log.warn(e.getMessage(), e);
}
closed = true;
}
public void removeReceiver(Receiver receiver) {
sessionSPI.removeProducer(receiver.getName());
receivers.remove(receiver);
}
public void addTransactionHandler(Coordinator coordinator, Receiver receiver) {
ProtonTransactionHandler transactionHandler = new ProtonTransactionHandler(sessionSPI, connection);
coordinator.setCapabilities(Symbol.getSymbol("amqp:local-transactions"), Symbol.getSymbol("amqp:multi-txns-per-ssn"), Symbol.getSymbol("amqp:multi-ssns-per-txn"));
receiver.setContext(transactionHandler);
connection.runNow(() -> {
receiver.open();
receiver.flow(connection.getAmqpCredits());
connection.flush();
});
}
public void addSender(Sender sender) throws Exception {
addSender(sender, (SenderController)null);
}
public void addSender(Sender sender, SenderController senderController) throws Exception {
// TODO: Remove this check when we have support for global link names
boolean outgoing = (sender.getContext() != null && sender.getContext().equals(true));
ProtonServerSenderContext protonSender = outgoing ? new ProtonClientSenderContext(connection, sender, this, sessionSPI) : new ProtonServerSenderContext(connection, sender, this, sessionSPI, senderController);
addSender(sender, protonSender);
}
public void addSender(Sender sender, ProtonServerSenderContext protonSender) throws Exception {
try {
protonSender.initialize();
senders.put(sender, protonSender);
serverSenders.put(protonSender.getBrokerConsumer(), protonSender);
sender.setContext(protonSender);
if (sender.getLocalState() != EndpointState.ACTIVE) {
connection.runNow(() -> {
sender.open();
connection.flush();
});
}
protonSender.start();
} catch (ActiveMQAMQPException e) {
senders.remove(sender);
if (protonSender.getBrokerConsumer() != null) {
serverSenders.remove(protonSender.getBrokerConsumer());
}
sender.setSource(null);
sender.setCondition(new ErrorCondition(e.getAmqpError(), e.getMessage()));
connection.runNow(() -> {
sender.close();
connection.flush();
});
}
}
public void removeSender(Sender sender) throws ActiveMQAMQPException {
ProtonServerSenderContext senderRemoved = senders.remove(sender);
if (senderRemoved != null) {
serverSenders.remove(senderRemoved.getBrokerConsumer());
}
}
public void addReplicaTarget(Receiver receiver) throws Exception {
try {
AMQPMirrorControllerTarget protonReceiver = new AMQPMirrorControllerTarget(sessionSPI, connection, this, receiver, server);
protonReceiver.initialize();
receivers.put(receiver, protonReceiver);
ServerProducer serverProducer = new ServerProducerImpl(receiver.getName(), "AMQP", receiver.getTarget().getAddress());
sessionSPI.addProducer(serverProducer);
receiver.setContext(protonReceiver);
HashMap<Symbol, Object> brokerIDProperties = new HashMap<>();
brokerIDProperties.put(AMQPMirrorControllerSource.BROKER_ID, server.getNodeID().toString());
receiver.setProperties(brokerIDProperties);
connection.runNow(() -> {
receiver.open();
connection.flush();
});
} catch (ActiveMQAMQPException e) {
receivers.remove(receiver);
receiver.setTarget(null);
receiver.setCondition(new ErrorCondition(e.getAmqpError(), e.getMessage()));
connection.runNow(() -> {
receiver.close();
connection.flush();
});
}
}
public void addReceiver(Receiver receiver) throws Exception {
try {
ProtonServerReceiverContext protonReceiver = new ProtonServerReceiverContext(sessionSPI, connection, this, receiver);
protonReceiver.initialize();
receivers.put(receiver, protonReceiver);
ServerProducer serverProducer = new ServerProducerImpl(receiver.getName(), "AMQP", receiver.getTarget().getAddress());
sessionSPI.addProducer(serverProducer);
receiver.setContext(protonReceiver);
connection.runNow(() -> {
receiver.open();
connection.flush();
});
} catch (ActiveMQAMQPException e) {
receivers.remove(receiver);
receiver.setTarget(null);
receiver.setCondition(new ErrorCondition(e.getAmqpError(), e.getMessage()));
connection.runNow(() -> {
receiver.close();
connection.flush();
});
}
}
public int getReceiverCount() {
return receivers.size();
}
public Map<Receiver, ProtonAbstractReceiver> getReceivers() {
return receivers;
}
public int getSenderCount() {
return senders.size();
}
}
| |
// Copyright 1997 Association for Universities for Research in Astronomy, Inc.,
// Observatory Control System, Gemini Telescopes Project.
// See the file LICENSE for complete details.
//
// $Id: CanopusFeature.java 36086 2011-07-13 13:15:28Z abrighton $
//
package jsky.app.ot.gemini.gems;
import edu.gemini.shared.util.immutable.Option;
import edu.gemini.shared.util.immutable.Some;
import edu.gemini.shared.util.immutable.None;
import edu.gemini.spModel.gemini.gems.Canopus;
import edu.gemini.spModel.guide.GuideProbe;
import edu.gemini.spModel.obs.context.ObsContext;
import edu.gemini.spModel.obscomp.SPInstObsComp;
import edu.gemini.spModel.target.SPTarget;
import edu.gemini.spModel.target.WatchablePos;
import edu.gemini.spModel.target.env.GuideProbeTargets;
import edu.gemini.spModel.target.env.TargetEnvironment;
import edu.gemini.spModel.target.env.TargetEnvironmentDiff;
import jsky.app.ot.tpe.*;
import jsky.app.ot.tpe.feat.TpeGuidePosCreateableItem;
import jsky.app.ot.util.BasicPropertyList;
import jsky.app.ot.util.OtColor;
import jsky.app.ot.util.PropertyWatcher;
import javax.swing.*;
import java.awt.*;
import java.awt.geom.AffineTransform;
import java.awt.geom.Area;
import java.awt.geom.Point2D;
import java.awt.geom.Rectangle2D;
import java.awt.image.BufferedImage;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.Collection;
import java.util.Collections;
/**
* Draws the Canopus AO field of view and probe ranges.
*/
public final class CanopusFeature extends TpeImageFeature implements PropertyWatcher, TpeModeSensitive, TpeDragSensitive {
private AffineTransform trans;
private boolean isEmpty;
// Color for AO WFS limit.
private static final Color AO_FOV_COLOR = Color.RED;
private static final Color PROBE_RANGE_COLOR = OtColor.SALMON;
// Composite used for drawing items that block the view
private static final Composite BLOCKED = AlphaComposite.getInstance(AlphaComposite.SRC_OVER, 0.5F);
// Property used to control drawing of the probe ranges.
private static final BasicPropertyList props = new BasicPropertyList(CanopusFeature.class.getName());
private static final String PROP_SHOW_RANGES = "Show Probe Ranges";
static {
props.registerBooleanProperty(PROP_SHOW_RANGES, true);
}
private enum RangeDisplayMode {
probe1(true, false), probe2(false, true), both(true, true);
private final boolean show1;
private final boolean show2;
RangeDisplayMode(boolean show1, boolean show2) {
this.show1 = show1;
this.show2 = show2;
}
boolean show1() { return show1; }
boolean show2() { return show2; }
}
private RangeDisplayMode rangeMode = RangeDisplayMode.both;
/**
* Construct the feature with its name and description.
*/
public CanopusFeature() {
super("Canopus", "Show the field of view of the Canopus WFS probes.");
}
/**
* A property has changed.
*
* @see PropertyWatcher
*/
public void propertyChange(String propName) {
_iw.repaint();
}
/**
* Override getProperties to return the properties supported by this
* feature.
*/
@Override
public BasicPropertyList getProperties() {
return props;
}
/**
* Turn on/off the drawing of probe ranges.
*/
public void setDrawProbeRanges(boolean draw) {
props.setBoolean(PROP_SHOW_RANGES, draw);
}
/**
* Gets the drawing of probe ranges.
*/
public boolean getDrawProbeRanges() {
return props.getBoolean(PROP_SHOW_RANGES, true);
}
private PropertyChangeListener selListener = new PropertyChangeListener() {
public void propertyChange(PropertyChangeEvent evt) {
_redraw();
}
};
/**
* Reinitialize (recalculate the positions and redraw).
*/
public void reinit(TpeImageWidget iw, TpeImageInfo tii) {
_stopMonitorOffsetSelections(selListener);
super.reinit(iw, tii);
props.addWatcher(this);
SPInstObsComp inst = _iw.getInstObsComp();
if (inst == null) return;
// arrange to be notified if telescope positions are added, removed, or selected
_monitorPosList();
// Monitor the selections of offset positions, since that affects the positions drawn
_monitorOffsetSelections(selListener);
Point2D.Double base = tii.getBaseScreenPos();
double ppa = tii.getPixelsPerArcsec();
trans = new AffineTransform();
trans.translate(base.x, base.y);
// The model already used the position angle, so just rotate by the difference between north and up in the image
trans.rotate(-tii.getTheta());
trans.scale(ppa, ppa);
}
@Override public void unloaded() {
props.deleteWatcher(this);
super.unloaded();
}
/**
* Implements the TelescopePosWatcher interface.
* @param tp
*/
public void telescopePosLocationUpdate(WatchablePos tp) {
_redraw();
}
/**
* Implements the TelescopePosWatcher interface.
* @param tp
*/
public void telescopePosGenericUpdate(WatchablePos tp) {
_redraw();
}
protected void handleTargetEnvironmentUpdate(TargetEnvironmentDiff diff) {
_redraw();
}
/**
* Schedule a redraw of the image feature.
*/
private void _redraw() {
if (_iw != null) _iw.repaint();
}
// Slant of the lines drawn in the probe 1 and 2 ranges. Slant at a
// 45 degree angle either falling (down) \ or rising (up) /
private enum Orientation {
vertical,
horizontal,
}
// Creates a Paint that is used for filling the probe 1 and 2 ranges.
private static Paint createProbeRangePaint(Graphics2D g2d, Orientation s) {
return createProbeRangePaint(g2d, s, 16, 0.16, 0.4);
}
private static Paint createProbeRangeKeyPaint(Graphics2D g2d, Orientation s) {
return createProbeRangePaint(g2d, s, 8, 0.32, 0.8);
}
private static Paint createProbeRangePaint(Graphics2D g2d, Orientation s, int skip, double alphaBg, double alphaLine) {
final int size = skip*2;
final Rectangle2D.Double rec = new Rectangle2D.Double(0, 0, size, size);
// Get a buffered image capable of being transparent.
BufferedImage bim = g2d.getDeviceConfiguration().createCompatibleImage(size, size, Transparency.TRANSLUCENT);
Graphics2D bimg = bim.createGraphics();
// Shade it with a light red color almost completely transparent.
bimg.setColor(OtColor.makeTransparent(PROBE_RANGE_COLOR, alphaBg));
bimg.setComposite(AlphaComposite.Src);
bimg.fill(rec);
// Now draw the slanting lines, which are also pretty transparent
// though not quite as much as the background.
bimg.setClip(0, 0, size, size);
bimg.setColor(OtColor.makeTransparent(PROBE_RANGE_COLOR, alphaLine));
if (s == Orientation.vertical) {
for (int x=0; x<size; x += skip) {
bimg.drawLine(x, 0, x, size);
}
} else {
for (int y=0; y<size; y += skip) {
bimg.drawLine(0, y, size, y);
}
}
bimg.dispose();
return new TexturePaint(bim, rec);
}
// If _flipRA is -1, flip the RA axis of the area
private Area flipArea(Area a) {
if (_flipRA == -1) {
a = a.createTransformedArea(AffineTransform.getScaleInstance(_flipRA, 1.0));
}
return a;
}
/**
* Draw the feature.
*/
public void draw(Graphics g, TpeImageInfo tii) {
if (!isEnabled(_iw.getContext())) return;
if (trans == null) return;
Graphics2D g2d = (Graphics2D) g;
Color c = g2d.getColor();
Option<ObsContext> ctxOpt = _iw.getObsContext();
if (ctxOpt.isEmpty()) return;
ObsContext ctx = ctxOpt.getValue();
// Draw the AO window itself. A circle.
Area a = Canopus.Wfs.cwfs3.probeRange(ctx);
isEmpty = a.isEmpty();
if (isEmpty) return;
Shape s = trans.createTransformedShape(flipArea(a));
g2d.setColor(AO_FOV_COLOR);
g2d.draw(s);
// Draw the probe 1 and probe 2 ranges.
if (getDrawProbeRanges()) {
Area a1 = new Area(flipArea(Canopus.Wfs.cwfs1.probeRange(ctx))).createTransformedArea(trans);
Area a2 = new Area(flipArea(Canopus.Wfs.cwfs2.probeRange(ctx))).createTransformedArea(trans);
g2d.setColor(OtColor.makeTransparent(AO_FOV_COLOR, 0.3));
if (rangeMode.show1()) g2d.draw(a1);
if (rangeMode.show2()) g2d.draw(a2);
Paint p = g2d.getPaint();
if (rangeMode.show1()) {
g2d.setPaint(createProbeRangePaint(g2d, Orientation.horizontal));
g2d.fill(a1);
}
if (rangeMode.show2()) {
g2d.setPaint(createProbeRangePaint(g2d, Orientation.vertical));
g2d.fill(a2);
}
g2d.setPaint(p);
}
drawProbeArm(g2d, tii, ctx, Canopus.Wfs.cwfs1);
drawProbeArm(g2d, tii, ctx, Canopus.Wfs.cwfs2);
// cwfs3 probe arm is not displayed
// drawProbeArm(g2d, tii, ctx, Canopus.Wfs.cwfs3);
g2d.setColor(c);
}
// draw the probe arm for the given wfs
private void drawProbeArm(Graphics2D g2d, TpeImageInfo tii, ObsContext ctx, Canopus.Wfs wfs) {
wfs.probeArm(ctx, true).foreach(a -> {
if (a != null) {
Shape s = trans.createTransformedShape(flipArea(a));
g2d.setColor(AO_FOV_COLOR);
g2d.draw(s);
Composite c = g2d.getComposite();
g2d.setComposite(BLOCKED);
g2d.fill(s);
g2d.setComposite(c);
}
});
}
@Override public boolean isEnabled(TpeContext ctx) {
if (!super.isEnabled(ctx)) return false;
return ctx.gems().isDefined();
}
private void setRangeDisplayMode(RangeDisplayMode mode) {
if (rangeMode == mode) return;
rangeMode = mode;
_redraw();
}
public void handleModeChange(TpeMode mode, Option<Object> arg) {
if ((mode != TpeMode.CREATE) || arg.isEmpty()) {
setRangeDisplayMode(RangeDisplayMode.both);
return;
}
Object value = arg.getValue();
if (!(value instanceof TpeGuidePosCreateableItem)) {
setRangeDisplayMode(RangeDisplayMode.both);
return;
}
TpeGuidePosCreateableItem item = (TpeGuidePosCreateableItem) value;
GuideProbe guider = item.getGuideProbe();
if (guider == Canopus.Wfs.cwfs1) {
setRangeDisplayMode(RangeDisplayMode.probe1);
} else if (guider == Canopus.Wfs.cwfs2) {
setRangeDisplayMode(RangeDisplayMode.probe2);
} else {
setRangeDisplayMode(RangeDisplayMode.both);
}
}
private static boolean containsTarget(TargetEnvironment env, GuideProbe guider, SPTarget target) {
final Option<GuideProbeTargets> gtOpt = env.getPrimaryGuideProbeTargets(guider);
return gtOpt.exists(gt -> gt.containsTarget(target));
}
public void handleDragStarted(Object dragObject, ObsContext context) {
if (!(dragObject instanceof SPTarget)) return;
SPTarget target = (SPTarget) dragObject;
TargetEnvironment env = context.getTargets();
if (env == null) {
setRangeDisplayMode(RangeDisplayMode.both);
return;
}
if (containsTarget(env, Canopus.Wfs.cwfs1, target)) {
setRangeDisplayMode(RangeDisplayMode.probe1);
} else if (containsTarget(env, Canopus.Wfs.cwfs2, target)) {
setRangeDisplayMode(RangeDisplayMode.probe2);
} else {
setRangeDisplayMode(RangeDisplayMode.both);
}
}
public void handleDragStopped(ObsContext context) {
setRangeDisplayMode(RangeDisplayMode.both);
}
private static class ProbeRangeIcon implements Icon {
private final Orientation[] slants;
ProbeRangeIcon(Orientation... slants) {
this.slants = slants;
}
public int getIconWidth() { return 18; }
public int getIconHeight() { return 18; }
public void paintIcon(Component c, Graphics g, int x, int y) {
Graphics2D g2d = (Graphics2D) g;
g2d.setColor(Color.black);
g2d.fill(new Rectangle2D.Double(1, 1, 16, 16));
Paint origPaint = g2d.getPaint();
for (Orientation slant : slants) {
Paint p = createProbeRangeKeyPaint(g2d, slant);
g2d.setPaint(p);
g2d.fill(new Rectangle2D.Double(1, 1, 16, 16));
}
g2d.setPaint(origPaint);
}
}
@Override
public Option<Component> getKey() {
JPanel pan = new JPanel(new GridBagLayout());
pan.add(new JLabel("CWFS1", new ProbeRangeIcon(Orientation.horizontal), JLabel.LEFT) {{ setForeground(Color.black); }},
new GridBagConstraints() {{
gridx=0; gridy=0; anchor=WEST; fill=HORIZONTAL;
}}
);
pan.add(new JLabel("CWFS2", new ProbeRangeIcon(Orientation.vertical), JLabel.LEFT) {{ setForeground(Color.black); }},
new GridBagConstraints() {{
gridx=0; gridy=1; anchor=WEST; fill=HORIZONTAL;
}}
);
pan.add(new JLabel("Both", new ProbeRangeIcon(Orientation.horizontal, Orientation.vertical), JLabel.LEFT) {{ setForeground(Color.black); }},
new GridBagConstraints() {{
gridx=0; gridy=2; anchor=WEST; fill=HORIZONTAL;
}}
);
return new Some<Component>(pan);
}
public TpeImageFeatureCategory getCategory() {
return TpeImageFeatureCategory.fieldOfView;
}
private static final TpeMessage WARNING = TpeMessage.warningMessage(
"No valid region for CWFS stars. Check offset positions.");
public Option<Collection<TpeMessage>> getMessages() {
if (!isEmpty) return None.instance();
return new Some<Collection<TpeMessage>>(Collections.singletonList(WARNING));
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.HashMultimap;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Multimap;
import com.google.javascript.jscomp.NodeTraversal.Callback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
import java.util.Collection;
import java.util.List;
/**
* Reduces the size of common function expressions.
*
* This pass will rewrite:
*
* C.prototype.getA = function() { return this.a_ };
* C.prototype.setA = function(newValue) { this.a_ = newValue };
*
* as:
*
* C.prototype.getA = JSCompiler_get("a_);
* C.prototype.setA = JSCompiler_set("a_);
*
* if by doing so we will save bytes, after the helper functions are
* added and renaming is done.
*
*/
class FunctionRewriter implements CompilerPass {
private final AbstractCompiler compiler;
// Safety margin used to avoid growing simple programs by a few bytes.
// Selected arbitrarily.
private static final int SAVINGS_THRESHOLD = 16;
FunctionRewriter(AbstractCompiler compiler) {
this.compiler = compiler;
}
@Override
public void process(Node externs, Node root) {
List<Reducer> reducers = ImmutableList.of(new ReturnConstantReducer(),
new GetterReducer(),
new SetterReducer(),
new EmptyFunctionReducer(),
new IdentityReducer());
Multimap<Reducer, Reduction> reductionMap = HashMultimap.create();
// Accumulate possible reductions in the reduction multi-map. They
// will be applied in the loop below.
NodeTraversal.traverseEs6(compiler, root,
new ReductionGatherer(reducers, reductionMap));
// Apply reductions iff they will provide some savings.
for (Reducer reducer : reducers) {
Collection<Reduction> reductions = reductionMap.get(reducer);
if (reductions.isEmpty()) {
continue;
}
Node helperCode = parseHelperCode(reducer);
if (helperCode == null) {
continue;
}
int helperCodeCost = InlineCostEstimator.getCost(helperCode);
// Estimate savings
int savings = 0;
for (Reduction reduction : reductions) {
savings += reduction.estimateSavings();
}
// Compare estimated savings against the helper cost. Apply
// reductions if doing so will result in some savings.
if (savings > (helperCodeCost + SAVINGS_THRESHOLD)) {
for (Reduction reduction : reductions) {
reduction.apply();
}
Node addingRoot = compiler.getNodeForCodeInsertion(null);
addingRoot.addChildrenToFront(helperCode);
compiler.reportCodeChange();
}
}
}
/**
* Parse helper code needed by a reducer.
*
* @return Helper code root. If parse fails, return null.
*/
public Node parseHelperCode(Reducer reducer) {
Node root =
compiler.parseSyntheticCode(reducer.getClass() + ":helper", reducer.getHelperSource());
return (root != null) ? root.removeFirstChild() : null;
}
private static boolean isReduceableFunctionExpression(Node n) {
return NodeUtil.isFunctionExpression(n)
&& !NodeUtil.isGetOrSetKey(n.getParent());
}
/**
* Information needed to apply a reduction.
*/
private class Reduction {
private final Node parent;
private final Node oldChild;
private final Node newChild;
Reduction(Node parent, Node oldChild, Node newChild) {
this.parent = parent;
this.oldChild = oldChild;
this.newChild = newChild;
}
/**
* Apply the reduction by replacing the old child with the new child.
*/
void apply() {
parent.replaceChild(oldChild, newChild);
compiler.reportCodeChange();
}
/**
* Estimate number of bytes saved by applying this reduction.
*/
int estimateSavings() {
return InlineCostEstimator.getCost(oldChild) -
InlineCostEstimator.getCost(newChild);
}
}
/**
* Gathers a list of reductions to apply later by doing an in-order
* AST traversal. If a suitable reduction is found, stop traversal
* in that branch.
*/
private class ReductionGatherer implements Callback {
private final List<Reducer> reducers;
private final Multimap<Reducer, Reduction> reductions;
/**
* @param reducers List of reducers to apply during traversal.
* @param reductions Reducer -> Reduction multimap,
* populated during traversal.
*/
ReductionGatherer(List<Reducer> reducers,
Multimap<Reducer, Reduction> reductions) {
this.reducers = reducers;
this.reductions = reductions;
}
@Override
public boolean shouldTraverse(NodeTraversal raversal,
Node node,
Node parent) {
for (Reducer reducer : reducers) {
Node replacement = reducer.reduce(node);
if (replacement != node) {
reductions.put(reducer, new Reduction(parent, node, replacement));
return false;
}
}
return true;
}
@Override
public void visit(NodeTraversal traversal, Node node, Node parent) {
}
}
/**
* Interface implemented by the strength-reduction optimizers below.
*/
abstract static class Reducer {
/**
* @return JS source for helper methods used by this reduction.
*/
abstract String getHelperSource();
/**
* @return root of the reduced subtree if a reduction was applied;
* otherwise returns the node argument.
*/
abstract Node reduce(Node node);
/**
* Builds a method call based on the the given method name,
* argument and history.
*
* @param methodName Method to call.
* @param argumentNode Method argument.
*/
protected final Node buildCallNode(String methodName, Node argumentNode,
Node srcref) {
Node call = IR.call(IR.name(methodName)).srcref(srcref);
call.putBooleanProp(Node.FREE_CALL, true);
if (argumentNode != null) {
call.addChildToBack(argumentNode.cloneTree());
}
return call;
}
}
/**
* Reduces return immutable constant literal methods declarations
* with calls to a constant return method factory.
*
* Example:
* a.prototype.b = function() {}
* is reduced to:
* a.prototype.b = emptyFn();
*/
private static class EmptyFunctionReducer extends Reducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_emptyFn";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "() {" +
" return function() {}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (NodeUtil.isEmptyFunctionExpression(node)) {
return buildCallNode(FACTORY_METHOD_NAME, null, node);
} else {
return node;
}
}
}
/**
* Base class for reducers that match functions that contain a
* single return statement.
*/
abstract static class SingleReturnStatementReducer extends Reducer {
/**
* @return function return value node if function body contains a
* single return statement. Otherwise, null.
*/
protected final Node maybeGetSingleReturnRValue(Node functionNode) {
Node body = functionNode.getLastChild();
if (!body.hasOneChild()) {
return null;
}
Node statement = body.getFirstChild();
if (statement.isReturn()) {
return statement.getFirstChild();
}
return null;
}
}
/**
* Reduces property getter method declarations with calls to a
* getter method factory.
*
* Example:
* a.prototype.b = function(a) {return a}
* is reduced to:
* a.prototype.b = getter(a);
*/
private static class IdentityReducer extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_identityFn";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "() {" +
" return function(" + FACTORY_METHOD_NAME + "_value) {" +
"return " + FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!isReduceableFunctionExpression(node)) {
return node;
}
if (isIdentityFunction(node)) {
return buildCallNode(FACTORY_METHOD_NAME, null, node);
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<value>, <rest>) {return <value>}
*
* @return Whether the function matches the pattern.
*/
private boolean isIdentityFunction(Node functionNode) {
Node argList = functionNode.getSecondChild();
Node paramNode = argList.getFirstChild();
if (paramNode == null) {
return false;
}
Node value = maybeGetSingleReturnRValue(functionNode);
return value != null && value.isName() && value.getString().equals(paramNode.getString());
}
}
/**
* Reduces return immutable constant literal methods declarations
* with calls to a constant return method factory.
*
* Example:
* a.prototype.b = function() {return 10}
* is reduced to:
* a.prototype.b = returnconst(10);
*/
private static class ReturnConstantReducer
extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_returnArg";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME +
"(" + FACTORY_METHOD_NAME + "_value) {" +
" return function() {return " + FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!isReduceableFunctionExpression(node)) {
return node;
}
Node valueNode = getValueNode(node);
if (valueNode != null) {
return buildCallNode(FACTORY_METHOD_NAME, valueNode, node);
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<args>) {return <immutable value>}
* and returns <immutable value> if a match is found.
*
* @return the immutable value node; or null.
*/
private Node getValueNode(Node functionNode) {
Node value = maybeGetSingleReturnRValue(functionNode);
if (value != null &&
NodeUtil.isImmutableValue(value)) {
return value;
}
return null;
}
}
/**
* Reduces property getter method declarations with calls to a
* getter method factory.
*
* Example:
* a.prototype.b = function() {return this.b_}
* is reduced to:
* a.prototype.b = getter("b_");
*/
private static class GetterReducer extends SingleReturnStatementReducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_get";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "(" +
FACTORY_METHOD_NAME + "_name) {" +
" return function() {return this[" + FACTORY_METHOD_NAME + "_name]}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!isReduceableFunctionExpression(node)) {
return node;
}
Node propName = getGetPropertyName(node);
if (propName != null) {
if (!propName.isString()) {
throw new IllegalStateException(
"Expected STRING, got " + propName.getType());
}
return buildCallNode(FACTORY_METHOD_NAME, propName, node);
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<args>) {return this.<name>}
* and returns <name> if a match is found.
*
* @return STRING node that is the RHS of a this property get; or null.
*/
private Node getGetPropertyName(Node functionNode) {
Node value = maybeGetSingleReturnRValue(functionNode);
if (value != null &&
value.isGetProp() &&
value.getFirstChild().isThis()) {
return value.getLastChild();
}
return null;
}
}
/**
* Reduces property setter method declarations with calls to a
* setter method factory.
*
* Example:
* a.prototype.setB = function(value) {this.b_ = value}
* reduces to:
* a.prototype.setB = getter("b_");
*/
private static class SetterReducer extends Reducer {
static final String FACTORY_METHOD_NAME = "JSCompiler_set";
static final String HELPER_SOURCE =
"function " + FACTORY_METHOD_NAME + "(" +
FACTORY_METHOD_NAME + "_name) {" +
" return function(" + FACTORY_METHOD_NAME + "_value) {" +
"this[" + FACTORY_METHOD_NAME + "_name] = " +
FACTORY_METHOD_NAME + "_value}" +
"}";
@Override
public String getHelperSource() {
return HELPER_SOURCE;
}
@Override
public Node reduce(Node node) {
if (!isReduceableFunctionExpression(node)) {
return node;
}
Node propName = getSetPropertyName(node);
if (propName != null) {
if (!propName.isString()) {
throw new IllegalStateException(
"Expected STRING, got " + propName.getType());
}
return buildCallNode(FACTORY_METHOD_NAME, propName, node);
} else {
return node;
}
}
/**
* Checks if the function matches the pattern:
* function(<value>, <rest>) {this.<name> = <value>}
* and returns <name> if a match is found.
*
* @return STRING node that is the RHS of a this property get; or null.
*/
private Node getSetPropertyName(Node functionNode) {
Node body = functionNode.getLastChild();
if (!body.hasOneChild()) {
return null;
}
Node argList = functionNode.getSecondChild();
Node paramNode = argList.getFirstChild();
if (paramNode == null) {
return null;
}
Node statement = body.getFirstChild();
if (!NodeUtil.isExprAssign(statement)) {
return null;
}
Node assign = statement.getFirstChild();
Node lhs = assign.getFirstChild();
if (lhs.isGetProp() && lhs.getFirstChild().isThis()) {
Node rhs = assign.getLastChild();
if (rhs.isName() &&
rhs.getString().equals(paramNode.getString())) {
Node propertyName = lhs.getLastChild();
return propertyName;
}
}
return null;
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.optimizer.spark;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.Context;
import org.apache.hadoop.hive.ql.exec.JoinOperator;
import org.apache.hadoop.hive.ql.exec.MapJoinOperator;
import org.apache.hadoop.hive.ql.exec.Operator;
import org.apache.hadoop.hive.ql.exec.ReduceSinkOperator;
import org.apache.hadoop.hive.ql.exec.SMBMapJoinOperator;
import org.apache.hadoop.hive.ql.exec.TableScanOperator;
import org.apache.hadoop.hive.ql.exec.Task;
import org.apache.hadoop.hive.ql.exec.TaskFactory;
import org.apache.hadoop.hive.ql.exec.spark.SparkTask;
import org.apache.hadoop.hive.ql.lib.Node;
import org.apache.hadoop.hive.ql.lib.NodeProcessor;
import org.apache.hadoop.hive.ql.lib.NodeProcessorCtx;
import org.apache.hadoop.hive.ql.optimizer.GenMapRedUtils;
import org.apache.hadoop.hive.ql.optimizer.physical.GenSparkSkewJoinProcessor;
import org.apache.hadoop.hive.ql.optimizer.physical.SkewJoinProcFactory;
import org.apache.hadoop.hive.ql.optimizer.physical.SparkMapJoinResolver;
import org.apache.hadoop.hive.ql.parse.ParseContext;
import org.apache.hadoop.hive.ql.parse.SemanticException;
import org.apache.hadoop.hive.ql.parse.spark.GenSparkUtils;
import org.apache.hadoop.hive.ql.plan.BaseWork;
import org.apache.hadoop.hive.ql.plan.MapWork;
import org.apache.hadoop.hive.ql.plan.OperatorDesc;
import org.apache.hadoop.hive.ql.plan.PlanUtils;
import org.apache.hadoop.hive.ql.plan.ReduceWork;
import org.apache.hadoop.hive.ql.plan.SparkEdgeProperty;
import org.apache.hadoop.hive.ql.plan.SparkWork;
import org.apache.hadoop.hive.ql.plan.TableDesc;
import java.io.Serializable;
import java.util.List;
import java.util.Set;
import java.util.Stack;
/**
* Spark-version of SkewJoinProcFactory.
*/
public class SparkSkewJoinProcFactory {
private SparkSkewJoinProcFactory() {
// prevent instantiation
}
public static NodeProcessor getDefaultProc() {
return SkewJoinProcFactory.getDefaultProc();
}
public static NodeProcessor getJoinProc() {
return new SparkSkewJoinJoinProcessor();
}
public static class SparkSkewJoinJoinProcessor implements NodeProcessor {
@Override
public Object process(Node nd, Stack<Node> stack, NodeProcessorCtx procCtx,
Object... nodeOutputs) throws SemanticException {
SparkSkewJoinResolver.SparkSkewJoinProcCtx context =
(SparkSkewJoinResolver.SparkSkewJoinProcCtx) procCtx;
Task<? extends Serializable> currentTsk = context.getCurrentTask();
JoinOperator op = (JoinOperator) nd;
ReduceWork reduceWork = context.getReducerToReduceWork().get(op);
ParseContext parseContext = context.getParseCtx();
if (!op.getConf().isFixedAsSorted() && currentTsk instanceof SparkTask
&& reduceWork != null && ((SparkTask) currentTsk).getWork().contains(reduceWork)
&& GenSparkSkewJoinProcessor.supportRuntimeSkewJoin(
op, currentTsk, parseContext.getConf())) {
// first we try to split the task
splitTask((SparkTask) currentTsk, reduceWork, parseContext);
GenSparkSkewJoinProcessor.processSkewJoin(op, currentTsk, reduceWork, parseContext);
}
return null;
}
}
/**
* If the join is not in a leaf ReduceWork, the spark task has to be split into 2 tasks.
*/
private static void splitTask(SparkTask currentTask, ReduceWork reduceWork,
ParseContext parseContext) throws SemanticException {
SparkWork currentWork = currentTask.getWork();
Set<Operator<?>> reduceSinkSet =
SparkMapJoinResolver.getOp(reduceWork, ReduceSinkOperator.class);
if (currentWork.getChildren(reduceWork).size() == 1 && canSplit(currentWork)
&& reduceSinkSet.size() == 1) {
ReduceSinkOperator reduceSink = (ReduceSinkOperator) reduceSinkSet.iterator().next();
BaseWork childWork = currentWork.getChildren(reduceWork).get(0);
SparkEdgeProperty originEdge = currentWork.getEdgeProperty(reduceWork, childWork);
// disconnect the reduce work from its child. this should produce two isolated sub graphs
currentWork.disconnect(reduceWork, childWork);
// move works following the current reduce work into a new spark work
SparkWork newWork =
new SparkWork(parseContext.getConf().getVar(HiveConf.ConfVars.HIVEQUERYID));
newWork.add(childWork);
copyWorkGraph(currentWork, newWork, childWork, true);
copyWorkGraph(currentWork, newWork, childWork, false);
// remove them from current spark work
for (BaseWork baseWork : newWork.getAllWorkUnsorted()) {
currentWork.remove(baseWork);
currentWork.getCloneToWork().remove(baseWork);
}
// create TS to read intermediate data
Context baseCtx = parseContext.getContext();
Path taskTmpDir = baseCtx.getMRTmpPath();
Operator<? extends OperatorDesc> rsParent = reduceSink.getParentOperators().get(0);
TableDesc tableDesc = PlanUtils.getIntermediateFileTableDesc(PlanUtils
.getFieldSchemasFromRowSchema(rsParent.getSchema(), "temporarycol"));
// this will insert FS and TS between the RS and its parent
TableScanOperator tableScanOp = GenMapRedUtils.createTemporaryFile(
rsParent, reduceSink, taskTmpDir, tableDesc, parseContext);
// create new MapWork
MapWork mapWork = PlanUtils.getMapRedWork().getMapWork();
mapWork.setName("Map " + GenSparkUtils.getUtils().getNextSeqNumber());
newWork.add(mapWork);
newWork.connect(mapWork, childWork, originEdge);
// setup the new map work
String streamDesc = taskTmpDir.toUri().toString();
if (GenMapRedUtils.needsTagging((ReduceWork) childWork)) {
Operator<? extends OperatorDesc> childReducer = ((ReduceWork) childWork).getReducer();
String id = null;
if (childReducer instanceof JoinOperator) {
if (parseContext.getJoinOps().contains(childReducer)) {
id = ((JoinOperator)childReducer).getConf().getId();
}
} else if (childReducer instanceof MapJoinOperator) {
if (parseContext.getMapJoinOps().contains(childReducer)) {
id = ((MapJoinOperator)childReducer).getConf().getId();
}
} else if (childReducer instanceof SMBMapJoinOperator) {
if (parseContext.getSmbMapJoinOps().contains(childReducer)) {
id = ((SMBMapJoinOperator)childReducer).getConf().getId();
}
}
if (id != null) {
streamDesc = id + ":$INTNAME";
} else {
streamDesc = "$INTNAME";
}
String origStreamDesc = streamDesc;
int pos = 0;
while (mapWork.getAliasToWork().get(streamDesc) != null) {
streamDesc = origStreamDesc.concat(String.valueOf(++pos));
}
}
GenMapRedUtils.setTaskPlan(taskTmpDir.toUri().toString(), streamDesc,
tableScanOp, mapWork, false, tableDesc);
// insert the new task between current task and its child
@SuppressWarnings("unchecked")
Task<? extends Serializable> newTask = TaskFactory.get(newWork, parseContext.getConf());
List<Task<? extends Serializable>> childTasks = currentTask.getChildTasks();
// must have at most one child
if (childTasks != null && childTasks.size() > 0) {
Task<? extends Serializable> childTask = childTasks.get(0);
currentTask.removeDependentTask(childTask);
newTask.addDependentTask(childTask);
}
currentTask.addDependentTask(newTask);
newTask.setFetchSource(currentTask.isFetchSource());
}
}
/**
* Whether we can split at reduceWork. For simplicity, let's require each work can
* have at most one child work. This may be relaxed by checking connectivity of the
* work graph after disconnect the current reduce work from its child
*/
private static boolean canSplit(SparkWork sparkWork) {
for (BaseWork baseWork : sparkWork.getAllWorkUnsorted()) {
if (sparkWork.getChildren(baseWork).size() > 1) {
return false;
}
}
return true;
}
/**
* Copy a sub-graph from originWork to newWork.
*/
private static void copyWorkGraph(SparkWork originWork, SparkWork newWork,
BaseWork baseWork, boolean upWards) {
if (upWards) {
for (BaseWork parent : originWork.getParents(baseWork)) {
newWork.add(parent);
SparkEdgeProperty edgeProperty = originWork.getEdgeProperty(parent, baseWork);
newWork.connect(parent, baseWork, edgeProperty);
copyWorkGraph(originWork, newWork, parent, true);
}
} else {
for (BaseWork child : originWork.getChildren(baseWork)) {
newWork.add(child);
SparkEdgeProperty edgeProperty = originWork.getEdgeProperty(baseWork, child);
newWork.connect(baseWork, child, edgeProperty);
copyWorkGraph(originWork, newWork, child, false);
}
}
}
}
| |
package fileOperation;
/**
* Created by anjum parvez ali on 5/14/16.
*/
import java.io.*;
import java.nio.file.*;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Properties;
import java.util.regex.Pattern;
public class FileManager implements IFileFacade {
private final Path path_to_file;
public FileManager(String path) {
String fileSeparator =
System.getProperty("file.separator");
String pattern = Pattern.quote(fileSeparator);
String[] splittedFileName = path.split(pattern);
String directory="";
for(int i = 0; i < splittedFileName.length - 1; i++ )
{
directory += (splittedFileName[i] + fileSeparator);
}
path_to_file = FileSystems.getDefault().getPath( directory, splittedFileName[splittedFileName.length-1]);
}
public byte[] getHash(String hashAlgo, int offset, long length)
{
if(Files.notExists(this.path_to_file))
return null;
else if(!Files.isReadable(this.path_to_file))
return null;
//add more features later
if(null == hashAlgo || hashAlgo.isEmpty())
hashAlgo = "SHA-1";
if(hashAlgo.compareTo("SHA-1") == 0)
{
byte[] originalFileHash;
try
{
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
InputStream fis = Files.newInputStream(this.path_to_file, StandardOpenOption.READ);
int n = 0;
byte[] buffer = new byte[8192];
while (n != -1) {
n = fis.read(buffer);
if (n > 0) {
sha1.update(buffer, 0, n);
}
}
fis.close();
return sha1.digest();
}
catch (NoSuchAlgorithmException ex)
{
return null;
}
catch (FileNotFoundException fex)
{
return null;
}catch (IOException ioex)
{
/*need to change this*/
return null;
}
}
return null;
}
public OpenFileOperationStatus fileMatchDescription(byte[] hash, String hashAlgo)
{
if(Files.notExists(this.path_to_file))
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
else if(!Files.isReadable(this.path_to_file))
return OpenFileOperationStatus.ACCESS_RESTRICTED;
//add more features later
if(null == hashAlgo || hashAlgo.isEmpty())
hashAlgo = "SHA-1";
if(hashAlgo.compareTo("SHA-1") == 0)
{
byte[] originalFileHash;
try
{
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
InputStream fis = Files.newInputStream(this.path_to_file, StandardOpenOption.READ);
int n = 0;
byte[] buffer = new byte[8192];
while (n != -1) {
n = fis.read(buffer);
if (n > 0) {
sha1.update(buffer, 0, n);
}
}
fis.close();
originalFileHash= sha1.digest();
}
catch (NoSuchAlgorithmException ex)
{
return OpenFileOperationStatus.NO_SUCH_HASH_ALGORITHM;
}
catch (FileNotFoundException fex)
{
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
}catch (IOException ioex)
{
/*need to change this*/
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
}
if(Arrays.equals(originalFileHash, hash))
return OpenFileOperationStatus.HASH_MATCH;
else
return OpenFileOperationStatus.HASH_DOES_NOT_MATCH;
}
return OpenFileOperationStatus.NO_SUCH_HASH_ALGORITHM;
}
public OpenFileOperationStatus fileMatchDescription(byte[] hash, String hashAlgo, int offset, int length)
{
if(Files.notExists(this.path_to_file))
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
else if(!Files.isReadable(this.path_to_file))
return OpenFileOperationStatus.ACCESS_RESTRICTED;
if(hashAlgo.compareTo("SHA-1") == 0)
{
byte[] originalFileHash;
try
{
MessageDigest sha1 = MessageDigest.getInstance("SHA-1");
RandomAccessFile raf = new RandomAccessFile(new File(this.path_to_file.toString()), "r");
raf.seek(offset);
/*InputStream fis = Files.newInputStream(this.path_to_file, StandardOpenOption.READ);*/
int n = 0;
byte[] buffer = new byte[8192];
/*fis.read(buffer, offset, 8192);
sha1.update(buffer, 0, n);*/
while (n != -1) {
n = raf.read(buffer);
if (n > 0) {
sha1.update(buffer, 0, n);
}
}
raf.close();
originalFileHash= sha1.digest();
}
catch (NoSuchAlgorithmException ex)
{
return OpenFileOperationStatus.NO_SUCH_HASH_ALGORITHM;
}
catch (FileNotFoundException fex)
{
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
}catch (IOException ioex)
{
/*need to change this*/
return OpenFileOperationStatus.FILE_DOES_NOT_EXIST;
}
if(Arrays.equals(originalFileHash, hash))
return OpenFileOperationStatus.HASH_MATCH;
else
return OpenFileOperationStatus.HASH_DOES_NOT_MATCH;
}
return OpenFileOperationStatus.NO_SUCH_HASH_ALGORITHM;
}
public byte[] readFromPosition(int offset, int length)
{
/*assuming that file has already been verified with fileMatchDescription*/
try
{
RandomAccessFile raf = new RandomAccessFile(new File(this.path_to_file.toString()), "r");
raf.seek(offset);
byte[] buffer = new byte[length];
raf.read(buffer, 0, length);
raf.close();
return buffer;
}
catch (FileNotFoundException fex)
{
fex.printStackTrace();
return null;
}catch (IOException ioex)
{
ioex.printStackTrace();
return null;
}
}
public long writeFromPosition(long offset, long length, byte[] data)
{
/*assuming that file has already been verified with fileMatchDescription*/
try
{
RandomAccessFile raf = new RandomAccessFile(new File(this.path_to_file.toString()), "rw");
raf.seek(offset);
raf.write(data);
raf.close();
return offset+length;
}
catch (FileNotFoundException fex)
{
fex.printStackTrace();
return offset;
}catch (IOException ioex)
{
ioex.printStackTrace();
return offset;
}
}
public long getSize()
{
if(Files.notExists(this.path_to_file))
return 0;
else if(!Files.isReadable(this.path_to_file))
return 0;
File file = new File(this.path_to_file.toString());
return file.length();
}
public boolean fileExits()
{
if(Files.notExists(this.path_to_file))
return false;
if(!Files.isReadable(this.path_to_file))
return false;
return true;
}
public boolean deleteFile()
{
if(fileExits())
{
File f = new File(path_to_file.toString());
return f.delete();
}
else
return true;
}
public String getFileName()
{
return this.path_to_file.toString();
}
private static String getFileNameFromPath(String path)
{
if(null == path || path.isEmpty())
return null;
String pattern = Pattern.quote(System.getProperty("file.separator"));
String[] splittedFileName = path.split(pattern);
return splittedFileName[splittedFileName.length -1];
}
@Override
public void fileClose() {
// TODO Auto-generated method stub
}
}
| |
/**
* $Id: EntityActionsManager.java 105077 2012-02-24 22:54:29Z ottenhoff@longsight.com $
* $URL: https://source.sakaiproject.org/svn/entitybroker/tags/sakai-10.6/rest/src/java/org/sakaiproject/entitybroker/rest/EntityActionsManager.java $
* EntityActionsManager.java - entity-broker - Jul 26, 2008 9:58:00 AM - azeckoski
**************************************************************************
* Copyright (c) 2008, 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.entitybroker.rest;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.sakaiproject.entitybroker.EntityReference;
import org.sakaiproject.entitybroker.EntityView;
import org.sakaiproject.entitybroker.entityprovider.EntityProviderMethodStore;
import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutable;
import org.sakaiproject.entitybroker.entityprovider.capabilities.ActionsExecutionControllable;
import org.sakaiproject.entitybroker.entityprovider.extension.ActionReturn;
import org.sakaiproject.entitybroker.entityprovider.extension.CustomAction;
import org.sakaiproject.entitybroker.entityprovider.extension.EntityData;
import org.sakaiproject.entitybroker.entityprovider.extension.Formats;
import org.sakaiproject.entitybroker.entityprovider.search.Search;
import org.sakaiproject.entitybroker.exception.EntityException;
import org.sakaiproject.entitybroker.exception.EntityNotFoundException;
import org.sakaiproject.entitybroker.exception.FormatUnsupportedException;
import org.sakaiproject.entitybroker.util.EntityDataUtils;
import org.sakaiproject.entitybroker.util.http.LazyResponseOutputStream;
import org.sakaiproject.entitybroker.util.request.RequestStorageImpl;
import org.sakaiproject.entitybroker.util.request.RequestUtils;
/**
* Handles everything related to the custom actions registration and execution
*
* @author Aaron Zeckoski (azeckoski @ gmail.com)
*/
public class EntityActionsManager {
protected EntityActionsManager() { }
/**
* Full constructor
* @param entityProviderMethodStore the provider method store service
*/
public EntityActionsManager(EntityProviderMethodStore entityProviderMethodStore) {
this.entityProviderMethodStore = entityProviderMethodStore;
}
private EntityProviderMethodStore entityProviderMethodStore;
public void setEntityProviderMethodStore(EntityProviderMethodStore entityProviderMethodStore) {
this.entityProviderMethodStore = entityProviderMethodStore;
}
/**
* Execute a custom action request
* @param actionProvider
* @param entityView
* @param action
* @param request
* @param response
* @return an action return (may be null)
* @throws IllegalArgumentException if any args are invalid
* @throws UnsupportedOperationException if the action is not valid for this prefix
* @throws IllegalStateException if a failure occurs
*/
public ActionReturn handleCustomActionRequest(ActionsExecutable actionProvider, EntityView entityView, String action,
HttpServletRequest request, HttpServletResponse response, Map<String, Object> searchParams) {
if (actionProvider == null || entityView == null || action == null || request == null || response == null) {
throw new IllegalArgumentException("actionProvider and view and action and request and response must not be null");
}
// get the action params out of the request first
Map<String, Object> actionParams = RequestStorageImpl.getRequestValues(request, true, true, true);
EntityReference ref = entityView.getEntityReference();
OutputStream outputStream = new LazyResponseOutputStream(response);
ActionReturn actionReturn = handleCustomActionExecution(actionProvider, ref, action, actionParams, outputStream, entityView, searchParams);
// now process the return into the request or response as needed
if (actionReturn != null) {
if (actionReturn.output != null || actionReturn.outputString != null) {
if (actionReturn.output == null) {
// write the string into the response outputstream
try {
outputStream.write( actionReturn.outputString.getBytes() );
} catch (IOException e) {
throw new RuntimeException("Failed encoding for outputstring: " + actionReturn.outputString);
}
actionReturn.output = outputStream;
}
// now set the encoding, mimetype into the response
actionReturn.format = entityView.getExtension();
if (actionReturn.encoding == null || actionReturn.mimeType == null) {
// use default if not set
if (actionReturn.format == null) {
actionReturn.format = Formats.TXT;
}
RequestUtils.setResponseEncoding(actionReturn.format, response);
} else {
response.setCharacterEncoding(actionReturn.encoding);
response.setContentType(actionReturn.mimeType);
}
}
// also sets the response code when handling the action
if (actionReturn.responseCode > 0) {
response.setStatus(actionReturn.responseCode);
} else {
response.setStatus(HttpServletResponse.SC_OK);
}
// other returns require no extra work here
} else {
// no failure so set the status code
response.setStatus(HttpServletResponse.SC_OK);
}
return actionReturn;
}
/**
* Handles the execution of custom actions based on a request for execution
* @throws IllegalArgumentException if any args are invalid
* @throws UnsupportedOperationException if the action is not valid for this prefix
*/
public ActionReturn handleCustomActionExecution(ActionsExecutable actionProvider, EntityReference ref, String action,
Map<String, Object> actionParams, OutputStream outputStream, EntityView view, Map<String, Object> searchParams) {
if (actionProvider == null || ref == null || action == null || "".equals(action)) {
throw new IllegalArgumentException("actionProvider and ref and action must not be null");
}
if (outputStream == null) {
// create an outputstream to hold the data
outputStream = new ByteArrayOutputStream();
}
String prefix = ref.getPrefix();
CustomAction customAction = entityProviderMethodStore.getCustomAction(prefix, action);
if (customAction == null) {
throw new UnsupportedOperationException("Invalid action ("+action+"), this action is not a supported custom action for prefix ("+prefix+")");
}
ActionReturn actionReturn = null;
Object result = null;
if (ActionsExecutionControllable.class.isAssignableFrom(actionProvider.getClass())) {
// execute the action
result = ((ActionsExecutionControllable)actionProvider).executeActions(new EntityView(ref, null, null), action, actionParams, outputStream);
} else {
if (customAction.methodName == null) {
throw new IllegalStateException("The custom action must have the method name set, null is not allowed: " + customAction);
}
Method method = customAction.getMethod();
if (method == null) {
try {
// Note: this is really expensive, need to cache the Method lookup
method = actionProvider.getClass().getMethod(customAction.methodName, customAction.methodArgTypes);
} catch (SecurityException e1) {
throw new RuntimeException("Fatal error trying to get custom action method: " + customAction, e1);
} catch (NoSuchMethodException e1) {
throw new RuntimeException("Fatal error trying to get custom action method: " + customAction, e1);
}
customAction.setMethod(method); // cache the method
}
Object[] args = new Object[customAction.methodArgTypes.length];
for (int i = 0; i < customAction.methodArgTypes.length; i++) {
Class<?> argType = customAction.methodArgTypes[i];
if (EntityReference.class.equals(argType)) {
args[i] = ref;
} else if (EntityView.class.equals(argType)) {
if (view == null) {
view = new EntityView(ref, customAction.viewKey, null);
}
args[i] = view;
} else if (String.class.equals(argType)) {
args[i] = actionProvider.getEntityPrefix();
} else if (Search.class.equals(argType)) {
Search search = null;
if (searchParams == null || searchParams.isEmpty()) {
search = new Search();
} else {
search = RequestUtils.makeSearchFromRequestParams(searchParams);
}
args[i] = search;
} else if (OutputStream.class.equals(argType)) {
args[i] = outputStream;
} else if (Map.class.equals(argType)) {
args[i] = actionParams;
} else {
throw new IllegalStateException("custom action ("+customAction+") contains an invalid methodArgTypes, " +
"only valid types allowed: EntityReference, EntityView, Search, String, OutputStream, Map");
}
}
try {
result = method.invoke(actionProvider, args);
} catch (IllegalArgumentException e) {
throw new RuntimeException("Fatal error trying to execute custom action method: " + customAction, e);
} catch (IllegalAccessException e) {
throw new RuntimeException("Fatal error trying to execute custom action method: " + customAction, e);
} catch (InvocationTargetException e) {
if (e.getCause() != null) {
if (e.getCause().getClass().isAssignableFrom(IllegalArgumentException.class)) {
throw new IllegalArgumentException(e.getCause().getMessage() + " (rethrown)", e.getCause());
} else if (e.getCause().getClass().isAssignableFrom(EntityNotFoundException.class)) {
throw new EntityNotFoundException(e.getCause().getMessage() + " (rethrown)", ref+"", e.getCause());
} else if (e.getCause().getClass().isAssignableFrom(FormatUnsupportedException.class)) {
String format = ((FormatUnsupportedException)e.getCause()).format;
throw new FormatUnsupportedException(e.getCause().getMessage() + " (rethrown)", e.getCause(), ref+"", format);
} else if (e.getCause().getClass().isAssignableFrom(UnsupportedOperationException.class)) {
throw new UnsupportedOperationException(e.getCause().getMessage() + " (rethrown)", e.getCause());
} else if (e.getCause().getClass().isAssignableFrom(EntityException.class)) {
int code = ((EntityException)e.getCause()).responseCode;
throw new EntityException(e.getCause().getMessage() + " (rethrown)", ref+"", code);
} else if (e.getCause().getClass().isAssignableFrom(IllegalStateException.class)) {
throw new IllegalStateException(e.getCause().getMessage() + " (rethrown)", e.getCause());
} else if (e.getCause().getClass().isAssignableFrom(SecurityException.class)) {
throw new SecurityException(e.getCause().getMessage() + " (rethrown)", e.getCause());
}
}
throw new RuntimeException("Fatal error trying to execute custom action method: " + customAction, e);
}
}
if (result != null) {
Class<?> resultClass = result.getClass();
// package up the result in the ActionResult
if (Boolean.class.isAssignableFrom(resultClass)) {
// handle booleans specially
boolean bool = ((Boolean) result).booleanValue();
if (bool) {
result = null;
} else {
throw new EntityNotFoundException("Could not find data for ref ("+ref+") from custom action ("+action+"), (returned boolean false)", ref+"");
}
} else if (ActionReturn.class.isAssignableFrom(resultClass)) {
actionReturn = (ActionReturn) result;
} else if (OutputStream.class.isAssignableFrom(resultClass)) {
actionReturn = new ActionReturn(outputStream);
} else if (String.class.isAssignableFrom(resultClass)) {
actionReturn = new ActionReturn((String) result);
} else if (List.class.isAssignableFrom(resultClass)) {
// convert the list to a list of EntityData
List<EntityData> data = EntityDataUtils.convertToEntityData((List<?>) result, ref);
actionReturn = new ActionReturn(data, (String) null);
} else if (EntityData.class.isAssignableFrom(resultClass)) {
actionReturn = new ActionReturn( (EntityData) result, (String) null);
} else {
// assume this is an entity object (not ED)
EntityData ed = EntityDataUtils.makeEntityData(ref, result);
actionReturn = new ActionReturn( ed, (String) null);
}
}
return actionReturn;
}
/**
* Get the {@link CustomAction} for a prefix and action if it exists
* @param prefix an entity prefix
* @param action an action key
* @return the custom action OR null if none found
*/
public CustomAction getCustomAction(String prefix, String action) {
return entityProviderMethodStore.getCustomAction(prefix, action);
}
}
| |
/*
* Copyright 2010 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.replication;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.UUID;
import java.util.concurrent.atomic.AtomicBoolean;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.replication.regionserver.Replication;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.zookeeper.ClusterId;
import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperNodeTracker;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.zookeeper.KeeperException;
import org.apache.zookeeper.KeeperException.ConnectionLossException;
import org.apache.zookeeper.KeeperException.SessionExpiredException;
/**
* This class serves as a helper for all things related to zookeeper in
* replication.
* <p/>
* The layout looks something like this under zookeeper.znode.parent for the
* master cluster:
* <p/>
*
* <pre>
* replication/
* state {contains true or false}
* clusterId {contains a byte}
* peers/
* 1/ {contains a full cluster address}
* peer-state {contains ENABLED or DISABLED}
* 2/
* ...
* rs/ {lists all RS that replicate}
* startcode1/ {lists all peer clusters}
* 1/ {lists hlogs to process}
* 10.10.1.76%3A53488.123456789 {contains nothing or a position}
* 10.10.1.76%3A53488.123456790
* ...
* 2/
* ...
* startcode2/
* ...
* </pre>
*/
public class ReplicationZookeeper {
private static final Log LOG =
LogFactory.getLog(ReplicationZookeeper.class);
// Name of znode we use to lock when failover
private final static String RS_LOCK_ZNODE = "lock";
// Values of znode which stores state of a peer
public static enum PeerState {
ENABLED, DISABLED
};
// Our handle on zookeeper
private final ZooKeeperWatcher zookeeper;
// Map of peer clusters keyed by their id
private Map<String, ReplicationPeer> peerClusters;
// Path to the root replication znode
private String replicationZNode;
// Path to the peer clusters znode
private String peersZNode;
// Path to the znode that contains all RS that replicates
private String rsZNode;
// Path to this region server's name under rsZNode
private String rsServerNameZnode;
// Name node if the replicationState znode
private String replicationStateNodeName;
// Name of zk node which stores peer state
private String peerStateNodeName;
private final Configuration conf;
// Is this cluster replicating at the moment?
private AtomicBoolean replicating;
// The key to our own cluster
private String ourClusterKey;
// Abortable
private Abortable abortable;
private ReplicationStatusTracker statusTracker;
/**
* Constructor used by clients of replication (like master and HBase clients)
* @param conf conf to use
* @param zk zk connection to use
* @throws IOException
*/
public ReplicationZookeeper(final Abortable abortable, final Configuration conf,
final ZooKeeperWatcher zk)
throws KeeperException {
this.conf = conf;
this.zookeeper = zk;
this.replicating = new AtomicBoolean();
setZNodes(abortable);
}
/**
* Constructor used by region servers, connects to the peer cluster right away.
*
* @param server
* @param replicating atomic boolean to start/stop replication
* @throws IOException
* @throws KeeperException
*/
public ReplicationZookeeper(final Server server, final AtomicBoolean replicating)
throws IOException, KeeperException {
this.abortable = server;
this.zookeeper = server.getZooKeeper();
this.conf = server.getConfiguration();
this.replicating = replicating;
setZNodes(server);
this.peerClusters = new HashMap<String, ReplicationPeer>();
ZKUtil.createWithParents(this.zookeeper,
ZKUtil.joinZNode(this.replicationZNode, this.replicationStateNodeName));
this.rsServerNameZnode = ZKUtil.joinZNode(rsZNode, server.getServerName().toString());
ZKUtil.createWithParents(this.zookeeper, this.rsServerNameZnode);
connectExistingPeers();
}
private void setZNodes(Abortable abortable) throws KeeperException {
String replicationZNodeName =
conf.get("zookeeper.znode.replication", "replication");
String peersZNodeName =
conf.get("zookeeper.znode.replication.peers", "peers");
this.peerStateNodeName = conf.get(
"zookeeper.znode.replication.peers.state", "peer-state");
this.replicationStateNodeName =
conf.get("zookeeper.znode.replication.state", "state");
String rsZNodeName =
conf.get("zookeeper.znode.replication.rs", "rs");
this.ourClusterKey = ZKUtil.getZooKeeperClusterKey(this.conf);
this.replicationZNode =
ZKUtil.joinZNode(this.zookeeper.baseZNode, replicationZNodeName);
this.peersZNode = ZKUtil.joinZNode(replicationZNode, peersZNodeName);
ZKUtil.createWithParents(this.zookeeper, this.peersZNode);
this.rsZNode = ZKUtil.joinZNode(replicationZNode, rsZNodeName);
ZKUtil.createWithParents(this.zookeeper, this.rsZNode);
// Set a tracker on replicationStateNodeNode
this.statusTracker =
new ReplicationStatusTracker(this.zookeeper, abortable);
statusTracker.start();
readReplicationStateZnode();
}
private void connectExistingPeers() throws IOException, KeeperException {
List<String> znodes = ZKUtil.listChildrenNoWatch(this.zookeeper, this.peersZNode);
if (znodes != null) {
for (String z : znodes) {
connectToPeer(z);
}
}
}
/**
* List this cluster's peers' IDs
* @return list of all peers' identifiers
*/
public List<String> listPeersIdsAndWatch() {
List<String> ids = null;
try {
ids = ZKUtil.listChildrenAndWatchThem(this.zookeeper, this.peersZNode);
} catch (KeeperException e) {
this.abortable.abort("Cannot get the list of peers ", e);
}
return ids;
}
/**
* Map of this cluster's peers for display.
* @return A map of peer ids to peer cluster keys
*/
public Map<String,String> listPeers() {
Map<String,String> peers = new TreeMap<String,String>();
List<String> ids = null;
try {
ids = ZKUtil.listChildrenNoWatch(this.zookeeper, this.peersZNode);
for (String id : ids) {
peers.put(id, Bytes.toString(ZKUtil.getData(this.zookeeper,
ZKUtil.joinZNode(this.peersZNode, id))));
}
} catch (KeeperException e) {
this.abortable.abort("Cannot get the list of peers ", e);
}
return peers;
}
/**
* Returns all region servers from given peer
*
* @param peerClusterId (byte) the cluster to interrogate
* @return addresses of all region servers
*/
public List<ServerName> getSlavesAddresses(String peerClusterId) {
if (this.peerClusters.size() == 0) {
return Collections.emptyList();
}
ReplicationPeer peer = this.peerClusters.get(peerClusterId);
if (peer == null) {
return Collections.emptyList();
}
List<ServerName> addresses;
try {
addresses = fetchSlavesAddresses(peer.getZkw());
} catch (KeeperException ke) {
reconnectPeer(ke, peer);
addresses = Collections.emptyList();
}
peer.setRegionServers(addresses);
return peer.getRegionServers();
}
/**
* Get the list of all the region servers from the specified peer
* @param zkw zk connection to use
* @return list of region server addresses or an empty list if the slave
* is unavailable
*/
private List<ServerName> fetchSlavesAddresses(ZooKeeperWatcher zkw)
throws KeeperException {
return listChildrenAndGetAsServerNames(zkw, zkw.rsZNode);
}
/**
* Lists the children of the specified znode, retrieving the data of each
* child as a server address.
*
* Used to list the currently online regionservers and their addresses.
*
* Sets no watches at all, this method is best effort.
*
* Returns an empty list if the node has no children. Returns null if the
* parent node itself does not exist.
*
* @param zkw zookeeper reference
* @param znode node to get children of as addresses
* @return list of data of children of specified znode, empty if no children,
* null if parent does not exist
* @throws KeeperException if unexpected zookeeper exception
*/
public static List<ServerName> listChildrenAndGetAsServerNames(
ZooKeeperWatcher zkw, String znode)
throws KeeperException {
List<String> children = ZKUtil.listChildrenNoWatch(zkw, znode);
if(children == null) {
return Collections.emptyList();
}
List<ServerName> addresses = new ArrayList<ServerName>(children.size());
for (String child : children) {
addresses.add(ServerName.parseServerName(child));
}
return addresses;
}
/**
* This method connects this cluster to another one and registers it
* in this region server's replication znode
* @param peerId id of the peer cluster
* @throws KeeperException
*/
public boolean connectToPeer(String peerId)
throws IOException, KeeperException {
if (peerClusters == null) {
return false;
}
if (this.peerClusters.containsKey(peerId)) {
return false;
}
ReplicationPeer peer = getPeer(peerId);
if (peer == null) {
return false;
}
this.peerClusters.put(peerId, peer);
ZKUtil.createWithParents(this.zookeeper, ZKUtil.joinZNode(
this.rsServerNameZnode, peerId));
LOG.info("Added new peer cluster " + peer.getClusterKey());
return true;
}
/**
* Helper method to connect to a peer
* @param peerId peer's identifier
* @return object representing the peer
* @throws IOException
* @throws KeeperException
*/
public ReplicationPeer getPeer(String peerId) throws IOException, KeeperException{
String znode = ZKUtil.joinZNode(this.peersZNode, peerId);
byte [] data = ZKUtil.getData(this.zookeeper, znode);
String otherClusterKey = Bytes.toString(data);
if (this.ourClusterKey.equals(otherClusterKey)) {
LOG.debug("Not connecting to " + peerId + " because it's us");
return null;
}
// Construct the connection to the new peer
Configuration otherConf = new Configuration(this.conf);
try {
ZKUtil.applyClusterKeyToConf(otherConf, otherClusterKey);
} catch (IOException e) {
LOG.error("Can't get peer because:", e);
return null;
}
ReplicationPeer peer = new ReplicationPeer(otherConf, peerId,
otherClusterKey);
peer.startStateTracker(this.zookeeper, this.getPeerStateNode(peerId));
return peer;
}
/**
* Set the new replication state for this cluster
* @param newState
*/
public void setReplicating(boolean newState) throws KeeperException {
ZKUtil.createWithParents(this.zookeeper,
ZKUtil.joinZNode(this.replicationZNode, this.replicationStateNodeName));
ZKUtil.setData(this.zookeeper,
ZKUtil.joinZNode(this.replicationZNode, this.replicationStateNodeName),
Bytes.toBytes(Boolean.toString(newState)));
}
/**
* Remove the peer from zookeeper. which will trigger the watchers on every
* region server and close their sources
* @param id
* @throws IllegalArgumentException Thrown when the peer doesn't exist
*/
public void removePeer(String id) throws IOException {
try {
if (!peerExists(id)) {
throw new IllegalArgumentException("Cannot remove inexisting peer");
}
ZKUtil.deleteNodeRecursively(this.zookeeper,
ZKUtil.joinZNode(this.peersZNode, id));
} catch (KeeperException e) {
throw new IOException("Unable to remove a peer", e);
}
}
/**
* Add a new peer to this cluster
* @param id peer's identifier
* @param clusterKey ZK ensemble's addresses, client port and root znode
* @throws IllegalArgumentException Thrown when the peer doesn't exist
* @throws IllegalStateException Thrown when a peer already exists, since
* multi-slave isn't supported yet.
*/
public void addPeer(String id, String clusterKey) throws IOException {
try {
if (peerExists(id)) {
throw new IllegalArgumentException("Cannot add existing peer");
}
ZKUtil.createWithParents(this.zookeeper, this.peersZNode);
ZKUtil.createAndWatch(this.zookeeper,
ZKUtil.joinZNode(this.peersZNode, id), Bytes.toBytes(clusterKey));
ZKUtil.createAndWatch(this.zookeeper, getPeerStateNode(id),
Bytes.toBytes(PeerState.ENABLED.name())); // enabled by default
} catch (KeeperException e) {
throw new IOException("Unable to add peer", e);
}
}
private boolean peerExists(String id) throws KeeperException {
return ZKUtil.checkExists(this.zookeeper,
ZKUtil.joinZNode(this.peersZNode, id)) >= 0;
}
/**
* Enable replication to the peer
*
* @param id peer's identifier
* @throws IllegalArgumentException
* Thrown when the peer doesn't exist
*/
public void enablePeer(String id) throws IOException {
changePeerState(id, PeerState.ENABLED);
LOG.info("peer " + id + " is enabled");
}
/**
* Disable replication to the peer
*
* @param id peer's identifier
* @throws IllegalArgumentException
* Thrown when the peer doesn't exist
*/
public void disablePeer(String id) throws IOException {
changePeerState(id, PeerState.DISABLED);
LOG.info("peer " + id + " is disabled");
}
private void changePeerState(String id, PeerState state) throws IOException {
try {
if (!peerExists(id)) {
throw new IllegalArgumentException("peer " + id + " is not registered");
}
String peerStateZNode = getPeerStateNode(id);
if (ZKUtil.checkExists(this.zookeeper, peerStateZNode) != -1) {
ZKUtil.setData(this.zookeeper, peerStateZNode,
Bytes.toBytes(state.name()));
} else {
ZKUtil.createAndWatch(zookeeper, peerStateZNode,
Bytes.toBytes(state.name()));
}
LOG.info("state of the peer " + id + " changed to " + state.name());
} catch (KeeperException e) {
throw new IOException("Unable to change state of the peer " + id, e);
}
}
/**
* Get state of the peer. This method checks the state by connecting to ZK.
*
* @param id peer's identifier
* @return current state of the peer
*/
public PeerState getPeerState(String id) throws KeeperException {
byte[] peerStateBytes = ZKUtil
.getData(this.zookeeper, getPeerStateNode(id));
return PeerState.valueOf(Bytes.toString(peerStateBytes));
}
/**
* Check whether the peer is enabled or not. This method checks the atomic
* boolean of ReplicationPeer locally.
*
* @param id peer identifier
* @return true if the peer is enabled, otherwise false
* @throws IllegalArgumentException
* Thrown when the peer doesn't exist
*/
public boolean getPeerEnabled(String id) {
if (!this.peerClusters.containsKey(id)) {
throw new IllegalArgumentException("peer " + id + " is not registered");
}
return this.peerClusters.get(id).getPeerEnabled().get();
}
private String getPeerStateNode(String id) {
return ZKUtil.joinZNode(this.peersZNode,
ZKUtil.joinZNode(id, this.peerStateNodeName));
}
/**
* This reads the state znode for replication and sets the atomic boolean
*/
private void readReplicationStateZnode() {
try {
this.replicating.set(getReplication());
LOG.info("Replication is now " + (this.replicating.get()?
"started" : "stopped"));
} catch (KeeperException e) {
this.abortable.abort("Failed getting data on from " + getRepStateNode(), e);
}
}
/**
* Get the replication status of this cluster. If the state znode doesn't
* exist it will also create it and set it true.
* @return returns true when it's enabled, else false
* @throws KeeperException
*/
public boolean getReplication() throws KeeperException {
byte [] data = this.statusTracker.getData(false);
if (data == null || data.length == 0) {
setReplicating(true);
return true;
}
return Boolean.parseBoolean(Bytes.toString(data));
}
private String getRepStateNode() {
return ZKUtil.joinZNode(this.replicationZNode, this.replicationStateNodeName);
}
/**
* Add a new log to the list of hlogs in zookeeper
* @param filename name of the hlog's znode
* @param peerId name of the cluster's znode
*/
public void addLogToList(String filename, String peerId)
throws KeeperException {
String znode = ZKUtil.joinZNode(this.rsServerNameZnode, peerId);
znode = ZKUtil.joinZNode(znode, filename);
ZKUtil.createWithParents(this.zookeeper, znode);
}
/**
* Remove a log from the list of hlogs in zookeeper
* @param filename name of the hlog's znode
* @param clusterId name of the cluster's znode
*/
public void removeLogFromList(String filename, String clusterId) {
try {
String znode = ZKUtil.joinZNode(rsServerNameZnode, clusterId);
znode = ZKUtil.joinZNode(znode, filename);
ZKUtil.deleteNode(this.zookeeper, znode);
} catch (KeeperException e) {
this.abortable.abort("Failed remove from list", e);
}
}
/**
* Set the current position of the specified cluster in the current hlog
* @param filename filename name of the hlog's znode
* @param clusterId clusterId name of the cluster's znode
* @param position the position in the file
* @throws IOException
*/
public void writeReplicationStatus(String filename, String clusterId,
long position) {
try {
String znode = ZKUtil.joinZNode(this.rsServerNameZnode, clusterId);
znode = ZKUtil.joinZNode(znode, filename);
// Why serialize String of Long and note Long as bytes?
ZKUtil.setData(this.zookeeper, znode,
Bytes.toBytes(Long.toString(position)));
} catch (KeeperException e) {
this.abortable.abort("Writing replication status", e);
}
}
/**
* Get a list of all the other region servers in this cluster
* and set a watch
* @return a list of server nanes
*/
public List<String> getRegisteredRegionServers() {
List<String> result = null;
try {
result = ZKUtil.listChildrenAndWatchThem(
this.zookeeper, this.zookeeper.rsZNode);
} catch (KeeperException e) {
this.abortable.abort("Get list of registered region servers", e);
}
return result;
}
/**
* Get the list of the replicators that have queues, they can be alive, dead
* or simply from a previous run
* @return a list of server names
*/
public List<String> getListOfReplicators() {
List<String> result = null;
try {
result = ZKUtil.listChildrenNoWatch(this.zookeeper, rsZNode);
} catch (KeeperException e) {
this.abortable.abort("Get list of replicators", e);
}
return result;
}
/**
* Get the list of peer clusters for the specified server names
* @param rs server names of the rs
* @return a list of peer cluster
*/
public List<String> getListPeersForRS(String rs) {
String znode = ZKUtil.joinZNode(rsZNode, rs);
List<String> result = null;
try {
result = ZKUtil.listChildrenNoWatch(this.zookeeper, znode);
} catch (KeeperException e) {
this.abortable.abort("Get list of peers for rs", e);
}
return result;
}
/**
* Get the list of hlogs for the specified region server and peer cluster
* @param rs server names of the rs
* @param id peer cluster
* @return a list of hlogs
*/
public List<String> getListHLogsForPeerForRS(String rs, String id) {
String znode = ZKUtil.joinZNode(rsZNode, rs);
znode = ZKUtil.joinZNode(znode, id);
List<String> result = null;
try {
result = ZKUtil.listChildrenNoWatch(this.zookeeper, znode);
} catch (KeeperException e) {
this.abortable.abort("Get list of hlogs for peer", e);
}
return result;
}
/**
* Try to set a lock in another server's znode.
* @param znode the server names of the other server
* @return true if the lock was acquired, false in every other cases
*/
public boolean lockOtherRS(String znode) {
try {
String parent = ZKUtil.joinZNode(this.rsZNode, znode);
if (parent.equals(rsServerNameZnode)) {
LOG.warn("Won't lock because this is us, we're dead!");
return false;
}
String p = ZKUtil.joinZNode(parent, RS_LOCK_ZNODE);
ZKUtil.createAndWatch(this.zookeeper, p, Bytes.toBytes(rsServerNameZnode));
} catch (KeeperException e) {
// This exception will pop up if the znode under which we're trying to
// create the lock is already deleted by another region server, meaning
// that the transfer already occurred.
// NoNode => transfer is done and znodes are already deleted
// NodeExists => lock znode already created by another RS
if (e instanceof KeeperException.NoNodeException ||
e instanceof KeeperException.NodeExistsException) {
LOG.info("Won't transfer the queue," +
" another RS took care of it because of: " + e.getMessage());
} else {
LOG.info("Failed lock other rs", e);
}
return false;
}
return true;
}
/**
* This methods copies all the hlogs queues from another region server
* and returns them all sorted per peer cluster (appended with the dead
* server's znode)
* @param znode server names to copy
* @return all hlogs for all peers of that cluster, null if an error occurred
*/
public SortedMap<String, SortedSet<String>> copyQueuesFromRS(String znode) {
// TODO this method isn't atomic enough, we could start copying and then
// TODO fail for some reason and we would end up with znodes we don't want.
SortedMap<String,SortedSet<String>> queues =
new TreeMap<String,SortedSet<String>>();
try {
String nodePath = ZKUtil.joinZNode(rsZNode, znode);
List<String> clusters =
ZKUtil.listChildrenNoWatch(this.zookeeper, nodePath);
// We have a lock znode in there, it will count as one.
if (clusters == null || clusters.size() <= 1) {
return queues;
}
// The lock isn't a peer cluster, remove it
clusters.remove(RS_LOCK_ZNODE);
for (String cluster : clusters) {
// We add the name of the recovered RS to the new znode, we can even
// do that for queues that were recovered 10 times giving a znode like
// number-startcode-number-otherstartcode-number-anotherstartcode-etc
String newCluster = cluster+"-"+znode;
String newClusterZnode = ZKUtil.joinZNode(rsServerNameZnode, newCluster);
ZKUtil.createNodeIfNotExistsAndWatch(this.zookeeper, newClusterZnode,
HConstants.EMPTY_BYTE_ARRAY);
String clusterPath = ZKUtil.joinZNode(nodePath, cluster);
List<String> hlogs = ZKUtil.listChildrenNoWatch(this.zookeeper, clusterPath);
// That region server didn't have anything to replicate for this cluster
if (hlogs == null || hlogs.size() == 0) {
continue;
}
SortedSet<String> logQueue = new TreeSet<String>();
queues.put(newCluster, logQueue);
for (String hlog : hlogs) {
String z = ZKUtil.joinZNode(clusterPath, hlog);
byte [] position = ZKUtil.getData(this.zookeeper, z);
LOG.debug("Creating " + hlog + " with data " + Bytes.toString(position));
String child = ZKUtil.joinZNode(newClusterZnode, hlog);
ZKUtil.createAndWatch(this.zookeeper, child, position);
logQueue.add(hlog);
}
}
} catch (KeeperException e) {
this.abortable.abort("Copy queues from rs", e);
}
return queues;
}
/**
* Delete a complete queue of hlogs
* @param peerZnode znode of the peer cluster queue of hlogs to delete
*/
public void deleteSource(String peerZnode, boolean closeConnection) {
try {
ZKUtil.deleteNodeRecursively(this.zookeeper,
ZKUtil.joinZNode(rsServerNameZnode, peerZnode));
if (closeConnection) {
this.peerClusters.get(peerZnode).getZkw().close();
this.peerClusters.remove(peerZnode);
}
} catch (KeeperException e) {
this.abortable.abort("Failed delete of " + peerZnode, e);
}
}
/**
* Recursive deletion of all znodes in specified rs' znode
* @param znode
*/
public void deleteRsQueues(String znode) {
String fullpath = ZKUtil.joinZNode(rsZNode, znode);
try {
List<String> clusters =
ZKUtil.listChildrenNoWatch(this.zookeeper, fullpath);
for (String cluster : clusters) {
// We'll delete it later
if (cluster.equals(RS_LOCK_ZNODE)) {
continue;
}
String fullClusterPath = ZKUtil.joinZNode(fullpath, cluster);
ZKUtil.deleteNodeRecursively(this.zookeeper, fullClusterPath);
}
// Finish cleaning up
ZKUtil.deleteNodeRecursively(this.zookeeper, fullpath);
} catch (KeeperException e) {
if (e instanceof KeeperException.NoNodeException ||
e instanceof KeeperException.NotEmptyException) {
// Testing a special case where another region server was able to
// create a lock just after we deleted it, but then was also able to
// delete the RS znode before us or its lock znode is still there.
if (e.getPath().equals(fullpath)) {
return;
}
}
this.abortable.abort("Failed delete of " + znode, e);
}
}
/**
* Delete this cluster's queues
*/
public void deleteOwnRSZNode() {
try {
ZKUtil.deleteNodeRecursively(this.zookeeper,
this.rsServerNameZnode);
} catch (KeeperException e) {
// if the znode is already expired, don't bother going further
if (e instanceof KeeperException.SessionExpiredException) {
return;
}
this.abortable.abort("Failed delete of " + this.rsServerNameZnode, e);
}
}
/**
* Get the position of the specified hlog in the specified peer znode
* @param peerId znode of the peer cluster
* @param hlog name of the hlog
* @return the position in that hlog
* @throws KeeperException
*/
public long getHLogRepPosition(String peerId, String hlog)
throws KeeperException {
String clusterZnode = ZKUtil.joinZNode(rsServerNameZnode, peerId);
String znode = ZKUtil.joinZNode(clusterZnode, hlog);
String data = Bytes.toString(ZKUtil.getData(this.zookeeper, znode));
return data == null || data.length() == 0 ? 0 : Long.parseLong(data);
}
/**
* Returns the UUID of the provided peer id. Should a connection loss or session
* expiration happen, the ZK handler will be reopened once and if it still doesn't
* work then it will bail and return null.
* @param peerId the peer's ID that will be converted into a UUID
* @return a UUID or null if there's a ZK connection issue
*/
public UUID getPeerUUID(String peerId) {
ReplicationPeer peer = getPeerClusters().get(peerId);
UUID peerUUID = null;
try {
peerUUID = getUUIDForCluster(peer.getZkw());
} catch (KeeperException ke) {
reconnectPeer(ke, peer);
}
return peerUUID;
}
/**
* Get the UUID for the provided ZK watcher. Doesn't handle any ZK exceptions
* @param zkw watcher connected to an ensemble
* @return the UUID read from zookeeper
* @throws KeeperException
*/
public UUID getUUIDForCluster(ZooKeeperWatcher zkw) throws KeeperException {
return UUID.fromString(ClusterId.readClusterIdZNode(zkw));
}
private void reconnectPeer(KeeperException ke, ReplicationPeer peer) {
if (ke instanceof ConnectionLossException
|| ke instanceof SessionExpiredException) {
LOG.warn(
"Lost the ZooKeeper connection for peer " + peer.getClusterKey(),
ke);
try {
peer.reloadZkWatcher();
} catch(IOException io) {
LOG.warn(
"Creation of ZookeeperWatcher failed for peer "
+ peer.getClusterKey(), io);
}
}
}
public void registerRegionServerListener(ZooKeeperListener listener) {
this.zookeeper.registerListener(listener);
}
/**
* Get a map of all peer clusters
* @return map of peer cluster keyed by id
*/
public Map<String, ReplicationPeer> getPeerClusters() {
return this.peerClusters;
}
/**
* Extracts the znode name of a peer cluster from a ZK path
* @param fullPath Path to extract the id from
* @return the id or an empty string if path is invalid
*/
public static String getZNodeName(String fullPath) {
String[] parts = fullPath.split("/");
return parts.length > 0 ? parts[parts.length-1] : "";
}
/**
* Get this cluster's zk connection
* @return zk connection
*/
public ZooKeeperWatcher getZookeeperWatcher() {
return this.zookeeper;
}
/**
* Get the full path to the peers' znode
* @return path to peers in zk
*/
public String getPeersZNode() {
return peersZNode;
}
/**
* Tracker for status of the replication
*/
public class ReplicationStatusTracker extends ZooKeeperNodeTracker {
public ReplicationStatusTracker(ZooKeeperWatcher watcher,
Abortable abortable) {
super(watcher, getRepStateNode(), abortable);
}
@Override
public synchronized void nodeDataChanged(String path) {
if (path.equals(node)) {
super.nodeDataChanged(path);
readReplicationStateZnode();
}
}
}
}
| |
/*
Copyright 2016 Goldman Sachs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
*/
package com.gs.fw.common.mithra.databasetype;
import com.gs.fw.common.mithra.MithraBusinessException;
import com.gs.fw.common.mithra.MithraObjectPortal;
import com.gs.fw.common.mithra.attribute.Attribute;
import com.gs.fw.common.mithra.bulkloader.BulkLoader;
import com.gs.fw.common.mithra.bulkloader.BulkLoaderException;
import com.gs.fw.common.mithra.finder.SqlQuery;
import com.gs.fw.common.mithra.tempobject.TupleTempContext;
import com.gs.fw.common.mithra.util.CommonDatabaseType;
import com.gs.fw.common.mithra.util.MithraFastList;
import com.gs.fw.common.mithra.util.MutableDouble;
import com.gs.fw.common.mithra.util.TableColumnInfo;
import com.gs.fw.common.mithra.util.Time;
import com.gs.fw.common.mithra.util.WildcardParser;
import javax.sql.DataSource;
import java.sql.*;
import java.util.List;
import java.util.TimeZone;
public interface DatabaseType extends CommonDatabaseType
{
public static final int RETRIABLE_FLAG = 1;
public static final int TIMED_OUT_FLAG = 2;
String getSelect(String columns, SqlQuery query, String groupBy, boolean isInTransaction, int rowCount);
String getSelect(String columns, String fromClause, String whereClause, boolean lock);
String getSelectForAggregatedData(SqlQuery query, List aggregateAttributes, List groupByAttributes);
String getDelete(SqlQuery query, int rowCount);
boolean loopNestedExceptionForFlagAndDetermineState(int flag, SQLException e);
boolean isKilledConnection(SQLException e);
boolean hasTopQuery();
int getMaxClauses();
boolean hasSetRowCount();
String getLastIdentitySql(String tableName);
public String getIdentityTableCreationStatement();
public String getAllowInsertIntoIdentityStatementFor(String tableName, String onOff);
/**
* @return the maximum number of statements that can be batched in one statement. a value less than zero means infinite.
* @param parametersPerStatement
*/
int getMaxPreparedStatementBatchCount(int parametersPerStatement);
void setInfiniteRowCount(Connection con);
void setRowCount(Connection con, int rowcount) throws SQLException;
String getFullyQualifiedTableName(String schema, String tableName);
String getCreateSchema(String schemaName);
boolean hasMultiInsert();
int getMultiInsertBatchSize(int columnsToInsert);
boolean supportsMultiValueInClause();
String createMultiInsertParametersStatement(String questionMarksForColumns, int numberOfStatements);
/**
* This method is deprecated. Use the similar method without a connection object.
* <p>Creates a {@link BulkLoader} for this <code>DatabaseType</code>.</p>
* <p><code>BulkLoader</code>s should be created once per bulk loading operation.</p>
* @param connection A connection to the database.
* @param user The user to log into the database (e.g. if the bulk loader has to start an external process).
* @param password The password for the user.
* @return A BulkLoader implementation for this database type.
* @throws com.gs.fw.common.mithra.bulkloader.BulkLoaderException if there was a problem creating the bulk loader.
*/
@Deprecated
BulkLoader createBulkLoader(Connection connection, String user, String password, String hostName, int port) throws BulkLoaderException;
/**
* <p>Creates a {@link BulkLoader} for this <code>DatabaseType</code>.</p>
* <p><code>BulkLoader</code>s should be created once per bulk loading operation.</p>
* @param user The user to log into the database (e.g. if the bulk loader has to start an external process).
* @param password The password for the user.
* @return A BulkLoader implementation for this database type.
* @throws com.gs.fw.common.mithra.bulkloader.BulkLoaderException if there was a problem creating the bulk loader.
*/
BulkLoader createBulkLoader(String user, String password, String hostName, int port) throws BulkLoaderException;
/**
* <p>Gets hold of the {@link TableColumnInfo} for a given table in a given schema.</p>
* @param connection The connection to use to fetch the table information from.
* @param schema The name of the schema in which the table resides (may be <code>null</code>).
* @param table The name of the table to get the metadata about.
* @return The table metadata for the given table, or <code>null</code> if the table cannot be found.
* @throws SQLException if there was a problem looking up the table metadata.
*/
TableColumnInfo getTableColumnInfo(Connection connection, String schema, String table) throws SQLException;
public String getHostnameFromDataSource(DataSource ds);
public int getPortFromDataSource(DataSource ds);
public String getHostnameFromUrl(String url);
public int getPortFromUrl(String url);
public Timestamp getTimestampFromResultSet(ResultSet rs, int pos, TimeZone timeZone) throws SQLException;
public void configureConnection(Connection con) throws SQLException;
public String getTempDbSchemaName();
public boolean hasPerTableLock();
public String getPerTableLock(boolean lock);
public String getPerStatementLock(boolean lock);
public boolean hasBulkInsert();
public String getNullableColumnConstraintString();
public boolean isConnectionDead(SQLException e);
public boolean violatesUniqueIndex(SQLException e);
public boolean generateBetweenClauseForLargeInClause();
//todo: consolidate this with appendNonSharedTempTableCreatePreamble
public String getTableNameForNonSharedTempTable(String nominalName);
//todo: consolidate this with appendNonSharedTempTableCreatePreamble
public String getSqlPrefixForNonSharedTempTableCreation(String nominalTableName);
public String getSqlPostfixForNonSharedTempTableCreation();
public int getMaxSearchableArguments();
public int getMaxUnionCount();
public String getModFunction(String fullyQualifiedLeftHandExpression, int divisor);
public String getCurrentSchema(Connection con) throws SQLException;
public void setSchemaOnConnection(Connection con, String schema) throws SQLException;
public String appendNonSharedTempTableCreatePreamble(StringBuilder sb, String tempTableName);
public String appendSharedTempTableCreatePreamble(StringBuilder sb, String nominalTableName);
public String getSqlPostfixForSharedTempTableCreation();
public boolean dropTableAllowedInTransaction();
public boolean createTempTableAllowedInTransaction();
public String getDeleteStatementForTestTables();
public void setTimestamp(PreparedStatement ps, int index, Timestamp timestamp, boolean forceAsString, TimeZone timeZone) throws SQLException;
public void setDate(PreparedStatement ps, int index, java.util.Date date, boolean forceAsString) throws SQLException;
public void setTime(PreparedStatement ps, int index, Time time) throws SQLException;
public void setTimeNull(PreparedStatement ps, int index) throws SQLException;
public int getNullableBooleanJavaSqlType();
public Time getTime(ResultSet rs, int position) throws SQLException;
public String convertDateToString(java.util.Date date);
public String convertDateOnlyToString(java.util.Date date);
public int getDefaultPrecision();
public int getMaxPrecision();
/**
* threshold of when updates should become insert into temp table + update original via join
* @return -1 if updates should never use insert + join
*/
public int getUpdateViaInsertAndJoinThreshold();
public void setUpdateViaInsertAndJoinThreshold(int updateViaInsertAndJoinThreshold);
/**
* threshold of when purges/deletes should become insert into temp table + purge/delete original via join
* @return -1 if purges/deletes should never use insert + join
*/
public int getDeleteViaInsertAndJoinThreshold();
public String createSubstringExpression(String stringExpression, int start, int end);
public int zGetTxLevel();
public int getUseTempTableThreshold();
public boolean indexRequiresSchemaName();
public boolean nonSharedTempTablesAreDroppedAutomatically();
public String createNonSharedIndexSql(String fullTableName, CharSequence indexColumns);
public String createSharedIndexSql(String fullTableName, CharSequence indexColumns);
public String getIndexableSqlDataTypeForBoolean();
public boolean useBigDecimalValuesInRangeOperations();
public String getConversionFunctionIntegerToString(String expression);
public String getConversionFunctionStringToInteger(String expression);
public boolean dropTempTableSyncAfterTransaction();
public double getSysLogPercentFull(Connection connection, String schemaName) throws SQLException;
public String getUpdateTableStatisticsSql(String tableName);
public boolean supportsSharedTempTable();
public boolean supportsAsKeywordForTableAliases();
public boolean truncateBeforeDroppingTempTable();
public String escapeLikeMetaChars(String parameter);
public String getSqlLikeExpression(WildcardParser parser);
public String getSqlExpressionForDateYear(String columnName);
public String getSqlExpressionForDateMonth(String columnName);
public String getSqlExpressionForDateDayOfMonth(String columnName);
public String getSqlExpressionForTimestampYear(String columnName, int conversion, TimeZone dbTimeZone) throws MithraBusinessException;
public String getSqlExpressionForTimestampMonth(String columnName, int conversion, TimeZone dbTimeZone) throws MithraBusinessException;
public String getSqlExpressionForTimestampDayOfMonth(String columnName, int conversion, TimeZone dbTimeZone) throws MithraBusinessException;
public String getSqlExpressionForStandardDeviationSample(String columnName);
public String getSqlExpressionForStandardDeviationPop(String columnName);
public void fixSampleStandardDeviation(MutableDouble obj, int count);
public void fixSampleVariance(MutableDouble obj, int count);
public String getSqlExpressionForVarianceSample(String columnName);
public String getSqlExpressionForVariancePop(String columnName);
public void appendTestTableCreationPostamble(StringBuilder sb);
public void setMultiUpdateViaJoinQuery(
Object source,
List updates,
Attribute[] prototypeArray,
MithraFastList<Attribute> nullAttributes,
int pkAttributeCount,
TupleTempContext tempContext,
MithraObjectPortal mithraObjectPortal,
String fullyQualifiedTableNameGenericSource,
StringBuilder builder);
public void setBatchUpdateViaJoinQuery(
Object source,
List updates,
Attribute[] prototypeArray,
MithraFastList<Attribute> nullAttributes,
int pkAttributeCount,
TupleTempContext tempContext,
MithraObjectPortal mithraObjectPortal,
String fullyQualifiedTableNameGenericSource,
StringBuilder builder);
public boolean canCombineOptimisticWithBatchUpdates();
}
| |
package org.vizzini.example.boardgame.qubic;
import java.beans.PropertyChangeListener;
import java.util.List;
import org.vizzini.core.game.Action;
import org.vizzini.core.game.Agent;
import org.vizzini.core.game.DefaultEnvironment;
import org.vizzini.core.game.Environment;
import org.vizzini.core.game.Position;
import org.vizzini.core.game.Team;
import org.vizzini.core.game.Token;
import org.vizzini.core.game.boardgame.BoardGameEnvironment;
/**
* Provides an implementation of an environment for qubic.
*/
public final class QubicEnvironment implements BoardGameEnvironment
{
/** Delegate. */
private final Environment delegate;
/** Agent red. */
private Agent firstAgent;
/** Agent white. */
private Agent secondAgent;
/**
* Construct this object.
*/
public QubicEnvironment()
{
final String name = "QubicBoard";
final String description = "A qubic board.";
delegate = new DefaultEnvironment(name, description);
}
@Override
public void addDoActionListener(final PropertyChangeListener listener)
{
delegate.addDoActionListener(listener);
}
@Override
public void addUndoActionListener(final PropertyChangeListener listener)
{
delegate.addUndoActionListener(listener);
}
@Override
public void clear()
{
firstAgent = null;
secondAgent = null;
delegate.clear();
}
@Override
public QubicEnvironment copy()
{
final QubicEnvironment answer = new QubicEnvironment();
final QubicPosition[] values = QubicPosition.values();
for (final QubicPosition position : values)
{
final Token token = getTokenAt(position);
if (token != null)
{
answer.placeToken(position, token);
}
}
return answer;
}
@Override
public void fireDoActionPropertyChange(final Action oldValue, final Action newValue)
{
delegate.fireDoActionPropertyChange(oldValue, newValue);
}
@Override
public void fireUndoActionPropertyChange(final Action oldValue, final Action newValue)
{
delegate.fireUndoActionPropertyChange(oldValue, newValue);
}
@Override
public String getDescription()
{
return delegate.getDescription();
}
@Override
public Agent getFirstAgent()
{
return firstAgent;
}
@Override
public String getName()
{
return delegate.getName();
}
@Override
public Agent getSecondAgent()
{
return secondAgent;
}
@Override
public QubicToken getTokenAt(final Position<?> position)
{
return (QubicToken)delegate.getTokenAt(position);
}
@Override
public int getTokenCount()
{
return delegate.getTokenCount();
}
@Override
public int getTokenCountFor(final Agent agent)
{
return delegate.getTokenCountFor(agent);
}
@Override
public int getTokenCountFor(final Team team)
{
return delegate.getTokenCountFor(team);
}
/**
* @param position TTTPosition.
*
* @return true if the given position is empty.
*/
public boolean isEmpty(final QubicPosition position)
{
return getTokenAt(position) == null;
}
@Override
public void placeInitialTokens(final List<Agent> agents)
{
firstAgent = agents.get(0);
secondAgent = agents.get(1);
}
@Override
public void placeToken(final Position<?> position, final Token token)
{
if (token == null)
{
throw new IllegalArgumentException("token is null");
}
if (token.getAgent() == null)
{
throw new RuntimeException("token has no agent!");
}
delegate.placeToken(position, token);
}
@Override
public void removeDoActionListener(final PropertyChangeListener listener)
{
delegate.removeDoActionListener(listener);
}
@Override
public void removeToken(final Position<?> position)
{
delegate.removeToken(position);
}
@Override
public void removeUndoActionListener(final PropertyChangeListener listener)
{
delegate.removeUndoActionListener(listener);
}
@Override
public String toString()
{
final StringBuilder sb = new StringBuilder();
for (int z = 0; z < QubicPosition.MAX_Z; z++)
{
sb.append("Level ").append((char)('A' + z)).append("\n");
sb.append(" a b c d\n");
for (int y = 0; y < QubicPosition.MAX_Y; y++)
{
sb.append(y + 1).append(" |");
for (int x = 0; x < QubicPosition.MAX_X; x++)
{
final QubicPosition position = QubicPosition.findByCoordinates(x, y, z);
final Token token = getTokenAt(position);
if (token == null)
{
sb.append(" ");
}
else
{
sb.append(token.getName());
}
sb.append("|");
}
if (y < (QubicPosition.MAX_Y - 1))
{
sb.append("\n");
}
}
if (z < (QubicPosition.MAX_Z - 1))
{
sb.append("\n");
}
}
return sb.toString();
}
}
| |
/*
* Copyright (c) 2012, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.developerstudio.eclipse.artifact.jaxrs.ui.wizard;
import java.io.BufferedReader;
import java.io.ByteArrayInputStream;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.lang.reflect.InvocationTargetException;
import java.net.URL;
import java.util.Map;
import org.eclipse.core.resources.IFile;
import org.eclipse.core.resources.IFolder;
import org.eclipse.core.resources.IProject;
import org.eclipse.core.resources.IResource;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.FileLocator;
import org.eclipse.core.runtime.IPath;
import org.eclipse.core.runtime.IProgressMonitor;
import org.eclipse.core.runtime.NullProgressMonitor;
import org.eclipse.core.runtime.Path;
import org.osgi.framework.Bundle;
import org.wso2.developerstudio.eclipse.artifact.jaxrs.model.JaxrsProjectModel;
import org.wso2.developerstudio.eclipse.artifact.jaxrs.Activator;
import org.wso2.developerstudio.eclipse.artifact.jaxrs.utils.JaxUtil;
import org.wso2.developerstudio.eclipse.artifact.jaxrs.utils.JaxUtil.CxfServlet;
import org.wso2.developerstudio.eclipse.libraries.utils.LibraryUtils;
import org.wso2.developerstudio.eclipse.logging.core.IDeveloperStudioLog;
import org.wso2.developerstudio.eclipse.logging.core.Logger;
import org.wso2.developerstudio.eclipse.maven.util.MavenUtils;
import org.wso2.developerstudio.eclipse.platform.ui.wizard.AbstractWSO2ProjectCreationWizard;
import org.wso2.developerstudio.eclipse.utils.file.FileUtils;
import org.wso2.developerstudio.eclipse.utils.jdt.JavaUtils;
import org.wso2.developerstudio.eclipse.utils.project.ProjectUtils;
import org.eclipse.jdt.core.ICompilationUnit;
import org.eclipse.jdt.core.IJavaElement;
import org.eclipse.jdt.core.IJavaProject;
import org.eclipse.jdt.core.IPackageFragment;
import org.eclipse.jdt.core.IPackageFragmentRoot;
import org.eclipse.jdt.core.JavaCore;
import org.eclipse.jdt.launching.IVMInstall;
import org.eclipse.jdt.launching.JavaRuntime;
import org.eclipse.jdt.ui.JavaUI;
import org.eclipse.jface.dialogs.ProgressMonitorDialog;
import org.eclipse.jface.operation.IRunnableWithProgress;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.ui.IEditorPart;
import org.eclipse.ui.IWorkbench;
public class JaxrsCreationWizard extends AbstractWSO2ProjectCreationWizard{
private static final String JAXRS_PROJECT_NATURE = "org.wso2.developerstudio.eclipse.jaxrs.project.nature";
private static final String CXF_CLASSLOADING_DESCRIPTOR = "webapp-classloading.xml";
private static IDeveloperStudioLog log=Logger.getLog(Activator.PLUGIN_ID);
private JaxrsProjectModel model;
IProject project;
IFolder sourceFolder;
IFolder webappFolder;
IFolder webINF;
IFolder resourceFolder;
IJavaProject javaProject;
IPackageFragmentRoot root;
public JaxrsCreationWizard(){
setProjectModel(new JaxrsProjectModel());
setModel(getProjectModel());
setWindowTitle("Create New JAX-RS Service");
setDefaultPageImageDescriptor(Activator.getImageDescriptor("icons/JAX-RS-wizard.png"));
}
public void init(IWorkbench workbench, IStructuredSelection selection) {
super.init(workbench, selection);
}
public boolean performFinish() {
try {
ICompilationUnit serviceClass=null;
project = createNewProject();
sourceFolder =ProjectUtils.getWorkspaceFolder(project, "src", "main", "java");
webappFolder = ProjectUtils.getWorkspaceFolder(project, "src", "main", "webapp");
webINF = ProjectUtils.getWorkspaceFolder(project, "src", "main", "webapp","WEB-INF");
resourceFolder = ProjectUtils.getWorkspaceFolder(project, "src", "main", "resources");
javaProject = JavaCore.create(project);
JavaUtils.addJavaSupportAndSourceFolder(project, sourceFolder);
ProjectUtils.createFolder(webappFolder);
ProjectUtils.createFolder(webINF);
ProjectUtils.createFolder(resourceFolder);
IFolder metaINF = ProjectUtils.getWorkspaceFolder(project, "src", "main", "webapp","META-INF");
Bundle bundle = Activator.getDefault().getBundle();
IPath resourcePath=new Path("src"+File.separator+"main"+File.separator+"resources"+File.separator+CXF_CLASSLOADING_DESCRIPTOR);
URL[] urls = FileLocator.findEntries(bundle, resourcePath);
if(urls!=null && urls.length>0){
File classLoadingFile = new File(FileLocator.toFileURL(urls[0]).getFile());
FileUtils.copy(classLoadingFile, new File(metaINF.getLocation().toFile(),CXF_CLASSLOADING_DESCRIPTOR));
}
IFile webXML = webINF.getFile("web.xml");
IFile cxfServletXML = webINF.getFile("cxf-servlet.xml");
webXML.create(new ByteArrayInputStream(JaxUtil.getCXFWebConfig().getBytes()), true, null);
JaxUtil.CxfServlet cxfServlet = new JaxUtil.CxfServlet();
cxfServletXML.create(new ByteArrayInputStream(cxfServlet.toString().getBytes()), true, null);
project.refreshLocal(IResource.DEPTH_INFINITE,new NullProgressMonitor());
cxfServlet = new JaxUtil.CxfServlet();
cxfServlet.deserialize(cxfServletXML);
if(getModel().getSelectedOption().equals("new.jaxrs")){
serviceClass = createServiceClass(project, cxfServlet, model.getServiceClassPackage(),
model.getServiceClass());
String content = cxfServlet.toString().replaceAll("xmlns=\"\"","");
cxfServletXML.setContents(new ByteArrayInputStream(content.getBytes()), IResource.FORCE, null);
}else if (getModel().getSelectedOption().equals("import.jaxrswadl")) {
ProgressMonitorDialog progressMonitorDialog = new ProgressMonitorDialog(getShell());
progressMonitorDialog.create();
progressMonitorDialog.open();
progressMonitorDialog.run(false, false, new CXFCodegenJob());
project.refreshLocal(IResource.DEPTH_INFINITE,new NullProgressMonitor());
}
File pomfile = project.getFile("pom.xml").getLocation().toFile();
getModel().getMavenInfo().setPackageName("war");
createPOM(pomfile);
project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
JavaUtils.addJarLibraryToProject(javaProject,
LibraryUtils.getDependencyPath(JaxUtil.getJsr311LibraryName()));
ProjectUtils.addNatureToProject(project,
false,
JAXRS_PROJECT_NATURE);
MavenUtils
.updateWithMavenEclipsePlugin(
pomfile,
new String[] { JDT_BUILD_COMMAND },
new String[] {
JAXRS_PROJECT_NATURE,
JDT_PROJECT_NATURE });
getModel().addToWorkingSet(project);
project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
refreshDistProjects();
if (serviceClass != null) {
serviceClass.getJavaProject().getProject()
.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
try {
IEditorPart javaEditor = JavaUI.openInEditor(serviceClass);
JavaUI.revealInEditor(javaEditor, (IJavaElement) serviceClass);
} catch (Exception e) { /* ignore */
}
}
} catch (CoreException e) {
log.error("CoreException has occurred", e);
} catch (IOException e) {
log.error("I/O error has occurred", e);
} catch (Exception e) {
log.error("An unexpected error has occurred", e);
}
return true;
}
private ICompilationUnit createServiceClass(IProject project, CxfServlet cxfServlet,
String packageName, String className) throws CoreException {
IJavaProject javaProject = JavaCore.create(project);
IPackageFragmentRoot root = javaProject.getPackageFragmentRoot(sourceFolder);
IPackageFragment sourcePackage = root.createPackageFragment(packageName, false, null);
StringBuffer buffer = new StringBuffer();
if (!packageName.equalsIgnoreCase("")) {
buffer.append("package " + packageName + ";\n");
buffer.append("\n");
}
buffer.append("import javax.ws.rs.*;\n");
buffer.append("\n");
buffer.append("@Path(\"/\")\n" + "public class "
+ className + " {\n\n");
buffer.append("\n}");
ICompilationUnit cu = sourcePackage.createCompilationUnit(className + ".java",
buffer.toString(), false, null);
String address = "/" + cu.getTypes()[0].getElementName();
address = address.replaceAll("([A-Z])", "_$1"); // split CamelCase
address = address.replaceAll("^/_", "/");
address = address.toLowerCase();
String beanClass = cu.getTypes()[0].getFullyQualifiedName();
cxfServlet.addServer(cu.getTypes()[0].getElementName(), null, address, beanClass);
return cu;
}
private class CXFCodegenJob implements IRunnableWithProgress {
public void run(IProgressMonitor monitor)
throws InvocationTargetException, InterruptedException {
String operationText="Generating server side code";
monitor.beginTask(operationText, 100);
monitor.subTask("Processing configuration...");
monitor.worked(10);
try {
monitor.subTask("Generating code...");
IVMInstall vmInstall= JavaRuntime.getDefaultVMInstall();
String s = null;
String shell =null;
String wadl2java = null;
String sourcePkg = model.getSourcePackage();
String sourceDir = sourceFolder.getLocation().toFile().toString();
String wadlFile = model.getImportFile().getAbsolutePath();
String os = System.getProperty("os.name").toLowerCase();
ProcessBuilder pb=null;
if(os.indexOf("win") >= 0){
shell = "cmd.exe";
wadl2java = "wadl2java.bat";
if(sourcePkg!=null && sourcePkg.trim().length()>0){
pb = new ProcessBuilder(shell, "/c", wadl2java, "-d",sourceDir , "-p",sourcePkg, "-impl", "-interface" ,wadlFile);
} else {
pb = new ProcessBuilder(shell, "/c", wadl2java, "-d",sourceDir ,"-impl", "-interface" ,wadlFile);
}
} else {
shell = "sh";
wadl2java = "wadl2java";
if(sourcePkg!=null && sourcePkg.trim().length()>0){
pb = new ProcessBuilder(shell, wadl2java, "-d",sourceDir , "-p",sourcePkg, "-impl", "-interface" ,wadlFile);
} else {
pb = new ProcessBuilder(shell, wadl2java, "-d",sourceDir ,"-impl", "-interface" ,wadlFile);
}
}
Map<String, String> env = pb.environment();
env.put("CXF_HOME", model.getCXFRuntime());
env.put("JAVA_HOME", vmInstall.getInstallLocation().toString());
pb.directory(new File(model.getCXFRuntime()+ File.separator + "bin" ));
Process p = pb.start();
InputStream inputStream = p.getInputStream();
InputStreamReader in = new InputStreamReader(inputStream);
BufferedReader stdInput = new BufferedReader(in);
InputStream errorStream = p.getErrorStream();
InputStreamReader inError = new InputStreamReader(errorStream);
BufferedReader stdError = new BufferedReader(inError);
while ((s = stdInput.readLine()) != null) {
monitor.subTask(s);
}
while ((s = stdError.readLine()) != null) {
log.error(s);
}
project.refreshLocal(IResource.DEPTH_INFINITE, new NullProgressMonitor());
monitor.worked(75);
monitor.worked(10);
monitor.subTask("Refreshing project...");
monitor.worked(5);
monitor.done();
inputStream.close();
errorStream.close();
in.close();
inError.close();
} catch (Exception e) {
throw new InvocationTargetException(e);
}
}
}
public void setProjectModel(JaxrsProjectModel model) {
this.model = model;
}
public JaxrsProjectModel getProjectModel() {
return model;
}
public IResource getCreatedResource() {
return null;
}
}
| |
/*************************GO-LICENSE-START*********************************
* Copyright 2015 ThoughtWorks, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*************************GO-LICENSE-END***********************************/
package com.thoughtworks.cruise;
import com.thoughtworks.cruise.config.ConfigModifier;
import com.thoughtworks.cruise.config.ModifyCommand;
import com.thoughtworks.cruise.config.RetryOnConflictModifier;
import com.thoughtworks.cruise.context.Configuration;
import com.thoughtworks.cruise.page.OnAgentsPage;
import com.thoughtworks.cruise.preconditions.AgentLauncher;
import com.thoughtworks.cruise.state.RepositoryState;
import com.thoughtworks.cruise.state.ScenarioState;
import com.thoughtworks.cruise.util.FileUtil;
import com.thoughtworks.cruise.utils.configfile.CruiseConfigDom;
import org.apache.commons.io.FileUtils;
import org.dom4j.Element;
import java.io.File;
import java.util.Calendar;
import java.util.GregorianCalendar;
import java.util.List;
import java.util.UUID;
public class ConfigureCruiseBy {
private static final String ORIGINAL_LDAP_URI = "original_ldap_uri";
private final ScenarioState scenarioState;
private Configuration configuration;
private final OnAgentsPage agentsPage;
private static final String CONTANT_SPEC = "* * * * * ? *";
private final CruiseAgents createdAgents;
private final RepositoryState repositoryState;
private ConfigModifier modifier;
private String rememberedUrl;
private static String TMP_FILE_SUFFIX = UUID.randomUUID().toString();
public ConfigureCruiseBy(Configuration configuration, ScenarioState scenarioState, OnAgentsPage agentsPage, CruiseAgents createdAgents, RepositoryState repositoryState) {
this.scenarioState = scenarioState;
this.configuration = configuration;
this.agentsPage = agentsPage;
this.createdAgents = createdAgents;
this.repositoryState = repositoryState;
modifier = new RetryOnConflictModifier(configuration);
}
public void changingApprovalTypeForPipelineStageTo(final String pipelineName, final String stageName, final String triggerType) {
final String dynName = scenarioState.pipelineNamed(pipelineName);
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setApprovalTypeForStage(dynName, stageName, triggerType);
}
});
}
public void removingEnvironments() {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeEnvironments();
}
});
}
public void addingResourceToAllAgents(final String resource) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addResourceToAllAgents(resource);
}
});
}
public void denyFirstAgent() {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.denyOneAgent();
}
});
}
public void undenyFirstAgent() {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.undenyOneAgent();
}
});
}
private CruiseConfigDom getDom() {
return configuration.provideDom();
}
private void writeToConfigFileOnFileSystem(CruiseConfigDom dom) {
configuration.setDomOnFileSystem(dom);
}
@com.thoughtworks.gauge.Step("Assigning <numberOfAgents> agents with resource <resource> to environment <environment>")
public void assigningAgentsWithResourceToEnvironment(final int numberOfAgents, final String resource, final String environment) throws Exception {
agentsPage.open();
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
List<String> idleAgentUuids = agentsPage.idleAgentUuids();
ConfigureCruiseBy.this.scenarioState.addAgentsByEnvironment(
dom.addAgentsToEnvironment(numberOfAgents, resource, environment, idleAgentUuids), environment);
}
});
}
@com.thoughtworks.gauge.Step("Adding resource <resource> to the job <job>")
public void addingResourceToTheJob(final String resource, final String job) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addResourceToJob(resource, job);
}
});
}
public void setRunOnAllAgentsForJobTo(final String pipelineName, final String jobName, final boolean runOnAllAgents) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setRunOnAllAgentsForJobTo(pipelineName, jobName, runOnAllAgents);
}
});
}
public void setRunInstanceCountForJobTo(final String pipelineName, final String jobName, final int runInstanceCount) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setRunInstanceCountForJobTo(pipelineName, jobName, runInstanceCount);
}
});
}
@com.thoughtworks.gauge.Step("Using timer with spec <spec>")
public void usingTimerWithSpec(final String spec) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setTimerWithSpec(currentPipeline(), spec);
}
});
}
@com.thoughtworks.gauge.Step("Using timer with invalid spec <spec>")
public void usingTimerWithInvalidSpec(final String spec) throws Exception {
CruiseConfigDom dom = configuration.provideDom();
dom.setTimerWithSpec(currentPipeline(), spec);
writeToConfigFileOnFileSystem(dom);
}
@com.thoughtworks.gauge.Step("Adding pipeline <pipelineName> to <environmentName> environment")
public void addingPipelineToEnvironment(final String pipelineName, final String environmentName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addPipeLineToEnvironment(scenarioState.pipelineNamed(pipelineName), environmentName);
}
});
}
public void usingConstantlyBuildingTimer() throws Exception {
usingTimerWithSpec(CONTANT_SPEC);
}
public void configureTimerToTriggerPipelineMinutesFromNow(int deltaMinutes) throws Exception {
Calendar calendar = GregorianCalendar.getInstance();
int currentMinutes = calendar.get(Calendar.MINUTE);
int minute = (currentMinutes + deltaMinutes) % 60;
usingTimerWithSpec(String.format("0 %s * * * ?", minute));
}
protected String currentPipeline() {
return scenarioState.currentPipeline();
}
@com.thoughtworks.gauge.Step("Assigning agent <agentIndex> to environment <environment>")
public void assigningAgentToEnvironment(final Integer agentIndex, final String environment) {
final AgentLauncher agent = createdAgents.get(agentIndex);
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addAgentToEnvironment(agent.getUuid(), environment);
}
});
}
@com.thoughtworks.gauge.Step("Assigning <numberOfAgents> agents to environment <environment>")
public void assigningAgentsToEnvironment(final Integer numberOfAgents, final String environment) {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
List<String> idleAgentUuids = agentsPage.idleAgentUuids();
scenarioState.addAgentsByEnvironment(dom.addAgentsToEnvironment(numberOfAgents, environment, idleAgentUuids), environment);
}
});
}
public void removeTimer() {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeTimer(currentPipeline());
}
});
}
@com.thoughtworks.gauge.Step("Setting first stage to auto approval")
public void settingFirstStageToAutoApproval() throws Exception {
settingFirstStageOfToAutoApproval(scenarioState.currentPipeline());
}
@com.thoughtworks.gauge.Step("Add environment variable <name> with value <value> to environment <env>")
public void addEnvironmentVariableWithValueToEnvironment(final String name, final String value, final String env) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addVariableToEnvironment(name, value, env);
}
});
}
@com.thoughtworks.gauge.Step("Add environment variable <name> with value <value> to job <env>")
public void addEnvironmentVariableWithValueToJob(final String name, final String value, final String env) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addVariableToJob(name, value, env);
}
});
}
@com.thoughtworks.gauge.Step("Add environment variable <name> with value <value> to pipeline <pipeline>")
public void addEnvironmentVariableWithValueToPipeline(final String name, final String value, final String pipeline) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addVariableToPipeline(name, value, scenarioState.pipelineNamed(pipeline));
}
});
}
@com.thoughtworks.gauge.Step("Add environment variable <name> with value <value> to stage <stage>")
public void addEnvironmentVariableWithValueToStage(final String name, final String value, final String stage) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addVariableToStage(name, value, stage);
}
});
}
@com.thoughtworks.gauge.Step("Remove pipeline <pipelineName> from environment")
public void removePipelineFromEnvironment(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removePipelineFromEnvironment(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Setting first stage of <pipelineName> to auto approval")
public void settingFirstStageOfToAutoApproval(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setFirstStageOfPipelineToAutoApproval(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Removing <materialName> material from <pipelineName>")
public void removingMaterialFrom(final String materialName, final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeMaterialFromPipeline(materialName, scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Changing destination of <materialName> material of <pipelineName> to <newDestination>")
public void changingDestinationOfMaterialOfTo(final String materialName, final String pipelineName, final String newDestination)
throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.changeDestinationOfMaterial(materialName, scenarioState.pipelineNamed(pipelineName), newDestination);
}
});
}
public void changingUrlOfMaterialOfTo(final String materialName, final String pipelineName, final String newUrl) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.changeUrlOfMaterial(materialName, scenarioState.pipelineNamed(pipelineName), newUrl);
}
});
}
public void removingLicense() throws Exception {
CruiseConfigDom dom = configuration.provideDom();
dom.removeLicense();
writeToConfigFileOnFileSystem(dom);
}
@com.thoughtworks.gauge.Step("Making pipeline <pipelineName> auto update")
public void makingPipelineAutoUpdate(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.pipelineAutoUpdate(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Enable auto update for pipeline <pipelineName>")
public void enableAutoUpdateForPipeline(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.enableAutoUpdate(pipelineName);
}
});
}
@com.thoughtworks.gauge.Step("Removing externals for <materialName> of pipeline <pipelineName>")
public void removingExternalsForOfPipeline(final String materialName, final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeExternalsFor(scenarioState.pipelineNamed(pipelineName), materialName);
}
});
}
@com.thoughtworks.gauge.Step("Adding test artifact with source <source> and destination <dest> to job <job>")
public void addingTestArtifactWithSourceAndDestinationToJob(final String source, final String dest, final String job) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addTestArtifactToJob(job, source, dest);
}
});
}
@com.thoughtworks.gauge.Step("Remove lock from pipeline <pipelineName>")
public void removeLockFromPipeline(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeLockFromPipeline(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Making <user> an admin user")
public void makingAnAdminUser(final String user) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addAdminUser(user);
}
});
}
@com.thoughtworks.gauge.Step("Removing <user> as an admin")
public void removingAsAnAdmin(final String user) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeAdminUser(user);
}
});
}
@com.thoughtworks.gauge.Step("Make pipeline <pipelineName> use non existant material")
public void makePipelineUseNonExistantMaterial(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.makeMaterialNonExistantMaterial(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Make pipeline <pipelineName> use all non existant material")
public void makePipelineUseAllNonExistantMaterial(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.makeAllMaterialsNonExistantMAterials(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Changing the artifacts location to <artifactLocation>")
public void changingTheArtifactsLocationTo(final String artifactLocation) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.changeArtifactLocation(artifactLocation);
}
});
}
@com.thoughtworks.gauge.Step("Removing pipeline <pipelineName>")
public void removingPipeline(final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removePipeline(scenarioState.pipelineNamed(pipelineName));
}
});
}
@com.thoughtworks.gauge.Step("Making pipeline <actualPipeline> not depend on <upstreamDependency>")
public void makingPipelineNotDependOn(final String actualPipeline, final String upstreamDependency) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeDependency(scenarioState.pipelineNamed(actualPipeline), scenarioState.pipelineNamed(upstreamDependency));
}
});
}
@com.thoughtworks.gauge.Step("Make cruise config file invalid")
public void makeCruiseConfigFileInvalid() throws Exception {
String path = "/config/invalid-cruise-config.xml";
String configXmlContents = FileUtil.readToEnd(getClass().getResourceAsStream(path));
System.out.println(configXmlContents);
configuration.setConfigOnFileSystem(configXmlContents);
}
@com.thoughtworks.gauge.Step("Save config by name <name>")
public void saveConfigByName(String name) throws Exception {
scenarioState.storeAsValidConfigNamed(name, getDom());
}
@com.thoughtworks.gauge.Step("Restore config by name <name>")
public void restoreConfigByName(String name) throws Exception {
CruiseConfigDom lastValidDom = scenarioState.getValidConfigNamed(name);
writeToConfigFileOnFileSystem(lastValidDom);
}
public void changeTheScmForPipelineFromTo(final String pipelineName, final String oldScm, final String newScm) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
Element newMaterial = dom.changeScm(scenarioState.pipelineNamed(pipelineName), oldScm, newScm);
repositoryState.createMaterial(newMaterial);
}
});
}
public void changingLdapUriTo(String ldapUri) throws Exception {
configuration.rememberValueAs("/cruise/server/security/ldap/@uri", ORIGINAL_LDAP_URI);
setLdapUri(ldapUri);
}
private void setLdapUri(final String ldapUri) {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.changeLdapUriTo(ldapUri);
}
});
}
public void restoreLdapUri() throws Exception {
String originalLdapUri = configuration.getRememberedValue(ORIGINAL_LDAP_URI);
setLdapUri(originalLdapUri);
}
@com.thoughtworks.gauge.Step("Moving password file to tmp")
public void movingPasswordFileToTmp() throws Exception {
String path = passwordFilePath();
String tmpPath = tmpPath(path);
FileUtils.deleteQuietly(new File(tmpPath));
FileUtils.moveFile(new File(path), new File(tmpPath));
}
private String passwordFilePath() {
return configuration.provideDom().getNode("/cruise/server/security/passwordFile/@path").getText();
}
public void restoreThePasswordFile() throws Exception {
String path = passwordFilePath();
FileUtils.moveFile(new File(tmpPath(path)), new File(path));
}
private String tmpPath(String path) {
return path + TMP_FILE_SUFFIX;
}
@com.thoughtworks.gauge.Step("Allow only known users to login")
public void allowOnlyKnownUsersToLogin() throws Exception {
setAllowOnlyKnownUsersToLogin(true);
}
@com.thoughtworks.gauge.Step("Allow unknown users to login")
public void allowUnknownUsersToLogin() throws Exception {
setAllowOnlyKnownUsersToLogin(false);
}
private void setAllowOnlyKnownUsersToLogin(final boolean allow) {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.setAllowOnlyKnownUsersToLogin(allow);
}
});
}
@com.thoughtworks.gauge.Step("Add site uRL")
public void addSiteURL() throws Exception {
addingAsSiteURL(Urls.urlFor(""));
}
@com.thoughtworks.gauge.Step("Add secure site uRL")
public void addSecureSiteURL() throws Exception {
addingAsSecureSiteURL(Urls.sslUrlFor(""));
}
@com.thoughtworks.gauge.Step("Adding <url> as site uRL")
public void addingAsSiteURL(final String url) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addSiteUrl(url);
}
});
}
@com.thoughtworks.gauge.Step("Adding <url> as secure site uRL")
public void addingAsSecureSiteURL(final String url) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addSecureSiteUrl(url);
}
});
}
public void removingSiteUrl() throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeSiteUrl();
}
});
}
public void removingSecureSiteUrl() throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeSecureSiteUrl();
}
});
}
@com.thoughtworks.gauge.Step("Adding <user> as a <permission> user for group <groupName>")
public void addingAsAUserForGroup(final String user, final String permission, final String groupName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
if ("view".equals(permission)) {
dom.addUserViewAuthorizationForGroup(user, groupName);
} else if ("operate".equals(permission)) {
dom.addUserOperateAuthorizationForGroup(user, groupName);
}
}
});
}
@com.thoughtworks.gauge.Step("Changing <nodeName> attribute <attributeName> to <link> for pipeline <pipelineName>")
public void changingAttributeToForPipeline(final String nodeName, final String attributeName, final String link, final String pipelineName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.modifyPipeline(scenarioState.pipelineNamed(pipelineName), nodeName, attributeName, link);
}
});
}
@com.thoughtworks.gauge.Step("Adding <user> as a <permission> role for group <groupName>")
public void addingAsARoleForGroup(final String user, final String permission, final String groupName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
if ("view".equals(permission)) {
dom.addRoleViewAuthorizationForGroup(user, groupName);
} else if ("operate".equals(permission)) {
dom.addRoleOperateAuthorizationForGroup(user, groupName);
}
}
});
}
@com.thoughtworks.gauge.Step("Adding <username> as a group admin of <groupName>")
public void addingAsAGroupAdminOf(final String username, final String groupName) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addGroupAdmin(groupName, username);
}
});
}
@com.thoughtworks.gauge.Step("Remove job <jobName> from stage <stageName> in pipeline <pipelineName>")
public void removeJobFromStageInPipeline(final String jobName, final String stageName, String pipelineName) throws Exception {
final String runtimeName = scenarioState.pipelineNamed(pipelineName);
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeJob(runtimeName, stageName, jobName);
}
});
}
public void removingTemplates() throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.removeTemplates();
}
});
}
@com.thoughtworks.gauge.Step("Remember url for material <materialName> of pipeline <pipelineName>")
public void rememberUrlForMaterialOfPipeline(String materialName, String pipelineName) throws Exception {
this.rememberedUrl = configuration.provideDom().urlForMaterial(scenarioState.pipelineNamed(pipelineName), materialName);
}
@com.thoughtworks.gauge.Step("Add parameter <parameterName> to pipeline <pipelineName>")
public void addParameterToPipeline(final String parameterName, String pipelineName) throws Exception {
final String runtimeName = scenarioState.pipelineNamed(pipelineName);
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addParameter(runtimeName, parameterName, rememberedUrl);
}
});
}
@com.thoughtworks.gauge.Step("Add security with password file and users <users> as admin")
public void addSecurityWithPasswordFileAndUsersAsAdmin(String users) throws Exception {
File passwordFile = configuration.copyPasswordFile(getClass().getResource("/config/password.properties"));
addSecurityTagWithPasswordfileAndAdmin(passwordFile.getAbsolutePath(), users);
}
private void addSecurityTagWithPasswordfileAndAdmin(final String passwordFilePath, final String users) {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addSecurityWithPasswordFile(passwordFilePath, users);
}
});
}
@com.thoughtworks.gauge.Step("Add security with password file only")
public void addSecurityWithPasswordFileOnly() throws Exception {
File passwordFile = configuration.copyPasswordFile(getClass().getResource("/config/password.properties"));
addSecurityTagWithPasswordfileOnly(passwordFile.getAbsolutePath());
}
private void addSecurityTagWithPasswordfileOnly(final String passwordFilePath) {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.addSecurityWithPasswordFileOnly(passwordFilePath);
}
});
}
@com.thoughtworks.gauge.Step("Update value of key <key> to <value> for repo with id <repoId> - Configure Cruise By")
public void updateValueOfKeyToForRepoWithId(final String key, final String value, final String repoId) throws Exception {
modifier.modifyConfig(new ModifyCommand() {
@Override
public void modify(CruiseConfigDom dom) {
dom.updateValueOfKeyToForRepoWithId(key, value, repoId);
}
});
}
}
| |
package mil.dds.anet;
import com.codahale.metrics.MetricRegistry;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.inject.Injector;
import de.ahus1.keycloak.dropwizard.AbstractKeycloakAuthenticator;
import de.ahus1.keycloak.dropwizard.KeycloakBundle;
import de.ahus1.keycloak.dropwizard.KeycloakConfiguration;
import freemarker.template.Configuration;
import freemarker.template.Version;
import io.dropwizard.Application;
import io.dropwizard.auth.AuthValueFactoryProvider;
import io.dropwizard.auth.Authorizer;
import io.dropwizard.bundles.assets.ConfiguredAssetsBundle;
import io.dropwizard.cli.ServerCommand;
import io.dropwizard.configuration.EnvironmentVariableSubstitutor;
import io.dropwizard.configuration.SubstitutingSourceProvider;
import io.dropwizard.db.DataSourceFactory;
import io.dropwizard.migrations.MigrationsBundle;
import io.dropwizard.setup.Bootstrap;
import io.dropwizard.setup.Environment;
import io.dropwizard.views.ViewBundle;
import java.lang.invoke.MethodHandles;
import java.security.Principal;
import java.util.EnumSet;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import javax.servlet.DispatcherType;
import javax.servlet.http.HttpServletRequest;
import mil.dds.anet.beans.Person;
import mil.dds.anet.beans.Person.Role;
import mil.dds.anet.config.AnetConfiguration;
import mil.dds.anet.config.AnetKeycloakConfiguration;
import mil.dds.anet.database.PersonDao;
import mil.dds.anet.database.StatementLogger;
import mil.dds.anet.resources.AdminResource;
import mil.dds.anet.resources.ApprovalStepResource;
import mil.dds.anet.resources.AuthorizationGroupResource;
import mil.dds.anet.resources.GraphQlResource;
import mil.dds.anet.resources.HomeResource;
import mil.dds.anet.resources.LocationResource;
import mil.dds.anet.resources.LoggingResource;
import mil.dds.anet.resources.NoteResource;
import mil.dds.anet.resources.OrganizationResource;
import mil.dds.anet.resources.PersonResource;
import mil.dds.anet.resources.PositionResource;
import mil.dds.anet.resources.ReportResource;
import mil.dds.anet.resources.SavedSearchResource;
import mil.dds.anet.resources.SubscriptionResource;
import mil.dds.anet.resources.SubscriptionUpdateResource;
import mil.dds.anet.resources.TaskResource;
import mil.dds.anet.threads.AccountDeactivationWorker;
import mil.dds.anet.threads.AnetEmailWorker;
import mil.dds.anet.threads.FutureEngagementWorker;
import mil.dds.anet.threads.MaterializedViewRefreshWorker;
import mil.dds.anet.threads.PendingAssessmentsNotificationWorker;
import mil.dds.anet.threads.ReportApprovalWorker;
import mil.dds.anet.threads.ReportPublicationWorker;
import mil.dds.anet.utils.DaoUtils;
import mil.dds.anet.utils.HttpsRedirectFilter;
import mil.dds.anet.utils.Utils;
import mil.dds.anet.views.ViewResponseFilter;
import org.eclipse.jetty.server.session.SessionHandler;
import org.eclipse.jetty.servlet.FilterHolder;
import org.eclipse.jetty.servlet.ServletContextHandler;
import org.keycloak.KeycloakSecurityContext;
import org.keycloak.representations.AccessToken;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import ru.vyarus.dropwizard.guice.GuiceBundle;
import ru.vyarus.dropwizard.guice.injector.lookup.InjectorLookup;
import ru.vyarus.guicey.jdbi3.JdbiBundle;
public class AnetApplication extends Application<AnetConfiguration> {
private static final Logger logger =
LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
public static final Version FREEMARKER_VERSION = Configuration.VERSION_2_3_30;
private MetricRegistry metricRegistry;
public static void main(String[] args) throws Exception {
new AnetApplication().run(args);
}
@Override
public String getName() {
return "anet";
}
@Override
protected void addDefaultCommands(Bootstrap<AnetConfiguration> bootstrap) {
bootstrap.addCommand(new ServerCommand<>(this));
bootstrap.addCommand(new AnetCheckCommand(this));
}
@Override
public void initialize(Bootstrap<AnetConfiguration> bootstrap) {
// Allow the anet.yml configuration to pull from Environment Variables.
bootstrap.setConfigurationSourceProvider(new SubstitutingSourceProvider(
bootstrap.getConfigurationSourceProvider(), new EnvironmentVariableSubstitutor(false)));
// Add the waitForDB command
bootstrap.addCommand(new WaitForDbCommand());
// Add the db migration commands
bootstrap.addBundle(new MigrationsBundle<AnetConfiguration>() {
@Override
public DataSourceFactory getDataSourceFactory(AnetConfiguration configuration) {
logger.info("datasource url: {}", configuration.getDataSourceFactory().getUrl());
return configuration.getDataSourceFactory();
}
});
// Add the init command
bootstrap.addCommand(new InitializationCommand(this));
// Add the database script command
bootstrap.addCommand(new DatabaseScriptCommand());
// Add the database maintenance command
bootstrap.addCommand(new MaintenanceCommand(this));
// Serve assets on /assets
bootstrap.addBundle(new ConfiguredAssetsBundle(ImmutableMap.<String, String>builder()
.put("/assets/", "/assets/").put("/imagery/", "/imagery/").put("/data/", "/data/").build(),
"index.html"));
// Use Freemarker to handle rendering TEXT_HTML views.
bootstrap.addBundle(new ViewBundle<AnetConfiguration>() {
@Override
public Map<String, Map<String, String>> getViewConfiguration(
AnetConfiguration configuration) {
return configuration.getViews();
}
});
// Add Dropwizard-Keycloak
bootstrap.addBundle(new KeycloakBundle<AnetConfiguration>() {
@Override
protected AnetKeycloakConfiguration getKeycloakConfiguration(
AnetConfiguration configuration) {
return configuration.getKeycloakConfiguration();
}
@Override
protected Class<? extends Principal> getUserClass() {
return Person.class;
}
@Override
protected AbstractKeycloakAuthenticator<Person> createAuthenticator(
KeycloakConfiguration configuration) {
return new AbstractKeycloakAuthenticator<Person>(configuration) {
@Override
protected Person prepareAuthentication(KeycloakSecurityContext securityContext,
HttpServletRequest request, KeycloakConfiguration keycloakConfiguration) {
final PersonDao dao = AnetObjectEngine.getInstance().getPersonDao();
final AccessToken token = securityContext.getToken();
// Call non-synchronized method first
Person person = findUser(dao, token);
if (person == null) {
// Call synchronized method
person = findOrCreateUser(dao, token);
}
return person;
}
// Non-synchronized method, safe to run multiple times in parallel
private Person findUser(final PersonDao dao, final AccessToken token) {
final String openIdSubject = token.getSubject();
final List<Person> p = dao.findByOpenIdSubject(openIdSubject);
if (!p.isEmpty()) {
final Person existingPerson = p.get(0);
logger.trace("found existing user={} by openIdSubject={}", existingPerson,
openIdSubject);
return existingPerson;
}
return null;
}
// Synchronized method, so we create/update at most one user in the face of multiple
// simultaneous authentication requests
private synchronized Person findOrCreateUser(final PersonDao dao,
final AccessToken token) {
final Person person = findUser(dao, token);
if (person != null) {
return person;
}
// Might be user from before Keycloak integration, try username
final String username = token.getPreferredUsername();
final String openIdSubject = token.getSubject();
List<Person> p = dao.findByDomainUsername(username);
if (!p.isEmpty()) {
final Person existingPerson = p.get(0);
logger.trace(
"found existing user={} by domainUsername={}; setting openIdSubject={} (was {})",
existingPerson, username, openIdSubject, existingPerson.getOpenIdSubject());
existingPerson.setOpenIdSubject(openIdSubject);
dao.update(existingPerson);
return existingPerson;
}
// Fall back to email
final String email = token.getEmail();
p = dao.findByEmailAddress(email);
if (!p.isEmpty()) {
final Person existingPerson = p.get(0);
logger.trace(
"found existing user={} by emailAddress={}; setting openIdSubject={} (was {})",
existingPerson, email, openIdSubject, existingPerson.getOpenIdSubject());
existingPerson.setOpenIdSubject(openIdSubject);
dao.update(existingPerson);
return existingPerson;
}
// Not found, first time this user has ever logged in
final Person newPerson = new Person();
logger.trace("creating new user with domainUsername={}, email={} and openIdSubject={}",
username, email, openIdSubject);
newPerson.setRole(Role.ADVISOR);
newPerson.setPendingVerification(true);
// Copy some data from the authentication token
newPerson.setOpenIdSubject(openIdSubject);
newPerson.setDomainUsername(username);
newPerson.setEmailAddress(email);
newPerson.setName(getCombinedName(token));
/*
* Note: there's also token.getGender(), but that's not generally available in AD/LDAP,
* and token.getPhoneNumber(), but that requires scope="openid phone" on the
* authentication request, which is hard to accomplish with current Keycloak code.
*/
return dao.insert(newPerson);
}
};
}
@Override
protected Authorizer<Person> createAuthorizer() {
return new Authorizer<Person>() {
@Override
public boolean authorize(Person principal, String role) {
// We don't use @RolesAllowed type authorizations
return false;
}
};
}
private String getCombinedName(AccessToken token) {
final StringBuilder combinedName = new StringBuilder();
// Try to combine FAMILYNAME, GivenName MiddleName
final String fn = Utils.trimStringReturnNull(token.getFamilyName());
if (!Utils.isEmptyOrNull(fn)) {
combinedName.append(fn.toUpperCase());
final String gn = Utils.trimStringReturnNull(token.getGivenName());
if (!Utils.isEmptyOrNull(gn)) {
combinedName.append(", ");
combinedName.append(gn);
}
final String mn = Utils.trimStringReturnNull(token.getMiddleName());
if (!Utils.isEmptyOrNull(mn)) {
combinedName.append(" ");
combinedName.append(mn);
}
}
// Fall back to just the name
if (combinedName.length() == 0) {
combinedName.append(token.getName());
}
return combinedName.toString();
}
});
// Add Dropwizard-Guicey
bootstrap.addBundle(GuiceBundle.builder()
.bundles(
JdbiBundle.<AnetConfiguration>forDatabase((conf, env) -> conf.getDataSourceFactory()))
.build());
metricRegistry = bootstrap.getMetricRegistry();
}
@Override
public void run(AnetConfiguration configuration, Environment environment)
throws IllegalArgumentException {
// Get the Database connection up and running
final String dbUrl = configuration.getDataSourceFactory().getUrl();
logger.info("datasource url: {}", dbUrl);
// We want to use our own custom DB logger in order to clean up the logs a bit.
final Injector injector = InjectorLookup.getInjector(this).get();
injector.getInstance(StatementLogger.class);
// The Object Engine is the core place where we store all of the Dao's
// You can always grab the engine from anywhere with AnetObjectEngine.getInstance()
final AnetObjectEngine engine =
new AnetObjectEngine(dbUrl, this, configuration, metricRegistry);
environment.servlets().setSessionHandler(new SessionHandler());
if (configuration.getRedirectToHttps()) {
forwardToHttps(environment.getApplicationContext());
}
// If you want to use @Auth to inject a custom Principal type into your resource
environment.jersey().register(new AuthValueFactoryProvider.Binder<>(Person.class));
// We no longer use @RolesAllowed to do authorization
// environment.jersey().register(RolesAllowedDynamicFeature.class);
environment.jersey().register(new WebExceptionMapper());
if (configuration.isTestMode()) {
logger.info("AnetApplication is in testMode, skipping scheduled workers");
} else {
logger.info("AnetApplication is starting scheduled workers");
// Schedule any tasks that need to run on an ongoing basis.
final ScheduledExecutorService scheduler = Executors.newScheduledThreadPool(1);
// Check for any reports that need to be published every 5 minutes.
// And run once in 5 seconds from boot-up. (give the server time to boot up).
final ReportPublicationWorker reportPublicationWorker =
new ReportPublicationWorker(configuration, engine.getReportDao());
scheduler.scheduleAtFixedRate(reportPublicationWorker, 5, 5, TimeUnit.MINUTES);
scheduler.schedule(reportPublicationWorker, 5, TimeUnit.SECONDS);
// Check for any emails that need to be sent every 5 minutes.
// And run once in 10 seconds from boot-up. (give the server time to boot up).
final AnetEmailWorker emailWorker = new AnetEmailWorker(configuration, engine.getEmailDao());
scheduler.scheduleAtFixedRate(emailWorker, 5, 5, TimeUnit.MINUTES);
scheduler.schedule(emailWorker, 10, TimeUnit.SECONDS);
// Check for any future engagements every 3 hours.
// And run once in 15 seconds from boot-up. (give the server time to boot up).
final FutureEngagementWorker futureWorker =
new FutureEngagementWorker(configuration, engine.getReportDao());
scheduler.scheduleAtFixedRate(futureWorker, 0, 3, TimeUnit.HOURS);
scheduler.schedule(futureWorker, 15, TimeUnit.SECONDS);
// Check for any reports that need to be approved every 5 minutes.
// And run once in 20 seconds from boot-up. (give the server time to boot up).
final ReportApprovalWorker reportApprovalWorker =
new ReportApprovalWorker(configuration, engine.getReportDao());
scheduler.scheduleAtFixedRate(reportApprovalWorker, 5, 5, TimeUnit.MINUTES);
scheduler.schedule(reportApprovalWorker, 5, TimeUnit.SECONDS);
runAccountDeactivationWorker(configuration, scheduler, engine);
// Check for any missing pending assessments every 6 hours.
// And run once in 25 seconds from boot-up. (give the server time to boot up).
final PendingAssessmentsNotificationWorker pendingAssessmentsNotificationWorker =
new PendingAssessmentsNotificationWorker(configuration);
scheduler.scheduleAtFixedRate(pendingAssessmentsNotificationWorker, 6, 6, TimeUnit.HOURS);
scheduler.schedule(pendingAssessmentsNotificationWorker, 25, TimeUnit.SECONDS);
if (DaoUtils.isPostgresql()) {
// Wait 60 seconds between updates of PostgreSQL materialized views,
// starting 30 seconds after boot-up.
final MaterializedViewRefreshWorker materializedViewRefreshWorker =
new MaterializedViewRefreshWorker(configuration, engine.getAdminDao());
scheduler.scheduleWithFixedDelay(materializedViewRefreshWorker, 30, 60, TimeUnit.SECONDS);
}
}
// Create all of the HTTP Resources.
final LoggingResource loggingResource = new LoggingResource();
final PersonResource personResource = new PersonResource(engine, configuration);
final TaskResource taskResource = new TaskResource(engine, configuration);
final LocationResource locationResource = new LocationResource(engine);
final OrganizationResource orgResource = new OrganizationResource(engine);
final PositionResource positionResource = new PositionResource(engine);
final ReportResource reportResource = new ReportResource(engine, configuration);
final AdminResource adminResource = new AdminResource(engine, configuration);
final HomeResource homeResource = new HomeResource(engine, configuration);
final SavedSearchResource savedSearchResource = new SavedSearchResource(engine);
final AuthorizationGroupResource authorizationGroupResource =
new AuthorizationGroupResource(engine);
final NoteResource noteResource = new NoteResource(engine);
final ApprovalStepResource approvalStepResource = new ApprovalStepResource(engine);
final SubscriptionResource subscriptionResource = new SubscriptionResource(engine);
final SubscriptionUpdateResource subscriptionUpdateResource =
new SubscriptionUpdateResource(engine);
final GraphQlResource graphQlResource = injector.getInstance(GraphQlResource.class);
graphQlResource.initialise(engine, configuration,
ImmutableList.of(reportResource, personResource, positionResource, locationResource,
orgResource, taskResource, adminResource, savedSearchResource,
authorizationGroupResource, noteResource, approvalStepResource, subscriptionResource,
subscriptionUpdateResource),
metricRegistry);
// Register all of the HTTP Resources
environment.jersey().register(loggingResource);
environment.jersey().register(adminResource);
environment.jersey().register(homeResource);
environment.jersey().register(new ViewResponseFilter(configuration));
environment.jersey().register(graphQlResource);
}
private void runAccountDeactivationWorker(final AnetConfiguration configuration,
final ScheduledExecutorService scheduler, final AnetObjectEngine engine) {
// Check whether the application is configured to auto-check for account deactivation.
// NOTE: if you change this, reloading the dictionary from the admin interface is *not*
// sufficient, you will have to restart ANET for this change to be reflected
if (configuration.getDictionaryEntry("automaticallyInactivateUsers") != null) {
// Check for any accounts which are scheduled to be deactivated as they reach the end-of-tour
// date.
final Integer accountDeactivationWarningInterval = (Integer) configuration
.getDictionaryEntry("automaticallyInactivateUsers.checkIntervalInSecs");
final AccountDeactivationWorker deactivationWarningWorker = new AccountDeactivationWorker(
configuration, engine.getPersonDao(), accountDeactivationWarningInterval);
// Run the account deactivation worker at the set interval.
scheduler.scheduleAtFixedRate(deactivationWarningWorker, accountDeactivationWarningInterval,
accountDeactivationWarningInterval, TimeUnit.SECONDS);
// While in development, run the worker once at the start to see whether it works correctly
if (configuration.isDevelopmentMode()) {
scheduler.schedule(deactivationWarningWorker, 20, TimeUnit.SECONDS);
}
}
}
/*
* Adds a Request filter that looks for any HTTP requests and redirects them to HTTPS
*/
public void forwardToHttps(ServletContextHandler handler) {
handler.addFilter(new FilterHolder(new HttpsRedirectFilter()), "/*",
EnumSet.of(DispatcherType.REQUEST));
}
}
| |
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.airavata.xbaya;
import java.net.URI;
import java.net.URISyntaxException;
import org.apache.airavata.workflow.model.exceptions.WorkflowException;
import org.apache.airavata.xbaya.XBayaConfiguration.XBayaExecutionMode;
import org.apache.airavata.xbaya.ui.utils.ErrorMessages;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class XBaya {
private static final Logger logger = LoggerFactory.getLogger(XBaya.class);
private XBayaConfiguration config;
private XBayaEngine engine;
public static int preservice = 0;
/**
* Constructs an XBayaEngine.
*
* @param args
*/
public XBaya(String[] args) {
parseArguments(args);
try {
this.engine = new XBayaEngine(this.config);
} catch (RuntimeException e) {
logger.error(e.getMessage(), e);
try {
this.engine.getGUI().getErrorWindow().error(ErrorMessages.UNEXPECTED_ERROR, e);
} catch (Throwable t) {
logger.error(e.getMessage(), e);
}
} catch (Error e) {
logger.error(e.getMessage(), e);
try {
this.engine.getGUI().getErrorWindow().error(ErrorMessages.UNEXPECTED_ERROR, e);
} catch (Throwable t) {
// Cannot do anything
logger.error(e.getMessage(), e);
}
}
}
/**
* Returns the XBayaEngine.
*
* @return The XBayaEngine
*/
public XBayaEngine getEngine() {
return this.engine;
}
private void printUsage() {
System.err.println("Usage: java " + XBaya.class.getName() + " [-help]" + "[-config file]" + " [-title title]"
+ " [-workflow workflow]" + " [-enableLocalRegistry]" + " [-localRegistry dir]"
+ " [-gpelEngineURL url]" + " [-templateID templateID]" + " [-instanceID instanceID]"
+ " [-gfacURL url]" + " [-dscURL url" + " [-startMonitor {true,false}]" + " [-brokerURL url]"
+ " [-topic topic]" + " [-pullMode {true,false}]" + " [-myProxyServer host]" + " [-karmaURL url]"
+ " [-karmaWorkflowInstanceID]" + " [-myProxyPort port]" + " [-myProxyUsername username]"
+ " [-myProxyLifetime sec]" + " [-loadMyProxy {true,false}]" + " [-messageBoxURL url]"
+ " [-width width]" + " [-height height]" + " [-exitOnClose false/true]" + "[-enableProvenance false/true]"
+ "[-enableProvenanceSmartRun false/true]" + "[-runWithCrossProduct true/false]"+"[-mode ide/monitor]" + "[--x x-coordinates of left top corner] " +
"+ [--y y-coordinate of left top corner]");
}
private void parseArguments(String[] args) {
try {
this.config = new XBayaConfiguration();
int index = 0;
while (index < args.length) {
String arg = args[index];
String possibleValue = "";
if ((index + 1) < args.length) {
possibleValue = args[index + 1];
}
logger.debug("arg: " + arg + " " + possibleValue);
if ("-help".equalsIgnoreCase(arg)) {
printUsage();
System.exit(0);
} else if ("-config".equalsIgnoreCase(arg)) {
index++;
String configPath = args[index];
try {
this.config.loadConfiguration(configPath);
} catch (RuntimeException e) {
String message = "Error while reading config file, " + configPath;
logger.warn(message, e);
this.config.addError(new WorkflowException(message, e));
}
} else if ("-title".equalsIgnoreCase(arg)) {
index++;
this.config.setTitle(args[index]);
} else if ("-workflow".equalsIgnoreCase(arg)) {
index++;
this.config.setWorkflow(args[index]);
} else if ("-startMonitor".equalsIgnoreCase(arg)) {
this.config.setStartMonitor(true);
} else if ("-brokerURL".equalsIgnoreCase(arg)) {
index++;
String brokerURL = args[index];
try {
this.config.setBrokerURL(parseURL(brokerURL));
} catch (URISyntaxException e) {
String message = "The broker URL is in wrong format: " + brokerURL;
logger.warn(message, e);
this.config.addError(new WorkflowException(message, e));
}
} else if ("-odeEngine".equalsIgnoreCase(arg)) {
index++;
this.config.setOdeURL(args[index]);
} else if ("-templateID".equalsIgnoreCase(arg)) {
index++;
this.config.setWorkflow(args[index]);
} else if ("-topic".equalsIgnoreCase(arg)) {
index++;
this.config.setTopic(args[index]);
} else if ("-pullMode".equalsIgnoreCase(arg)) {
if (index < args.length - 1) {
String nextArg = args[index + 1];
if (nextArg.startsWith("-")) {
this.config.setPullMode(true);
} else if ("true".equalsIgnoreCase(nextArg)) {
index++;
this.config.setPullMode(true);
} else if ("false".equalsIgnoreCase(nextArg)) {
index++;
this.config.setPullMode(false);
} else {
String message = "-pullMode has to be either true or false, not " + nextArg;
logger.warn(message);
this.config.addError(new WorkflowException(message));
}
} else {
// This is the last arg
this.config.setPullMode(true);
}
} else if ("-messageBoxURL".equalsIgnoreCase(arg) || "-msgBoxURL".equalsIgnoreCase(arg)) {
index++;
String messageBoxURL = args[index];
try {
this.config.setMessageBoxURL(parseURL(messageBoxURL));
} catch (URISyntaxException e) {
String message = "The message box URL is in wrong format: " + messageBoxURL;
logger.warn(message, e);
this.config.addError(new WorkflowException(message, e));
}
} else if ("-width".equalsIgnoreCase(arg)) {
index++;
String width = args[index];
try {
this.config.setWidth(Integer.parseInt(width));
} catch (NumberFormatException e) {
String message = "The width must be an integer: " + width;
logger.warn(message, e);
this.config.addError(new WorkflowException(message, e));
}
} else if ("-height".equalsIgnoreCase(arg)) {
index++;
String height = args[index];
try {
this.config.setHeight(Integer.parseInt(height));
} catch (NumberFormatException e) {
String message = "The height must be an integer: " + height;
logger.warn(message, e);
this.config.addError(new WorkflowException(message, e));
}
} else if ("-exitOnClose".equalsIgnoreCase(arg)) {
index++;
String exit = args[index];
if ("false".equalsIgnoreCase(exit)) {
this.config.setCloseOnExit(false);
}
} else if ("-enableProvenance".equalsIgnoreCase(arg)) {
index++;
String exit = args[index];
if ("true".equalsIgnoreCase(exit)) {
this.config.setCollectProvenance(true);
}
} else if ("-enableProvenanceSmartRun".equalsIgnoreCase(arg)) {
index++;
String exit = args[index];
if ("true".equalsIgnoreCase(exit)) {
this.config.setProvenanceSmartRun(true);
}
} else if ("-runWithCrossProduct".equalsIgnoreCase(arg)) {
index++;
String exit = args[index];
if ("false".equalsIgnoreCase(exit)) {
this.config.setRunWithCrossProduct(false);
}
} else if ("-mode".equalsIgnoreCase(arg)) {
index++;
String modeValue = args[index].toUpperCase();
this.config.setXbayaExecutionMode(XBayaExecutionMode.valueOf(modeValue));
} else if ("-x".equalsIgnoreCase(arg)) {
index++;
this.config.setX(Integer.parseInt(args[index]));
} else if ("-y".equalsIgnoreCase(arg)) {
index++;
this.config.setY(Integer.parseInt(args[index]));
} else {
String message = "Unknown option: " + arg;
logger.error(message);
this.config.addError(new WorkflowException(message));
}
index++;
}
} catch (ArrayIndexOutOfBoundsException e) {
String message = "Argument is missing after " + args[args.length - 1];
logger.error(message, e);
this.config.addError(new WorkflowException(message));
} catch (Throwable e) {
logger.error(e.getMessage(), e);
String message = "Unknown error while parsing the arguments";
this.config.addError(new WorkflowException(message, e));
}
}
private URI parseURL(String urlString) throws URISyntaxException {
if (urlString.trim().length() == 0) {
// This makes it possible to not use some of our default services.
return null;
} else if ("null".equalsIgnoreCase(urlString)) {
// This is a workaround that JNLP doesn't take empty string as an
// argument.
return null;
} else {
return new URI(urlString).parseServerAuthority();
}
}
/**
* @param args
*/
public static void main(String[] args) {
new XBaya(args);
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.index.mapper;
import org.apache.lucene.index.IndexableField;
import org.elasticsearch.Version;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressedXContent;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.MapperService.MergeReason;
import org.elasticsearch.index.mapper.ObjectMapper.Dynamic;
import org.elasticsearch.test.VersionUtils;
import java.io.IOException;
import java.io.UncheckedIOException;
import java.util.Collection;
import java.util.HashSet;
import java.util.function.Function;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasSize;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.nullValue;
public class NestedObjectMapperTests extends MapperServiceTestCase {
public void testEmptyNested() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> b.startObject("nested1").field("type", "nested").endObject()));
ParsedDocument doc = docMapper.parse(source(b -> b.field("field", "value").nullField("nested1")));
assertThat(doc.docs().size(), equalTo(1));
doc = docMapper.parse(new SourceToParse("test", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested").endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(1));
}
public void testSingleNested() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> b.startObject("nested1").field("type", "nested").endObject()));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper;
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startObject("nested1").field("field1", "1").field("field2", "2").endObject()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(2));
assertThat(doc.docs().get(0).get(NestedPathFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePath()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get("field"), equalTo("value"));
doc = docMapper.parse(new SourceToParse("test", "1", BytesReference
.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").field("field2", "2").endObject()
.startObject().field("field1", "3").field("field2", "4").endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get(NestedPathFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePath()));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get(NestedPathFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePath()));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
public void testMultiNested() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("nested2").field("type", "nested").endObject();
}
b.endObject();
}
b.endObject();
}));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper1 = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper1, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper1;
assertThat(nested1Mapper.isIncludeInParent(), equalTo(false));
assertThat(nested1Mapper.isIncludeInRoot(), equalTo(false));
ObjectMapper mapper2 = docMapper.mappers().objectMappers().get("nested1.nested2");
assertThat(mapper2, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested2Mapper = (NestedObjectMapper) mapper2;
assertThat(nested2Mapper.isIncludeInParent(), equalTo(false));
assertThat(nested2Mapper.isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray()
.endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(6).get("nested1.nested2.field2"), nullValue());
}
public void testMultiObjectAndNested1() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("nested2");
{
b.field("type", "nested");
b.field("include_in_parent", true);
}
b.endObject();
}
b.endObject();
}
b.endObject();
}));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper1 = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper1, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper1;
assertThat(nested1Mapper.isIncludeInParent(), equalTo(false));
assertThat(nested1Mapper.isIncludeInRoot(), equalTo(false));
ObjectMapper mapper2 = docMapper.mappers().objectMappers().get("nested1.nested2");
assertThat(mapper2, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested2Mapper = (NestedObjectMapper) mapper2;
assertThat(nested2Mapper.isIncludeInParent(), equalTo(true));
assertThat(nested2Mapper.isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(6).get("nested1.nested2.field2"), nullValue());
}
public void testMultiObjectAndNested2() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.field("include_in_parent", true);
b.startObject("properties");
{
b.startObject("nested2");
{
b.field("type", "nested");
b.field("include_in_parent", true);
}
b.endObject();
}
b.endObject();
}
b.endObject();
}));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper1 = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper1, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper1;
assertThat(nested1Mapper.isIncludeInParent(), equalTo(true));
assertThat(nested1Mapper.isIncludeInRoot(), equalTo(false));
ObjectMapper mapper2 = docMapper.mappers().objectMappers().get("nested1.nested2");
assertThat(mapper2, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested2Mapper = (NestedObjectMapper) mapper2;
assertThat(nested2Mapper.isIncludeInParent(), equalTo(true));
assertThat(nested2Mapper.isIncludeInRoot(), equalTo(false));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).getFields("nested1.field1").length, equalTo(2));
assertThat(doc.docs().get(6).getFields("nested1.nested2.field2").length, equalTo(4));
}
public void testMultiRootAndNested1() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.startObject("properties");
{
b.startObject("nested2");
{
b.field("type", "nested");
b.field("include_in_root", true);
}
b.endObject();
}
b.endObject();
}
b.endObject();
}));
assertEquals("nested1", docMapper.mappers().getNestedParent("nested1.nested2"));
assertNull(docMapper.mappers().getNestedParent("nonexistent"));
assertNull(docMapper.mappers().getNestedParent("nested1"));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper1 = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper1, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper1;
assertThat(nested1Mapper.isIncludeInParent(), equalTo(false));
assertThat(nested1Mapper.isIncludeInRoot(), equalTo(false));
ObjectMapper mapper2 = docMapper.mappers().objectMappers().get("nested1.nested2");
assertThat(mapper2, instanceOf(NestedObjectMapper.class));
NestedObjectMapper nested2Mapper = (NestedObjectMapper) mapper2;
assertThat(nested2Mapper.isIncludeInParent(), equalTo(false));
assertThat(nested2Mapper.isIncludeInRoot(), equalTo(true));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1")
.startArray("nested2")
.startObject().field("field2", "2").endObject()
.startObject().field("field2", "3").endObject()
.endArray().endObject()
.startObject().field("field1", "4")
.startArray("nested2")
.startObject().field("field2", "5").endObject()
.startObject().field("field2", "6").endObject()
.endArray().endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(7));
assertThat(doc.docs().get(0).get("nested1.nested2.field2"), equalTo("2"));
assertThat(doc.docs().get(0).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.nested2.field2"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(2).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(2).get("field"), nullValue());
assertThat(doc.docs().get(3).get("nested1.nested2.field2"), equalTo("5"));
assertThat(doc.docs().get(3).get("field"), nullValue());
assertThat(doc.docs().get(4).get("nested1.nested2.field2"), equalTo("6"));
assertThat(doc.docs().get(4).get("field"), nullValue());
assertThat(doc.docs().get(5).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(5).get("nested1.nested2.field2"), nullValue());
assertThat(doc.docs().get(5).get("field"), nullValue());
assertThat(doc.docs().get(6).get("field"), equalTo("value"));
assertThat(doc.docs().get(6).get("nested1.field1"), nullValue());
assertThat(doc.docs().get(6).getFields("nested1.nested2.field2").length, equalTo(4));
}
/**
* Checks that multiple levels of nested includes where a node is both directly and transitively
* included in root by {@code include_in_root} and a chain of {@code include_in_parent} does not
* lead to duplicate fields on the root document.
*/
public void testMultipleLevelsIncludeRoot1() throws Exception {
MapperService mapperService = createMapperService(mapping(b -> {
}));
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject(MapperService.SINGLE_MAPPING_NAME)
.startObject("properties")
.startObject("nested1").field("type", "nested").field("include_in_root", true)
.field("include_in_parent", true).startObject("properties")
.startObject("nested2").field("type", "nested").field("include_in_root", true)
.field("include_in_parent", true)
.endObject().endObject().endObject()
.endObject().endObject().endObject());
MergeReason mergeReason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), mergeReason);
DocumentMapper docMapper = mapperService.documentMapper();
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startArray("nested1")
.startObject().startArray("nested2").startObject().field("foo", "bar")
.endObject().endArray().endObject().endArray()
.endObject()),
XContentType.JSON));
final Collection<IndexableField> fields = doc.rootDoc().getFields();
assertThat(fields.size(), equalTo(new HashSet<>(fields).size()));
}
public void testRecursiveIncludeInParent() throws IOException {
// if we have a nested hierarchy, and all nested mappers have 'include_in_parent'
// set to 'true', then values from the grandchild nodes should be copied all the
// way up the hierarchy and into the root document, even if 'include_in_root' has
// explicitly been set to 'false'.
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("nested1");
b.field("type", "nested");
b.field("include_in_parent", true);
b.field("include_in_root", false);
b.startObject("properties");
b.startObject("nested1_id").field("type", "keyword").endObject();
b.startObject("nested2");
b.field("type", "nested");
b.field("include_in_parent", true);
b.field("include_in_root", false);
b.startObject("properties");
b.startObject("nested2_id").field("type", "keyword").endObject();
b.endObject();
b.endObject();
b.endObject();
b.endObject();
}));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> {
b.startObject("nested1");
b.field("nested1_id", "1");
b.startObject("nested2");
b.field("nested2_id", "2");
b.endObject();
b.endObject();
}));
assertNotNull(doc.rootDoc().getField("nested1.nested2.nested2_id"));
}
/**
* Same as {@link NestedObjectMapperTests#testMultipleLevelsIncludeRoot1()} but tests for the
* case where the transitive {@code include_in_parent} and redundant {@code include_in_root}
* happen on a chain of nodes that starts from a parent node that is not directly connected to
* root by a chain of {@code include_in_parent}, i.e. that has {@code include_in_parent} set to
* {@code false} and {@code include_in_root} set to {@code true}.
*/
public void testMultipleLevelsIncludeRoot2() throws Exception {
MapperService mapperService = createMapperService(mapping(b -> {
}));
String mapping = Strings.toString(XContentFactory.jsonBuilder()
.startObject().startObject(MapperService.SINGLE_MAPPING_NAME)
.startObject("properties")
.startObject("nested1").field("type", "nested")
.field("include_in_root", true).field("include_in_parent", true).startObject("properties")
.startObject("nested2").field("type", "nested")
.field("include_in_root", true).field("include_in_parent", false).startObject("properties")
.startObject("nested3").field("type", "nested")
.field("include_in_root", true).field("include_in_parent", true)
.endObject().endObject().endObject().endObject().endObject()
.endObject().endObject().endObject());
MergeReason mergeReason = randomFrom(MergeReason.MAPPING_UPDATE, MergeReason.INDEX_TEMPLATE);
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(mapping), mergeReason);
DocumentMapper docMapper = mapperService.documentMapper();
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startArray("nested1")
.startObject().startArray("nested2")
.startObject().startArray("nested3").startObject().field("foo", "bar")
.endObject().endArray().endObject().endArray().endObject().endArray()
.endObject()),
XContentType.JSON));
final Collection<IndexableField> fields = doc.rootDoc().getFields();
assertThat(fields.size(), equalTo(new HashSet<>(fields).size()));
}
/**
* Same as {@link NestedObjectMapperTests#testMultipleLevelsIncludeRoot1()} but tests that
* the redundant includes are removed even if each individual mapping doesn't contain the
* redundancy, only the merged mapping does.
*/
public void testMultipleLevelsIncludeRootWithMerge() throws Exception {
MapperService mapperService = createMapperService(mapping(b -> {
}));
String firstMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject(MapperService.SINGLE_MAPPING_NAME)
.startObject("properties")
.startObject("nested1")
.field("type", "nested")
.field("include_in_root", true)
.startObject("properties")
.startObject("nested2")
.field("type", "nested")
.field("include_in_root", true)
.field("include_in_parent", true)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject());
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(firstMapping), MergeReason.INDEX_TEMPLATE);
String secondMapping = Strings.toString(XContentFactory.jsonBuilder().startObject()
.startObject(MapperService.SINGLE_MAPPING_NAME)
.startObject("properties")
.startObject("nested1")
.field("type", "nested")
.field("include_in_root", true)
.field("include_in_parent", true)
.startObject("properties")
.startObject("nested2")
.field("type", "nested")
.field("include_in_root", true)
.endObject()
.endObject()
.endObject()
.endObject()
.endObject()
.endObject());
mapperService.merge(MapperService.SINGLE_MAPPING_NAME, new CompressedXContent(secondMapping), MergeReason.INDEX_TEMPLATE);
DocumentMapper docMapper = mapperService.documentMapper();
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject().startArray("nested1")
.startObject().startArray("nested2").startObject().field("foo", "bar")
.endObject().endArray().endObject().endArray()
.endObject()),
XContentType.JSON));
final Collection<IndexableField> fields = doc.rootDoc().getFields();
assertThat(fields.size(), equalTo(new HashSet<>(fields).size()));
}
public void testNestedArrayStrict() throws Exception {
DocumentMapper docMapper = createDocumentMapper(mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.field("dynamic", "strict");
b.startObject("properties");
{
b.startObject("field1").field("type", "text").endObject();
}
b.endObject();
}
b.endObject();
}));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper nested1Mapper = docMapper.mappers().objectMappers().get("nested1");
assertThat(nested1Mapper, instanceOf(NestedObjectMapper.class));
assertThat(nested1Mapper.dynamic(), equalTo(Dynamic.STRICT));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject().field("field1", "1").endObject()
.startObject().field("field1", "4").endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3));
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("field"), nullValue());
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("4"));
assertThat(doc.docs().get(1).get("field"), nullValue());
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
public void testLimitOfNestedFieldsPerIndex() throws Exception {
Function<String, String> mapping = type -> {
try {
return Strings.toString(XContentFactory.jsonBuilder().startObject().startObject(type).startObject("properties")
.startObject("nested1").field("type", "nested").startObject("properties")
.startObject("nested2").field("type", "nested")
.endObject().endObject().endObject()
.endObject().endObject().endObject());
} catch (IOException e) {
throw new UncheckedIOException(e);
}
};
// default limit allows at least two nested fields
createMapperService(mapping.apply("_doc"));
// explicitly setting limit to 0 prevents nested fields
Exception e = expectThrows(IllegalArgumentException.class, () -> {
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0)
.build();
createMapperService(settings, mapping.apply("_doc"));
});
assertThat(e.getMessage(), containsString("Limit of nested fields [0] has been exceeded"));
// setting limit to 1 with 2 nested fields fails
e = expectThrows(IllegalArgumentException.class, () -> {
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 1)
.build();
createMapperService(settings, mapping.apply("_doc"));
});
assertThat(e.getMessage(), containsString("Limit of nested fields [1] has been exceeded"));
// do not check nested fields limit if mapping is not updated
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_FIELDS_LIMIT_SETTING.getKey(), 0).build();
MapperService mapperService = createMapperService(settings, mapping(b -> {
}));
merge(mapperService, MergeReason.MAPPING_RECOVERY, mapping.apply("_doc"));
}
public void testLimitNestedDocsDefaultSettings() throws Exception {
Settings settings = Settings.builder().build();
DocumentMapper docMapper
= createDocumentMapper(mapping(b -> b.startObject("nested1").field("type", "nested").endObject()));
long defaultMaxNoNestedDocs = MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.get(settings);
// parsing a doc with No. nested objects > defaultMaxNoNestedDocs fails
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject();
{
docBuilder.startArray("nested1");
{
for (int i = 0; i <= defaultMaxNoNestedDocs; i++) {
docBuilder.startObject().field("f", i).endObject();
}
}
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = new SourceToParse("test1", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source1));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + defaultMaxNoNestedDocs
+ "]. This limit can be set by changing the [" + MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey()
+ "] index level setting.",
e.getMessage()
);
}
public void testLimitNestedDocs() throws Exception {
// setting limit to allow only two nested objects in the whole doc
long maxNoNestedDocs = 2L;
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey(), maxNoNestedDocs)
.build();
DocumentMapper docMapper
= createMapperService(settings, mapping(b -> b.startObject("nested1").field("type", "nested").endObject())).documentMapper();
// parsing a doc with 2 nested objects succeeds
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject();
{
docBuilder.startArray("nested1");
{
docBuilder.startObject().field("field1", "11").field("field2", "21").endObject();
docBuilder.startObject().field("field1", "12").field("field2", "22").endObject();
}
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = new SourceToParse("test1", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
ParsedDocument doc = docMapper.parse(source1);
assertThat(doc.docs().size(), equalTo(3));
// parsing a doc with 3 nested objects fails
XContentBuilder docBuilder2 = XContentFactory.jsonBuilder();
docBuilder2.startObject();
{
docBuilder2.startArray("nested1");
{
docBuilder2.startObject().field("field1", "11").field("field2", "21").endObject();
docBuilder2.startObject().field("field1", "12").field("field2", "22").endObject();
docBuilder2.startObject().field("field1", "13").field("field2", "23").endObject();
}
docBuilder2.endArray();
}
docBuilder2.endObject();
SourceToParse source2 = new SourceToParse("test1", "2",
BytesReference.bytes(docBuilder2), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs
+ "]. This limit can be set by changing the [" + MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey()
+ "] index level setting.",
e.getMessage()
);
}
public void testLimitNestedDocsMultipleNestedFields() throws Exception {
// setting limit to allow only two nested objects in the whole doc
long maxNoNestedDocs = 2L;
Settings settings = Settings.builder()
.put(MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey(), maxNoNestedDocs).build();
DocumentMapper docMapper = createMapperService(settings, mapping(b -> {
b.startObject("nested1").field("type", "nested").endObject();
b.startObject("nested2").field("type", "nested").endObject();
})).documentMapper();
// parsing a doc with 2 nested objects succeeds
XContentBuilder docBuilder = XContentFactory.jsonBuilder();
docBuilder.startObject();
{
docBuilder.startArray("nested1");
{
docBuilder.startObject().field("field1", "11").field("field2", "21").endObject();
}
docBuilder.endArray();
docBuilder.startArray("nested2");
{
docBuilder.startObject().field("field1", "21").field("field2", "22").endObject();
}
docBuilder.endArray();
}
docBuilder.endObject();
SourceToParse source1 = new SourceToParse("test1", "1",
BytesReference.bytes(docBuilder), XContentType.JSON);
ParsedDocument doc = docMapper.parse(source1);
assertThat(doc.docs().size(), equalTo(3));
// parsing a doc with 3 nested objects fails
XContentBuilder docBuilder2 = XContentFactory.jsonBuilder();
docBuilder2.startObject();
{
docBuilder2.startArray("nested1");
{
docBuilder2.startObject().field("field1", "11").field("field2", "21").endObject();
}
docBuilder2.endArray();
docBuilder2.startArray("nested2");
{
docBuilder2.startObject().field("field1", "12").field("field2", "22").endObject();
docBuilder2.startObject().field("field1", "13").field("field2", "23").endObject();
}
docBuilder2.endArray();
}
docBuilder2.endObject();
SourceToParse source2 = new SourceToParse("test1", "2",
BytesReference.bytes(docBuilder2), XContentType.JSON);
MapperParsingException e = expectThrows(MapperParsingException.class, () -> docMapper.parse(source2));
assertEquals(
"The number of nested documents has exceeded the allowed limit of [" + maxNoNestedDocs
+ "]. This limit can be set by changing the [" + MapperService.INDEX_MAPPING_NESTED_DOCS_LIMIT_SETTING.getKey()
+ "] index level setting.",
e.getMessage()
);
}
public void testReorderParent() throws IOException {
Version version = VersionUtils.randomIndexCompatibleVersion(random());
DocumentMapper docMapper
= createDocumentMapper(version, mapping(b -> b.startObject("nested1").field("type", "nested").endObject()));
assertThat(docMapper.mappers().hasNested(), equalTo(true));
ObjectMapper mapper = docMapper.mappers().objectMappers().get("nested1");
assertThat(mapper, instanceOf(NestedObjectMapper.class));
ParsedDocument doc = docMapper.parse(new SourceToParse("test", "1",
BytesReference.bytes(XContentFactory.jsonBuilder()
.startObject()
.field("field", "value")
.startArray("nested1")
.startObject()
.field("field1", "1")
.field("field2", "2")
.endObject()
.startObject()
.field("field1", "3")
.field("field2", "4")
.endObject()
.endArray()
.endObject()),
XContentType.JSON));
assertThat(doc.docs().size(), equalTo(3));
NestedObjectMapper nested1Mapper = (NestedObjectMapper) mapper;
if (version.before(Version.V_8_0_0)) {
assertThat(doc.docs().get(0).get("_type"), equalTo(nested1Mapper.nestedTypePath()));
} else {
assertThat(doc.docs().get(0).get(NestedPathFieldMapper.NAME), equalTo(nested1Mapper.nestedTypePath()));
}
assertThat(doc.docs().get(0).get("nested1.field1"), equalTo("1"));
assertThat(doc.docs().get(0).get("nested1.field2"), equalTo("2"));
assertThat(doc.docs().get(1).get("nested1.field1"), equalTo("3"));
assertThat(doc.docs().get(1).get("nested1.field2"), equalTo("4"));
assertThat(doc.docs().get(2).get("field"), equalTo("value"));
}
public void testMergeChildMappings() throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("nested1");
b.field("type", "nested");
b.startObject("properties");
b.startObject("field1").field("type", "keyword").endObject();
b.startObject("field2").field("type", "keyword").endObject();
b.startObject("nested2").field("type", "nested").field("include_in_root", true).endObject();
b.endObject();
b.endObject();
}));
merge(mapperService, mapping(b -> {
b.startObject("nested1");
b.field("type", "nested");
b.startObject("properties");
b.startObject("field2").field("type", "keyword").endObject();
b.startObject("field3").field("type", "keyword").endObject();
b.startObject("nested2").field("type", "nested").field("include_in_root", true).endObject();
b.endObject();
b.endObject();
}));
NestedObjectMapper nested1 = (NestedObjectMapper) mapperService.mappingLookup().objectMappers().get("nested1");
assertThat(nested1.getChildren().values(), hasSize(4));
NestedObjectMapper nested2 = (NestedObjectMapper) nested1.getChildren().get("nested2");
assertTrue(nested2.isIncludeInRoot());
}
public void testMergeNestedMappings() throws IOException {
MapperService mapperService = createMapperService(mapping(b -> b.startObject("nested1").field("type", "nested").endObject()));
// cannot update `include_in_parent` dynamically
MapperException e1 = expectThrows(MapperException.class, () -> merge(mapperService, mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.field("include_in_parent", true);
}
b.endObject();
})));
assertEquals("the [include_in_parent] parameter can't be updated on a nested object mapping", e1.getMessage());
// cannot update `include_in_root` dynamically
MapperException e2 = expectThrows(MapperException.class, () -> merge(mapperService, mapping(b -> {
b.startObject("nested1");
{
b.field("type", "nested");
b.field("include_in_root", true);
}
b.endObject();
})));
assertEquals("the [include_in_root] parameter can't be updated on a nested object mapping", e2.getMessage());
}
public void testEnabled() throws IOException {
MapperService mapperService = createMapperService(mapping(b -> {
b.startObject("nested");
b.field("type", "nested");
b.field("enabled", "false");
b.endObject();
}));
{
NestedObjectMapper nom = (NestedObjectMapper) mapperService.mappingLookup().objectMappers().get("nested");
assertFalse(nom.isEnabled());
}
merge(mapperService, mapping(b -> {
b.startObject("nested");
b.field("type", "nested");
b.field("enabled", "false");
b.endObject();
}));
{
NestedObjectMapper nom = (NestedObjectMapper) mapperService.mappingLookup().objectMappers().get("nested");
assertFalse(nom.isEnabled());
}
// merging for index templates allows override of 'enabled' param
merge(mapperService, MergeReason.INDEX_TEMPLATE, mapping(b -> {
b.startObject("nested");
b.field("type", "nested");
b.field("enabled", "true");
b.endObject();
}));
{
NestedObjectMapper nom = (NestedObjectMapper) mapperService.mappingLookup().objectMappers().get("nested");
assertTrue(nom.isEnabled());
}
// but a normal merge does not permit 'enabled' overrides
Exception e = expectThrows(MapperException.class, () -> merge(mapperService, mapping(b -> {
b.startObject("nested");
b.field("type", "nested");
b.field("enabled", "false");
b.endObject();
})));
assertThat(e.getMessage(), containsString("the [enabled] parameter can't be updated for the object mapping [nested]"));
}
public void testMergeNestedMappingsFromDynamicUpdate() throws IOException {
// Check that dynamic mappings have redundant includes removed
MapperService mapperService = createMapperService(topMapping(b -> {
b.startArray("dynamic_templates");
b.startObject();
b.startObject("object_fields");
b.field("match_mapping_type", "object");
b.startObject("mapping");
b.field("type", "nested");
b.field("include_in_parent", true);
b.field("include_in_root", true);
b.endObject();
b.field("match", "*");
b.endObject();
b.endObject();
b.endArray();
}));
ParsedDocument doc = mapperService.documentMapper().parse(source(b -> b.startObject("object").endObject()));
merge(mapperService, Strings.toString(doc.dynamicMappingsUpdate()));
merge(mapperService, Strings.toString(doc.dynamicMappingsUpdate()));
assertThat(
Strings.toString(mapperService.documentMapper().mapping()),
containsString("\"properties\":{\"object\":{\"type\":\"nested\",\"include_in_parent\":true}}")
);
}
}
| |
/*
* Copyright 2018-Present The CloudEvents Authors
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package io.cloudevents.core.impl;
import io.cloudevents.CloudEvent;
import io.cloudevents.CloudEventContext;
import io.cloudevents.CloudEventData;
import io.cloudevents.CloudEventExtension;
import io.cloudevents.core.builder.CloudEventBuilder;
import io.cloudevents.core.data.BytesCloudEventData;
import io.cloudevents.rw.CloudEventRWException;
import javax.annotation.Nonnull;
import java.net.URI;
import java.time.OffsetDateTime;
import java.util.HashMap;
import java.util.Map;
import static io.cloudevents.core.v03.CloudEventV03.SPECVERSION;
public abstract class BaseCloudEventBuilder<SELF extends BaseCloudEventBuilder<SELF, T>, T extends CloudEvent> implements CloudEventBuilder {
// This is a little trick for enabling fluency
private final SELF self;
protected CloudEventData data;
protected Map<String, Object> extensions = new HashMap<>();
@SuppressWarnings("unchecked")
public BaseCloudEventBuilder() {
this.self = (SELF) this;
}
public BaseCloudEventBuilder(CloudEventContext context) {
this();
setAttributes(context);
}
public BaseCloudEventBuilder(CloudEvent event) {
this();
this.setAttributes(event);
this.data = event.getData();
}
protected abstract void setAttributes(CloudEventContext event);
//TODO builder should accept data as Object and use data codecs (that we need to implement)
// to encode data
public SELF withData(byte[] data) {
this.data = BytesCloudEventData.wrap(data);
return this.self;
}
public SELF withData(String dataContentType, byte[] data) {
withDataContentType(dataContentType);
withData(data);
return this.self;
}
public SELF withData(String dataContentType, URI dataSchema, byte[] data) {
withDataContentType(dataContentType);
withDataSchema(dataSchema);
withData(data);
return this.self;
}
public SELF withData(CloudEventData data) {
this.data = data;
return this.self;
}
public SELF withData(String dataContentType, CloudEventData data) {
withDataContentType(dataContentType);
withData(data);
return this.self;
}
public SELF withData(String dataContentType, URI dataSchema, CloudEventData data) {
withDataContentType(dataContentType);
withDataSchema(dataSchema);
withData(data);
return this.self;
}
@Override
public CloudEventBuilder withoutData() {
this.data = null;
return this.self;
}
@Override
public CloudEventBuilder withoutDataSchema() {
withDataSchema(null);
return this.self;
}
@Override
public CloudEventBuilder withoutDataContentType() {
withDataContentType(null);
return this.self;
}
public SELF withExtension(@Nonnull String key, @Nonnull String value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
public SELF withExtension(@Nonnull String key, @Nonnull Number value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
public SELF withExtension(@Nonnull String key, @Nonnull Boolean value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
@Override
public SELF withExtension(@Nonnull String key, @Nonnull URI value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
@Override
public SELF withExtension(@Nonnull String key, @Nonnull OffsetDateTime value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
@Override
public CloudEventBuilder withExtension(@Nonnull String key, @Nonnull byte[] value) {
if (!isValidExtensionName(key)) {
throw CloudEventRWException.newInvalidExtensionName(key);
}
this.extensions.put(key, value);
return self;
}
@Override
public SELF withoutExtension(@Nonnull String key) {
this.extensions.remove(key);
return self;
}
@Override
public SELF withoutExtension(@Nonnull CloudEventExtension extension) {
extension.getKeys().forEach(this::withoutExtension);
return self;
}
public SELF withExtension(@Nonnull CloudEventExtension extension) {
for (String key : extension.getKeys()) {
Object value = extension.getValue(key);
if (value != null) {
this.extensions.put(key, value);
}
}
return self;
}
@Override
public CloudEvent end(CloudEventData value) throws CloudEventRWException {
this.data = value;
return build();
}
@Override
public CloudEvent end() {
try {
return build();
} catch (Exception e) {
throw CloudEventRWException.newOther(e);
}
}
protected static IllegalStateException createMissingAttributeException(String attributeName) {
return new IllegalStateException("Attribute '" + attributeName + "' cannot be null");
}
/**
* Validates the extension name as defined in CloudEvents spec.
*
* @param name the extension name
* @return true if extension name is valid, false otherwise
* @see <a href="https://github.com/cloudevents/spec/blob/master/spec.md#attribute-naming-convention">attribute-naming-convention</a>
*/
private static boolean isValidExtensionName(String name) {
for (int i = 0; i < name.length(); i++) {
if (!isValidChar(name.charAt(i))) {
return false;
}
}
return true;
}
private static boolean isValidChar(char c) {
return (c >= 'a' && c <= 'z') || (c >= '0' && c <= '9');
}
protected void requireValidAttributeWrite(String name) {
if (name.equals(SPECVERSION)) {
throw new IllegalArgumentException("You should not set the specversion attribute through withContextAttribute methods");
}
}
}
| |
/*
* Copyright 2018 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.google.bigtable.v2;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableMap;
import com.google.api.pathtemplate.PathTemplate;
import com.google.api.resourcenames.ResourceName;
import com.google.api.resourcenames.ResourceNameType;
import java.util.Map;
import java.util.ArrayList;
import java.util.List;
// AUTO-GENERATED DOCUMENTATION AND CLASS
@javax.annotation.Generated("by GAPIC protoc plugin")
public class TableName implements ResourceName {
private static final PathTemplate PATH_TEMPLATE =
PathTemplate.createWithoutUrlEncoding("projects/{project}/instances/{instance}/tables/{table}");
private volatile Map<String, String> fieldValuesMap;
private final String project;
private final String instance;
private final String table;
public String getProject() {
return project;
}
public String getInstance() {
return instance;
}
public String getTable() {
return table;
}
public static Builder newBuilder() {
return new Builder();
}
public Builder toBuilder() {
return new Builder(this);
}
private TableName(Builder builder) {
project = Preconditions.checkNotNull(builder.getProject());
instance = Preconditions.checkNotNull(builder.getInstance());
table = Preconditions.checkNotNull(builder.getTable());
}
public static TableName of(String project, String instance, String table) {
return newBuilder()
.setProject(project)
.setInstance(instance)
.setTable(table)
.build();
}
public static String format(String project, String instance, String table) {
return newBuilder()
.setProject(project)
.setInstance(instance)
.setTable(table)
.build()
.toString();
}
public static TableName parse(String formattedString) {
if (formattedString.isEmpty()) {
return null;
}
Map<String, String> matchMap =
PATH_TEMPLATE.validatedMatch(formattedString, "TableName.parse: formattedString not in valid format");
return of(matchMap.get("project"), matchMap.get("instance"), matchMap.get("table"));
}
public static List<TableName> parseList(List<String> formattedStrings) {
List<TableName> list = new ArrayList<>(formattedStrings.size());
for (String formattedString : formattedStrings) {
list.add(parse(formattedString));
}
return list;
}
public static List<String> toStringList(List<TableName> values) {
List<String> list = new ArrayList<String>(values.size());
for (TableName value : values) {
if (value == null) {
list.add("");
} else {
list.add(value.toString());
}
}
return list;
}
public static boolean isParsableFrom(String formattedString) {
return PATH_TEMPLATE.matches(formattedString);
}
public Map<String, String> getFieldValuesMap() {
if (fieldValuesMap == null) {
synchronized (this) {
if (fieldValuesMap == null) {
ImmutableMap.Builder<String, String> fieldMapBuilder = ImmutableMap.builder();
fieldMapBuilder.put("project", project);
fieldMapBuilder.put("instance", instance);
fieldMapBuilder.put("table", table);
fieldValuesMap = fieldMapBuilder.build();
}
}
}
return fieldValuesMap;
}
public String getFieldValue(String fieldName) {
return getFieldValuesMap().get(fieldName);
}
/**
* @deprecated This method is only present to satisfy the ResourceName interface.
*/
@Deprecated
public ResourceNameType getType() {
throw new UnsupportedOperationException("TableName.getType() not supported");
}
@Override
public String toString() {
return PATH_TEMPLATE.instantiate("project", project, "instance", instance, "table", table);
}
/** Builder for TableName. */
public static class Builder {
private String project;
private String instance;
private String table;
public String getProject() {
return project;
}
public String getInstance() {
return instance;
}
public String getTable() {
return table;
}
public Builder setProject(String project) {
this.project = project;
return this;
}
public Builder setInstance(String instance) {
this.instance = instance;
return this;
}
public Builder setTable(String table) {
this.table = table;
return this;
}
private Builder() {
}
private Builder(TableName tableName) {
project = tableName.project;
instance = tableName.instance;
table = tableName.table;
}
public TableName build() {
return new TableName(this);
}
}
@Override
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (o instanceof TableName) {
TableName that = (TableName) o;
return (this.project.equals(that.project))
&& (this.instance.equals(that.instance))
&& (this.table.equals(that.table));
}
return false;
}
@Override
public int hashCode() {
int h = 1;
h *= 1000003;
h ^= project.hashCode();
h *= 1000003;
h ^= instance.hashCode();
h *= 1000003;
h ^= table.hashCode();
return h;
}
}
| |
/*
* Copyright 2012-2014 the original author or authors.
*
* Licensed under the MIT license (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://opensource.org/licenses/mit-license.php
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.schildbach.wallet.util;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Reader;
import java.security.SecureRandom;
import java.util.Arrays;
import javax.annotation.Nonnull;
import org.spongycastle.crypto.BufferedBlockCipher;
import org.spongycastle.crypto.CipherParameters;
import org.spongycastle.crypto.DataLengthException;
import org.spongycastle.crypto.InvalidCipherTextException;
import org.spongycastle.crypto.PBEParametersGenerator;
import org.spongycastle.crypto.engines.AESFastEngine;
import org.spongycastle.crypto.generators.OpenSSLPBEParametersGenerator;
import org.spongycastle.crypto.modes.CBCBlockCipher;
import org.spongycastle.crypto.paddings.PaddedBufferedBlockCipher;
import org.spongycastle.crypto.params.ParametersWithIV;
import com.google.common.base.Charsets;
import com.google.common.io.BaseEncoding;
/**
* This class encrypts and decrypts a string in a manner that is compatible with OpenSSL.
*
* If you encrypt a string with this class you can decrypt it with the OpenSSL command: openssl enc -d -aes-256-cbc -a
* -in cipher.txt -out plain.txt -pass pass:aTestPassword
*
* where: cipher.txt = file containing the cipher text plain.txt - where you want the plaintext to be saved
*
* substitute your password for "aTestPassword" or remove the "-pass" parameter to be prompted.
*
* @author jim
* @author Andreas Schildbach
*/
public class Crypto
{
private static final BaseEncoding BASE64_ENCRYPT = BaseEncoding.base64().withSeparator("\n", 76);
private static final BaseEncoding BASE64_DECRYPT = BaseEncoding.base64().withSeparator("\r\n", 76);
/**
* number of times the password & salt are hashed during key creation.
*/
private static final int NUMBER_OF_ITERATIONS = 1024;
/**
* Key length.
*/
private static final int KEY_LENGTH = 256;
/**
* Initialization vector length.
*/
private static final int IV_LENGTH = 128;
/**
* The length of the salt.
*/
private static final int SALT_LENGTH = 8;
/**
* OpenSSL salted prefix text.
*/
private static final String OPENSSL_SALTED_TEXT = "Salted__";
/**
* OpenSSL salted prefix bytes - also used as magic number for encrypted key file.
*/
private static final byte[] OPENSSL_SALTED_BYTES = OPENSSL_SALTED_TEXT.getBytes(Charsets.UTF_8);
/**
* Magic text that appears at the beginning of every OpenSSL encrypted file. Used in identifying encrypted key
* files.
*/
private static final String OPENSSL_MAGIC_TEXT = BASE64_ENCRYPT.encode(Crypto.OPENSSL_SALTED_BYTES).substring(0,
Crypto.NUMBER_OF_CHARACTERS_TO_MATCH_IN_OPENSSL_MAGIC_TEXT);
private static final int NUMBER_OF_CHARACTERS_TO_MATCH_IN_OPENSSL_MAGIC_TEXT = 10;
private static final SecureRandom secureRandom = new SecureRandom();
/**
* Get password and generate key and iv.
*
* @param password
* The password to use in key generation
* @param salt
* The salt to use in key generation
* @return The CipherParameters containing the created key
*/
private static CipherParameters getAESPasswordKey(final char[] password, final byte[] salt)
{
final PBEParametersGenerator generator = new OpenSSLPBEParametersGenerator();
generator.init(PBEParametersGenerator.PKCS5PasswordToBytes(password), salt, NUMBER_OF_ITERATIONS);
final ParametersWithIV key = (ParametersWithIV) generator.generateDerivedParameters(KEY_LENGTH, IV_LENGTH);
return key;
}
/**
* Password based encryption using AES - CBC 256 bits.
*
* @param plainText
* The text to encrypt
* @param password
* The password to use for encryption
* @return The encrypted string
* @throws IOException
*/
public static String encrypt(@Nonnull final String plainText, @Nonnull final char[] password) throws IOException
{
final byte[] plainTextAsBytes = plainText.getBytes(Charsets.UTF_8);
return encrypt(plainTextAsBytes, password);
}
/**
* Password based encryption using AES - CBC 256 bits.
*
* @param plainTextAsBytes
* The bytes to encrypt
* @param password
* The password to use for encryption
* @return The encrypted string
* @throws IOException
*/
public static String encrypt(@Nonnull final byte[] plainTextAsBytes, @Nonnull final char[] password) throws IOException
{
final byte[] encryptedBytes = encryptRaw(plainTextAsBytes, password);
// OpenSSL prefixes the salt bytes + encryptedBytes with Salted___ and then base64 encodes it
final byte[] encryptedBytesPlusSaltedText = concat(OPENSSL_SALTED_BYTES, encryptedBytes);
return BASE64_ENCRYPT.encode(encryptedBytesPlusSaltedText);
}
/**
* Password based encryption using AES - CBC 256 bits.
*
* @param plainBytes
* The bytes to encrypt
* @param password
* The password to use for encryption
* @return SALT_LENGTH bytes of salt followed by the encrypted bytes.
* @throws IOException
*/
private static byte[] encryptRaw(final byte[] plainTextAsBytes, final char[] password) throws IOException
{
try
{
// Generate salt - each encryption call has a different salt.
final byte[] salt = new byte[SALT_LENGTH];
secureRandom.nextBytes(salt);
final ParametersWithIV key = (ParametersWithIV) getAESPasswordKey(password, salt);
// The following code uses an AES cipher to encrypt the message.
final BufferedBlockCipher cipher = new PaddedBufferedBlockCipher(new CBCBlockCipher(new AESFastEngine()));
cipher.init(true, key);
final byte[] encryptedBytes = new byte[cipher.getOutputSize(plainTextAsBytes.length)];
final int processLen = cipher.processBytes(plainTextAsBytes, 0, plainTextAsBytes.length, encryptedBytes, 0);
final int doFinalLen = cipher.doFinal(encryptedBytes, processLen);
// The result bytes are the SALT_LENGTH bytes followed by the encrypted bytes.
return concat(salt, Arrays.copyOf(encryptedBytes, processLen + doFinalLen));
}
catch (final InvalidCipherTextException x)
{
throw new IOException("Could not encrypt bytes", x);
}
catch (final DataLengthException x)
{
throw new IOException("Could not encrypt bytes", x);
}
}
/**
* Decrypt text previously encrypted with this class.
*
* @param textToDecode
* The code to decrypt
* @param password
* password to use for decryption
* @return The decrypted text
* @throws IOException
*/
public static String decrypt(@Nonnull final String textToDecode, @Nonnull final char[] password) throws IOException
{
final byte[] decryptedBytes = decryptBytes(textToDecode, password);
return new String(decryptedBytes, Charsets.UTF_8).trim();
}
/**
* Decrypt bytes previously encrypted with this class.
*
* @param textToDecode
* The code to decrypt
* @param password
* password to use for decryption
* @return The decrypted bytes
* @throws IOException
*/
public static byte[] decryptBytes(@Nonnull final String textToDecode, @Nonnull final char[] password) throws IOException
{
final byte[] decodeTextAsBytes;
try
{
decodeTextAsBytes = BASE64_DECRYPT.decode(textToDecode);
}
catch (final IllegalArgumentException x)
{
throw new IOException("invalid base64 encoding");
}
if (decodeTextAsBytes.length < OPENSSL_SALTED_BYTES.length)
throw new IOException("out of salt");
final byte[] cipherBytes = new byte[decodeTextAsBytes.length - OPENSSL_SALTED_BYTES.length];
System.arraycopy(decodeTextAsBytes, OPENSSL_SALTED_BYTES.length, cipherBytes, 0, decodeTextAsBytes.length - OPENSSL_SALTED_BYTES.length);
final byte[] decryptedBytes = decryptRaw(cipherBytes, password);
return decryptedBytes;
}
/**
* Decrypt bytes previously encrypted with this class.
*
* @param bytesToDecode
* The bytes to decrypt
* @param passwordbThe
* password to use for decryption
* @return The decrypted bytes
* @throws IOException
*/
private static byte[] decryptRaw(final byte[] bytesToDecode, final char[] password) throws IOException
{
try
{
// separate the salt and bytes to decrypt
final byte[] salt = new byte[SALT_LENGTH];
System.arraycopy(bytesToDecode, 0, salt, 0, SALT_LENGTH);
final byte[] cipherBytes = new byte[bytesToDecode.length - SALT_LENGTH];
System.arraycopy(bytesToDecode, SALT_LENGTH, cipherBytes, 0, bytesToDecode.length - SALT_LENGTH);
final ParametersWithIV key = (ParametersWithIV) getAESPasswordKey(password, salt);
// decrypt the message
final BufferedBlockCipher cipher = new PaddedBufferedBlockCipher(new CBCBlockCipher(new AESFastEngine()));
cipher.init(false, key);
final byte[] decryptedBytes = new byte[cipher.getOutputSize(cipherBytes.length)];
final int processLen = cipher.processBytes(cipherBytes, 0, cipherBytes.length, decryptedBytes, 0);
final int doFinalLen = cipher.doFinal(decryptedBytes, processLen);
return Arrays.copyOf(decryptedBytes, processLen + doFinalLen);
}
catch (final InvalidCipherTextException x)
{
throw new IOException("Could not decrypt bytes", x);
}
catch (final DataLengthException x)
{
throw new IOException("Could not decrypt bytes", x);
}
}
/**
* Concatenate two byte arrays.
*/
private static byte[] concat(final byte[] arrayA, final byte[] arrayB)
{
final byte[] result = new byte[arrayA.length + arrayB.length];
System.arraycopy(arrayA, 0, result, 0, arrayA.length);
System.arraycopy(arrayB, 0, result, arrayA.length, arrayB.length);
return result;
}
public final static FileFilter OPENSSL_FILE_FILTER = new FileFilter()
{
private final char[] buf = new char[OPENSSL_MAGIC_TEXT.length()];
@Override
public boolean accept(final File file)
{
Reader in = null;
try
{
in = new InputStreamReader(new FileInputStream(file), Charsets.UTF_8);
if (in.read(buf) == -1)
return false;
final String str = new String(buf);
if (!str.toString().equals(OPENSSL_MAGIC_TEXT))
return false;
return true;
}
catch (final IOException x)
{
return false;
}
finally
{
if (in != null)
{
try
{
in.close();
}
catch (final IOException x2)
{
}
}
}
}
};
}
| |
import java.util.ArrayList;
abstract public class ExpressionNode extends Node {
int pos;
}
class ArrayExpressionNode extends ExpressionNode {
ArrayList<ExpressionNode> elements = new ArrayList<ExpressionNode>();
void compile(CodeWriter cw) throws SyntaxErrorException {
for (int i = elements.size() - 1; i >= 0; i--) {
elements.get(i).compile(cw);
}
cw.writeInstruction("createArr", Integer.toString(elements.size()));
}
}
class IndexExpressionNode extends ExpressionNode {
ExpressionNode object;
ExpressionNode index;
void compile(CodeWriter cw) throws SyntaxErrorException {
object.compile(cw);
index.compile(cw);
cw.writeInstruction("loadIdx");
}
}
class NumberNode extends ExpressionNode {
float value;
public NumberNode(float value, int pos) {
this.value = value;
this.pos = pos;
}
void compile(CodeWriter cw) {
cw.writeInstruction("push", Float.toString(value));
}
}
class StringNode extends ExpressionNode {
String content;
void compile(CodeWriter cw) throws SyntaxErrorException {
cw.writeInstruction("pushstr", content);
}
}
class BinaryNode extends ExpressionNode {
Operator opt;
ExpressionNode left, right;
public enum Operator {
Add, Sub, Mul, Div,
CompareEqual, GreaterEqual, LessEqual, NotEqual, Greater, Less,
BitAnd, BitOr, BitXor, And, Or, Mod, Assign, Bracket
}
public BinaryNode() {
this.opt = null;
this.left = null;
this.right = null;
this.pos = -1;
}
public BinaryNode(Operator opt, ExpressionNode left, ExpressionNode right,
int pos) {
this.opt = opt;
this.left = left;
this.right = right;
this.pos = pos;
}
void compile(CodeWriter cw) throws SyntaxErrorException {
switch (this.opt) {
case Add:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("add");
break;
case Sub:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("sub");
break;
case Mul:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("mul");
break;
case Div:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("div");
break;
case And:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("and");
break;
case BitAnd:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("band");
break;
case BitOr:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("bor");
break;
case BitXor:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("bxor");
break;
case CompareEqual:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("eql");
break;
case Greater:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("gt");
break;
case GreaterEqual:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("ge");
break;
case Less:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("lt");
break;
case LessEqual:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("le");
break;
case NotEqual:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("neq");
break;
case Or:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("or");
break;
case Mod:
this.left.compile(cw);
this.right.compile(cw);
cw.writeInstruction("mod");
break;
case Assign:
ExpressionNode en = this.left;
if (en instanceof VariableNode) {
this.right.compile(cw);
VariableNode vn = (VariableNode) this.left;
cw.writeInstruction("store", vn.varName);
} else if (en instanceof IndexExpressionNode) {
IndexExpressionNode idxn = (IndexExpressionNode) this.left;
this.right.compile(cw);
idxn.index.compile(cw);
idxn.object.compile(cw);
cw.writeInstruction("storeIdx");
} else {
throw new SyntaxErrorException(0);
}
break;
default:
break;
}
}
}
class InovocationNode extends ExpressionNode {
String functionName;
ExpressionNode argument;
public InovocationNode(String fun, ExpressionNode arg) {
this.functionName = fun;
this.argument = arg;
}
void compile(CodeWriter cw) throws SyntaxErrorException {
this.argument.compile(cw);
cw.writeInstruction("call", this.functionName);
}
}
class UnaryNode extends ExpressionNode {
Operator opt;
ExpressionNode node;
public enum Operator {
neg, not
}
@Override
void compile(CodeWriter cw) throws SyntaxErrorException {
this.node.compile(cw);
if (opt == Operator.neg)
cw.writeInstruction("neg");
else
cw.writeInstruction("not");
}
}
class VariableNode extends ExpressionNode {
String varName;
void compile(CodeWriter cw) {
cw.writeInstruction("load", varName);
}
}
| |
package org.apache.maven.plugins.enforcer;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.commons.lang.SystemUtils;
import org.apache.maven.enforcer.rule.api.EnforcerRuleException;
import org.apache.maven.enforcer.rule.api.EnforcerRuleHelper;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.Dependency;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.project.MavenProject;
import org.codehaus.plexus.component.configurator.expression.ExpressionEvaluationException;
import org.codehaus.plexus.util.StringUtils;
/**
* This rule will check if a multi module build will follow the best practices.
*
* @author Karl-Heinz Marbaise
* @since 1.4
*/
public class ReactorModuleConvergence
extends AbstractNonCacheableEnforcerRule
{
private boolean ignoreModuleDependencies = false;
private Log logger;
public void execute( EnforcerRuleHelper helper )
throws EnforcerRuleException
{
logger = helper.getLog();
MavenSession session;
try
{
session = (MavenSession) helper.evaluate( "${session}" );
}
catch ( ExpressionEvaluationException eee )
{
throw new EnforcerRuleException( "Unable to retrieve the MavenSession: ", eee );
}
List<MavenProject> sortedProjects = session.getSortedProjects();
if ( sortedProjects != null && !sortedProjects.isEmpty() )
{
checkReactor( sortedProjects );
checkParentsInReactor( sortedProjects );
checkMissingParentsInReactor( sortedProjects );
checkParentsPartOfTheReactor( sortedProjects );
if ( !isIgnoreModuleDependencies() )
{
checkDependenciesWithinReactor( sortedProjects );
}
}
}
private void checkParentsPartOfTheReactor( List<MavenProject> sortedProjects )
throws EnforcerRuleException
{
List<MavenProject> parentsWhichAreNotPartOfTheReactor =
existParentsWhichAreNotPartOfTheReactor( sortedProjects );
if ( !parentsWhichAreNotPartOfTheReactor.isEmpty() )
{
StringBuilder sb = new StringBuilder().append( SystemUtils.LINE_SEPARATOR );
addMessageIfExist( sb );
for ( MavenProject mavenProject : parentsWhichAreNotPartOfTheReactor )
{
sb.append( " module: " );
sb.append( mavenProject.getId() );
sb.append( SystemUtils.LINE_SEPARATOR );
}
throw new EnforcerRuleException( "Module parents have been found which could not be found in the reactor."
+ sb.toString() );
}
}
/**
* Convenience method to create a user readable message.
*
* @param sortedProjects The list of reactor projects.
* @throws EnforcerRuleException In case of a violation.
*/
private void checkMissingParentsInReactor( List<MavenProject> sortedProjects )
throws EnforcerRuleException
{
List<MavenProject> modulesWithoutParentsInReactor = existModulesWithoutParentsInReactor( sortedProjects );
if ( !modulesWithoutParentsInReactor.isEmpty() )
{
StringBuilder sb = new StringBuilder().append( SystemUtils.LINE_SEPARATOR );
addMessageIfExist( sb );
for ( MavenProject mavenProject : modulesWithoutParentsInReactor )
{
sb.append( " module: " );
sb.append( mavenProject.getId() );
sb.append( SystemUtils.LINE_SEPARATOR );
}
throw new EnforcerRuleException( "Reactor contains modules without parents." + sb.toString() );
}
}
private void checkDependenciesWithinReactor( List<MavenProject> sortedProjects )
throws EnforcerRuleException
{
// After we are sure having consistent version we can simply use the first one?
String reactorVersion = sortedProjects.get( 0 ).getVersion();
Map<MavenProject, List<Dependency>> areThereDependenciesWhichAreNotPartOfTheReactor =
areThereDependenciesWhichAreNotPartOfTheReactor( reactorVersion, sortedProjects );
if ( !areThereDependenciesWhichAreNotPartOfTheReactor.isEmpty() )
{
StringBuilder sb = new StringBuilder().append( SystemUtils.LINE_SEPARATOR );
addMessageIfExist( sb );
// CHECKSTYLE_OFF: LineLength
for ( Entry<MavenProject, List<Dependency>> item : areThereDependenciesWhichAreNotPartOfTheReactor.entrySet() )
{
sb.append( " module: " );
sb.append( item.getKey().getId() );
sb.append( SystemUtils.LINE_SEPARATOR );
for ( Dependency dependency : item.getValue() )
{
String id =
dependency.getGroupId() + ":" + dependency.getArtifactId() + ":" + dependency.getVersion();
sb.append( " dependency: " );
sb.append( id );
sb.append( SystemUtils.LINE_SEPARATOR );
}
}
throw new EnforcerRuleException(
"Reactor modules contains dependencies which do not reference the reactor."
+ sb.toString() );
// CHECKSTYLE_ON: LineLength
}
}
/**
* Convenience method to create a user readable message.
*
* @param sortedProjects The list of reactor projects.
* @throws EnforcerRuleException In case of a violation.
*/
private void checkParentsInReactor( List<MavenProject> sortedProjects )
throws EnforcerRuleException
{
// After we are sure having consistent version we can simply use the first one?
String reactorVersion = sortedProjects.get( 0 ).getVersion();
List<MavenProject> areParentsFromTheReactor = areParentsFromTheReactor( reactorVersion, sortedProjects );
if ( !areParentsFromTheReactor.isEmpty() )
{
StringBuilder sb = new StringBuilder().append( SystemUtils.LINE_SEPARATOR );
addMessageIfExist( sb );
for ( MavenProject mavenProject : areParentsFromTheReactor )
{
sb.append( " --> " );
sb.append( mavenProject.getId() );
sb.append( " parent:" );
sb.append( mavenProject.getParent().getId() );
sb.append( SystemUtils.LINE_SEPARATOR );
}
throw new EnforcerRuleException( "Reactor modules have parents which contain a wrong version."
+ sb.toString() );
}
}
/**
* Convenience method to create user readable message.
*
* @param sortedProjects The list of reactor projects.
* @throws EnforcerRuleException In case of a violation.
*/
private void checkReactor( List<MavenProject> sortedProjects )
throws EnforcerRuleException
{
List<MavenProject> consistenceCheckResult = isReactorVersionConsistent( sortedProjects );
if ( !consistenceCheckResult.isEmpty() )
{
StringBuilder sb = new StringBuilder().append( SystemUtils.LINE_SEPARATOR );
addMessageIfExist( sb );
for ( MavenProject mavenProject : consistenceCheckResult )
{
sb.append( " --> " );
sb.append( mavenProject.getId() );
sb.append( SystemUtils.LINE_SEPARATOR );
}
throw new EnforcerRuleException( "The reactor contains different versions." + sb.toString() );
}
}
private List<MavenProject> areParentsFromTheReactor( String reactorVersion, List<MavenProject> sortedProjects )
{
List<MavenProject> result = new ArrayList<MavenProject>();
for ( MavenProject mavenProject : sortedProjects )
{
logger.debug( "Project: " + mavenProject.getId() );
if ( hasParent( mavenProject ) )
{
if ( !mavenProject.isExecutionRoot() )
{
MavenProject parent = mavenProject.getParent();
if ( !reactorVersion.equals( parent.getVersion() ) )
{
logger.debug( "The project: " + mavenProject.getId()
+ " has a parent which version does not match the other elements in reactor" );
result.add( mavenProject );
}
}
}
else
{
// This situation is currently ignored, cause it's handled by existModulesWithoutParentsInReactor()
}
}
return result;
}
private List<MavenProject> existParentsWhichAreNotPartOfTheReactor( List<MavenProject> sortedProjects )
{
List<MavenProject> result = new ArrayList<MavenProject>();
for ( MavenProject mavenProject : sortedProjects )
{
logger.debug( "Project: " + mavenProject.getId() );
if ( hasParent( mavenProject ) )
{
if ( !mavenProject.isExecutionRoot() )
{
MavenProject parent = mavenProject.getParent();
if ( !isProjectPartOfTheReactor( parent, sortedProjects ) )
{
result.add( mavenProject );
}
}
}
}
return result;
}
/**
* This will check of the groupId/artifactId can be found in any reactor project. The version will be ignored cause
* versions are checked before.
*
* @param project The project which should be checked if it is contained in the sortedProjects.
* @param sortedProjects The list of existing projects.
* @return true if the project has been found within the list false otherwise.
*/
private boolean isProjectPartOfTheReactor( MavenProject project, List<MavenProject> sortedProjects )
{
return isGAPartOfTheReactor( project.getGroupId(), project.getArtifactId(), sortedProjects );
}
private boolean isDependencyPartOfTheReactor( Dependency dependency, List<MavenProject> sortedProjects )
{
return isGAPartOfTheReactor( dependency.getGroupId(), dependency.getArtifactId(), sortedProjects );
}
/**
* This will check if the given <code>groupId/artifactId</code> is part of the current reactor.
*
* @param groupId The groupId
* @param artifactId The artifactId
* @param sortedProjects The list of projects within the reactor.
* @return true if the groupId/artifactId is part of the reactor false otherwise.
*/
private boolean isGAPartOfTheReactor( String groupId, String artifactId, List<MavenProject> sortedProjects )
{
boolean result = false;
for ( MavenProject mavenProject : sortedProjects )
{
String parentId = groupId + ":" + artifactId;
String projectId = mavenProject.getGroupId() + ":" + mavenProject.getArtifactId();
if ( parentId.equals( projectId ) )
{
result = true;
}
}
return result;
}
/**
* Assume we have a module which is a child of a multi module build but this child does not have a parent. This
* method will exactly search for such cases.
*
* @param sortedProjects The sorted list of the reactor modules.
* @return The resulting list will contain the modules in the reactor which do not have a parent. The list will
* never null. If the list is empty no violation have happened.
*/
private List<MavenProject> existModulesWithoutParentsInReactor( List<MavenProject> sortedProjects )
{
List<MavenProject> result = new ArrayList<MavenProject>();
for ( MavenProject mavenProject : sortedProjects )
{
logger.debug( "Project: " + mavenProject.getId() );
if ( !hasParent( mavenProject ) )
{
// TODO: Should add an option to force having a parent?
if ( mavenProject.isExecutionRoot() )
{
logger.debug( "The root does not need having a parent." );
}
else
{
logger.debug( "The module: " + mavenProject.getId() + " has no parent." );
result.add( mavenProject );
}
}
}
return result;
}
/**
* Convenience method to handle adding a dependency to the Map of List.
*
* @param result The result List which should be handled.
* @param project The MavenProject which will be added.
* @param dependency The dependency which will be added.
*/
private void addDep( Map<MavenProject, List<Dependency>> result, MavenProject project, Dependency dependency )
{
if ( result.containsKey( project ) )
{
List<Dependency> list = result.get( project );
if ( list == null )
{
list = new ArrayList<Dependency>();
}
list.add( dependency );
result.put( project, list );
}
else
{
List<Dependency> list = new ArrayList<Dependency>();
list.add( dependency );
result.put( project, list );
}
}
/**
* Go through the list of modules in the builds and check if we have dependencies. If yes we will check every
* dependency based on groupId/artifactId if it belongs to the multi module build. In such a case it will be checked
* if the version does fit the version in the rest of build.
*
* @param reactorVersion The version of the reactor.
* @param sortedProjects The list of existing projects within this build.
* @return List of violations. Never null. If the list is empty than no violation has happened.
*/
// CHECKSTYLE_OFF: LineLength
private Map<MavenProject, List<Dependency>> areThereDependenciesWhichAreNotPartOfTheReactor( String reactorVersion,
List<MavenProject> sortedProjects )
// CHECKSTYLE_ON: LineLength
{
Map<MavenProject, List<Dependency>> result = new HashMap<MavenProject, List<Dependency>>();
for ( MavenProject mavenProject : sortedProjects )
{
logger.debug( "Project: " + mavenProject.getId() );
@SuppressWarnings( "unchecked" )
List<Dependency> dependencies = mavenProject.getDependencies();
if ( hasDependencies( dependencies ) )
{
for ( Dependency dependency : dependencies )
{
logger.debug( " -> Dep:" + dependency.getGroupId() + ":" + dependency.getArtifactId() + ":"
+ dependency.getVersion() );
if ( isDependencyPartOfTheReactor( dependency, sortedProjects ) )
{
if ( !dependency.getVersion().equals( reactorVersion ) )
{
addDep( result, mavenProject, dependency );
}
}
}
}
}
return result;
}
/**
* This method will check the following situation within a multi-module build.
*
* <pre>
* <parent>
* <groupId>...</groupId>
* <artifactId>...</artifactId>
* <version>1.0-SNAPSHOT</version>
* </parent>
*
* <version>1.1-SNAPSHOT</version>
* </pre>
*
* @param projectList The sorted list of the reactor modules.
* @return The resulting list will contain the modules in the reactor which do the thing in the example above. The
* list will never null. If the list is empty no violation have happened.
*/
private List<MavenProject> isReactorVersionConsistent( List<MavenProject> projectList )
{
List<MavenProject> result = new ArrayList<MavenProject>();
if ( projectList != null && !projectList.isEmpty() )
{
String version = projectList.get( 0 ).getVersion();
logger.debug( "First version:" + version );
for ( MavenProject mavenProject : projectList )
{
logger.debug( " -> checking " + mavenProject.getId() );
if ( !version.equals( mavenProject.getVersion() ) )
{
result.add( mavenProject );
}
}
}
return result;
}
private boolean hasDependencies( List<Dependency> dependencies )
{
return dependencies != null && !dependencies.isEmpty();
}
private boolean hasParent( MavenProject mavenProject )
{
return mavenProject.getParent() != null;
}
public boolean isIgnoreModuleDependencies()
{
return ignoreModuleDependencies;
}
public void setIgnoreModuleDependencies( boolean ignoreModuleDependencies )
{
this.ignoreModuleDependencies = ignoreModuleDependencies;
}
/**
* This will add the given user message to the output.
*
* @param sb The already initialized exception message part.
*/
private void addMessageIfExist( StringBuilder sb )
{
if ( !StringUtils.isEmpty( getMessage() ) )
{
sb.append( getMessage() );
sb.append( SystemUtils.LINE_SEPARATOR );
}
}
}
| |
/*******************************************************************************
* Copyright (c) 2012, 2015 Pivotal Software, Inc.
*
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
*
* http://www.eclipse.org/legal/epl-v10.html
*
* and the Apache License v2.0 is available at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* You may elect to redistribute this code under either of these licenses.
*
* Contributors:
* Pivotal Software, Inc. - initial API and implementation
* IBM - Switching to use the more generic AbstractCloudFoundryUrl
* instead concrete CloudServerURL
********************************************************************************/
package org.eclipse.cft.server.ui.internal.editor;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import org.eclipse.cft.server.core.AbstractCloudFoundryUrl;
import org.eclipse.cft.server.core.internal.CloudFoundryPlugin;
import org.eclipse.cft.server.core.internal.CloudFoundryServer;
import org.eclipse.cft.server.core.internal.CloudFoundryBrandingExtensionPoint.CloudServerURL;
import org.eclipse.cft.server.ui.internal.CloudFoundryServerUiPlugin;
import org.eclipse.cft.server.ui.internal.CloudServerUIUtil;
import org.eclipse.cft.server.ui.internal.Messages;
import org.eclipse.cft.server.ui.internal.wizards.CloudUrlWizard;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.core.runtime.IStatus;
import org.eclipse.core.runtime.Status;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.MessageDialog;
import org.eclipse.jface.layout.GridDataFactory;
import org.eclipse.jface.layout.GridLayoutFactory;
import org.eclipse.jface.layout.TableColumnLayout;
import org.eclipse.jface.operation.IRunnableContext;
import org.eclipse.jface.viewers.ColumnWeightData;
import org.eclipse.jface.viewers.ILabelProviderListener;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.IStructuredContentProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.ITableLabelProvider;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.jface.wizard.WizardDialog;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Shell;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.wst.server.core.IServer;
import org.eclipse.wst.server.core.ServerCore;
/**
* @author Terry Denney
*/
public class ManageCloudDialog extends Dialog {
private final String serverTypeId;
private List<AbstractCloudFoundryUrl> cloudUrls;
private Set<String> urlsToDelete;
private Set<CloudFoundryServer> serversToDelete;
private AbstractCloudFoundryUrl lastAddedEditedURL;
private IRunnableContext runnableContext;
protected ManageCloudDialog(Shell parentShell, String serverTypeId) {
this (parentShell, serverTypeId, null);
}
protected ManageCloudDialog(Shell parentShell, String serverTypeId, IRunnableContext runnableContext) {
super(parentShell);
this.serverTypeId = serverTypeId;
serversToDelete = new HashSet<CloudFoundryServer>();
urlsToDelete = new HashSet<String>();
this.runnableContext = runnableContext;
}
private TableViewer createTableViewer(Composite parent, String[] columnNames, int[] columnWeights) {
Composite container = new Composite(parent, SWT.NONE);
GridDataFactory.fillDefaults().grab(true, true).hint(600, 200).applyTo(container);
TableColumnLayout layout = new TableColumnLayout();
container.setLayout(layout);
Table table = new Table(container, SWT.BORDER | SWT.FULL_SELECTION | SWT.MULTI);
table.setHeaderVisible(true);
for (int i = 0; i < columnNames.length; i++) {
TableColumn column = new TableColumn(table, SWT.NONE);
column.setText(columnNames[i]);
layout.setColumnData(column, new ColumnWeightData(columnWeights[i]));
}
TableViewer tableViewer = new TableViewer(table);
tableViewer.setColumnProperties(columnNames);
return tableViewer;
}
/**
* Prompts a user for a cloud URL. If successfully prompted and user enters
* a cloud URL, the latter is returned. Otherwise, null is returned.
* @param serverID
* @param shell
* @param allURLs
* @param existingURL
* @param existingName
* @return Cloud URL if successfully prompted and entered by user. Null
* otherwise
* @deprecated use {@link #promptForCloudFoundryUrl(String, Shell, List, String, String)} instead.
*/
protected CloudServerURL promptForCloudURL(String serverID, Shell shell, List<CloudServerURL> allURLs,
String existingURL, String existingName) {
boolean selfSigned = existingURL != null && CloudFoundryServer.getSelfSignedCertificate(existingURL);
CloudUrlWizard wizard = new CloudUrlWizard(serverID, allURLs, existingURL, existingName, selfSigned);
WizardDialog dialog = new WizardDialog(shell, wizard);
if (dialog.open() == Dialog.OK) {
return wizard.getCloudUrl();
}
return null;
}
/**
* Prompts a user for a cloud URL. If successfully prompted and user enters
* a cloud URL, the latter is returned. Otherwise, null is returned.
* @param serverID
* @param shell
* @param allURLs
* @param existingURL
* @param existingName
* @return Cloud URL if successfully prompted and entered by user. Null
* otherwise
*/
protected AbstractCloudFoundryUrl promptForCloudFoundryUrl(String serverID, Shell shell, List<AbstractCloudFoundryUrl> allURLs,
String existingURL, String existingName) {
boolean selfSigned = existingURL != null && CloudFoundryServer.getSelfSignedCertificate(existingURL);
CloudUrlWizard wizard = new CloudUrlWizard(serverID, existingURL, existingName, selfSigned, allURLs);
WizardDialog dialog = new WizardDialog(shell, wizard);
if (dialog.open() == Dialog.OK) {
return wizard.getCloudFoundryUrl();
}
return null;
}
@Override
protected Control createDialogArea(Composite parent) {
getShell().setText(Messages.ManageCloudDialog_TEXT_MANAGE_CLOUD_URL);
Composite composite = new Composite(parent, SWT.NONE);
GridDataFactory.fillDefaults().grab(true, true).applyTo(composite);
GridLayoutFactory.fillDefaults().margins(10, 10).numColumns(2).equalWidth(false).applyTo(composite);
final TableViewer viewer = createTableViewer(composite, new String[] { Messages.ManageCloudDialog_TEXT_SERVER_TYPE, Messages.ManageCloudDialog_TEXT_URL }, new int[] { 35,
55 });
viewer.setContentProvider(new IStructuredContentProvider() {
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
}
public void dispose() {
}
public Object[] getElements(Object inputElement) {
Collections.sort(cloudUrls, new Comparator<AbstractCloudFoundryUrl>() {
public int compare(AbstractCloudFoundryUrl o1, AbstractCloudFoundryUrl o2) {
return o1.getName().compareTo(o2.getName());
}
});
return cloudUrls.toArray();
}
});
try {
cloudUrls = CloudServerUIUtil.getAllUrls(serverTypeId, runnableContext);
viewer.setInput(cloudUrls.toArray());
} catch (CoreException ex) {
CloudFoundryServerUiPlugin.logError(ex);
}
viewer.setLabelProvider(new ITableLabelProvider() {
public void removeListener(ILabelProviderListener listener) {
}
public boolean isLabelProperty(Object element, String property) {
return false;
}
public void dispose() {
}
public void addListener(ILabelProviderListener listener) {
}
public String getColumnText(Object element, int columnIndex) {
if (element instanceof AbstractCloudFoundryUrl) {
AbstractCloudFoundryUrl cloudUrl = (AbstractCloudFoundryUrl) element;
if (columnIndex == 0) {
return cloudUrl.getName();
}
else if (columnIndex == 1) {
return cloudUrl.getUrl();
}
}
return null;
}
public Image getColumnImage(Object element, int columnIndex) {
return null;
}
});
Composite buttonComposite = new Composite(composite, SWT.NONE);
GridDataFactory.fillDefaults().grab(false, true).applyTo(buttonComposite);
GridLayoutFactory.fillDefaults().margins(0, 0).applyTo(buttonComposite);
final Button addButton = new Button(buttonComposite, SWT.PUSH);
GridDataFactory.fillDefaults().applyTo(addButton);
addButton.setText(Messages.COMMONTXT_ADD);
addButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
AbstractCloudFoundryUrl cloudURL = promptForCloudFoundryUrl(serverTypeId, e.display.getActiveShell(), cloudUrls, null,
null);
if (cloudURL != null) {
addURL(cloudURL);
viewer.refresh(true);
}
}
});
final Button editButton = new Button(buttonComposite, SWT.PUSH);
GridDataFactory.fillDefaults().applyTo(editButton);
editButton.setText(Messages.COMMONTXT_EDIT);
editButton.setEnabled(false);
editButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
ISelection selection = viewer.getSelection();
if (selection instanceof IStructuredSelection) {
IStructuredSelection sSelection = (IStructuredSelection) selection;
Object element = sSelection.getFirstElement();
if (element instanceof AbstractCloudFoundryUrl) {
AbstractCloudFoundryUrl cloudUrl = (AbstractCloudFoundryUrl) element;
if (cloudUrl.getUserDefined()) {
cloudUrls.remove(cloudUrl);
AbstractCloudFoundryUrl newUrl = promptForCloudFoundryUrl(serverTypeId, e.display.getActiveShell(),
cloudUrls, cloudUrl.getUrl(), cloudUrl.getName());
if (newUrl != null) {
if (cloudUrl.getUrl().equals(newUrl.getUrl()) || canUpdateUrl(cloudUrl, newUrl)) {
addURL(newUrl);
}
else {
addURL(cloudUrl);
}
}
else {
addURL(cloudUrl);
}
}
else {
AbstractCloudFoundryUrl url = CloudServerUIUtil.getWildcardUrl(cloudUrl, cloudUrls, getShell());
if (url != null) {
addURL(url);
}
}
viewer.refresh(true);
}
}
}
});
final Button removeButton = new Button(buttonComposite, SWT.PUSH);
GridDataFactory.fillDefaults().applyTo(removeButton);
removeButton.setText(Messages.COMMONTXT_REMOVE);
removeButton.setEnabled(false);
removeButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
ISelection selection = viewer.getSelection();
if (selection instanceof IStructuredSelection) {
IStructuredSelection sSelection = (IStructuredSelection) selection;
Object[] selectedItems = sSelection.toArray();
for (Object selectedItem : selectedItems) {
if (selectedItem instanceof AbstractCloudFoundryUrl) {
AbstractCloudFoundryUrl cloudUrl = (AbstractCloudFoundryUrl) selectedItem;
removeCloudUrl(cloudUrl);
}
}
}
viewer.refresh(true);
}
});
viewer.addSelectionChangedListener(new ISelectionChangedListener() {
public void selectionChanged(SelectionChangedEvent event) {
ISelection selection = event.getSelection();
if (selection instanceof IStructuredSelection) {
boolean editEnabled = true;
boolean removeEnabled = true;
IStructuredSelection sSelection = (IStructuredSelection) selection;
Object[] selectedItems = sSelection.toArray();
for (Object selectedItem : selectedItems) {
if (selectedItem instanceof AbstractCloudFoundryUrl) {
AbstractCloudFoundryUrl cloudUrl = (AbstractCloudFoundryUrl) selectedItem;
if (!cloudUrl.getUserDefined()) {
String url = cloudUrl.getUrl();
if (!url.contains("{")) { //$NON-NLS-1$
editEnabled = false;
}
removeEnabled = false;
}
}
}
editButton.setEnabled(selectedItems.length == 1 && editEnabled);
removeButton.setEnabled(selectedItems.length > 0 && removeEnabled);
}
}
});
return composite;
}
protected void addURL(AbstractCloudFoundryUrl urlToAdd) {
if (cloudUrls != null) {
cloudUrls.add(urlToAdd);
if (urlsToDelete != null) {
urlsToDelete.remove(urlToAdd.getUrl());
}
lastAddedEditedURL = urlToAdd;
}
}
protected void removeCloudUrl(AbstractCloudFoundryUrl cloudUrl) {
if (cloudUrl != null && cloudUrl.getUserDefined() && canUpdateUrl(cloudUrl, null)) {
cloudUrls.remove(cloudUrl);
if (urlsToDelete != null) {
urlsToDelete.add(cloudUrl.getUrl());
}
}
}
/**
* @deprecated use {@link #getLastAddedOrEditedCloudFoundryUrl()} instead.
*/
public CloudServerURL getLastAddedOrEditedURL() {
if (lastAddedEditedURL instanceof CloudServerURL) {
return (CloudServerURL)lastAddedEditedURL;
}
return null;
}
public AbstractCloudFoundryUrl getLastAddedOrEditedCloudFoundryUrl() {
return lastAddedEditedURL;
}
private boolean canUpdateUrl(AbstractCloudFoundryUrl url, AbstractCloudFoundryUrl newUrl) {
IServer[] servers = ServerCore.getServers();
Set<CloudFoundryServer> matchedServers = new HashSet<CloudFoundryServer>();
for (IServer server : servers) {
CloudFoundryServer cfServer = (CloudFoundryServer) server.loadAdapter(CloudFoundryServer.class, null);
if (cfServer != null && cfServer.getUrl().equals(url.getUrl())) {
matchedServers.add(cfServer);
}
}
if (matchedServers.isEmpty()) {
return true;
}
if (newUrl == null) {
if (MessageDialog.openQuestion(getShell(), Messages.ManageCloudDialog_TEXT_URL_USED_TITLE, Messages.ManageCloudDialog_TEXT_URL_USED_BODY_1)) {
for (CloudFoundryServer matchedServer : matchedServers) {
serversToDelete.add(matchedServer);
}
return true;
}
}
else {
EditUrlConfirmationDialog dialog = new EditUrlConfirmationDialog(getShell());
int answer = dialog.open();
if (answer == 0) {
if (dialog.getAction() == EditUrlConfirmationDialog.Action.REMOVE_SERVER) {
for (CloudFoundryServer matchedServer : matchedServers) {
serversToDelete.add(matchedServer);
}
return true;
}
else {
addURL(newUrl);
}
}
}
return false;
}
private static class EditUrlConfirmationDialog extends MessageDialog {
public enum Action {
REMOVE_SERVER, ADD_URL
};
private Action action;
public EditUrlConfirmationDialog(Shell parentShell) {
super(parentShell, Messages.ManageCloudDialog_TEXT_URL_USED_TITLE, null,
Messages.ManageCloudDialog_TEXT_URL_MOD_USED, MessageDialog.QUESTION, new String[] {
Messages.COMMONTXT_OK, Messages.ManageCloudDialog_TEXT_CANCEL }, 0);
}
@Override
protected Control createMessageArea(Composite composite) {
Control control = super.createMessageArea(composite);
new Label(composite, SWT.NONE);
final Button removeServerButton = new Button(composite, SWT.RADIO);
removeServerButton.setText(Messages.ManageCloudDialog_TEXT_REMOVE_SERVER);
removeServerButton.setSelection(true);
action = Action.REMOVE_SERVER;
removeServerButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (removeServerButton.getSelection()) {
action = Action.REMOVE_SERVER;
}
}
});
new Label(composite, SWT.NONE);
final Button addUrlButton = new Button(composite, SWT.RADIO);
addUrlButton.setText(Messages.ManageCloudDialog_TEXT_KEEP_OLD_URL);
removeServerButton.addSelectionListener(new SelectionAdapter() {
@Override
public void widgetSelected(SelectionEvent e) {
if (addUrlButton.getSelection()) {
action = Action.ADD_URL;
}
}
});
return control;
}
public Action getAction() {
return action;
}
}
@Override
protected void okPressed() {
CloudServerUIUtil.storeUserDefinedUrls(serverTypeId, cloudUrls);
// Servers to delete are servers that were previously created using a
// URL that has been deleted.
for (CloudFoundryServer server : serversToDelete) {
try {
IServer serverOriginal = server.getServerOriginal();
serverOriginal.delete();
}
catch (CoreException e) {
CloudFoundryPlugin.getDefault().getLog()
.log(new Status(IStatus.ERROR, CloudFoundryPlugin.PLUGIN_ID, "Unable to delete server", e)); //$NON-NLS-1$
}
}
// Also remove the self-signed settings for cloud URL
if (urlsToDelete != null) {
for (String url : urlsToDelete) {
CloudFoundryServer.setSelfSignedCertificate(false, url);
}
}
super.okPressed();
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v10/errors/conversion_custom_variable_error.proto
package com.google.ads.googleads.v10.errors;
/**
* <pre>
* Container for enum describing possible conversion custom variable errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum}
*/
public final class ConversionCustomVariableErrorEnum extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)
ConversionCustomVariableErrorEnumOrBuilder {
private static final long serialVersionUID = 0L;
// Use ConversionCustomVariableErrorEnum.newBuilder() to construct.
private ConversionCustomVariableErrorEnum(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ConversionCustomVariableErrorEnum() {
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ConversionCustomVariableErrorEnum();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private ConversionCustomVariableErrorEnum(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(
input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorProto.internal_static_google_ads_googleads_v10_errors_ConversionCustomVariableErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorProto.internal_static_google_ads_googleads_v10_errors_ConversionCustomVariableErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.class, com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.Builder.class);
}
/**
* <pre>
* Enum describing possible conversion custom variable errors.
* </pre>
*
* Protobuf enum {@code google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.ConversionCustomVariableError}
*/
public enum ConversionCustomVariableError
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
UNSPECIFIED(0),
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
UNKNOWN(1),
/**
* <pre>
* A conversion custom variable with the specified name already exists.
* </pre>
*
* <code>DUPLICATE_NAME = 2;</code>
*/
DUPLICATE_NAME(2),
/**
* <pre>
* A conversion custom variable with the specified tag already exists.
* </pre>
*
* <code>DUPLICATE_TAG = 3;</code>
*/
DUPLICATE_TAG(3),
/**
* <pre>
* A conversion custom variable with the specified tag is reserved for other
* uses.
* </pre>
*
* <code>RESERVED_TAG = 4;</code>
*/
RESERVED_TAG(4),
UNRECOGNIZED(-1),
;
/**
* <pre>
* Enum unspecified.
* </pre>
*
* <code>UNSPECIFIED = 0;</code>
*/
public static final int UNSPECIFIED_VALUE = 0;
/**
* <pre>
* The received error code is not known in this version.
* </pre>
*
* <code>UNKNOWN = 1;</code>
*/
public static final int UNKNOWN_VALUE = 1;
/**
* <pre>
* A conversion custom variable with the specified name already exists.
* </pre>
*
* <code>DUPLICATE_NAME = 2;</code>
*/
public static final int DUPLICATE_NAME_VALUE = 2;
/**
* <pre>
* A conversion custom variable with the specified tag already exists.
* </pre>
*
* <code>DUPLICATE_TAG = 3;</code>
*/
public static final int DUPLICATE_TAG_VALUE = 3;
/**
* <pre>
* A conversion custom variable with the specified tag is reserved for other
* uses.
* </pre>
*
* <code>RESERVED_TAG = 4;</code>
*/
public static final int RESERVED_TAG_VALUE = 4;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static ConversionCustomVariableError valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static ConversionCustomVariableError forNumber(int value) {
switch (value) {
case 0: return UNSPECIFIED;
case 1: return UNKNOWN;
case 2: return DUPLICATE_NAME;
case 3: return DUPLICATE_TAG;
case 4: return RESERVED_TAG;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<ConversionCustomVariableError>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<
ConversionCustomVariableError> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<ConversionCustomVariableError>() {
public ConversionCustomVariableError findValueByNumber(int number) {
return ConversionCustomVariableError.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.getDescriptor().getEnumTypes().get(0);
}
private static final ConversionCustomVariableError[] VALUES = values();
public static ConversionCustomVariableError valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private ConversionCustomVariableError(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.ConversionCustomVariableError)
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)) {
return super.equals(obj);
}
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum other = (com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum) obj;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Container for enum describing possible conversion custom variable errors.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnumOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorProto.internal_static_google_ads_googleads_v10_errors_ConversionCustomVariableErrorEnum_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorProto.internal_static_google_ads_googleads_v10_errors_ConversionCustomVariableErrorEnum_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.class, com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.Builder.class);
}
// Construct using com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
@java.lang.Override
public Builder clear() {
super.clear();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorProto.internal_static_google_ads_googleads_v10_errors_ConversionCustomVariableErrorEnum_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum getDefaultInstanceForType() {
return com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum build() {
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum buildPartial() {
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum result = new com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum(this);
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum) {
return mergeFrom((com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum other) {
if (other == com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum.getDefaultInstance()) return this;
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum)
private static final com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum();
}
public static com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ConversionCustomVariableErrorEnum>
PARSER = new com.google.protobuf.AbstractParser<ConversionCustomVariableErrorEnum>() {
@java.lang.Override
public ConversionCustomVariableErrorEnum parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new ConversionCustomVariableErrorEnum(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<ConversionCustomVariableErrorEnum> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ConversionCustomVariableErrorEnum> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v10.errors.ConversionCustomVariableErrorEnum getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
package org.zstack.storage.backup.sftp;
import org.zstack.header.HasThreadContext;
import org.zstack.header.log.NoLogging;
import java.io.Serializable;
public class SftpBackupStorageCommands {
public static class AgentCommand {
public String uuid;
}
public static class AgentResponse {
private boolean success = true;
private String error;
private Long totalCapacity;
private Long availableCapacity;
public Long getTotalCapacity() {
return totalCapacity;
}
public void setTotalCapacity(Long totalCapacity) {
this.totalCapacity = totalCapacity;
}
public Long getAvailableCapacity() {
return availableCapacity;
}
public void setAvailableCapacity(Long availableCapacity) {
this.availableCapacity = availableCapacity;
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
}
public static class PingCmd extends AgentCommand {
}
public static class PingResponse extends AgentResponse {
private String uuid;
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
}
public static class ConnectCmd extends AgentCommand {
private String storagePath;
private String sendCommandUrl;
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public String getStoragePath() {
return storagePath;
}
public void setStoragePath(String storagePath) {
this.storagePath = storagePath;
}
public void setSendCommandUrl(String sendCommandUrl) {
this.sendCommandUrl = sendCommandUrl;
}
public String getSendCommandUrl() {
return sendCommandUrl;
}
}
public static class ConnectResponse extends AgentResponse {
}
public static class DownloadCmd extends AgentCommand implements Serializable, HasThreadContext {
private String imageUuid;
private String installPath;
@NoLogging(type = NoLogging.Type.Uri)
private String url;
private long timeout;
private String urlScheme;
public String getImageUuid() {
return imageUuid;
}
public void setImageUuid(String imageUuid) {
this.imageUuid = imageUuid;
}
public String getUuid() {
return uuid;
}
public void setUuid(String uuid) {
this.uuid = uuid;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
public String getInstallPath() {
return installPath;
}
public void setInstallPath(String installPath) {
this.installPath = installPath;
}
public long getTimeout() {
return timeout;
}
public void setTimeout(long timeout) {
this.timeout = timeout;
}
public String getUrlScheme() {
return urlScheme;
}
public void setUrlScheme(String urlScheme) {
this.urlScheme = urlScheme;
}
}
public static class DownloadResponse extends AgentResponse {
public String md5Sum;
public long size;
public long actualSize;
public String format;
public long getActualSize() {
return actualSize;
}
public void setActualSize(long actualSize) {
this.actualSize = actualSize;
}
public String getMd5Sum() {
return md5Sum;
}
public void setMd5Sum(String md5Sum) {
this.md5Sum = md5Sum;
}
public long getSize() {
return size;
}
public void setSize(long size) {
this.size = size;
}
public String getFormat() {
return format;
}
public void setFormat(String format) {
this.format = format;
}
}
public static class DeleteCmd extends AgentCommand {
private String installUrl;
public String getInstallUrl() {
return installUrl;
}
public void setInstallUrl(String installUrl) {
this.installUrl = installUrl;
}
}
public static class DeleteResponse extends AgentResponse {
}
public static class GetSshKeyCommand extends AgentCommand {
}
public static class GetSshKeyResponse extends AgentResponse {
private String sshKey;
public String getSshKey() {
return sshKey;
}
public void setSshKey(String sshKey) {
this.sshKey = sshKey;
}
}
public static class GetLocalFileSizeCmd extends AgentCommand {
public String path ;
}
public static class GetLocalFileSizeRsp extends AgentResponse {
public long size;
}
public static class GetImageSizeCmd extends AgentCommand {
public String imageUuid;
public String installPath;
}
public static class GetImageSizeRsp extends AgentResponse {
public long size;
public long actualSize;
}
public static class GetImagesMetaDataCmd extends AgentCommand {
private String backupStoragePath;
public String getBackupStoragePath() {
return backupStoragePath;
}
public void setBackupStoragePath(String backupStoragePath) {
this.backupStoragePath = backupStoragePath;
}
}
public static class GetImagesMetaDataRsp extends AgentResponse {
private String imagesMetaData;
public String getImagesMetaData() {
return imagesMetaData;
}
public void setImagesMetaData(String imagesMetaData) {
this.imagesMetaData = imagesMetaData;
}
}
public static class CheckImageMetaDataFileExistCmd extends AgentCommand {
private String backupStoragePath;
public String getBackupStoragePath() {
return backupStoragePath;
}
public void setBackupStoragePath(String backupStoragePath) {
this.backupStoragePath = backupStoragePath;
}
}
public static class CheckImageMetaDataFileExistRsp extends AgentResponse{
private String backupStorageMetaFileName;
private Boolean exist;
public Boolean getExist() {
return exist;
}
public void setExist(Boolean exist) {
this.exist = exist;
}
public String getBackupStorageMetaFileName() {
return backupStorageMetaFileName;
}
public void setBackupStorageMetaFileName(String backupStorageMetaFileName) {
this.backupStorageMetaFileName = backupStorageMetaFileName;
}
}
public static class GenerateImageMetaDataFileCmd extends AgentCommand {
private String backupStoragePath;
public String getBackupStoragePath() {
return backupStoragePath;
}
public void setBackupStoragePath(String backupStoragePath) {
this.backupStoragePath = backupStoragePath;
}
}
public static class GenerateImageMetaDataFileRsp extends AgentResponse {
private String backupStorageMetaFileName;
public String getBackupStorageMetaFileName() {
return backupStorageMetaFileName;
}
public void setBackupStorageMetaFileName(String backupStorageMetaFileName) {
this.backupStorageMetaFileName = backupStorageMetaFileName;
}
}
public static class DumpImageInfoToMetaDataFileCmd extends AgentCommand {
private String backupStoragePath;
private String imageMetaData;
private boolean dumpAllMetaData;
public boolean isDumpAllMetaData() {
return dumpAllMetaData;
}
public void setDumpAllMetaData(boolean dumpAllMetaData) {
this.dumpAllMetaData = dumpAllMetaData;
}
public String getBackupStoragePath() {
return backupStoragePath;
}
public void setBackupStoragePath(String backupStoragePath) {
this.backupStoragePath = backupStoragePath;
}
public String getImageMetaData() {
return imageMetaData;
}
public void setImageMetaData(String imageMetaData) {
this.imageMetaData = imageMetaData;
}
}
public static class DumpImageInfoToMetaDataFileRsp extends AgentResponse {
}
public static class DeleteImageInfoFromMetaDataFileCmd extends AgentCommand {
private String imageUuid;
private String imageBackupStorageUuid;
private String backupStoragePath;
public String getBackupStoragePath() {
return backupStoragePath;
}
public void setBackupStoragePath(String backupStoragePath) {
this.backupStoragePath = backupStoragePath;
}
public String getImageBackupStorageUuid() {
return imageBackupStorageUuid;
}
public void setImageBackupStorageUuid(String imageBackupStorageUuid) {
this.imageBackupStorageUuid = imageBackupStorageUuid;
}
public String getImageUuid() {
return imageUuid;
}
public void setImageUuid(String imageUuid) {
this.imageUuid = imageUuid;
}
}
public static class DeleteImageInfoFromMetaDataFileRsp extends AgentResponse {
private Integer ret;
private String out;
public Integer getRet() {
return ret;
}
public void setRet(Integer ret) {
this.ret = ret;
}
public String getOut() {
return out;
}
public void setOut(String out) {
this.out = out;
}
}
public static class CancelCommand extends AgentCommand implements org.zstack.header.agent.CancelCommand {
private String cancellationApiId;
@Override
public void setCancellationApiId(String cancellationApiId) {
this.cancellationApiId = cancellationApiId;
}
}
}
| |
/*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.template.soy.internal.i18n;
import com.google.auto.value.AutoValue;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions;
import com.google.common.html.HtmlEscapers;
import com.google.template.soy.data.Dir;
import javax.annotation.Nullable;
public class BidiFormatter {
/** The text used to bidi wrap a string. */
@AutoValue
public abstract static class BidiWrappingText {
static BidiWrappingText create(String beforeText, String afterText) {
return new AutoValue_BidiFormatter_BidiWrappingText(beforeText, afterText);
}
/** The text to go before the string to wrap. */
public abstract String beforeText();
/** The text to go after the string to wrap. */
public abstract String afterText();
}
private static final BidiFormatter DEFAULT_LTR_INSTANCE = new BidiFormatter(Dir.LTR);
private static final BidiFormatter DEFAULT_RTL_INSTANCE = new BidiFormatter(Dir.RTL);
private final Dir contextDir;
/**
* Factory for creating an instance of BidiFormatter given the context directionality. {@link
* #spanWrap} avoids span wrapping unless there's a reason ('dir' attribute should be appended).
*
* @param contextDir The context directionality. Must be RTL or LTR.
*/
public static BidiFormatter getInstance(Dir contextDir) {
switch (contextDir) {
case LTR:
return DEFAULT_LTR_INSTANCE;
case RTL:
return DEFAULT_RTL_INSTANCE;
default:
throw new IllegalArgumentException("invalid context directionality: " + contextDir);
}
}
/** @param contextDir The context directionality */
private BidiFormatter(@Nullable Dir contextDir) {
this.contextDir = contextDir;
}
/**
* Returns "dir=\"ltr\"" or "dir=\"rtl\"", depending on the given directionality, if it is not
* NEUTRAL or the same as the context directionality. Otherwise, returns "".
*
* @param dir Given directionality. Must not be null.
* @return "dir=\"rtl\"" for RTL text in non-RTL context; "dir=\"ltr\"" for LTR text in non-LTR
* context; else, the empty string.
*/
public String knownDirAttr(Dir dir) {
Preconditions.checkNotNull(dir);
if (dir != contextDir) {
return dir == Dir.LTR ? "dir=\"ltr\"" : dir == Dir.RTL ? "dir=\"rtl\"" : "";
}
return "";
}
/**
* Formats a string of given directionality for use in HTML output of the context directionality,
* so an opposite-directionality string is neither garbled nor garbles its surroundings.
*
* <p>The algorithm: In case the given directionality doesn't match the context directionality,
* wraps the string with a 'span' element and adds a 'dir' attribute (either 'dir=\"rtl\"' or
* 'dir=\"ltr\"').
*
* <p>Directionally isolates the string so that it does not garble its surroundings. Currently,
* this is done by "resetting" the directionality after the string by appending a trailing Unicode
* bidi mark matching the context directionality (LRM or RLM) when either the overall
* directionality or the exit directionality of the string is opposite to that of the context.
* Note that as opposed to the overall directionality, the entry and exit directionalities are
* determined from the string itself.
*
* <p>If !{@code isHtml}, HTML-escapes the string regardless of wrapping.
*
* @param dir {@code str}'s directionality. If null, i.e. unknown, it is estimated.
* @param str The input string
* @param isHtml Whether {@code str} is HTML / HTML-escaped
* @return Input string after applying the above processing.
*/
public String spanWrap(@Nullable Dir dir, String str, boolean isHtml) {
BidiWrappingText wrappingText = spanWrappingText(dir, str, isHtml);
if (!isHtml) {
str = HtmlEscapers.htmlEscaper().escape(str);
}
return wrappingText.beforeText() + str + wrappingText.afterText();
}
/**
* Operates like {@link #spanWrap(Dir, String, boolean)} but only returns the text that would be
* prepended and appended to {@code str}.
*
* @param dir {@code str}'s directionality. If null, i.e. unknown, it is estimated.
* @param str The input string
* @param isHtml Whether {@code str} is HTML / HTML-escaped
*/
public BidiWrappingText spanWrappingText(@Nullable Dir dir, String str, boolean isHtml) {
if (dir == null) {
dir = estimateDirection(str, isHtml);
}
StringBuilder beforeText = new StringBuilder();
StringBuilder afterText = new StringBuilder();
boolean dirCondition = (dir != Dir.NEUTRAL && dir != contextDir);
if (dirCondition) {
beforeText.append("<span dir=\"").append(dir == Dir.RTL ? "rtl" : "ltr").append("\">");
afterText.append("</span>");
}
afterText.append(markAfter(dir, str, isHtml));
return BidiWrappingText.create(beforeText.toString(), afterText.toString());
}
/**
* Formats a string of given directionality for use in plain-text output of the context
* directionality, so an opposite-directionality string is neither garbled nor garbles its
* surroundings. As opposed to {@link #spanWrap}, this makes use of Unicode bidi formatting
* characters. In HTML, its *only* valid use is inside of elements that do not allow markup, e.g.
* the 'option' and 'title' elements.
*
* <p>The algorithm: In case the given directionality doesn't match the context directionality,
* wraps the string with Unicode bidi formatting characters: RLE+{@code str}+PDF for RTL text, or
* LRE+{@code str}+PDF for LTR text.
*
* <p>Directionally isolates the string so that it does not garble its surroundings. Currently,
* this is done by "resetting" the directionality after the string by appending a trailing Unicode
* bidi mark matching the context directionality (LRM or RLM) when either the overall
* directionality or the exit directionality of the string is opposite to that of the context.
* Note that as opposed to the overall directionality, the entry and exit directionalities are
* determined from the string itself.
*
* <p>Does *not* do HTML-escaping regardless of the value of {@code isHtml}.
*
* @param dir {@code str}'s directionality. If null, i.e. unknown, it is estimated.
* @param str The input string
* @param isHtml Whether {@code str} is HTML / HTML-escaped
* @return Input string after applying the above processing.
*/
public String unicodeWrap(@Nullable Dir dir, String str, boolean isHtml) {
BidiWrappingText wrappingText = unicodeWrappingText(dir, str, isHtml);
return wrappingText.beforeText() + str + wrappingText.afterText();
}
/**
* Operates like {@link #unicodeWrap(Dir, String, boolean)} but only returns the text that would
* be prepended and appended to {@code str}.
*
* @param dir {@code str}'s directionality. If null, i.e. unknown, it is estimated.
* @param str The input string
* @param isHtml Whether {@code str} is HTML / HTML-escaped
*/
public BidiWrappingText unicodeWrappingText(@Nullable Dir dir, String str, boolean isHtml) {
if (dir == null) {
dir = estimateDirection(str, isHtml);
}
StringBuilder beforeText = new StringBuilder();
StringBuilder afterText = new StringBuilder();
if (dir != Dir.NEUTRAL && dir != contextDir) {
beforeText.append(dir == Dir.RTL ? BidiUtils.Format.RLE : BidiUtils.Format.LRE);
afterText.append(BidiUtils.Format.PDF);
}
afterText.append(markAfter(dir, str, isHtml));
return BidiWrappingText.create(beforeText.toString(), afterText.toString());
}
/**
* Returns a Unicode bidi mark matching the context directionality (LRM or RLM) if either the
* overall or the exit directionality of a given string is opposite to the context directionality.
* Putting this after the string (including its directionality declaration wrapping) prevents it
* from "sticking" to other opposite-directionality text or a number appearing after it inline
* with only neutral content in between. Otherwise returns the empty string. While the exit
* directionality is determined by scanning the end of the string, the overall directionality is
* given explicitly in {@code dir}.
*
* @param str String after which the mark may need to appear
* @param dir {@code str}'s overall directionality. If null, i.e. unknown, it is estimated.
* @param isHtml Whether {@code str} is HTML / HTML-escaped
* @return LRM for RTL text in LTR context; RLM for LTR text in RTL context; else, the empty
* string.
*/
public String markAfter(@Nullable Dir dir, String str, boolean isHtml) {
if (dir == null) {
dir = estimateDirection(str, isHtml);
}
// BidiUtils.getExitDir() is called only if needed (short-circuit).
if (contextDir == Dir.LTR && (dir == Dir.RTL || BidiUtils.getExitDir(str, isHtml) == Dir.RTL)) {
return BidiUtils.Format.LRM_STRING;
}
if (contextDir == Dir.RTL && (dir == Dir.LTR || BidiUtils.getExitDir(str, isHtml) == Dir.LTR)) {
return BidiUtils.Format.RLM_STRING;
}
return "";
}
/**
* Estimates the directionality of a string using the best known general-purpose method, i.e.
* using relative word counts. Dir.NEUTRAL return value indicates completely neutral input.
*
* @param str String whose directionality is to be estimated
* @param isHtml Whether {@code str} is HTML / HTML-escaped
* @return {@code str}'s estimated overall directionality
*/
@VisibleForTesting
static Dir estimateDirection(String str, boolean isHtml) {
return BidiUtils.estimateDirection(str, isHtml);
}
}
| |
package course.labs.notificationslab;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import android.app.Activity;
import android.app.Fragment;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.os.AsyncTask;
import android.os.Bundle;
import android.widget.RemoteViews;
import android.widget.Toast;
public class DownloaderTaskFragment extends Fragment {
private DownloadFinishedListener mCallback;
private Context mContext;
private final int MY_NOTIFICATION_ID = 11151990;
@SuppressWarnings("unused")
private static final String TAG = "Lab-Notifications";
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// Preserve across reconfigurations
setRetainInstance(true);
// TODO: Create new DownloaderTask that "downloads" data
DownloaderTask task = new DownloaderTask();
// TODO: Retrieve arguments from DownloaderTaskFragment
// Prepare them for use with DownloaderTask.
Integer[] params = MainActivity.sRawTextFeedIds.toArray(new Integer[] {});
// TODO: Start the DownloaderTask
task.execute(params);
}
// Assign current hosting Activity to mCallback
// Store application context for use by downloadTweets()
@Override
public void onAttach(Activity activity) {
super.onAttach(activity);
mContext = activity.getApplicationContext();
// Make sure that the hosting activity has implemented
// the correct callback interface.
try {
mCallback = (DownloadFinishedListener) activity;
} catch (ClassCastException e) {
throw new ClassCastException(activity.toString() + " must implement DownloadFinishedListener");
}
}
// Null out mCallback
@Override
public void onDetach() {
super.onDetach();
mCallback = null;
}
// This class must use the downloadTweets method (currently commented
// out). Ultimately, it must also pass newly available data back to
// the hosting Activity using the DownloadFinishedListener interface.
// public class DownloaderTask extends ...
public class DownloaderTask extends AsyncTask<Integer, Integer, String[]> {
@Override
protected String[] doInBackground(Integer... params) {
String[] tweets = downloadTweets(params);
return tweets;
}
@Override
protected void onPostExecute(String[] result) {
if (mCallback != null) {
mCallback.notifyDataRefreshed(result);
}
}
}
// Simulates downloading Twitter data from the network
private String[] downloadTweets(Integer resourceIDS[]) {
final int simulatedDelay = 2000;
String[] feeds = new String[resourceIDS.length];
boolean downLoadCompleted = false;
try {
for (int idx = 0; idx < resourceIDS.length; idx++) {
InputStream inputStream;
BufferedReader in;
try {
// Pretend downloading takes a long time
Thread.sleep(simulatedDelay);
} catch (InterruptedException e) {
e.printStackTrace();
}
inputStream = mContext.getResources().openRawResource(resourceIDS[idx]);
in = new BufferedReader(new InputStreamReader(inputStream));
String readLine;
StringBuffer buf = new StringBuffer();
while ((readLine = in.readLine()) != null) {
buf.append(readLine);
}
feeds[idx] = buf.toString();
if (null != in) {
in.close();
}
}
downLoadCompleted = true;
saveTweetsToFile(feeds);
} catch (IOException e) {
e.printStackTrace();
}
// Notify user that downloading has finished
notify(downLoadCompleted);
return feeds;
}
// Uncomment this helper method.
// If necessary, notifies the user that the tweet downloads are
// complete. Sends an ordered broadcast back to the BroadcastReceiver in
// MainActivity to determine whether the notification is necessary.
private void notify(final boolean success) {
final Intent restartMainActivityIntent = new Intent(mContext, MainActivity.class);
restartMainActivityIntent.setFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
// Sends an ordered broadcast to determine whether MainActivity is
// active and in the foreground. Creates a new BroadcastReceiver
// to receive a result indicating the state of MainActivity
// The Action for this broadcast Intent is
// MainActivity.DATA_REFRESHED_ACTION
// The result, MainActivity.IS_ALIVE, indicates that MainActivity is
// active and in the foreground.
mContext.sendOrderedBroadcast(new Intent(MainActivity.DATA_REFRESHED_ACTION), null, new BroadcastReceiver() {
final String failMsg = mContext.getString(R.string.download_failed_string);
final String successMsg = mContext.getString(R.string.download_succes_string);
final String notificationSentMsg = mContext.getString(R.string.notification_sent_string);
@Override
public void onReceive(Context context, Intent intent) {
// TODO: Check whether or not the MainActivity
// received the broadcast
if (intent.getAction().equals(MainActivity.DATA_REFRESHED_ACTION)) {
if (getResultCode() != MainActivity.IS_ALIVE) {
// TODO: If not, create a PendingIntent using
// the
// restartMainActivityIntent and set its flags
// to FLAG_UPDATE_CURRENT
PendingIntent pendingIntent = PendingIntent.getActivity(context, 1, restartMainActivityIntent, PendingIntent.FLAG_UPDATE_CURRENT);
// Uses R.layout.custom_notification for the
// layout of the notification View. The xml
// file is in res/layout/custom_notification.xml
RemoteViews mContentView = new RemoteViews(mContext.getPackageName(), R.layout.custom_notification);
// TODO: Set the notification View's text to
// reflect whether the download completed
// successfully
mContentView.setTextViewText(R.id.text, successMsg);
// TODO: Use the Notification.Builder class to
// create the Notification. You will have to set
// several pieces of information. You can use
// android.R.drawable.stat_sys_warning
// for the small icon. You should also
// setAutoCancel(true).
Notification.Builder notificationBuilder = new Notification.Builder(context);
notificationBuilder.setContentIntent(pendingIntent);
notificationBuilder.setContent(mContentView);
notificationBuilder.setSmallIcon(android.R.drawable.stat_sys_warning);
// notificationBuilder.setAutoCancel(true);
// TODO: Send the notification
NotificationManager manager = (NotificationManager) context.getSystemService(Context.NOTIFICATION_SERVICE);
manager.notify(MY_NOTIFICATION_ID, notificationBuilder.build());
Toast.makeText(mContext, notificationSentMsg, Toast.LENGTH_LONG).show();
} else {
Toast.makeText(mContext, success ? successMsg : failMsg, Toast.LENGTH_LONG).show();
}
}
}
}, null, 0, null, null);
}
// Uncomment this helper method
// Saves the tweets to a file
private void saveTweetsToFile(String[] result) {
PrintWriter writer = null;
try {
FileOutputStream fos = mContext.openFileOutput(MainActivity.TWEET_FILENAME, Context.MODE_PRIVATE);
writer = new PrintWriter(new BufferedWriter(new OutputStreamWriter(fos)));
for (String s : result) {
writer.println(s);
}
} catch (IOException e) {
e.printStackTrace();
} finally {
if (null != writer) {
writer.close();
}
}
}
}
| |
/*
*
* Copyright (c) 2013,2019 AT&T Knowledge Ventures
* SPDX-License-Identifier: MIT
*/
package com.att.research.xacml.std;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.NodeList;
import com.att.research.xacml.api.Attribute;
import com.att.research.xacml.api.AttributeValue;
import com.att.research.xacml.api.Identifier;
import com.att.research.xacml.api.Request;
import com.att.research.xacml.api.RequestAttributes;
import com.att.research.xacml.api.RequestAttributesReference;
import com.att.research.xacml.api.RequestReference;
import com.att.research.xacml.api.Status;
import com.att.research.xacml.api.XACML3;
import com.att.research.xacml.api.pdp.ScopeQualifier;
import com.att.research.xacml.api.pdp.ScopeResolver;
import com.att.research.xacml.api.pdp.ScopeResolverException;
import com.att.research.xacml.api.pdp.ScopeResolverResult;
import com.att.research.xacml.std.datatypes.DataTypes;
import com.att.research.xacml.std.datatypes.XPathExpressionWrapper;
/**
* StdIndividualDecisionRequestGenerator is a utility that PDP developers can use to take an original
* {@link com.att.research.xacml.api.Request} and turn it into a sequence of individual decision <code>Request</code>s.
* This class implements all of the multiple-decision profiles specified in "XACML v3.0 Multiple Decision Profile Version 1.0"
*
* @author car
* @version $Revision$
*/
public class StdIndividualDecisionRequestGenerator {
private static final Identifier[] idArray = new Identifier[0];
private static final Status STATUS_NO_ATTRIBUTES = new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "No attributes");
private static final Status STATUS_NO_XMLID = new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "No xml:id");
private static final Status STATUS_NO_CATEGORY = new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "No category");
private static final Status STATUS_NO_RESOURCE_ID = new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "No " + XACML3.ID_RESOURCE_RESOURCE_ID.stringValue() + " attributes");
private static final Logger logger = LoggerFactory.getLogger(StdIndividualDecisionRequestGenerator.class);
private Request originalRequest;
private List<Request> individualDecisionRequests = new ArrayList<>();
private ScopeResolver scopeResolver;
private static StdMutableRequestAttributes removeMultipleContentSelector(RequestAttributes requestAttributes) {
StdMutableRequestAttributes stdRequestAttributes = new StdMutableRequestAttributes();
stdRequestAttributes.setCategory(requestAttributes.getCategory());
stdRequestAttributes.setContentRoot(requestAttributes.getContentRoot());
stdRequestAttributes.setXmlId(requestAttributes.getXmlId());
for (Attribute attribute: requestAttributes.getAttributes()) {
if (!attribute.getAttributeId().equals(XACML3.ID_MULTIPLE_CONTENT_SELECTOR)) {
stdRequestAttributes.add(attribute);
}
}
return stdRequestAttributes;
}
/**
* Does a depth-first recursion on the <code>RequestAttribute</code>s that have a multiple:content-selector and generates all
* possible combinations of these attributes with the <code>ReqeustAttribute</code>s wthout a multiple:content-selector attribute.
*
* @param listRequestAttributes the <code>List</code> of <code>RequestAttribute</code>s for the new <code>Request</code>s
* @param listPos the position within the <code>List</code>
* @param requestInProgress the <code>StdMutableRequest</code> with all of the processed <code>RequestAttribute</code>s so far
*/
private void explodeOnContentSelector(List<RequestAttributes> listRequestAttributes, int listPos, StdMutableRequest requestInProgress) {
int listSize = listRequestAttributes.size();
while (listPos < listSize) {
RequestAttributes requestAttributes = listRequestAttributes.get(listPos++);
if (requestAttributes.hasAttributes(XACML3.ID_MULTIPLE_CONTENT_SELECTOR)) {
/*
* Get the single Attribute for the multiple content selector
*/
Iterator<Attribute> iterAttributesMultipleContentSelector = requestAttributes.getAttributes(XACML3.ID_MULTIPLE_CONTENT_SELECTOR);
assert(iterAttributesMultipleContentSelector != null && iterAttributesMultipleContentSelector.hasNext());
Attribute attributeMultipleContentSelector = iterAttributesMultipleContentSelector.next();
if (iterAttributesMultipleContentSelector.hasNext()) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "multiple " + XACML3.ID_MULTIPLE_CONTENT_SELECTOR.stringValue() + " in category " + requestAttributes.getCategory().stringValue())));
return;
}
/*
* Get all of the XPathExpression values for this attribute, evaluate them against the Content node
*/
Iterator<AttributeValue<XPathExpressionWrapper>> iterXPathExpressions = attributeMultipleContentSelector.findValues(DataTypes.DT_XPATHEXPRESSION);
if (iterXPathExpressions == null || !iterXPathExpressions.hasNext()) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "no XPathExpression values in " + XACML3.ID_MULTIPLE_CONTENT_SELECTOR.stringValue() + " in category " + requestAttributes.getCategory().stringValue())));
return;
}
/*
* Get the single XPathExpression and return an error if there is more than one. This may not be strictly necessary. We could
* explode all of the XPathExpressions, but for now assume only one is allowed.
*/
AttributeValue<XPathExpressionWrapper> attributeValueXPathExpression = iterXPathExpressions.next();
if (iterXPathExpressions.hasNext()) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "multiple XPathExpression values in " + XACML3.ID_MULTIPLE_CONTENT_SELECTOR.stringValue() + " in category " + requestAttributes.getCategory().stringValue())));
return;
}
XPathExpressionWrapper xpathExpression = attributeValueXPathExpression.getValue();
if (xpathExpression == null) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "null XPathExpression")));
return;
}
/*
* Get the NodeList so we know how many results will be returned
*/
NodeList nodeListXPathExpressionResults = requestAttributes.getContentNodeListByXpathExpression(xpathExpression);
if (nodeListXPathExpressionResults == null || nodeListXPathExpressionResults.getLength() == 0) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "no matching nodes in the Content for XPathExpression " + xpathExpression.toString() + " in category " + requestAttributes.getCategory().stringValue())));
return;
}
/*
* For each matching node, create a new XPathExpression with an array accessor
*/
for (int i = 0 ; i < nodeListXPathExpressionResults.getLength() ; i++) {
try {
StdMutableRequestAttributes requestAttributesSingleContentSelector = removeMultipleContentSelector(requestAttributes);
XPathExpressionWrapper xpathExpressionWrapperSingle = new XPathExpressionWrapper(xpathExpression.getNamespaceContext(), xpathExpression.getPath() + "[" + (i+1) + "]");
Attribute attributeContentSelector = new StdMutableAttribute(attributeMultipleContentSelector.getCategory(),
XACML3.ID_CONTENT_SELECTOR,
DataTypes.DT_XPATHEXPRESSION.createAttributeValue(xpathExpressionWrapperSingle),
attributeMultipleContentSelector.getIssuer(),
attributeMultipleContentSelector.getIncludeInResults());
requestAttributesSingleContentSelector.add(attributeContentSelector);
StdMutableRequest stdRequestSingleContentSelector = new StdMutableRequest(requestInProgress);
stdRequestSingleContentSelector.add(requestAttributesSingleContentSelector);
/*
* Recurse to get the remaining attribute categories
*/
this.explodeOnContentSelector(listRequestAttributes, listPos, stdRequestSingleContentSelector);
} catch (Exception ex) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_PROCESSING_ERROR, ex.getMessage())));
return;
}
}
/*
* Once we have exploded values at this point in the list, we just return as the list will have been
* completely processed in the recursion
*/
return;
} else {
requestInProgress.add(requestAttributes);
}
}
/*
* If we get here, then the request in progress is complete and should be added to the set
*/
this.individualDecisionRequests.add(requestInProgress);
}
/**
* Checks to see if there are any categories that include an attribute with a "multiple:content-selector" identifier. If so,
* the multiple content selectors are resolved to individual content-selectors.
*
* @param request
*/
protected void processContentSelectors(Request request) {
Iterator<RequestAttributes> iterRequestAttributes = request.getRequestAttributes().iterator();
if (!iterRequestAttributes.hasNext()) {
this.individualDecisionRequests.add(request);
return;
}
/*
* Quick check for any categories with a multiple:content-selector attribute
*/
boolean hasMultipleContentSelectors = false;
while (!hasMultipleContentSelectors && iterRequestAttributes.hasNext()) {
hasMultipleContentSelectors = iterRequestAttributes.next().hasAttributes(XACML3.ID_MULTIPLE_CONTENT_SELECTOR);
}
/*
* Iterate over all of the categories and see if there are any attributes in them with a multiple:content-selector
*/
if (!hasMultipleContentSelectors) {
this.individualDecisionRequests.add(request);
} else {
List<RequestAttributes> listRequestAttributes = new ArrayList<>();
listRequestAttributes.addAll(request.getRequestAttributes());
StdMutableRequest stdRequestInProgress = new StdMutableRequest();
stdRequestInProgress.setRequestDefaults(request.getRequestDefaults());
stdRequestInProgress.setReturnPolicyIdList(request.getReturnPolicyIdList());
this.explodeOnContentSelector(listRequestAttributes, 0, stdRequestInProgress);
}
}
private static StdMutableRequest removeResources(Request request) {
StdMutableRequest stdRequest = new StdMutableRequest(request.getStatus());
stdRequest.setCombinedDecision(request.getCombinedDecision());
stdRequest.setRequestDefaults(request.getRequestDefaults());
stdRequest.setReturnPolicyIdList(request.getReturnPolicyIdList());
Iterator<RequestAttributes> iterRequestAttributes = request.getRequestAttributes().iterator();
if (iterRequestAttributes != null) {
while (iterRequestAttributes.hasNext()) {
RequestAttributes requestAttributes = iterRequestAttributes.next();
if (requestAttributes.getCategory() == null || !requestAttributes.getCategory().equals(XACML3.ID_ATTRIBUTE_CATEGORY_RESOURCE)) {
stdRequest.add(requestAttributes);
}
}
}
return stdRequest;
}
/**
* Creates a duplicate of the given <code>RequestAttributes</code> with any resource-id and scope
* attributes removed.
*
* @param requestAttributes the original <code>RequestAttributes</code>.
* @return
*/
private static StdMutableRequestAttributes removeScopeAttributes(RequestAttributes requestAttributes) {
StdMutableRequestAttributes stdRequestAttributes = new StdMutableRequestAttributes();
stdRequestAttributes.setCategory(requestAttributes.getCategory());
stdRequestAttributes.setContentRoot(requestAttributes.getContentRoot());
stdRequestAttributes.setXmlId(requestAttributes.getXmlId());
for (Attribute attribute: requestAttributes.getAttributes()) {
Identifier identifierAttribute = attribute.getAttributeId();
if (!identifierAttribute.equals(XACML3.ID_RESOURCE_RESOURCE_ID) && !identifierAttribute.equals(XACML3.ID_RESOURCE_SCOPE)) {
stdRequestAttributes.add(attribute);
}
}
return stdRequestAttributes;
}
/**
* Gets the <code>ScopeQualifier</code> specified in the given <code>RequestAttributes</code>.
*
* @param requestAttributes
* @return
* @throws ScopeResolverException
*/
private static ScopeQualifier getScopeQualifier(RequestAttributes requestAttributes) throws ScopeResolverException {
Iterator<Attribute> iterAttributesScope = requestAttributes.getAttributes(XACML3.ID_RESOURCE_SCOPE);
if (iterAttributesScope == null || !iterAttributesScope.hasNext()) {
return null;
}
Attribute attributeScope = iterAttributesScope.next();
if (iterAttributesScope.hasNext()) {
throw new ScopeResolverException("More than one " + XACML3.ID_RESOURCE_SCOPE.stringValue() + " attribute");
}
Iterator<AttributeValue<?>> iterAttributeValuesScope = attributeScope.getValues().iterator();
if (!iterAttributeValuesScope.hasNext()) {
throw new ScopeResolverException("No values for " + XACML3.ID_RESOURCE_SCOPE.stringValue() + " attribute");
}
ScopeQualifier scopeQualifier = null;
while (scopeQualifier == null && iterAttributeValuesScope.hasNext()) {
AttributeValue<?> attributeValueScope = iterAttributeValuesScope.next();
AttributeValue<String> attributeValueScopeString = null;
try {
attributeValueScopeString = DataTypes.DT_STRING.convertAttributeValue(attributeValueScope);
if (attributeValueScopeString != null) {
scopeQualifier = ScopeQualifier.getScopeQualifier(attributeValueScopeString.getValue());
}
} catch (Exception ex) {
}
}
if (scopeQualifier == null) {
throw new ScopeResolverException("No valid values for " + XACML3.ID_RESOURCE_SCOPE.stringValue() + " attribute");
}
return scopeQualifier;
}
/**
* Checks to see if there are any categories that include an attribute with a "scope" identifier. If so, the scopes are expanded
* and individual decision requests are generated with the expanded scopes.
*
* @param request
*/
protected void processScopes(Request request) {
assert(request.getStatus() == null || request.getStatus().isOk());
/*
* If there is no scope resolver, then just move on to the content selectors
*/
if (this.scopeResolver == null) {
this.processContentSelectors(request);
return;
}
/*
* Scope only applies to the resource category, so just get the RequestAttributes for that. At this point there should be at most one.
*/
Iterator<RequestAttributes> iterRequestAttributesResource = request.getRequestAttributes(XACML3.ID_ATTRIBUTE_CATEGORY_RESOURCE);
if (iterRequestAttributesResource == null || !iterRequestAttributesResource.hasNext()) {
this.processContentSelectors(request);
return;
}
RequestAttributes requestAttributesResource = iterRequestAttributesResource.next();
assert(!iterRequestAttributesResource.hasNext());
/*
* Get the requested scope
*/
ScopeQualifier scopeQualifier = null;
try {
scopeQualifier = getScopeQualifier(requestAttributesResource);
} catch (ScopeResolverException ex) {
this.individualDecisionRequests.add(new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, ex.getMessage())));
return;
}
if (scopeQualifier == null) {
this.processContentSelectors(request);
return;
}
/*
* Get the resource-id attributes and iterate over them, generating individual resource id values using the scope
* resolver.
*/
Iterator<Attribute> iterAttributesResourceId = requestAttributesResource.getAttributes(XACML3.ID_RESOURCE_RESOURCE_ID);
if (iterAttributesResourceId == null || !iterAttributesResourceId.hasNext()) {
this.individualDecisionRequests.add(new StdMutableRequest(STATUS_NO_RESOURCE_ID));
return;
}
/*
* Make a copy of the request attributes with the scope and resource ID values removed.
*/
StdMutableRequestAttributes requestAttributesBase = removeScopeAttributes(requestAttributesResource);
/*
* Set up the basic Request to match the input request but with no resource attributes
*/
StdMutableRequest stdRequest = removeResources(request);
boolean bAtLeastOne = false;
while (iterAttributesResourceId.hasNext()) {
Attribute attributeResourceId = iterAttributesResourceId.next();
ScopeResolverResult scopeResolverResult = null;
try {
scopeResolverResult = this.scopeResolver.resolveScope(attributeResourceId, scopeQualifier);
} catch (ScopeResolverException ex) {
logger.error("ScopeResolverException resolving " + attributeResourceId.toString() + ": " + ex.getMessage(), ex);
continue;
}
if (scopeResolverResult.getStatus() != null && !scopeResolverResult.getStatus().isOk()) {
this.individualDecisionRequests.add(new StdMutableRequest(scopeResolverResult.getStatus()));
return;
}
Iterator<Attribute> iterAttributesResourceIdResolved = scopeResolverResult.getAttributes();
if (iterAttributesResourceIdResolved != null) {
while (iterAttributesResourceIdResolved.hasNext()) {
StdMutableRequestAttributes stdRequestAttributes = new StdMutableRequestAttributes(requestAttributesBase);
stdRequestAttributes.add(iterAttributesResourceIdResolved.next());
StdMutableRequest stdRequestExploded = new StdMutableRequest(stdRequest);
stdRequestExploded.add(stdRequestAttributes);
this.processContentSelectors(stdRequestExploded);
bAtLeastOne = true;
}
}
}
if (!bAtLeastOne) {
logger.warn("No scopes expanded. Using original resource ids");
iterAttributesResourceId = requestAttributesResource.getAttributes(XACML3.ID_RESOURCE_RESOURCE_ID);
assert(iterAttributesResourceId != null);
while (iterAttributesResourceId.hasNext()) {
requestAttributesBase.add(iterAttributesResourceId.next());
}
stdRequest.add(requestAttributesBase);
this.processContentSelectors(stdRequest);
}
}
/**
* Checks to see if the category with the <code>Identifier</code> at the current <code>Iterator</code> position has
* multiple <code>RequestAttribute</code>s associated with it. If so, then for each copy, explode on the remaining categories. Otherwise
* store the single value in the requestInProgress and explode on the remaining values.
*
* @param identifiers
* @param pos
* @param requestInProgress
* @param mapCategories
*/
private void explodeOnCategory(Identifier[] identifiers, int pos, StdMutableRequest requestInProgress, Map<Identifier,List<RequestAttributes>> mapCategories) {
if (pos >= identifiers.length) {
/*
* Tail out of the recursion by performing the next stage of processing on the request in proress
*/
this.processScopes(requestInProgress);
} else {
List<RequestAttributes> listCategoryAttributes = mapCategories.get(identifiers[pos]);
assert(listCategoryAttributes != null && ! listCategoryAttributes.isEmpty());
if (listCategoryAttributes.size() == 1) {
requestInProgress.add(listCategoryAttributes.get(0));
this.explodeOnCategory(identifiers, pos+1, requestInProgress, mapCategories);
} else {
for (RequestAttributes requestAttributes : listCategoryAttributes) {
StdMutableRequest stdRequestCopy = new StdMutableRequest(requestInProgress);
stdRequestCopy.add(requestAttributes);
this.explodeOnCategory(identifiers, pos+1, stdRequestCopy, mapCategories);
}
}
}
}
/**
* Checks to see if the given <code>Request</code> contains instances of repeated categories in the request attributes elements.
*
* @param request the <code>Request</code> to check
*/
protected void processRepeatedCategories(Request request) {
Iterator<RequestAttributes> iterRequestAttributes = request.getRequestAttributes().iterator();
if (iterRequestAttributes == null || !iterRequestAttributes.hasNext()) {
/*
* There are no attributes to process anyway. The PDP will give an indeterminate result from this
*/
this.individualDecisionRequests.add(request);
return;
}
/*
* We need to do a quick check for multiple Attributes with the same Category
*/
boolean bContainsMultiples = false;
Set<Identifier> setCategories = new HashSet<>();
while (iterRequestAttributes.hasNext() && !bContainsMultiples) {
RequestAttributes requestAttributes = iterRequestAttributes.next();
Identifier identifierCategory = requestAttributes.getCategory();
if (identifierCategory == null) {
this.individualDecisionRequests.add(new StdMutableRequest(STATUS_NO_CATEGORY));
return;
}
if (setCategories.contains(identifierCategory)) {
bContainsMultiples = true;
} else {
setCategories.add(identifierCategory);
}
}
/*
* If there are no instances of categories with multiple Attributes elements, then no splitting is done here,
* just move on to the next check.
*/
if (!bContainsMultiples) {
this.processScopes(request);
} else {
iterRequestAttributes = request.getRequestAttributes().iterator();
Map<Identifier,List<RequestAttributes>> mapCategories = new HashMap<>();
while (iterRequestAttributes.hasNext()) {
RequestAttributes requestAttributes = iterRequestAttributes.next();
Identifier identifierCategory = requestAttributes.getCategory();
List<RequestAttributes> listRequestAttributes = mapCategories.get(identifierCategory);
if (listRequestAttributes == null) {
listRequestAttributes = new ArrayList<>();
mapCategories.put(identifierCategory, listRequestAttributes);
}
listRequestAttributes.add(requestAttributes);
}
StdMutableRequest requestRoot = new StdMutableRequest();
requestRoot.setRequestDefaults(request.getRequestDefaults());
requestRoot.setReturnPolicyIdList(request.getReturnPolicyIdList());
this.explodeOnCategory(mapCategories.keySet().toArray(idArray), 0, requestRoot, mapCategories);
}
}
/**
* Tries to resolve the given <code>RequestReference</code> in order to create a fully
* qualified <code>Request</code> to pass on to the next stage of individual request processing.
*
* @param requestReference
*/
protected Request processMultiRequest(Request requestOriginal, RequestReference requestReference) {
Collection<RequestAttributesReference> listRequestAttributesReferences = requestReference.getAttributesReferences();
if (listRequestAttributesReferences.isEmpty()) {
return new StdMutableRequest(STATUS_NO_ATTRIBUTES);
}
StdMutableRequest stdRequest = new StdMutableRequest(requestOriginal.getStatus());
stdRequest.setRequestDefaults(requestOriginal.getRequestDefaults());
stdRequest.setReturnPolicyIdList(requestOriginal.getReturnPolicyIdList());
for (RequestAttributesReference requestAttributesReference: listRequestAttributesReferences) {
String xmlId = requestAttributesReference.getReferenceId();
if (xmlId == null) {
return new StdMutableRequest(STATUS_NO_XMLID);
}
RequestAttributes requestAttributes = requestOriginal.getRequestAttributesByXmlId(xmlId);
if (requestAttributes == null) {
return new StdMutableRequest(new StdStatus(StdStatusCode.STATUS_CODE_SYNTAX_ERROR, "Unresolved xml:id " + xmlId));
} else {
stdRequest.add(requestAttributes);
}
}
return stdRequest;
}
/**
* Populates the individual decision <code>Request</code>s from the given <code>Request</code>
* using all supported profiles. The process here is documented as step 1. of Section 4 of the XACML document.
*
* @param request the <code>Request</code> to explode
*/
protected void createIndividualDecisionRequests(Request request) {
/*
* If the request is bad to begin with, just add it to the list and be done.
*/
if (request.getStatus() != null && !request.getStatus().isOk()) {
this.individualDecisionRequests.add(request);
return;
}
/*
* Check to see if this Request is a MultiRequest
*/
Iterator<RequestReference> iterRequestReferences = request.getMultiRequests().iterator();
if (iterRequestReferences != null && iterRequestReferences.hasNext()) {
while (iterRequestReferences.hasNext()) {
Request requestFromReferences = this.processMultiRequest(request, iterRequestReferences.next());
assert(requestFromReferences != null);
if (requestFromReferences.getStatus() == null || requestFromReferences.getStatus().isOk()) {
this.processRepeatedCategories(requestFromReferences);
} else {
/*
* Just add the bad request to the list. It will be cause a Result with the same bad status
* when the PDP actually runs the request.
*/
this.individualDecisionRequests.add(requestFromReferences);
}
}
} else {
this.processRepeatedCategories(request);
}
}
public StdIndividualDecisionRequestGenerator(ScopeResolver scopeResolverIn, Request request) {
this.originalRequest = request;
this.scopeResolver = scopeResolverIn;
this.createIndividualDecisionRequests(request);
}
public StdIndividualDecisionRequestGenerator(Request request) {
this(null, request);
}
/**
* Gets the original <code>Request</code>.
*
* @return the original <code>Request</code>
*/
public Request getOriginalRequest() {
return this.originalRequest;
}
/**
* Gets an <code>Iterator</code> over the individual decision <code>Request</code>s for
* the original <code>Request</code>.
*
* @return an <code>Iterator</code> over the individual decision <code>Request</code>s.
*/
public Iterator<Request> getIndividualDecisionRequests() {
return this.individualDecisionRequests.iterator();
}
}
| |
package brooklyn.entity.basic;
import static com.google.common.base.Preconditions.checkNotNull;
import java.util.Collection;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import brooklyn.config.ConfigKey;
import brooklyn.config.ConfigKey.HasConfigKey;
import brooklyn.enricher.basic.AbstractEnricher;
import brooklyn.entity.Application;
import brooklyn.entity.Effector;
import brooklyn.entity.Entity;
import brooklyn.entity.EntityType;
import brooklyn.entity.Group;
import brooklyn.entity.proxying.EntitySpec;
import brooklyn.entity.proxying.InternalEntityFactory;
import brooklyn.entity.rebind.BasicEntityRebindSupport;
import brooklyn.entity.rebind.RebindManagerImpl;
import brooklyn.entity.rebind.RebindSupport;
import brooklyn.event.AttributeSensor;
import brooklyn.event.Sensor;
import brooklyn.event.SensorEvent;
import brooklyn.event.SensorEventListener;
import brooklyn.event.basic.AttributeMap;
import brooklyn.event.basic.AttributeSensorAndConfigKey;
import brooklyn.event.basic.BasicNotificationSensor;
import brooklyn.event.feed.ConfigToAttributes;
import brooklyn.internal.storage.BrooklynStorage;
import brooklyn.internal.storage.Reference;
import brooklyn.internal.storage.impl.BasicReference;
import brooklyn.location.Location;
import brooklyn.location.basic.Locations;
import brooklyn.management.EntityManager;
import brooklyn.management.ExecutionContext;
import brooklyn.management.ManagementContext;
import brooklyn.management.SubscriptionContext;
import brooklyn.management.SubscriptionHandle;
import brooklyn.management.Task;
import brooklyn.management.internal.EffectorUtils;
import brooklyn.management.internal.EntityManagementSupport;
import brooklyn.management.internal.ManagementContextInternal;
import brooklyn.management.internal.SubscriptionTracker;
import brooklyn.mementos.EntityMemento;
import brooklyn.policy.Enricher;
import brooklyn.policy.EnricherSpec;
import brooklyn.policy.Policy;
import brooklyn.policy.PolicySpec;
import brooklyn.policy.basic.AbstractPolicy;
import brooklyn.util.BrooklynLanguageExtensions;
import brooklyn.util.collections.MutableList;
import brooklyn.util.collections.MutableMap;
import brooklyn.util.collections.SetFromLiveMap;
import brooklyn.util.config.ConfigBag;
import brooklyn.util.flags.FlagUtils;
import brooklyn.util.flags.SetFromFlag;
import brooklyn.util.guava.Maybe;
import brooklyn.util.task.DeferredSupplier;
import brooklyn.util.text.Identifiers;
import brooklyn.util.text.Strings;
import com.google.common.annotations.Beta;
import com.google.common.base.Objects;
import com.google.common.base.Objects.ToStringHelper;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
/**
* Default {@link Entity} implementation, which should be extended whenever implementing an entity.
* <p>
* Provides several common fields ({@link #displayName}, {@link #id}), and supports the core features of
* an entity such as configuration keys, attributes, subscriptions and effector invocation.
* <p>
* If a sub-class is creating other entities, this should be done in an overridden {@link #init()}
* method.
* <p>
* Note that config is typically inherited by children, whereas the fields and attributes are not.
* <p>
* Though currently Groovy code, this is very likely to change to pure Java in a future release of
* Brooklyn so Groovy'isms should not be relied on.
* <p>
* Sub-classes should have a no-argument constructor. When brooklyn creates an entity, it will:
* <ol>
* <li>Construct the entity via the no-argument constructor
* <li>Call {@link #setDisplayName(String)}
* <li>Call {@link #setManagementContext(ManagementContextInternal)}
* <li>Call {@link #setProxy(Entity)}; the proxy should be used by everything else when referring
* to this entity (except for drivers/policies that are attached to the entity, which can be
* given a reference to this entity itself).
* <li>Call {@link #configure(Map)} and then {@link #setConfig(ConfigKey, Object)}
* <li>Call {@link #init()}
* <li>Call {@link #addPolicy(Policy)} (for any policies defined in the {@link EntitySpec})
* <li>Call {@link #setParent(Entity)}, if a parent is specified in the {@link EntitySpec}
* </ol>
* <p>
* The legacy (pre 0.5) mechanism for creating entities is for others to call the constructor directly.
* This is now deprecated.
*/
public abstract class AbstractEntity implements EntityLocal, EntityInternal {
private static final Logger LOG = LoggerFactory.getLogger(AbstractEntity.class);
static { BrooklynLanguageExtensions.init(); }
public static final BasicNotificationSensor<Sensor> SENSOR_ADDED = new BasicNotificationSensor<Sensor>(Sensor.class,
"entity.sensor.added", "Sensor dynamically added to entity");
public static final BasicNotificationSensor<Sensor> SENSOR_REMOVED = new BasicNotificationSensor<Sensor>(Sensor.class,
"entity.sensor.removed", "Sensor dynamically removed from entity");
public static final BasicNotificationSensor<String> EFFECTOR_ADDED = new BasicNotificationSensor<String>(String.class,
"entity.effector.added", "Effector dynamically added to entity");
public static final BasicNotificationSensor<String> EFFECTOR_REMOVED = new BasicNotificationSensor<String>(String.class,
"entity.effector.removed", "Effector dynamically removed from entity");
public static final BasicNotificationSensor<String> EFFECTOR_CHANGED = new BasicNotificationSensor<String>(String.class,
"entity.effector.changed", "Effector dynamically changed on entity");
public static final BasicNotificationSensor<PolicyDescriptor> POLICY_ADDED = new BasicNotificationSensor<PolicyDescriptor>(PolicyDescriptor.class,
"entity.policy.added", "Policy dynamically added to entity");
public static final BasicNotificationSensor<PolicyDescriptor> POLICY_REMOVED = new BasicNotificationSensor<PolicyDescriptor>(PolicyDescriptor.class,
"entity.policy.removed", "Policy dynamically removed from entity");
public static final BasicNotificationSensor<Entity> CHILD_ADDED = new BasicNotificationSensor<Entity>(Entity.class,
"entity.children.added", "Child dynamically added to entity");
public static final BasicNotificationSensor<Entity> CHILD_REMOVED = new BasicNotificationSensor<Entity>(Entity.class,
"entity.children.removed", "Child dynamically removed from entity");
@SetFromFlag(value="id")
private String id = Identifiers.makeRandomId(8);
private boolean displayNameAutoGenerated = true;
private Entity selfProxy;
private volatile Application application;
// TODO Because some things still don't use EntitySpec (e.g. the EntityFactory stuff for cluster/fabric),
// then we need temp vals here. When setManagementContext is called, we'll switch these out for the read-deal;
// i.e. for the values backed by storage
private Reference<Entity> parent = new BasicReference<Entity>();
private Set<Group> groups = Sets.newLinkedHashSet();
private Set<Entity> children = Sets.newLinkedHashSet();
private Reference<List<Location>> locations = new BasicReference<List<Location>>(ImmutableList.<Location>of()); // dups removed in addLocations
private Reference<Long> creationTimeUtc = new BasicReference<Long>(System.currentTimeMillis());
private Reference<String> displayName = new BasicReference<String>();
private Reference<String> iconUrl = new BasicReference<String>();
Map<String,Object> presentationAttributes = Maps.newLinkedHashMap();
Collection<AbstractPolicy> policies = Lists.newCopyOnWriteArrayList();
Collection<AbstractEnricher> enrichers = Lists.newCopyOnWriteArrayList();
Set<Object> tags = Sets.newLinkedHashSet();
// FIXME we do not currently support changing parents, but to implement a cluster that can shrink we need to support at least
// orphaning (i.e. removing ownership). This flag notes if the entity has previously had a parent, and if an attempt is made to
// set a new parent an exception will be thrown.
boolean previouslyOwned = false;
/**
* Whether we are still being constructed, in which case never warn in "assertNotYetOwned"
*/
private boolean inConstruction = true;
private final EntityDynamicType entityType;
protected final EntityManagementSupport managementSupport = new EntityManagementSupport(this);
/**
* The config values of this entity. Updating this map should be done
* via getConfig/setConfig.
*/
// TODO Assigning temp value because not everything uses EntitySpec; see setManagementContext()
private EntityConfigMap configsInternal = new EntityConfigMap(this, Maps.<ConfigKey<?>, Object>newLinkedHashMap());
/**
* The sensor-attribute values of this entity. Updating this map should be done
* via getAttribute/setAttribute; it will automatically emit an attribute-change event.
*/
// TODO Assigning temp value because not everything uses EntitySpec; see setManagementContext()
private AttributeMap attributesInternal = new AttributeMap(this, Maps.<Collection<String>, Object>newLinkedHashMap());
/**
* For temporary data, e.g. timestamps etc for calculating real attribute values, such as when
* calculating averages over time etc.
*
* @deprecated since 0.6; use attributes
*/
@Deprecated
protected final Map<String,Object> tempWorkings = Maps.newLinkedHashMap();
protected transient SubscriptionTracker _subscriptionTracker;
private final boolean _legacyConstruction;
public AbstractEntity() {
this(Maps.newLinkedHashMap(), null);
}
/**
* @deprecated since 0.5; instead use no-arg constructor with EntityManager().createEntity(spec)
*/
@Deprecated
public AbstractEntity(Map flags) {
this(flags, null);
}
/**
* @deprecated since 0.5; instead use no-arg constructor with EntityManager().createEntity(spec)
*/
@Deprecated
public AbstractEntity(Entity parent) {
this(Maps.newLinkedHashMap(), parent);
}
// FIXME don't leak this reference in constructor - even to utils
/**
* @deprecated since 0.5; instead use no-arg constructor with EntityManager().createEntity(spec)
*/
@Deprecated
public AbstractEntity(Map flags, Entity parent) {
if (flags==null) {
throw new IllegalArgumentException("Flags passed to entity "+this+" must not be null (try no-arguments or empty map)");
}
if (flags.get("parent") != null && parent != null && flags.get("parent") != parent) {
throw new IllegalArgumentException("Multiple parents supplied, "+flags.get("parent")+" and "+parent);
}
if (flags.get("owner") != null && parent != null && flags.get("owner") != parent) {
throw new IllegalArgumentException("Multiple parents supplied with flags.parent, "+flags.get("owner")+" and "+parent);
}
if (flags.get("parent") != null && flags.get("owner") != null && flags.get("parent") != flags.get("owner")) {
throw new IllegalArgumentException("Multiple parents supplied with flags.parent and flags.owner, "+flags.get("parent")+" and "+flags.get("owner"));
}
if (parent != null) {
flags.put("parent", parent);
}
if (flags.get("owner") != null) {
LOG.warn("Use of deprecated \"flags.owner\" instead of \"flags.parent\" for entity {}", this);
flags.put("parent", flags.get("owner"));
flags.remove("owner");
}
// TODO Don't let `this` reference escape during construction
entityType = new EntityDynamicType(this);
_legacyConstruction = !InternalEntityFactory.FactoryConstructionTracker.isConstructing();
if (_legacyConstruction) {
LOG.warn("Deprecated use of old-style entity construction for "+getClass().getName()+"; instead use EntityManager().createEntity(spec)");
AbstractEntity checkWeGetThis = configure(flags);
assert this.equals(checkWeGetThis) : this+" configure method does not return itself; returns "+checkWeGetThis+" instead";
}
inConstruction = false;
}
@Override
public int hashCode() {
return id.hashCode();
}
@Override
public boolean equals(Object o) {
return (o == this || o == selfProxy) ||
(o instanceof Entity && Objects.equal(id, ((Entity)o).getId()));
}
protected boolean isLegacyConstruction() {
return _legacyConstruction;
}
protected boolean isRebinding() {
return RebindManagerImpl.RebindTracker.isRebinding();
}
@Override
public String getId() {
return id;
}
public void setProxy(Entity proxy) {
if (selfProxy != null) throw new IllegalStateException("Proxy is already set; cannot reset proxy for "+toString());
selfProxy = checkNotNull(proxy, "proxy");
}
public Entity getProxy() {
return selfProxy;
}
/**
* Returns the proxy, or if not available (because using legacy code) then returns the real entity.
* This method will be deleted in a future release; it will be kept while deprecated legacy code
* still exists that creates entities without setting the proxy.
*/
@Beta
public Entity getProxyIfAvailable() {
return getProxy()!=null ? getProxy() : this;
}
/** sets fields from flags; can be overridden if needed, subclasses should
* set custom fields before _invoking_ this super
* (and they nearly always should invoke the super)
* <p>
* note that it is usually preferred to use the SetFromFlag annotation on relevant fields
* so they get set automatically by this method and overriding it is unnecessary
*
* @return this entity, for fluent style initialization
*/
public AbstractEntity configure() {
return configure(Maps.newLinkedHashMap());
}
@Override
public AbstractEntity configure(Map flags) {
if (!inConstruction && getManagementSupport().isDeployed()) {
LOG.warn("bulk/flag configuration being made to {} after deployment: may not be supported in future versions ({})",
new Object[] { this, flags });
}
// TODO use a config bag instead
// ConfigBag bag = new ConfigBag().putAll(flags);
// FIXME Need to set parent with proxy, rather than `this`
Entity suppliedParent = (Entity) flags.remove("parent");
if (suppliedParent != null) {
suppliedParent.addChild(getProxyIfAvailable());
}
Map<ConfigKey,?> suppliedOwnConfig = (Map<ConfigKey, ?>) flags.remove("config");
if (suppliedOwnConfig != null) {
for (Map.Entry<ConfigKey, ?> entry : suppliedOwnConfig.entrySet()) {
setConfigEvenIfOwned(entry.getKey(), entry.getValue());
}
}
if (flags.get("displayName") != null) {
displayName.set((String) flags.remove("displayName"));
displayNameAutoGenerated = false;
} else if (flags.get("name") != null) {
displayName.set((String) flags.remove("name"));
displayNameAutoGenerated = false;
} else if (isLegacyConstruction()) {
displayName.set(getClass().getSimpleName()+":"+Strings.maxlen(id, 4));
displayNameAutoGenerated = true;
}
if (flags.get("iconUrl") != null) {
iconUrl.set((String) flags.remove("iconUrl"));
}
// allow config keys, and fields, to be set from these flags if they have a SetFromFlag annotation
// TODO the default values on flags are not used? (we should remove that support, since ConfigKeys gives a better way)
FlagUtils.setFieldsFromFlags(flags, this);
flags = FlagUtils.setAllConfigKeys(flags, this, false);
// finally all config keys specified in map should be set as config
// TODO use a config bag and remove the ones set above in the code below
for (Iterator<Map.Entry> fi = flags.entrySet().iterator(); fi.hasNext();) {
Map.Entry entry = fi.next();
Object k = entry.getKey();
if (k instanceof HasConfigKey) k = ((HasConfigKey)k).getConfigKey();
if (k instanceof ConfigKey) {
setConfigEvenIfOwned((ConfigKey)k, entry.getValue());
fi.remove();
}
}
if (!flags.isEmpty()) {
LOG.warn("Unsupported flags when configuring {}; storing: {}", this, flags);
configsInternal.addToLocalBag(flags);
}
return this;
}
/**
* Sets a config key value, and returns this Entity instance for use in fluent-API style coding.
*/
public <T> AbstractEntity configure(ConfigKey<T> key, T value) {
setConfig(key, value);
return this;
}
public <T> AbstractEntity configure(ConfigKey<T> key, String value) {
setConfig((ConfigKey)key, value);
return this;
}
public <T> AbstractEntity configure(HasConfigKey<T> key, T value) {
setConfig(key, value);
return this;
}
public <T> AbstractEntity configure(HasConfigKey<T> key, String value) {
setConfig((ConfigKey)key, value);
return this;
}
public void setManagementContext(ManagementContextInternal managementContext) {
getManagementSupport().setManagementContext(managementContext);
entityType.setName(getEntityTypeName());
if (displayNameAutoGenerated) displayName.set(getEntityType().getSimpleName()+":"+Strings.maxlen(id, 4));
Entity oldParent = parent.get();
Set<Group> oldGroups = groups;
Set<Entity> oldChildren = children;
List<Location> oldLocations = locations.get();
EntityConfigMap oldConfig = configsInternal;
AttributeMap oldAttribs = attributesInternal;
long oldCreationTimeUtc = creationTimeUtc.get();
String oldDisplayName = displayName.get();
String oldIconUrl = iconUrl.get();
parent = managementContext.getStorage().getReference(id+"-parent");
groups = SetFromLiveMap.create(managementContext.getStorage().<Group,Boolean>getMap(id+"-groups"));
children = SetFromLiveMap.create(managementContext.getStorage().<Entity,Boolean>getMap(id+"-children"));
locations = managementContext.getStorage().getNonConcurrentList(id+"-locations");
creationTimeUtc = managementContext.getStorage().getReference(id+"-creationTime");
displayName = managementContext.getStorage().getReference(id+"-displayName");
iconUrl = managementContext.getStorage().getReference(id+"-iconUrl");
// Only override stored defaults if we have actual values. We might be in setManagementContext
// because we are reconstituting an existing entity in a new brooklyn management-node (in which
// case believe what is already in the storage), or we might be in the middle of creating a new
// entity. Normally for a new entity (using EntitySpec creation approach), this will get called
// before setting the parent etc. However, for backwards compatibility we still support some
// things calling the entity's constructor directly.
if (oldParent != null) parent.set(oldParent);
if (oldGroups.size() > 0) groups.addAll(oldGroups);
if (oldChildren.size() > 0) children.addAll(oldChildren);
if (oldLocations.size() > 0) locations.set(ImmutableList.copyOf(oldLocations));
if (creationTimeUtc.isNull()) creationTimeUtc.set(oldCreationTimeUtc);
if (displayName.isNull()) {
displayName.set(oldDisplayName);
} else {
displayNameAutoGenerated = false;
}
if (iconUrl.isNull()) iconUrl.set(oldIconUrl);
configsInternal = new EntityConfigMap(this, managementContext.getStorage().<ConfigKey<?>, Object>getMap(id+"-config"));
if (oldConfig.getLocalConfig().size() > 0) {
configsInternal.setLocalConfig(oldConfig.getLocalConfig());
}
refreshInheritedConfig();
attributesInternal = new AttributeMap(this, managementContext.getStorage().<Collection<String>, Object>getMap(id+"-attributes"));
if (oldAttribs.asRawMap().size() > 0) {
for (Map.Entry<Collection<String>,Object> entry : oldAttribs.asRawMap().entrySet()) {
attributesInternal.update(entry.getKey(), entry.getValue());
}
}
}
@Override
public Map<String, String> toMetadataRecord() {
return ImmutableMap.of();
}
@Override
public long getCreationTime() {
return creationTimeUtc.get();
}
@Override
public String getDisplayName() {
return displayName.get();
}
@Override
public String getIconUrl() {
return iconUrl.get();
}
@Override
public void setDisplayName(String newDisplayName) {
displayName.set(newDisplayName);
displayNameAutoGenerated = false;
getManagementSupport().getEntityChangeListener().onChanged();
}
/** allows subclasses to set the default display name to use if none is provided */
protected void setDefaultDisplayName(String displayNameIfDefault) {
if (displayNameAutoGenerated) {
displayName.set(displayNameIfDefault);
}
}
/**
* Gets the entity type name, to be returned by {@code getEntityType().getName()}.
* To be called by brooklyn internals only.
* Can be overridden to customize the name.
*/
protected String getEntityTypeName() {
try {
Class<?> typeClazz = getManagementContext().getEntityManager().getEntityTypeRegistry().getEntityTypeOf(getClass());
String typeName = typeClazz.getCanonicalName();
if (typeName == null) typeName = typeClazz.getName();
return typeName;
} catch (IllegalArgumentException e) {
String typeName = getClass().getCanonicalName();
if (typeName == null) typeName = getClass().getName();
LOG.debug("Entity type interface not found for entity "+this+"; instead using "+typeName+" as entity type name");
return typeName;
}
}
/**
* Called by framework (in new-style entities) after configuring, setting parent, etc,
* but before a reference to this entity is shared with other entities.
*
* To preserve backwards compatibility for if the entity is constructed directly, one
* can add to the start method the code below, but that means it will be called after
* references to this entity have been shared with other entities.
* <pre>
* {@code
* if (isLegacyConstruction()) {
* init();
* }
* }
* </pre>
*/
public void init() {
// no-op
}
/**
* Called by framework (in new-style entities where EntitySpec was used) on rebind,
* after configuring but before the entity is managed.
* Note that {@link #init()} will not be called on rebind.
*/
public void rebind() {
// no-op
}
/**
* Adds this as a child of the given entity; registers with application if necessary.
*/
@Override
public AbstractEntity setParent(Entity entity) {
if (!parent.isNull()) {
// If we are changing to the same parent...
if (parent.contains(entity)) return this;
// If we have a parent but changing to orphaned...
if (entity==null) { clearParent(); return this; }
// We have a parent and are changing to another parent...
throw new UnsupportedOperationException("Cannot change parent of "+this+" from "+parent+" to "+entity+" (parent change not supported)");
}
// If we have previously had a parent and are trying to change to another one...
if (previouslyOwned && entity != null)
throw new UnsupportedOperationException("Cannot set a parent of "+this+" because it has previously had a parent");
// We don't have a parent, never have and are changing to having a parent...
//make sure there is no loop
if (this.equals(entity)) throw new IllegalStateException("entity "+this+" cannot own itself");
//this may be expensive, but preferable to throw before setting the parent!
if (Entities.isDescendant(this, entity))
throw new IllegalStateException("loop detected trying to set parent of "+this+" as "+entity+", which is already a descendent");
parent.set(entity);
//previously tested entity!=null but that should be guaranteed?
entity.addChild(getProxyIfAvailable());
refreshInheritedConfig();
previouslyOwned = true;
getApplication();
return this;
}
@Override
public void clearParent() {
if (parent.isNull()) return;
Entity oldParent = parent.get();
parent.clear();
if (oldParent != null) oldParent.removeChild(getProxyIfAvailable());
}
/**
* Adds the given entity as a child of this parent <em>and</em> sets this entity as the parent of the child;
* returns argument passed in, for convenience.
* <p>
* The child is NOT managed, even if the parent is already managed at this point
* (e.g. the child is added *after* the parent's {@link AbstractEntity#init()} is invoked)
* and so will need an explicit <code>getEntityManager().manage(childReturnedFromThis)</code> call.
* <i>These semantics are currently under review.</i>
*/
@Override
public <T extends Entity> T addChild(T child) {
checkNotNull(child, "child must not be null (for entity %s)", this);
boolean changed;
synchronized (children) {
if (Entities.isAncestor(this, child)) throw new IllegalStateException("loop detected trying to add child "+child+" to "+this+"; it is already an ancestor");
child.setParent(getProxyIfAvailable());
changed = children.add(child);
getManagementSupport().getEntityChangeListener().onChildrenChanged();
}
// TODO not holding synchronization lock while notifying risks out-of-order if addChild+removeChild called in rapid succession.
// But doing notification in synchronization block may risk deadlock?
if (changed) {
emit(AbstractEntity.CHILD_ADDED, child);
}
return child;
}
/**
* Creates an entity using the given spec, and adds it as a child of this entity.
*
* @see #addChild(Entity)
* @see EntityManager#createEntity(EntitySpec)
*
* @throws IllegalArgumentException If {@code spec.getParent()} is set and is different from this entity
*/
@Override
public <T extends Entity> T addChild(EntitySpec<T> spec) {
if (spec.getParent() != null && !this.equals(spec.getParent())) {
throw new IllegalArgumentException("Attempt to create child of "+this+" with entity spec "+spec+
" failed because spec has different parent: "+spec.getParent());
}
return addChild(getEntityManager().createEntity(spec));
}
@Override
public boolean removeChild(Entity child) {
boolean changed;
synchronized (children) {
changed = children.remove(child);
child.clearParent();
if (changed) {
getManagementSupport().getEntityChangeListener().onChildrenChanged();
}
}
if (changed) {
emit(AbstractEntity.CHILD_REMOVED, child);
}
return changed;
}
/**
* Adds this as a member of the given group, registers with application if necessary
*/
@Override
public void addGroup(Group e) {
groups.add(e);
getApplication();
}
@Override
public Entity getParent() {
return parent.get();
}
@Override
public Collection<Entity> getChildren() {
return ImmutableList.copyOf(children);
}
@Override
public Collection<Group> getGroups() {
return ImmutableList.copyOf(groups);
}
/**
* Returns the application, looking it up if not yet known (registering if necessary)
*/
@Override
public Application getApplication() {
if (application != null) return application;
Entity parent = getParent();
Application app = (parent != null) ? parent.getApplication() : null;
if (app != null) {
if (getManagementSupport().isFullyManaged())
// only do this once fully managed, in case root app becomes parented
setApplication(app);
}
return app;
}
// FIXME Can this really be deleted? Overridden by AbstractApplication; needs careful review
/** @deprecated since 0.4.0 should not be needed / leaked outwith brooklyn internals / mgmt support? */
protected synchronized void setApplication(Application app) {
if (application != null) {
if (application.getId() != app.getId()) {
throw new IllegalStateException("Cannot change application of entity (attempted for "+this+" from "+getApplication()+" to "+app);
}
}
this.application = app;
}
@Override
public String getApplicationId() {
Application app = getApplication();
return (app == null) ? null : app.getId();
}
@Override
public synchronized ManagementContext getManagementContext() {
return getManagementSupport().getManagementContext();
}
protected EntityManager getEntityManager() {
return getManagementContext().getEntityManager();
}
@Override
public EntityType getEntityType() {
if (entityType==null) return null;
return entityType.getSnapshot();
}
@Override
public EntityDynamicType getMutableEntityType() {
return entityType;
}
@Override
public Collection<Location> getLocations() {
synchronized (locations) {
return ImmutableList.copyOf(locations.get());
}
}
@Override
public void addLocations(Collection<? extends Location> newLocations) {
synchronized (locations) {
List<Location> oldLocations = locations.get();
Set<Location> truelyNewLocations = Sets.newLinkedHashSet(newLocations);
truelyNewLocations.removeAll(oldLocations);
if (truelyNewLocations.size() > 0) {
locations.set(ImmutableList.<Location>builder().addAll(oldLocations).addAll(truelyNewLocations).build());
}
}
if (getManagementSupport().isDeployed()) {
for (Location newLocation : newLocations) {
// Location is now reachable, so manage it
// TODO will not be required in future releases when creating locations always goes through LocationManager.createLocation(LocationSpec).
Locations.manage(newLocation, getManagementContext());
}
}
getManagementSupport().getEntityChangeListener().onLocationsChanged();
}
@Override
public void removeLocations(Collection<? extends Location> removedLocations) {
synchronized (locations) {
List<Location> oldLocations = locations.get();
locations.set(MutableList.<Location>builder().addAll(oldLocations).removeAll(removedLocations).buildImmutable());
}
// TODO Not calling `Entities.unmanage(removedLocation)` because this location might be shared with other entities.
// Relying on abstractLocation.removeChildLocation unmanaging it, but not ideal as top-level locations will stick
// around forever, even if not referenced.
// Same goes for AbstractEntity#clearLocations().
getManagementSupport().getEntityChangeListener().onLocationsChanged();
}
@Override
public void clearLocations() {
synchronized (locations) {
locations.set(ImmutableList.<Location>of());
}
getManagementSupport().getEntityChangeListener().onLocationsChanged();
}
public Location firstLocation() {
synchronized (locations) {
return Iterables.get(locations.get(), 0);
}
}
/**
* Should be invoked at end-of-life to clean up the item.
*/
@Override
public void destroy() {
}
@Override
public <T> T getAttribute(AttributeSensor<T> attribute) {
return attributesInternal.getValue(attribute);
}
public <T> T getAttributeByNameParts(List<String> nameParts) {
return (T) attributesInternal.getValue(nameParts);
}
@Override
public <T> T setAttribute(AttributeSensor<T> attribute, T val) {
T result = attributesInternal.update(attribute, val);
if (result == null) {
// could be this is a new sensor
entityType.addSensorIfAbsent(attribute);
}
getManagementSupport().getEntityChangeListener().onAttributeChanged(attribute);
return result;
}
@Override
public <T> T setAttributeWithoutPublishing(AttributeSensor<T> attribute, T val) {
T result = attributesInternal.updateWithoutPublishing(attribute, val);
if (result == null) {
// could be this is a new sensor
entityType.addSensorIfAbsentWithoutPublishing(attribute);
}
getManagementSupport().getEntityChangeListener().onAttributeChanged(attribute);
return result;
}
@Override
public void removeAttribute(AttributeSensor<?> attribute) {
attributesInternal.remove(attribute);
entityType.removeSensor(attribute);
}
/** sets the value of the given attribute sensor from the config key value herein
* if the attribtue sensor is not-set or null
* <p>
* returns old value
* @deprecated on interface since 0.5.0; use {@link ConfigToAttributes#apply(EntityLocal, AttributeSensorAndConfigKey)} */
public <T> T setAttribute(AttributeSensorAndConfigKey<?,T> configuredSensor) {
T v = getAttribute(configuredSensor);
if (v!=null) return v;
v = configuredSensor.getAsSensorValue(this);
if (v!=null) return setAttribute(configuredSensor, v);
return null;
}
@Override
public <T> T getConfig(ConfigKey<T> key) {
return configsInternal.getConfig(key);
}
@Override
public <T> T getConfig(HasConfigKey<T> key) {
return configsInternal.getConfig(key);
}
@Override
public <T> T getConfig(HasConfigKey<T> key, T defaultValue) {
return configsInternal.getConfig(key, defaultValue);
}
//don't use groovy defaults for defaultValue as that doesn't implement the contract; we need the above
@Override
public <T> T getConfig(ConfigKey<T> key, T defaultValue) {
return configsInternal.getConfig(key, defaultValue);
}
@Override
public Maybe<Object> getConfigRaw(ConfigKey<?> key, boolean includeInherited) {
return configsInternal.getConfigRaw(key, includeInherited);
}
@Override
public Maybe<Object> getConfigRaw(HasConfigKey<?> key, boolean includeInherited) {
return getConfigRaw(key.getConfigKey(), includeInherited);
}
@SuppressWarnings("unchecked")
private <T> T setConfigInternal(ConfigKey<T> key, Object val) {
if (!inConstruction && getManagementSupport().isDeployed()) {
// previously we threw, then warned, but it is still quite common;
// so long as callers don't expect miracles, it should be fine.
// i (Alex) think the way to be stricter about this (if that becomes needed)
// would be to introduce a 'mutable' field on config keys
LOG.debug("configuration being made to {} after deployment: {} = {}; change may not be visible in other contexts",
new Object[] { this, key, val });
}
T result = (T) configsInternal.setConfig(key, val);
getManagementSupport().getEntityChangeListener().onConfigChanged(key);
return result;
}
@Override
public <T> T setConfig(ConfigKey<T> key, T val) {
return setConfigInternal(key, val);
}
@Override
public <T> T setConfig(ConfigKey<T> key, Task<T> val) {
return setConfigInternal(key, val);
}
public <T> T setConfig(ConfigKey<T> key, DeferredSupplier val) {
return setConfigInternal(key, val);
}
@Override
public <T> T setConfig(HasConfigKey<T> key, T val) {
return setConfig(key.getConfigKey(), val);
}
@Override
public <T> T setConfig(HasConfigKey<T> key, Task<T> val) {
return (T) setConfig(key.getConfigKey(), val);
}
public <T> T setConfig(HasConfigKey<T> key, DeferredSupplier val) {
return setConfig(key.getConfigKey(), val);
}
public <T> T setConfigEvenIfOwned(ConfigKey<T> key, T val) {
return (T) configsInternal.setConfig(key, val);
}
public <T> T setConfigEvenIfOwned(HasConfigKey<T> key, T val) {
return setConfigEvenIfOwned(key.getConfigKey(), val);
}
protected void setConfigIfValNonNull(ConfigKey key, Object val) {
if (val != null) setConfig(key, val);
}
protected void setConfigIfValNonNull(HasConfigKey key, Object val) {
if (val != null) setConfig(key, val);
}
@Override
public void refreshInheritedConfig() {
if (getParent() != null) {
configsInternal.setInheritedConfig(((EntityInternal)getParent()).getAllConfig(), ((EntityInternal)getParent()).getAllConfigBag());
} else {
configsInternal.clearInheritedConfig();
}
refreshInheritedConfigOfChildren();
}
void refreshInheritedConfigOfChildren() {
for (Entity it : getChildren()) {
((EntityInternal)it).refreshInheritedConfig();
}
}
@Override
public EntityConfigMap getConfigMap() {
return configsInternal;
}
@Override
public Map<ConfigKey<?>,Object> getAllConfig() {
return configsInternal.getAllConfig();
}
@Beta
@Override
public ConfigBag getAllConfigBag() {
return configsInternal.getAllConfigBag();
}
@Beta
@Override
public ConfigBag getLocalConfigBag() {
return configsInternal.getLocalConfigBag();
}
@Override
public Map<AttributeSensor, Object> getAllAttributes() {
Map<AttributeSensor, Object> result = Maps.newLinkedHashMap();
Map<String, Object> attribs = attributesInternal.asMap();
for (Map.Entry<String,Object> entry : attribs.entrySet()) {
AttributeSensor attribKey = (AttributeSensor) entityType.getSensor(entry.getKey());
if (attribKey == null) {
LOG.warn("When retrieving all attributes of {}, ignoring attribute {} because no matching AttributeSensor found", this, entry.getKey());
} else {
result.put(attribKey, entry.getValue());
}
}
return result;
}
/** @see EntityLocal#subscribe */
@Override
public <T> SubscriptionHandle subscribe(Entity producer, Sensor<T> sensor, SensorEventListener<? super T> listener) {
return getSubscriptionTracker().subscribe(producer, sensor, listener);
}
/** @see EntityLocal#subscribeToChildren */
@Override
public <T> SubscriptionHandle subscribeToChildren(Entity parent, Sensor<T> sensor, SensorEventListener<? super T> listener) {
return getSubscriptionTracker().subscribeToChildren(parent, sensor, listener);
}
/** @see EntityLocal#subscribeToMembers */
@Override
public <T> SubscriptionHandle subscribeToMembers(Group group, Sensor<T> sensor, SensorEventListener<? super T> listener) {
return getSubscriptionTracker().subscribeToMembers(group, sensor, listener);
}
/**
* Unsubscribes the given producer.
*
* @see SubscriptionContext#unsubscribe(SubscriptionHandle)
*/
@Override
public boolean unsubscribe(Entity producer) {
return getSubscriptionTracker().unsubscribe(producer);
}
/**
* Unsubscribes the given handle.
*
* @see SubscriptionContext#unsubscribe(SubscriptionHandle)
*/
@Override
public boolean unsubscribe(Entity producer, SubscriptionHandle handle) {
return getSubscriptionTracker().unsubscribe(producer, handle);
}
@Override
public synchronized SubscriptionContext getSubscriptionContext() {
return getManagementSupport().getSubscriptionContext();
}
protected synchronized SubscriptionTracker getSubscriptionTracker() {
if (_subscriptionTracker == null) {
_subscriptionTracker = new SubscriptionTracker(getSubscriptionContext());
}
return _subscriptionTracker;
}
@Override
public synchronized ExecutionContext getExecutionContext() {
return getManagementSupport().getExecutionContext();
}
/** Default String representation is simplified name of class, together with selected fields. */
@Override
public String toString() {
return toStringHelper().toString();
}
/**
* Override this to add to the toString(), e.g. {@code return super.toStringHelper().add("port", port);}
*
* Cannot be used in combination with overriding the deprecated toStringFieldsToInclude.
*/
protected ToStringHelper toStringHelper() {
return Objects.toStringHelper(this).omitNullValues().add("id", getId());
// make output more concise by suppressing display name
// .add("name", getDisplayName());
}
// -------- POLICIES --------------------
@Override
public Collection<Policy> getPolicies() {
return ImmutableList.<Policy>copyOf(policies);
}
@Override
public void addPolicy(Policy policy) {
policies.add((AbstractPolicy)policy);
((AbstractPolicy)policy).setEntity(this);
getManagementSupport().getEntityChangeListener().onPolicyAdded(policy);
emit(AbstractEntity.POLICY_ADDED, new PolicyDescriptor(policy));
}
@Override
public <T extends Policy> T addPolicy(PolicySpec<T> spec) {
T policy = getManagementContext().getEntityManager().createPolicy(spec);
addPolicy(policy);
return policy;
}
@Override
public <T extends Enricher> T addEnricher(EnricherSpec<T> spec) {
T enricher = getManagementContext().getEntityManager().createEnricher(spec);
addEnricher(enricher);
return enricher;
}
@Override
public boolean removePolicy(Policy policy) {
((AbstractPolicy)policy).destroy();
boolean changed = policies.remove(policy);
if (changed) {
getManagementSupport().getEntityChangeListener().onPolicyRemoved(policy);
emit(AbstractEntity.POLICY_REMOVED, new PolicyDescriptor(policy));
}
return changed;
}
@Override
public boolean removeAllPolicies() {
boolean changed = false;
for (Policy policy : policies) {
removePolicy(policy);
changed = true;
}
return changed;
}
@Override
public Collection<Enricher> getEnrichers() {
return ImmutableList.<Enricher>copyOf(enrichers);
}
@Override
public void addEnricher(Enricher enricher) {
enrichers.add((AbstractEnricher) enricher);
((AbstractEnricher)enricher).setEntity(this);
getManagementSupport().getEntityChangeListener().onEnricherAdded(enricher);
// TODO Could add equivalent of AbstractEntity.POLICY_ADDED for enrichers; no use-case for that yet
}
@Override
public boolean removeEnricher(Enricher enricher) {
((AbstractEnricher)enricher).destroy();
boolean changed = enrichers.remove(enricher);
if (changed) {
getManagementSupport().getEntityChangeListener().onEnricherRemoved(enricher);
}
return changed;
}
@Override
public boolean removeAllEnrichers() {
boolean changed = false;
for (AbstractEnricher enricher : enrichers) {
changed = removeEnricher(enricher) || changed;
}
return changed;
}
// -------- SENSORS --------------------
@Override
public <T> void emit(Sensor<T> sensor, T val) {
if (sensor instanceof AttributeSensor) {
LOG.warn("Strongly discouraged use of emit with attribute sensor "+sensor+" "+val+"; use setAttribute instead!",
new Throwable("location of discouraged attribute "+sensor+" emit"));
}
if (val instanceof SensorEvent) {
LOG.warn("Strongly discouraged use of emit with sensor event as value "+sensor+" "+val+"; value should be unpacked!",
new Throwable("location of discouraged event "+sensor+" emit"));
}
if (LOG.isDebugEnabled()) LOG.debug("Emitting sensor notification {} value {} on {}", new Object[] {sensor.getName(), val, this});
emitInternal(sensor, val);
}
public <T> void emitInternal(Sensor<T> sensor, T val) {
SubscriptionContext subsContext = getSubscriptionContext();
if (subsContext != null) subsContext.publish(sensor.newEvent(getProxyIfAvailable(), val));
}
// -------- EFFECTORS --------------
/** Convenience for finding named effector in {@link EntityType#getEffectors()} {@link Map}. */
public Effector<?> getEffector(String effectorName) {
return entityType.getEffector(effectorName);
}
/** Invoke an {@link Effector} directly. */
public <T> Task<T> invoke(Effector<T> eff) {
return invoke(MutableMap.of(), eff);
}
public <T> Task<T> invoke(Map parameters, Effector<T> eff) {
return invoke(eff, parameters);
}
/**
* Additional form supplied for when the parameter map needs to be made explicit.
*
* @see #invoke(Effector)
*/
@Override
public <T> Task<T> invoke(Effector<T> eff, Map<String,?> parameters) {
return EffectorUtils.invokeEffectorAsync(this, eff, parameters);
}
/**
* Invoked by {@link EntityManagementSupport} when this entity is becoming managed (i.e. it has a working
* management context, but before the entity is visible to other entities).
*/
public void onManagementStarting() {
if (isLegacyConstruction()) {
entityType.setName(getEntityTypeName());
if (displayNameAutoGenerated) displayName.set(getEntityType().getSimpleName()+":"+Strings.maxlen(id, 4));
}
}
/**
* Invoked by {@link EntityManagementSupport} when this entity is fully managed and visible to other entities
* through the management context.
*/
public void onManagementStarted() {}
// FIXME Really deprecated? I don't want folk to have to override createManagementSupport for simple use-cases
/**
* Invoked by {@link ManagementContext} when this entity becomes managed at a particular management node,
* including the initial management started and subsequent management node master-change for this entity.
* @deprecated since 0.4.0 override EntityManagementSupport.onManagementStarting if customization needed
*/
public void onManagementBecomingMaster() {}
// FIXME Really deprecated? I don't want folk to have to override createManagementSupport for simple use-cases
/**
* Invoked by {@link ManagementContext} when this entity becomes mastered at a particular management node,
* including the final management end and subsequent management node master-change for this entity.
* @deprecated since 0.4.0 override EntityManagementSupport.onManagementStopping if customization needed
*/
public void onManagementNoLongerMaster() {}
/**
* Invoked by {@link EntityManagementSupport} when this entity is fully unmanaged.
*/
public void onManagementStopped() {
if (getManagementContext().isRunning()) {
BrooklynStorage storage = ((ManagementContextInternal)getManagementContext()).getStorage();
storage.remove(id+"-parent");
storage.remove(id+"-groups");
storage.remove(id+"-children");
storage.remove(id+"-locations");
storage.remove(id+"-creationTime");
storage.remove(id+"-displayName");
storage.remove(id+"-config");
storage.remove(id+"-attributes");
}
}
/** For use by management plane, to invalidate all fields (e.g. when an entity is changing to being proxied) */
public void invalidateReferences() {
// TODO Just rely on GC of this entity instance, to get rid of the children map etc.
// Don't clear it, as it's persisted.
// TODO move this to EntityMangementSupport,
application = null;
}
@Override
public EntityManagementSupport getManagementSupport() {
return managementSupport;
}
@Override
public void requestPersist() {
getManagementSupport().getEntityChangeListener().onChanged();
}
@Override
public RebindSupport<EntityMemento> getRebindSupport() {
return new BasicEntityRebindSupport(this);
}
@Override
public Set<Object> getTags() {
synchronized (tags) {
return ImmutableSet.copyOf(tags);
}
}
@Override
public boolean addTag(Object tag) {
synchronized (tags) {
return tags.add(tag);
}
}
@Override
public boolean removeTag(Object tag) {
synchronized (tags) {
return tags.remove(tag);
}
}
@Override
public boolean containsTag(Object tag) {
synchronized (tags) {
return tags.contains(tag);
}
}
@Override
protected void finalize() throws Throwable {
super.finalize();
if (!getManagementSupport().wasDeployed())
LOG.warn("Entity "+this+" was never deployed -- explicit call to manage(Entity) required.");
}
}
| |
/*
* Copyright (c) 2015, SRI International
* All rights reserved.
* Licensed under the The BSD 3-Clause License;
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://opensource.org/licenses/BSD-3-Clause
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* Neither the name of the aic-praise nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package com.sri.ai.praise.core.representation.classbased.table.core.uai;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import com.google.common.annotations.Beta;
import com.sri.ai.praise.core.representation.classbased.table.api.MarkovNetwork;
import com.sri.ai.praise.core.representation.classbased.table.core.data.FunctionTable;
/**
* In memory representation of an Uncertainty in Artificial Intelligence (UAI)
* <a href="http://www.hlt.utdallas.edu/~vgogate/uai14-competition/modelformat.html">Inference Competition FactorNetwork</a>.
*
* @author oreilly
*/
@Beta
public class UAIModel implements MarkovNetwork {
private UAIModelType type;
private Map<Integer, Integer> variableIndexToCardinality = new LinkedHashMap<>();
private List<List<Integer>> tableInstanceVariableIndexes = new ArrayList<>();
private Map<Integer, FunctionTable> tableInstanceIndexToTable = new LinkedHashMap<>();
//
private Map<Integer, Integer> evidence = new LinkedHashMap<>();
//
private Map<Integer, FunctionTable> uniqueTableIdxToUniqueTable = new LinkedHashMap<>();
private Map<FunctionTable, List<Integer>> uniqueTableToTableInstanceIdxs = new LinkedHashMap<>();
//
private Map<Integer, List<Double>> marSolution = new LinkedHashMap<>();
public UAIModel(UAIModelType type,
Map<Integer, Integer> variableIndexToCardinality,
List<List<Integer>> tableInstanceVariableIndexes,
Map<Integer, FunctionTable> tableInstanceIndexToTable) {
this.type = type;
this.variableIndexToCardinality.putAll(variableIndexToCardinality);
this.tableInstanceVariableIndexes.addAll(tableInstanceVariableIndexes);
this.tableInstanceIndexToTable.putAll(tableInstanceIndexToTable);
computeUniqueMappings();
}
public UAIModelType getType() {
return type;
}
//
// START-GraphicalNetwork
@Override
public int numberVariables() {
return variableIndexToCardinality.size();
}
@Override
public int cardinality(int varIdx) {
return variableIndexToCardinality.get(varIdx);
}
@Override
public int numberUniqueFunctionTables() {
return uniqueTableIdxToUniqueTable.size();
}
@Override
public FunctionTable getUniqueFunctionTable(int uniqueFunctionTableIdx) {
return uniqueTableIdxToUniqueTable.get(uniqueFunctionTableIdx);
}
@Override
public int numberTables() {
return tableInstanceVariableIndexes.size();
}
@Override
public FunctionTable getTable(int tableIdx) {
return tableInstanceIndexToTable.get(tableIdx);
}
@Override
public List<Integer> getVariableIndexesForTable(int tableIdx) {
return tableInstanceVariableIndexes.get(tableIdx);
}
@Override
public List<Integer> getTableIndexes(int uniqueFunctionTableIdx) {
return uniqueTableToTableInstanceIdxs.get(getUniqueFunctionTable(uniqueFunctionTableIdx));
}
// END-GraphicalNetwork
//
public void clearEvidence() {
this.evidence.clear();
}
public Map<Integer, Integer> getEvidence() {
return Collections.unmodifiableMap(evidence);
}
public void addEvidence(Integer varIdx, Integer valueIdx) {
if (varIdx < 0 || varIdx >= numberVariables()) {
throw new IllegalArgumentException("Not a legal variable index: "+varIdx+" must be in interval [0, "+numberVariables()+")");
}
Integer cardinality = cardinality(varIdx);
if (valueIdx < 0 || valueIdx >= cardinality) {
throw new IllegalArgumentException("Not a leval value index: "+valueIdx+" must be in interval [0, "+cardinality+")");
}
evidence.put(varIdx, valueIdx);
}
public void mergeEvidenceIntoModel() {
if (evidence.size() > 0) {
// For each evidence assignment create function table with an entry = 1
// for the assignment value and 0 for all other values.
for (Map.Entry<Integer, Integer> evidenceAssignment : evidence.entrySet()) {
Integer evidenceVarIndex = evidenceAssignment.getKey();
int evidenceValue = evidenceAssignment.getValue();
int varCardinality = variableIndexToCardinality.get(evidenceVarIndex);
List<Double> entries = new ArrayList<>();
for (int i = 0; i < varCardinality; i++) {
if (i == evidenceValue) {
entries.add(1.0);
}
else {
entries.add(0.0);
}
}
FunctionTable evidenceFactor = new FunctionTable(Arrays.asList(varCardinality), entries);
//
// Merge in with the other factor information
tableInstanceVariableIndexes.add(Arrays.asList(evidenceVarIndex));
tableInstanceIndexToTable.put(tableInstanceVariableIndexes.size()-1, evidenceFactor);
}
// Ensure the unique mapping information is re-created.
computeUniqueMappings();
}
}
public void clearMARSolution() {
this.marSolution.clear();
}
public void addMARSolution(Integer varIdx, List<Double> values) {
Integer cardinality = variableIndexToCardinality.get(varIdx);
if (cardinality == null) {
throw new IllegalArgumentException("ExpressionVariable Index is invalid, give "+ varIdx +" must be in interval [0, "+numberVariables()+")");
}
if (cardinality != values.size()) {
throw new IllegalArgumentException("Size of values given, "+values.size()+", does not match variables cardinality, which is "+cardinality);
}
this.marSolution.put(varIdx, new ArrayList<>(values));
}
public Map<Integer, List<Double>> getMARSolution() {
return Collections.unmodifiableMap(this.marSolution);
}
@Override
public String toString() {
return "UAI model #vars="+numberVariables()+", #tables="+numberTables()+", #unique function tables="+numberUniqueFunctionTables()+", ratio="+ratioUniqueTablesToTables();
}
private void computeUniqueMappings() {
uniqueTableIdxToUniqueTable.clear();
uniqueTableToTableInstanceIdxs.clear();
for (Map.Entry<Integer, FunctionTable> entry : this.tableInstanceIndexToTable.entrySet()) {
List<Integer> tableInstanceIndexesForUniqueTable = this.uniqueTableToTableInstanceIdxs.get(entry.getValue());
if (tableInstanceIndexesForUniqueTable == null) {
tableInstanceIndexesForUniqueTable = new ArrayList<>();
this.uniqueTableToTableInstanceIdxs.put(entry.getValue(), tableInstanceIndexesForUniqueTable);
this.uniqueTableIdxToUniqueTable.put(this.uniqueTableIdxToUniqueTable.size(), entry.getValue());
}
tableInstanceIndexesForUniqueTable.add(entry.getKey());
}
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.slicer;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.ide.util.treeView.AbstractTreeUi;
import com.intellij.openapi.application.ReadAction;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.progress.ProgressIndicator;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorBase;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Ref;
import com.intellij.psi.PsiElement;
import com.intellij.ui.DuplicateNodeRenderer;
import com.intellij.usageView.UsageViewBundle;
import com.intellij.util.ArrayUtil;
import com.intellij.util.Processor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
/**
* @author cdr
*/
public class SliceNode extends AbstractTreeNode<SliceUsage> implements DuplicateNodeRenderer.DuplicatableNode<SliceNode>, MyColoredTreeCellRenderer {
protected List<SliceNode> myCachedChildren;
boolean dupNodeCalculated;
protected SliceNode duplicate;
final DuplicateMap targetEqualUsages;
protected boolean changed;
private int index; // my index in parent's mycachedchildren
protected SliceNode(@NotNull Project project, SliceUsage sliceUsage, @NotNull DuplicateMap targetEqualUsages) {
super(project, sliceUsage);
this.targetEqualUsages = targetEqualUsages;
}
@NotNull
SliceNode copy() {
SliceUsage newUsage = getValue().copy();
SliceNode newNode = new SliceNode(getProject(), newUsage, targetEqualUsages);
newNode.dupNodeCalculated = dupNodeCalculated;
newNode.duplicate = duplicate;
return newNode;
}
@Override
@NotNull
public Collection<SliceNode> getChildren() {
if (isUpToDate()) return myCachedChildren == null ? Collections.emptyList() : myCachedChildren;
try {
List<SliceNode> nodes;
ProgressIndicator current = ProgressManager.getInstance().getProgressIndicator();
if (current == null) {
ProgressIndicator indicator = new ProgressIndicatorBase();
indicator.start();
Ref<List<SliceNode>> nodesRef = Ref.create();
try {
ProgressManager.getInstance().executeProcessUnderProgress(
() -> nodesRef.set(doGetChildren()), indicator);
}
finally {
indicator.stop();
}
nodes = nodesRef.get();
} else {
nodes = doGetChildren();
}
synchronized (nodes) {
myCachedChildren = nodes;
}
return nodes;
} catch (ProcessCanceledException pce) {
changed = true;
throw pce;
}
}
private List<SliceNode> doGetChildren() {
return AbstractTreeUi.calculateYieldingToWriteAction(() -> {
final List<SliceNode> children = new ArrayList<>();
final ProgressIndicator progress = ProgressManager.getInstance().getProgressIndicator();
Processor<SliceUsage> processor = sliceUsage -> {
progress.checkCanceled();
SliceNode node = new SliceNode(myProject, sliceUsage, targetEqualUsages);
synchronized (children) {
node.index = children.size();
children.add(node);
}
return true;
};
getValue().processChildren(processor);
return children;
});
}
SliceNode getNext(List parentChildren) {
return index == parentChildren.size() - 1 ? null : (SliceNode)parentChildren.get(index + 1);
}
SliceNode getPrev(List parentChildren) {
return index == 0 ? null : (SliceNode)parentChildren.get(index - 1);
}
public List<SliceNode> getCachedChildren() {
return myCachedChildren;
}
private boolean isUpToDate() {
if (myCachedChildren != null || !isValid()/* || getTreeBuilder().splitByLeafExpressions*/) {
return true;
}
return false;
}
@NotNull
@Override
protected PresentationData createPresentation() {
return new PresentationData(){
@NotNull
@Override
public Object[] getEqualityObjects() {
return ArrayUtil.append(super.getEqualityObjects(), changed);
}
};
}
@Override
protected void update(PresentationData presentation) {
if (presentation != null) {
presentation.setChanged(presentation.isChanged() || changed);
changed = false;
}
}
void calculateDupNode() {
if (!dupNodeCalculated) {
if (!(getValue() instanceof SliceTooComplexDFAUsage)) {
duplicate = targetEqualUsages.putNodeCheckDupe(this);
}
dupNodeCalculated = true;
}
}
@Override
public SliceNode getDuplicate() {
return duplicate;
}
@Override
public void navigate(boolean requestFocus) {
SliceUsage sliceUsage = getValue();
sliceUsage.navigate(requestFocus);
}
@Override
public boolean canNavigate() {
return getValue().canNavigate();
}
@Override
public boolean canNavigateToSource() {
return getValue().canNavigateToSource();
}
public boolean isValid() {
return ReadAction.compute(() -> getValue().isValid());
}
@Override
public boolean expandOnDoubleClick() {
return false;
}
@Override
public void customizeCellRenderer(@NotNull SliceUsageCellRendererBase renderer, @NotNull JTree tree, Object value, boolean selected, boolean expanded, boolean leaf, int row, boolean hasFocus) {
renderer.setIcon(getPresentation().getIcon(expanded));
if (isValid()) {
SliceUsage sliceUsage = getValue();
renderer.customizeCellRendererFor(sliceUsage);
renderer.setToolTipText(sliceUsage.getPresentation().getTooltipText());
}
else {
renderer.append(UsageViewBundle.message("node.invalid") + " ", SliceUsageCellRendererBase.ourInvalidAttributes);
}
}
public void setChanged() {
changed = true;
}
@Nullable
public SliceLanguageSupportProvider getProvider(){
AbstractTreeNode<SliceUsage> element = getElement();
if(element == null){
return null;
}
SliceUsage usage = element.getValue();
if(usage == null){
return null;
}
PsiElement psiElement = usage.getElement();
if(psiElement == null){
return null;
}
return LanguageSlicing.getProvider(psiElement);
}
public String getNodeText() {
return getValue().getPresentation().getPlainText().trim();
}
@Override
public String toString() {
return ReadAction.compute(() -> getValue() == null ? "<null>" : getValue().toString());
}
}
| |
/**
* This class is generated by jOOQ
*/
package org.killbill.billing.plugin.adyen.dao.gen.tables.records;
/**
* This class is generated by jOOQ.
*/
@javax.annotation.Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.5.0"
},
comments = "This class is generated by jOOQ"
)
@java.lang.SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class AdyenResponsesRecord extends org.jooq.impl.UpdatableRecordImpl<org.killbill.billing.plugin.adyen.dao.gen.tables.records.AdyenResponsesRecord> {
private static final long serialVersionUID = -289707332;
/**
* Setter for <code>killbill.adyen_responses.record_id</code>.
*/
public void setRecordId(org.jooq.types.UInteger value) {
setValue(0, value);
}
/**
* Getter for <code>killbill.adyen_responses.record_id</code>.
*/
public org.jooq.types.UInteger getRecordId() {
return (org.jooq.types.UInteger) getValue(0);
}
/**
* Setter for <code>killbill.adyen_responses.kb_account_id</code>.
*/
public void setKbAccountId(java.lang.String value) {
setValue(1, value);
}
/**
* Getter for <code>killbill.adyen_responses.kb_account_id</code>.
*/
public java.lang.String getKbAccountId() {
return (java.lang.String) getValue(1);
}
/**
* Setter for <code>killbill.adyen_responses.kb_payment_id</code>.
*/
public void setKbPaymentId(java.lang.String value) {
setValue(2, value);
}
/**
* Getter for <code>killbill.adyen_responses.kb_payment_id</code>.
*/
public java.lang.String getKbPaymentId() {
return (java.lang.String) getValue(2);
}
/**
* Setter for <code>killbill.adyen_responses.kb_payment_transaction_id</code>.
*/
public void setKbPaymentTransactionId(java.lang.String value) {
setValue(3, value);
}
/**
* Getter for <code>killbill.adyen_responses.kb_payment_transaction_id</code>.
*/
public java.lang.String getKbPaymentTransactionId() {
return (java.lang.String) getValue(3);
}
/**
* Setter for <code>killbill.adyen_responses.transaction_type</code>.
*/
public void setTransactionType(java.lang.String value) {
setValue(4, value);
}
/**
* Getter for <code>killbill.adyen_responses.transaction_type</code>.
*/
public java.lang.String getTransactionType() {
return (java.lang.String) getValue(4);
}
/**
* Setter for <code>killbill.adyen_responses.amount</code>.
*/
public void setAmount(java.math.BigDecimal value) {
setValue(5, value);
}
/**
* Getter for <code>killbill.adyen_responses.amount</code>.
*/
public java.math.BigDecimal getAmount() {
return (java.math.BigDecimal) getValue(5);
}
/**
* Setter for <code>killbill.adyen_responses.currency</code>.
*/
public void setCurrency(java.lang.String value) {
setValue(6, value);
}
/**
* Getter for <code>killbill.adyen_responses.currency</code>.
*/
public java.lang.String getCurrency() {
return (java.lang.String) getValue(6);
}
/**
* Setter for <code>killbill.adyen_responses.psp_result</code>.
*/
public void setPspResult(java.lang.String value) {
setValue(7, value);
}
/**
* Getter for <code>killbill.adyen_responses.psp_result</code>.
*/
public java.lang.String getPspResult() {
return (java.lang.String) getValue(7);
}
/**
* Setter for <code>killbill.adyen_responses.psp_reference</code>.
*/
public void setPspReference(java.lang.String value) {
setValue(8, value);
}
/**
* Getter for <code>killbill.adyen_responses.psp_reference</code>.
*/
public java.lang.String getPspReference() {
return (java.lang.String) getValue(8);
}
/**
* Setter for <code>killbill.adyen_responses.auth_code</code>.
*/
public void setAuthCode(java.lang.String value) {
setValue(9, value);
}
/**
* Getter for <code>killbill.adyen_responses.auth_code</code>.
*/
public java.lang.String getAuthCode() {
return (java.lang.String) getValue(9);
}
/**
* Setter for <code>killbill.adyen_responses.result_code</code>.
*/
public void setResultCode(java.lang.String value) {
setValue(10, value);
}
/**
* Getter for <code>killbill.adyen_responses.result_code</code>.
*/
public java.lang.String getResultCode() {
return (java.lang.String) getValue(10);
}
/**
* Setter for <code>killbill.adyen_responses.refusal_reason</code>.
*/
public void setRefusalReason(java.lang.String value) {
setValue(11, value);
}
/**
* Getter for <code>killbill.adyen_responses.refusal_reason</code>.
*/
public java.lang.String getRefusalReason() {
return (java.lang.String) getValue(11);
}
/**
* Setter for <code>killbill.adyen_responses.reference</code>.
*/
public void setReference(java.lang.String value) {
setValue(12, value);
}
/**
* Getter for <code>killbill.adyen_responses.reference</code>.
*/
public java.lang.String getReference() {
return (java.lang.String) getValue(12);
}
/**
* Setter for <code>killbill.adyen_responses.psp_error_codes</code>.
*/
public void setPspErrorCodes(java.lang.String value) {
setValue(13, value);
}
/**
* Getter for <code>killbill.adyen_responses.psp_error_codes</code>.
*/
public java.lang.String getPspErrorCodes() {
return (java.lang.String) getValue(13);
}
/**
* Setter for <code>killbill.adyen_responses.payment_internal_ref</code>.
*/
public void setPaymentInternalRef(java.lang.String value) {
setValue(14, value);
}
/**
* Getter for <code>killbill.adyen_responses.payment_internal_ref</code>.
*/
public java.lang.String getPaymentInternalRef() {
return (java.lang.String) getValue(14);
}
/**
* Setter for <code>killbill.adyen_responses.form_url</code>.
*/
public void setFormUrl(java.lang.String value) {
setValue(15, value);
}
/**
* Getter for <code>killbill.adyen_responses.form_url</code>.
*/
public java.lang.String getFormUrl() {
return (java.lang.String) getValue(15);
}
/**
* Setter for <code>killbill.adyen_responses.dcc_amount</code>.
*/
public void setDccAmount(java.math.BigDecimal value) {
setValue(16, value);
}
/**
* Getter for <code>killbill.adyen_responses.dcc_amount</code>.
*/
public java.math.BigDecimal getDccAmount() {
return (java.math.BigDecimal) getValue(16);
}
/**
* Setter for <code>killbill.adyen_responses.dcc_currency</code>.
*/
public void setDccCurrency(java.lang.String value) {
setValue(17, value);
}
/**
* Getter for <code>killbill.adyen_responses.dcc_currency</code>.
*/
public java.lang.String getDccCurrency() {
return (java.lang.String) getValue(17);
}
/**
* Setter for <code>killbill.adyen_responses.dcc_signature</code>.
*/
public void setDccSignature(java.lang.String value) {
setValue(18, value);
}
/**
* Getter for <code>killbill.adyen_responses.dcc_signature</code>.
*/
public java.lang.String getDccSignature() {
return (java.lang.String) getValue(18);
}
/**
* Setter for <code>killbill.adyen_responses.issuer_url</code>.
*/
public void setIssuerUrl(java.lang.String value) {
setValue(19, value);
}
/**
* Getter for <code>killbill.adyen_responses.issuer_url</code>.
*/
public java.lang.String getIssuerUrl() {
return (java.lang.String) getValue(19);
}
/**
* Setter for <code>killbill.adyen_responses.md</code>.
*/
public void setMd(java.lang.String value) {
setValue(20, value);
}
/**
* Getter for <code>killbill.adyen_responses.md</code>.
*/
public java.lang.String getMd() {
return (java.lang.String) getValue(20);
}
/**
* Setter for <code>killbill.adyen_responses.pa_request</code>.
*/
public void setPaRequest(java.lang.String value) {
setValue(21, value);
}
/**
* Getter for <code>killbill.adyen_responses.pa_request</code>.
*/
public java.lang.String getPaRequest() {
return (java.lang.String) getValue(21);
}
/**
* Setter for <code>killbill.adyen_responses.additional_data</code>.
*/
public void setAdditionalData(java.lang.String value) {
setValue(22, value);
}
/**
* Getter for <code>killbill.adyen_responses.additional_data</code>.
*/
public java.lang.String getAdditionalData() {
return (java.lang.String) getValue(22);
}
/**
* Setter for <code>killbill.adyen_responses.created_date</code>.
*/
public void setCreatedDate(java.sql.Timestamp value) {
setValue(23, value);
}
/**
* Getter for <code>killbill.adyen_responses.created_date</code>.
*/
public java.sql.Timestamp getCreatedDate() {
return (java.sql.Timestamp) getValue(23);
}
/**
* Setter for <code>killbill.adyen_responses.kb_tenant_id</code>.
*/
public void setKbTenantId(java.lang.String value) {
setValue(24, value);
}
/**
* Getter for <code>killbill.adyen_responses.kb_tenant_id</code>.
*/
public java.lang.String getKbTenantId() {
return (java.lang.String) getValue(24);
}
// -------------------------------------------------------------------------
// Primary key information
// -------------------------------------------------------------------------
/**
* {@inheritDoc}
*/
@Override
public org.jooq.Record1<org.jooq.types.UInteger> key() {
return (org.jooq.Record1) super.key();
}
// -------------------------------------------------------------------------
// Constructors
// -------------------------------------------------------------------------
/**
* Create a detached AdyenResponsesRecord
*/
public AdyenResponsesRecord() {
super(org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenResponses.ADYEN_RESPONSES);
}
/**
* Create a detached, initialised AdyenResponsesRecord
*/
public AdyenResponsesRecord(org.jooq.types.UInteger recordId, java.lang.String kbAccountId, java.lang.String kbPaymentId, java.lang.String kbPaymentTransactionId, java.lang.String transactionType, java.math.BigDecimal amount, java.lang.String currency, java.lang.String pspResult, java.lang.String pspReference, java.lang.String authCode, java.lang.String resultCode, java.lang.String refusalReason, java.lang.String reference, java.lang.String pspErrorCodes, java.lang.String paymentInternalRef, java.lang.String formUrl, java.math.BigDecimal dccAmount, java.lang.String dccCurrency, java.lang.String dccSignature, java.lang.String issuerUrl, java.lang.String md, java.lang.String paRequest, java.lang.String additionalData, java.sql.Timestamp createdDate, java.lang.String kbTenantId) {
super(org.killbill.billing.plugin.adyen.dao.gen.tables.AdyenResponses.ADYEN_RESPONSES);
setValue(0, recordId);
setValue(1, kbAccountId);
setValue(2, kbPaymentId);
setValue(3, kbPaymentTransactionId);
setValue(4, transactionType);
setValue(5, amount);
setValue(6, currency);
setValue(7, pspResult);
setValue(8, pspReference);
setValue(9, authCode);
setValue(10, resultCode);
setValue(11, refusalReason);
setValue(12, reference);
setValue(13, pspErrorCodes);
setValue(14, paymentInternalRef);
setValue(15, formUrl);
setValue(16, dccAmount);
setValue(17, dccCurrency);
setValue(18, dccSignature);
setValue(19, issuerUrl);
setValue(20, md);
setValue(21, paRequest);
setValue(22, additionalData);
setValue(23, createdDate);
setValue(24, kbTenantId);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.