gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
package com.facebook.presto.operator.scalar;
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import com.facebook.airlift.stats.cardinality.HyperLogLog;
import com.google.common.base.Joiner;
import com.google.common.collect.ImmutableList;
import com.google.common.io.BaseEncoding;
import io.airlift.slice.Slice;
import org.testng.annotations.Test;
import java.util.Iterator;
import java.util.List;
import static com.facebook.presto.spi.type.BigintType.BIGINT;
import static com.facebook.presto.spi.type.BooleanType.BOOLEAN;
public class TestHyperLogLogFunctions
extends AbstractTestFunctions
{
private TestHyperLogLogFunctions() {}
private static final int NUMBER_OF_BUCKETS = 32768;
@Test
public void testCardinalityNullArray()
{
assertFunction("cardinality(merge_hll(null))", BIGINT, null);
}
@Test
public void testCardinalityMultipleNullColumns()
{
assertFunction("cardinality(merge_hll(ARRAY[null, null, null]))", BIGINT, null);
}
@Test
public void testMergeNoColumns()
{
int blockSize = 0;
long uniqueElements = 10000 * blockSize;
String projection = getMergeProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunction("(CAST(" + projection + " AS VARBINARY)) IS NULL", BOOLEAN, true);
}
@Test
public void testCardinalityNoColumns()
{
int blockSize = 0;
long uniqueElements = 10000 * blockSize;
String projection = getCardinalityProjection(getUniqueElements(blockSize, uniqueElements));
assertFunction(projection, BIGINT, null);
}
@Test
public void testMergeSingleColumn()
{
int blockSize = 1;
long uniqueElements = 10000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getMergeProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunction("(CAST(" + projection + " AS VARBINARY)) IS NULL", BOOLEAN, false);
}
@Test
public void testCardinalitySingleColumn()
{
int blockSize = 1;
long uniqueElements = 10000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getCardinalityProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunctionWithError(projection, BIGINT, uniqueElements, error);
}
@Test
public void testCardinalityTwoColumns()
{
int blockSize = 2;
long uniqueElements = 10000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getCardinalityProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunctionWithError(projection, BIGINT, uniqueElements, error);
}
@Test
public void testCardinalityThreeColumns()
{
int blockSize = 3;
long uniqueElements = 10000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getCardinalityProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunctionWithError(projection, BIGINT, uniqueElements, error);
}
@Test
public void testMergeManyColumns()
{
int blockSize = 254;
long uniqueElements = 10000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getMergeProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunction("(CAST(" + projection + " AS VARBINARY)) IS NULL", BOOLEAN, false);
}
@Test
public void testCardinalityManyColumns()
{
// max number of columns to merge is 254
int blockSize = 254;
long uniqueElements = 1000 * blockSize;
double error = uniqueElements * 0.05;
String projection = getCardinalityProjection(getUniqueElements(blockSize, uniqueElements));
functionAssertions.assertFunctionWithError(projection, BIGINT, uniqueElements, error);
}
private List<HyperLogLog> getUniqueElements(int blockSize, long uniqueElements)
{
ImmutableList.Builder<HyperLogLog> builder = ImmutableList.builder();
for (int j = 0; j < blockSize; j++) {
// create a single HyperLogLog column
HyperLogLog firstHll = HyperLogLog.newInstance(NUMBER_OF_BUCKETS);
// populate column with even partitions of the unique elements
for (long i = j * uniqueElements / blockSize; i < j * uniqueElements / blockSize +
uniqueElements / blockSize; i++) {
firstHll.add(i);
}
builder.add(firstHll);
}
return builder.build();
}
private String getCardinalityProjection(List<HyperLogLog> list)
{
String projection = "cardinality(merge_hll(ARRAY[";
Iterator<HyperLogLog> iterator = list.listIterator();
ImmutableList.Builder<String> casts = ImmutableList.builder();
for (HyperLogLog current : list) {
Slice firstSerial = current.serialize();
byte[] firstBytes = firstSerial.getBytes();
String firstEncode = BaseEncoding.base16().lowerCase().encode(firstBytes);
// create an iterable with all our cast statements
casts.add("CAST(X'" + firstEncode + "' AS HyperLogLog)");
}
projection += Joiner.on(", ").join(casts.build());
projection += "]))";
return projection;
}
private String getMergeProjection(List<HyperLogLog> list)
{
String projection = "merge_hll(ARRAY[";
Iterator<HyperLogLog> iterator = list.listIterator();
ImmutableList.Builder<String> casts = ImmutableList.builder();
for (HyperLogLog current : list) {
Slice firstSerial = current.serialize();
byte[] firstBytes = firstSerial.getBytes();
String firstEncode = BaseEncoding.base16().lowerCase().encode(firstBytes);
// create an iterable with all our cast statements
casts.add("CAST(X'" + firstEncode + "' AS HyperLogLog)");
}
projection += Joiner.on(", ").join(casts.build());
projection += "])";
return projection;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.db.filter;
import java.io.IOException;
import java.util.*;
import com.google.common.collect.SortedSetMultimap;
import com.google.common.collect.TreeMultimap;
import org.apache.cassandra.config.CFMetaData;
import org.apache.cassandra.cql3.ColumnIdentifier;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.rows.CellPath;
import org.apache.cassandra.config.ColumnDefinition;
import org.apache.cassandra.io.util.DataInputPlus;
import org.apache.cassandra.io.util.DataOutputPlus;
/**
* Represents which (non-PK) columns (and optionally which sub-part of a column for complex columns) are selected
* by a query.
*
* We distinguish 2 sets of columns in practice: the _fetched_ columns, which are the columns that we (may, see
* below) need to fetch internally, and the _queried_ columns, which are the columns that the user has selected
* in its request.
*
* The reason for distinguishing those 2 sets is that due to the CQL semantic (see #6588 for more details), we
* often need to internally fetch all regular columns for the queried table, but can still do some optimizations for
* those columns that are not directly queried by the user (see #10657 for more details).
*
* Note that in practice:
* - the _queried_ columns set is always included in the _fetched_ one.
* - whenever those sets are different, we know 1) the _fetched_ set contains all regular columns for the table and 2)
* _fetched_ == _queried_ for static columns, so we don't have to record this set, we just keep a pointer to the
* table metadata. The only set we concretely store is thus the _queried_ one.
* - in the special case of a {@code SELECT *} query, we want to query all columns, and _fetched_ == _queried.
* As this is a common case, we special case it by keeping the _queried_ set {@code null} (and we retrieve
* the columns through the metadata pointer).
*
* For complex columns, this class optionally allows to specify a subset of the cells to query for each column.
* We can either select individual cells by path name, or a slice of them. Note that this is a sub-selection of
* _queried_ cells, so if _fetched_ != _queried_, then the cell selected by this sub-selection are considered
* queried and the other ones are considered fetched (and if a column has some sub-selection, it must be a queried
* column, which is actually enforced by the Builder below).
*/
public class ColumnFilter
{
public static final Serializer serializer = new Serializer();
// True if _fetched_ includes all regular columns (an any static in _queried_), in which case metadata must not be
// null. If false, then _fetched_ == _queried_ and we only store _queried_.
private final boolean fetchAllRegulars;
private final CFMetaData metadata; // can be null if !fetchAllRegulars
private final PartitionColumns queried; // can be null if fetchAllRegulars, to represent a wildcard query (all
// static and regular columns are both _fetched_ and _queried_).
private final SortedSetMultimap<ColumnIdentifier, ColumnSubselection> subSelections; // can be null
private ColumnFilter(boolean fetchAllRegulars,
CFMetaData metadata,
PartitionColumns queried,
SortedSetMultimap<ColumnIdentifier, ColumnSubselection> subSelections)
{
assert !fetchAllRegulars || metadata != null;
assert fetchAllRegulars || queried != null;
this.fetchAllRegulars = fetchAllRegulars;
this.metadata = metadata;
this.queried = queried;
this.subSelections = subSelections;
}
/**
* A filter that includes all columns for the provided table.
*/
public static ColumnFilter all(CFMetaData metadata)
{
return new ColumnFilter(true, metadata, null, null);
}
/**
* A filter that only fetches/queries the provided columns.
* <p>
* Note that this shouldn't be used for CQL queries in general as all columns should be queried to
* preserve CQL semantic (see class javadoc). This is ok for some internal queries however (and
* for #6588 if/when we implement it).
*/
public static ColumnFilter selection(PartitionColumns columns)
{
return new ColumnFilter(false, null, columns, null);
}
/**
* A filter that fetches all columns for the provided table, but returns
* only the queried ones.
*/
public static ColumnFilter selection(CFMetaData metadata, PartitionColumns queried)
{
return new ColumnFilter(true, metadata, queried, null);
}
/**
* The columns that needs to be fetched internally for this filter.
*
* @return the columns to fetch for this filter.
*/
public PartitionColumns fetchedColumns()
{
if (!fetchAllRegulars)
return queried;
// We always fetch all regulars, but only fetch the statics in queried. Unless queried == null, in which
// case it's a wildcard and we fetch everything.
PartitionColumns all = metadata.partitionColumns();
return queried == null || all.statics.isEmpty()
? all
: new PartitionColumns(queried.statics, all.regulars);
}
/**
* The columns actually queried by the user.
* <p>
* Note that this is in general not all the columns that are fetched internally (see {@link #fetchedColumns}).
*/
public PartitionColumns queriedColumns()
{
assert queried != null || fetchAllRegulars;
return queried == null ? metadata.partitionColumns() : queried;
}
/**
* Wether all the (regular or static) columns are fetched by this filter.
* <p>
* Note that this method is meant as an optimization but a negative return
* shouldn't be relied upon strongly: this can return {@code false} but
* still have all the columns fetches if those were manually selected by the
* user. The goal here is to cheaply avoid filtering things on wildcard
* queries, as those are common.
*
* @param isStatic whether to check for static columns or not. If {@code true},
* the method returns if all static columns are fetched, otherwise it checks
* regular columns.
*/
public boolean fetchesAllColumns(boolean isStatic)
{
return isStatic ? queried == null : fetchAllRegulars;
}
/**
* Whether _fetched_ == _queried_ for this filter, and so if the {@code isQueried()} methods
* can return {@code false} for some column/cell.
*/
public boolean allFetchedColumnsAreQueried()
{
return !fetchAllRegulars || queried == null;
}
/**
* Whether the provided column is fetched by this filter.
*/
public boolean fetches(ColumnDefinition column)
{
// For statics, it is included only if it's part of _queried_, or if _queried_ is null (wildcard query).
if (column.isStatic())
return queried == null || queried.contains(column);
// For regulars, if 'fetchAllRegulars', then it's included automatically. Otherwise, it depends on _queried_.
return fetchAllRegulars || queried.contains(column);
}
/**
* Whether the provided column, which is assumed to be _fetched_ by this filter (so the caller must guarantee
* that {@code fetches(column) == true}, is also _queried_ by the user.
*
* !WARNING! please be sure to understand the difference between _fetched_ and _queried_
* columns that this class made before using this method. If unsure, you probably want
* to use the {@link #fetches} method.
*/
public boolean fetchedColumnIsQueried(ColumnDefinition column)
{
return !fetchAllRegulars || queried == null || queried.contains(column);
}
/**
* Whether the provided complex cell (identified by its column and path), which is assumed to be _fetched_ by
* this filter, is also _queried_ by the user.
*
* !WARNING! please be sure to understand the difference between _fetched_ and _queried_
* columns that this class made before using this method. If unsure, you probably want
* to use the {@link #fetches} method.
*/
public boolean fetchedCellIsQueried(ColumnDefinition column, CellPath path)
{
assert path != null;
if (!fetchAllRegulars || subSelections == null)
return true;
SortedSet<ColumnSubselection> s = subSelections.get(column.name);
// No subsection for this column means everything is queried
if (s.isEmpty())
return true;
for (ColumnSubselection subSel : s)
if (subSel.compareInclusionOf(path) == 0)
return true;
return false;
}
/**
* Creates a new {@code Tester} to efficiently test the inclusion of cells of complex column
* {@code column}.
*
* @param column for complex column for which to create a tester.
* @return the created tester or {@code null} if all the cells from the provided column
* are queried.
*/
public Tester newTester(ColumnDefinition column)
{
if (subSelections == null || !column.isComplex())
return null;
SortedSet<ColumnSubselection> s = subSelections.get(column.name);
if (s.isEmpty())
return null;
return new Tester(!column.isStatic() && fetchAllRegulars, s.iterator());
}
/**
* Returns a {@code ColumnFilter}} builder that fetches all regular columns (and queries the columns
* added to the builder, or everything if no column is added).
*/
public static Builder allRegularColumnsBuilder(CFMetaData metadata)
{
return new Builder(metadata);
}
/**
* Returns a {@code ColumnFilter} builder that only fetches the columns/cells added to the builder.
*/
public static Builder selectionBuilder()
{
return new Builder(null);
}
public static class Tester
{
private final boolean isFetched;
private ColumnSubselection current;
private final Iterator<ColumnSubselection> iterator;
private Tester(boolean isFetched, Iterator<ColumnSubselection> iterator)
{
this.isFetched = isFetched;
this.iterator = iterator;
}
public boolean fetches(CellPath path)
{
return isFetched || hasSubselection(path);
}
/**
* Must only be called if {@code fetches(path) == true}.
*/
public boolean fetchedCellIsQueried(CellPath path)
{
return !isFetched || hasSubselection(path);
}
private boolean hasSubselection(CellPath path)
{
while (current != null || iterator.hasNext())
{
if (current == null)
current = iterator.next();
int cmp = current.compareInclusionOf(path);
if (cmp == 0) // The path is included
return true;
else if (cmp < 0) // The path is before this sub-selection, it's not included by any
return false;
// the path is after this sub-selection, we need to check the next one.
current = null;
}
return false;
}
}
/**
* A builder for a {@code ColumnFilter} object.
*
* Note that the columns added to this build are the _queried_ column. Whether or not all columns
* are _fetched_ depends on which constructor you've used to obtained this builder, allColumnsBuilder (all
* columns are fetched) or selectionBuilder (only the queried columns are fetched).
*
* Note that for a allColumnsBuilder, if no queried columns are added, this is interpreted as querying
* all columns, not querying none (but if you know you want to query all columns, prefer
* {@link ColumnFilter#all(CFMetaData)}. For selectionBuilder, adding no queried columns means no column will be
* fetched (so the builder will return {@code PartitionColumns.NONE}).
*/
public static class Builder
{
private final CFMetaData metadata; // null if we don't fetch all columns
private PartitionColumns.Builder queriedBuilder;
private List<ColumnSubselection> subSelections;
private Builder(CFMetaData metadata)
{
this.metadata = metadata;
}
public Builder add(ColumnDefinition c)
{
if (queriedBuilder == null)
queriedBuilder = PartitionColumns.builder();
queriedBuilder.add(c);
return this;
}
public Builder addAll(Iterable<ColumnDefinition> columns)
{
if (queriedBuilder == null)
queriedBuilder = PartitionColumns.builder();
queriedBuilder.addAll(columns);
return this;
}
private Builder addSubSelection(ColumnSubselection subSelection)
{
add(subSelection.column());
if (subSelections == null)
subSelections = new ArrayList<>();
subSelections.add(subSelection);
return this;
}
public Builder slice(ColumnDefinition c, CellPath from, CellPath to)
{
return addSubSelection(ColumnSubselection.slice(c, from, to));
}
public Builder select(ColumnDefinition c, CellPath elt)
{
return addSubSelection(ColumnSubselection.element(c, elt));
}
public ColumnFilter build()
{
boolean isFetchAll = metadata != null;
PartitionColumns queried = queriedBuilder == null ? null : queriedBuilder.build();
// It's only ok to have queried == null in ColumnFilter if isFetchAll. So deal with the case of a selectionBuilder
// with nothing selected (we can at least happen on some backward compatible queries - CASSANDRA-10471).
if (!isFetchAll && queried == null)
queried = PartitionColumns.NONE;
SortedSetMultimap<ColumnIdentifier, ColumnSubselection> s = null;
if (subSelections != null)
{
s = TreeMultimap.create(Comparator.<ColumnIdentifier>naturalOrder(), Comparator.<ColumnSubselection>naturalOrder());
for (ColumnSubselection subSelection : subSelections)
s.put(subSelection.column().name, subSelection);
}
return new ColumnFilter(isFetchAll, metadata, queried, s);
}
}
@Override
public String toString()
{
if (fetchAllRegulars && queried == null)
return "*";
if (queried.isEmpty())
return "";
Iterator<ColumnDefinition> defs = queried.selectOrderIterator();
if (!defs.hasNext())
return "<none>";
StringBuilder sb = new StringBuilder();
while (defs.hasNext())
{
appendColumnDef(sb, defs.next());
if (defs.hasNext())
sb.append(", ");
}
return sb.toString();
}
private void appendColumnDef(StringBuilder sb, ColumnDefinition column)
{
if (subSelections == null)
{
sb.append(column.name);
return;
}
SortedSet<ColumnSubselection> s = subSelections.get(column.name);
if (s.isEmpty())
{
sb.append(column.name);
return;
}
int i = 0;
for (ColumnSubselection subSel : s)
sb.append(i++ == 0 ? "" : ", ").append(column.name).append(subSel);
}
public static class Serializer
{
private static final int IS_FETCH_ALL_MASK = 0x01;
private static final int HAS_QUERIED_MASK = 0x02;
private static final int HAS_SUB_SELECTIONS_MASK = 0x04;
private static int makeHeaderByte(ColumnFilter selection)
{
return (selection.fetchAllRegulars ? IS_FETCH_ALL_MASK : 0)
| (selection.queried != null ? HAS_QUERIED_MASK : 0)
| (selection.subSelections != null ? HAS_SUB_SELECTIONS_MASK : 0);
}
public void serialize(ColumnFilter selection, DataOutputPlus out, int version) throws IOException
{
out.writeByte(makeHeaderByte(selection));
if (selection.queried != null)
{
Columns.serializer.serialize(selection.queried.statics, out);
Columns.serializer.serialize(selection.queried.regulars, out);
}
if (selection.subSelections != null)
{
out.writeUnsignedVInt(selection.subSelections.size());
for (ColumnSubselection subSel : selection.subSelections.values())
ColumnSubselection.serializer.serialize(subSel, out, version);
}
}
public ColumnFilter deserialize(DataInputPlus in, int version, CFMetaData metadata) throws IOException
{
int header = in.readUnsignedByte();
boolean isFetchAll = (header & IS_FETCH_ALL_MASK) != 0;
boolean hasQueried = (header & HAS_QUERIED_MASK) != 0;
boolean hasSubSelections = (header & HAS_SUB_SELECTIONS_MASK) != 0;
PartitionColumns queried = null;
if (hasQueried)
{
Columns statics = Columns.serializer.deserialize(in, metadata);
Columns regulars = Columns.serializer.deserialize(in, metadata);
queried = new PartitionColumns(statics, regulars);
}
SortedSetMultimap<ColumnIdentifier, ColumnSubselection> subSelections = null;
if (hasSubSelections)
{
subSelections = TreeMultimap.create(Comparator.<ColumnIdentifier>naturalOrder(), Comparator.<ColumnSubselection>naturalOrder());
int size = (int)in.readUnsignedVInt();
for (int i = 0; i < size; i++)
{
ColumnSubselection subSel = ColumnSubselection.serializer.deserialize(in, version, metadata);
subSelections.put(subSel.column().name, subSel);
}
}
return new ColumnFilter(isFetchAll, isFetchAll ? metadata : null, queried, subSelections);
}
public long serializedSize(ColumnFilter selection, int version)
{
long size = 1; // header byte
if (selection.queried != null)
{
size += Columns.serializer.serializedSize(selection.queried.statics);
size += Columns.serializer.serializedSize(selection.queried.regulars);
}
if (selection.subSelections != null)
{
size += TypeSizes.sizeofUnsignedVInt(selection.subSelections.size());
for (ColumnSubselection subSel : selection.subSelections.values())
size += ColumnSubselection.serializer.serializedSize(subSel, version);
}
return size;
}
}
}
| |
package org.fastnate.generator.context;
import java.io.File;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.AnnotatedElement;
import java.net.URL;
import java.util.AbstractMap.SimpleEntry;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Properties;
import java.util.function.BiConsumer;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.persistence.AssociationOverride;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.JoinTable;
import javax.persistence.SequenceGenerator;
import javax.persistence.TableGenerator;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import org.apache.commons.lang.StringUtils;
import org.fastnate.generator.EntitySqlGenerator;
import org.fastnate.generator.dialect.GeneratorDialect;
import org.fastnate.generator.dialect.H2Dialect;
import org.fastnate.generator.provider.HibernateProvider;
import org.fastnate.generator.provider.JpaProvider;
import org.fastnate.generator.statements.StatementsWriter;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NodeList;
import org.xml.sax.SAXException;
import lombok.AccessLevel;
import lombok.Getter;
import lombok.RequiredArgsConstructor;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;
/**
* Represents the configuration and state for one or more {@link EntitySqlGenerator}s.
*
* @author Tobias Liefke
*/
@Getter
@Setter
@Slf4j
public class GeneratorContext {
@RequiredArgsConstructor
private static final class GeneratorId {
private final String id;
private final GeneratorTable table;
@Override
public boolean equals(final Object obj) {
if (obj instanceof GeneratorId) {
final GeneratorId other = (GeneratorId) obj;
return this.id.equals(other.id) && Objects.equals(this.table, other.table);
}
return false;
}
@Override
public int hashCode() {
if (this.table == null) {
return this.id.hashCode();
}
return this.id.hashCode() << 2 | this.table.getName().hashCode();
}
@Override
public String toString() {
if (this.table == null) {
return this.id;
}
return this.id + '.' + this.table.getQualifiedName();
}
}
/**
* The settings key for the JPA provider.
*
* Contains either the fully qualified class name of an extension of {@link JpaProvider} or the simple name of one
* of the classes from {@code org.fastnate.generator.provider}.
*
* Defaults to {@code HibernateProvider}.
*/
public static final String PROVIDER_KEY = "fastnate.generator.jpa.provider";
/** The settings key for the path to the persistence.xml, either relative to the current directory or absolute. */
public static final String PERSISTENCE_FILE_KEY = "fastnate.generator.persistence.file";
/**
* The settings key for the name of the persistence unit in the persistence.xml. The first persistence unit is used,
* if none is explicitly set.
*/
public static final String PERSISTENCE_UNIT_KEY = "fastnate.generator.persistence.unit";
/**
* The settings key for the target SQL dialect.
*
* <p>
* Contains either the fully qualified name of a class that extends {@link GeneratorDialect} or the simple class
* name of one of the classes from {@code org.fastnate.generator.dialect}. The suffix 'Dialect' may be omitted in
* that case. For example 'MySql' would map to {@code org.fastnate.generator.dialect.MySqlDialect}.
* </p>
*
* <p>
* If no dialect is set explicitly then the configured {@link #PERSISTENCE_FILE_KEY persistence.xml} is scanned for
* a connection URL or provider specific dialect, which would be converted to our known dialects.
* </p>
*
* <p>
* If nothing is found, H2 is used as default.
* </p>
*/
public static final String DIALECT_KEY = "fastnate.generator.dialect";
/** The settings key for {@link #writeNullValues}. */
public static final String NULL_VALUES_KEY = "fastnate.generator.null.values";
/** The settings key for {@link #writeRelativeIds}. */
public static final String RELATIVE_IDS_KEY = "fastnate.generator.relative.ids";
/** The settings key for the {@link #uniquePropertyQuality}. */
public static final String UNIQUE_PROPERTIES_QUALITY_KEY = "fastnate.generator.unique.properties.quality";
/** The settings key for the {@link #maxUniqueProperties}. */
public static final String UNIQUE_PROPERTIES_MAX_KEY = "fastnate.generator.unique.properties.max";
/** The settings key for {@link #preferSequenceCurentValue}. */
public static final String PREFER_SEQUENCE_CURRENT_VALUE = "fastnate.generator.prefer.sequence.current.value";
/**
* Tries to read any persistence file defined in the settings.
*
* @param settings
* the current settings
*/
private static void readPersistenceFile(final Properties settings) {
String persistenceFilePath = settings.getProperty(PERSISTENCE_FILE_KEY);
if (StringUtils.isEmpty(persistenceFilePath)) {
final URL url = GeneratorContext.class.getResource("/META-INF/persistence.xml");
if (url == null) {
return;
}
persistenceFilePath = url.toString();
} else {
final File persistenceFile = new File(persistenceFilePath);
if (persistenceFile.isFile()) {
persistenceFilePath = persistenceFile.toURI().toString();
}
}
final String persistenceUnit = settings.getProperty(PERSISTENCE_UNIT_KEY);
try {
final Document document = DocumentBuilderFactory.newInstance().newDocumentBuilder()
.parse(persistenceFilePath);
final NodeList persistenceUnits = document.getElementsByTagName("persistence-unit");
for (int i = 0; i < persistenceUnits.getLength(); i++) {
final Element persistenceUnitElement = (Element) persistenceUnits.item(i);
if (StringUtils.isEmpty(persistenceUnit)
|| persistenceUnit.equals(persistenceUnitElement.getAttribute("name"))) {
final NodeList properties = persistenceUnitElement.getElementsByTagName("property");
for (int i2 = 0; i2 < properties.getLength(); i2++) {
final Element property = (Element) properties.item(i2);
final String name = property.getAttribute("name");
if (!settings.containsKey(name)) {
settings.put(name, property.getAttribute("value"));
}
}
break;
}
}
} catch (final IOException | SAXException | ParserConfigurationException e) {
log.error("Could not read " + persistenceFilePath + ": " + e, e);
}
}
/** Identifies the SQL dialect for generating SQL statements. Encapsulates the database specifica. */
private GeneratorDialect dialect;
/** Identifies the JPA provider to indicate implementation specific details. */
private JpaProvider provider;
/** The maximum count of columns that are used when referencing an entity using it's unique properties. */
private int maxUniqueProperties = 1;
/**
* Indicates what kind of properties are used for referencing an entity with its unique properties.
*/
private UniquePropertyQuality uniquePropertyQuality = UniquePropertyQuality.onlyRequiredPrimitives;
/**
* Indiciates to use "currval" of a sequence if the referenced entity is the last created entity for that sequence
* before checking for {@link #uniquePropertyQuality unique properties}.
*/
private boolean preferSequenceCurentValue = true;
/**
* Indicates that we write into a schema that is not empty. By default we write all IDs as absolute values and
* change the sequences / table generators at the end. But this would crash if there is data in the database already
* that uses the same IDs. So in the case of incremental updates, one should set this setting to {@code true} -
* which will generate relative IDs which respect the existing IDs.
*/
private boolean writeRelativeIds;
/** Indicates to include null values in statements. */
private boolean writeNullValues;
/** Contains the settings that were given during creation, resp. as read from the persistence configuration. */
private final Properties settings;
/** Contains the extracted metadata to every known class of an {@link Entity}. */
private final Map<Class<?>, EntityClass<?>> descriptions = new HashMap<>();
/** The mapping from the {@link Entity#name() name of an entity} to the {@link #descriptions extracted metadata}. */
private final Map<String, EntityClass<?>> descriptionsByName = new HashMap<>();
/** Mapping from the names of all known database table to their description (including column information). */
private final Map<String, GeneratorTable> tables = new HashMap<>();
/** Contains the state of single entities, maps from an entity name to the mapping of an id to its state. */
private final Map<String, Map<Object, GenerationState>> states = new HashMap<>();
/** Mapping from the name of a generator to the generator itself. */
@Getter(AccessLevel.NONE)
private final Map<GeneratorId, IdGenerator> generators = new HashMap<>();
/** The default sequence generator, if none is explicitly specified in a {@link GeneratedValue}. */
private SequenceIdGenerator defaultSequenceGenerator;
/** The default table generator, if none is explicitly specified in a {@link GeneratedValue}. */
private TableIdGenerator defaultTableGenerator;
/** All listeners of this context. */
private List<ContextModelListener> contextModelListeners = new ArrayList<>();
/**
* Creates a default generator context.
*/
public GeneratorContext() {
this(new H2Dialect());
}
/**
* Creates a generator context for a dialect.
*
* @param dialect
* the database dialect to use during generation
*/
public GeneratorContext(final GeneratorDialect dialect) {
this.dialect = dialect;
this.provider = new HibernateProvider();
this.settings = new Properties();
}
/**
* Creates a new instance of {@link GeneratorContext}.
*
* @param settings
* contains the settings
*/
public GeneratorContext(final Properties settings) {
this.settings = settings;
readPersistenceFile(settings);
String providerName = settings.getProperty(PROVIDER_KEY, "HibernateProvider");
if (providerName.indexOf('.') < 0) {
providerName = JpaProvider.class.getPackage().getName() + '.' + providerName;
}
try {
this.provider = (JpaProvider) Class.forName(providerName).newInstance();
this.provider.initialize(settings);
} catch (final InstantiationException | IllegalAccessException | ClassNotFoundException
| ClassCastException e) {
throw new IllegalArgumentException("Can't instantiate provider: " + providerName, e);
}
String dialectName = settings.getProperty(DIALECT_KEY, "H2Dialect");
if (dialectName.indexOf('.') < 0) {
dialectName = GeneratorDialect.class.getPackage().getName() + '.' + dialectName;
if (!dialectName.endsWith("Dialect")) {
dialectName += "Dialect";
}
}
try {
this.dialect = (GeneratorDialect) Class.forName(dialectName).newInstance();
} catch (final InstantiationException | IllegalAccessException | ClassNotFoundException
| ClassCastException e) {
throw new IllegalArgumentException("Can't instantiate dialect: " + dialectName, e);
}
this.writeRelativeIds = Boolean
.parseBoolean(settings.getProperty(RELATIVE_IDS_KEY, String.valueOf(this.writeRelativeIds)));
this.writeNullValues = Boolean
.parseBoolean(settings.getProperty(NULL_VALUES_KEY, String.valueOf(this.writeNullValues)));
this.uniquePropertyQuality = UniquePropertyQuality
.valueOf(settings.getProperty(UNIQUE_PROPERTIES_QUALITY_KEY, this.uniquePropertyQuality.name()));
this.maxUniqueProperties = Integer
.parseInt(settings.getProperty(UNIQUE_PROPERTIES_MAX_KEY, String.valueOf(this.maxUniqueProperties)));
this.preferSequenceCurentValue = Boolean.parseBoolean(
settings.getProperty(PREFER_SEQUENCE_CURRENT_VALUE, String.valueOf(this.preferSequenceCurentValue)));
}
/**
* Adds a new listener to this context.
*
* @param listener
* the listener that is interested in new discovered model elements
*/
public void addContextModelListener(final ContextModelListener listener) {
this.contextModelListeners.add(listener);
}
private <K, T> T addContextObject(final Map<K, ? super T> objects,
final BiConsumer<ContextModelListener, ? super T> listenerFunction, final K key, final T object) {
objects.put(key, object);
fireContextObjectAdded(listenerFunction, object);
return object;
}
/**
* Fires an event to all {@link #getContextModelListeners() listeners}.
*
* @param listenerFunction
* the function that is called on the listeners
* @param contextObject
* the object to offer to the listener function
*/
protected <T> void fireContextObjectAdded(final BiConsumer<ContextModelListener, T> listenerFunction,
final T contextObject) {
for (final ContextModelListener listener : this.contextModelListeners) {
listenerFunction.accept(listener, contextObject);
}
}
private IdGenerator getDefaultSequenceGenerator() {
if (this.defaultSequenceGenerator == null) {
final Map<String, Object> defaults = Stream
.of(new SimpleEntry<String, Object>("sequenceName", this.provider.getDefaultSequence()),
new SimpleEntry<String, Object>("allocationSize", Integer.valueOf(1)))
.collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue));
this.defaultSequenceGenerator = new SequenceIdGenerator(
AnnotationDefaults.create(SequenceGenerator.class, defaults), this.dialect, this.writeRelativeIds);
fireContextObjectAdded(ContextModelListener::foundGenerator, this.defaultSequenceGenerator);
}
return this.defaultSequenceGenerator;
}
private IdGenerator getDefaultTableGenerator() {
if (this.defaultTableGenerator == null) {
final Map<String, Object> defaults = Stream
.of(new SimpleEntry<String, Object>("pkColumnValue", "default"),
new SimpleEntry<String, Object>("allocationSize", Integer.valueOf(1)))
.collect(Collectors.toMap(SimpleEntry::getKey, SimpleEntry::getValue));
this.defaultTableGenerator = new TableIdGenerator(AnnotationDefaults.create(TableGenerator.class, defaults),
this);
fireContextObjectAdded(ContextModelListener::foundGenerator, this.defaultTableGenerator);
}
return this.defaultTableGenerator;
}
/**
* Finds the description for a class.
*
* @param entityClass
* the class to lookup
* @return the description for the class or {@code null} if the class is not an {@link Entity}
*/
public <E> EntityClass<E> getDescription(final Class<E> entityClass) {
// Lookup description
EntityClass<E> description = (EntityClass<E>) this.descriptions.get(entityClass);
if (description == null) {
if (!entityClass.isAnnotationPresent(Entity.class)) {
// Step up to find the parent description
final Class<?> superClass = entityClass.getSuperclass();
if (superClass == null) {
return null;
}
return (EntityClass<E>) getDescription(superClass);
}
// Create the description
description = new EntityClass<>(this, entityClass);
// First remember the description (to prevent endless loops)
this.descriptions.put(entityClass, description);
this.descriptionsByName.put(description.getEntityName(), description);
// And now build the properties
description.build();
// And notify listeners
fireContextObjectAdded(ContextModelListener::foundEntityClass, description);
}
return description;
}
/**
* Finds the description for the class of an entity.
*
* @param entity
* the entity to lookup
* @return the description for the class of the entity
* @throws IllegalArgumentException
* if the given object is no {@link Entity}
*/
public <E> EntityClass<E> getDescription(final E entity) {
if (entity == null) {
throw new IllegalArgumentException("Can't inspect null entity");
}
final EntityClass<E> description = (EntityClass<E>) getDescription(entity.getClass());
if (description == null) {
throw new IllegalArgumentException(entity.getClass() + " is not an entity class");
}
return description;
}
/**
* Finds the correct generator for the given annotation.
*
* @param generatedValue
* the annotation of the current primary key
* @param table
* the name of the current table
* @param column
* the name of the current column
* @return the generator that is responsible for managing the values
*/
@SuppressWarnings("null")
public IdGenerator getGenerator(final GeneratedValue generatedValue, final GeneratorTable table,
final GeneratorColumn column) {
GenerationType strategy = generatedValue.strategy();
final String name = generatedValue.generator();
if (StringUtils.isNotEmpty(name)) {
ModelException.test(strategy != GenerationType.IDENTITY,
"Generator for GenerationType.IDENTITY not allowed");
IdGenerator generator = this.generators.get(new GeneratorId(name, table));
if (generator == null) {
generator = this.generators.get(new GeneratorId(name, null));
ModelException.test(generator != null, "Generator '{}' not found", name);
final IdGenerator derived = generator.derive(table);
if (derived != generator) {
return addContextObject(this.generators, ContextModelListener::foundGenerator,
new GeneratorId(name, table), derived);
}
}
return generator;
}
if (strategy == GenerationType.AUTO) {
strategy = this.dialect.getAutoGenerationType();
}
switch (strategy) {
case IDENTITY:
return addContextObject(this.generators, ContextModelListener::foundGenerator,
new GeneratorId(column.getName(), table), new IdentityValue(this, table, column));
case TABLE:
return getDefaultTableGenerator();
case SEQUENCE:
return getDefaultSequenceGenerator();
case AUTO:
default:
throw new ModelException("Unknown GenerationType: " + strategy);
}
}
/**
* The entity states for the given entity class.
*
* @param entityClass
* the current entity class
* @return the states of the entities of that class (with their IDs as keys)
*/
Map<Object, GenerationState> getStates(final EntityClass<?> entityClass) {
Map<Object, GenerationState> entityStates = this.states.get(entityClass.getEntityName());
if (entityStates == null) {
entityStates = new HashMap<>();
this.states.put(entityClass.getEntityName(), entityStates);
}
return entityStates;
}
/**
* Registers the {@link TableGenerator} and {@link SequenceGenerator} declared at the given element.
*
* If neither annotation is present, nothing happens.
*
* @param element
* the inspected class, method or field
* @param table
* the table of the current entity
*/
public void registerGenerators(final AnnotatedElement element, final GeneratorTable table) {
final SequenceGenerator sequenceGenerator = element.getAnnotation(SequenceGenerator.class);
if (sequenceGenerator != null) {
GeneratorId key = new GeneratorId(sequenceGenerator.name(), null);
final IdGenerator existingGenerator = this.generators.get(key);
if (!(existingGenerator instanceof SequenceIdGenerator) || !((SequenceIdGenerator) existingGenerator)
.getSequenceName().equals(sequenceGenerator.sequenceName())) {
if (existingGenerator != null) {
key = new GeneratorId(sequenceGenerator.name(), table);
}
addContextObject(this.generators, ContextModelListener::foundGenerator, key,
new SequenceIdGenerator(sequenceGenerator, this.dialect, this.writeRelativeIds));
}
}
final TableGenerator tableGenerator = element.getAnnotation(TableGenerator.class);
if (tableGenerator != null) {
final GeneratorId key = new GeneratorId(tableGenerator.name(), null);
if (!this.generators.containsKey(key)) {
addContextObject(this.generators, ContextModelListener::foundGenerator, key,
new TableIdGenerator(tableGenerator, this));
}
}
}
/**
* Removes a listener from this context.
*
* @param listener
* the listener that is not interested anymore
*/
public void removeContextModelListener(final ContextModelListener listener) {
this.contextModelListeners.remove(listener);
}
/**
* Finds resp. builds the metadata to the given table from the given (optional) annotation.
*
* @param override
* contains the overrides for the mapping table
* @param annotation
* the optional annotation that contains any metadata to the table
* @param catalogName
* finds the optional name of the catalog that contains the table
* @param schemaName
* finds the optional name of the schema that contains the table
* @param tableName
* finds the name of the table
* @param defaultTableName
* the name of the talbe, if the annotation is {@code null} or contains no value for the table name
* @return the metadata for the given table
*/
public <A extends Annotation> GeneratorTable resolveTable(final AssociationOverride override, final A annotation,
final Function<A, String> catalogName, final Function<A, String> schemaName,
final Function<A, String> tableName, final String defaultTableName) {
String catalog;
String schema;
String table;
if (annotation == null) {
catalog = null;
schema = null;
table = defaultTableName;
} else {
catalog = catalogName.apply(annotation);
schema = schemaName.apply(annotation);
table = StringUtils.defaultIfEmpty(tableName.apply(annotation), defaultTableName);
}
if (override != null) {
final JoinTable joinTable = override.joinTable();
if (joinTable != null) {
catalog = StringUtils.defaultIfEmpty(joinTable.catalog(), catalog);
schema = StringUtils.defaultIfEmpty(joinTable.schema(), catalog);
table = StringUtils.defaultIfEmpty(joinTable.name(), table);
}
}
return resolveTable(catalog, schema, table);
}
/**
* Finds resp. builds the metadata to the given table.
*
* @param catalogName
* the optional name of the catalog that contains the table
* @param schemaName
* the optional name of the schema that contains the table
*
* @param tableName
* the name of the table from the database
* @return the metadata for the given table
*/
public GeneratorTable resolveTable(final String catalogName, final String schemaName, final String tableName) {
final String catalog;
final String schema;
final String qualified;
if (catalogName == null || catalogName.length() == 0) {
catalog = null;
if (schemaName == null || schemaName.length() == 0) {
schema = null;
qualified = tableName;
} else {
schema = schemaName;
qualified = schemaName + '.' + tableName;
}
} else {
schema = schemaName;
ModelException.test(schema != null && schema.length() > 0,
"Catalog name '{}' found for table '{}' but schema name is missing.", catalogName, tableName);
catalog = catalogName;
qualified = catalog + '.' + schema + '.' + tableName;
}
final GeneratorTable table = this.tables.get(qualified);
if (table != null) {
return table;
}
return addContextObject(this.tables, ContextModelListener::foundTable, qualified,
new GeneratorTable(this.tables.size(), catalog, schema, tableName, qualified, this));
}
/**
* Builds all statements that are necessary to align ID generators in the database with the current IDs.
*
* @param writer
* the target of any write operation
* @throws IOException
* if the writer throws one
*/
public void writeAlignmentStatements(final StatementsWriter writer) throws IOException {
for (final IdGenerator generator : this.generators.values()) {
generator.alignNextValue(writer);
}
if (this.defaultSequenceGenerator != null) {
this.defaultSequenceGenerator.alignNextValue(writer);
}
if (this.defaultTableGenerator != null) {
this.defaultTableGenerator.alignNextValue(writer);
}
}
}
| |
/**
* Copyright (C) 2010 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.inject.multibindings;
import static com.google.inject.multibindings.MapBinder.entryOfProviderOf;
import static com.google.inject.multibindings.MapBinder.mapOf;
import static com.google.inject.multibindings.MapBinder.mapOfProviderOf;
import static com.google.inject.multibindings.MapBinder.mapOfSetOfProviderOf;
import static com.google.inject.multibindings.Multibinder.setOf;
import static com.google.inject.multibindings.SpiUtils.BindType.INSTANCE;
import static com.google.inject.multibindings.SpiUtils.BindType.LINKED;
import static com.google.inject.multibindings.SpiUtils.BindType.PROVIDER_INSTANCE;
import static com.google.inject.multibindings.SpiUtils.VisitType.BOTH;
import static com.google.inject.multibindings.SpiUtils.VisitType.INJECTOR;
import static com.google.inject.multibindings.SpiUtils.VisitType.MODULE;
import static junit.framework.Assert.assertEquals;
import static junit.framework.Assert.assertNotNull;
import static junit.framework.Assert.assertTrue;
import static junit.framework.Assert.fail;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import com.google.inject.Binding;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Key;
import com.google.inject.Module;
import com.google.inject.TypeLiteral;
import com.google.inject.internal.util.Lists;
import com.google.inject.spi.DefaultBindingTargetVisitor;
import com.google.inject.spi.Element;
import com.google.inject.spi.Elements;
import com.google.inject.spi.InstanceBinding;
import com.google.inject.spi.LinkedKeyBinding;
import com.google.inject.spi.ProviderInstanceBinding;
import com.google.inject.spi.ProviderLookup;
/**
* Utilities for testing the Multibinder & MapBinder extension SPI.
*
* @author sameb@google.com (Sam Berlin)
*/
public class SpiUtils {
/** The kind of test we should perform. A live Injector, a raw Elements (Module) test, or both. */
enum VisitType { INJECTOR, MODULE, BOTH }
/**
* Asserts that MapBinderBinding visitors for work correctly.
*
* @param <T> The type of the binding
* @param mapKey The key the map belongs to.
* @param keyType the TypeLiteral of the key of the map
* @param valueType the TypeLiteral of the value of the map
* @param modules The modules that define the mapbindings
* @param visitType The kind of test we should perform. A live Injector, a raw Elements (Module) test, or both.
* @param allowDuplicates If duplicates are allowed.
* @param expectedMapBindings The number of other mapbinders we expect to see.
* @param results The kind of bindings contained in the mapbinder.
*/
static <T> void assertMapVisitor(Key<T> mapKey, TypeLiteral<?> keyType, TypeLiteral<?> valueType,
Iterable<? extends Module> modules, VisitType visitType, boolean allowDuplicates,
int expectedMapBindings, MapResult... results) {
if(visitType == null) {
fail("must test something");
}
if (visitType == BOTH || visitType == INJECTOR) {
mapInjectorTest(mapKey, keyType, valueType, modules, allowDuplicates, expectedMapBindings,
results);
}
if (visitType == BOTH || visitType == MODULE) {
mapModuleTest(mapKey, keyType, valueType, modules, allowDuplicates, expectedMapBindings,
results);
}
}
@SuppressWarnings("unchecked")
private static <T> void mapInjectorTest(Key<T> mapKey, TypeLiteral<?> keyType,
TypeLiteral<?> valueType, Iterable<? extends Module> modules, boolean allowDuplicates,
int expectedMapBindings, MapResult... results) {
Injector injector = Guice.createInjector(modules);
Visitor<T> visitor = new Visitor<T>();
Binding<T> mapBinding = injector.getBinding(mapKey);
MapBinderBinding<T> mapbinder = (MapBinderBinding<T>)mapBinding.acceptTargetVisitor(visitor);
assertNotNull(mapbinder);
assertEquals(keyType, mapbinder.getKeyTypeLiteral());
assertEquals(valueType, mapbinder.getValueTypeLiteral());
assertEquals(allowDuplicates, mapbinder.permitsDuplicates());
List<Map.Entry<?, Binding<?>>> entries = Lists.newArrayList(mapbinder.getEntries());
List<MapResult> mapResults = Lists.newArrayList(results);
assertEquals("wrong entries, expected: " + mapResults + ", but was: " + entries,
mapResults.size(), entries.size());
for(MapResult result : mapResults) {
Map.Entry<?, Binding<?>> found = null;
for(Map.Entry<?, Binding<?>> entry : entries) {
Object key = entry.getKey();
Binding<?> value = entry.getValue();
if(!key.equals(result.k)) {
continue;
}
switch (result.v.type) {
case INSTANCE:
if (value instanceof InstanceBinding
&& ((InstanceBinding) value).getInstance().equals(result.v.instance)) {
found = entry;
}
break;
case LINKED:
if (value instanceof LinkedKeyBinding
&& ((LinkedKeyBinding) value).getKey().equals(result.v.key)) {
found = entry;
}
break;
case PROVIDER_INSTANCE:
if (value instanceof ProviderInstanceBinding
&& ((ProviderInstanceBinding) value).getProviderInstance().get().equals(
result.v.instance)) {
found = entry;
}
break;
}
}
if(found == null) {
fail("Could not find entry: " + result + " in remaining entries: " + entries);
} else {
assertTrue(mapbinder.containsElement(found.getValue()));
entries.remove(found);
}
}
if(!entries.isEmpty()) {
fail("Found all entries of: " + mapResults + ", but more were left over: " + entries);
}
Key<?> mapOfProvider = adapt(mapKey, mapOfProviderOf(keyType, valueType));
Key<?> mapOfSetOfProvider = adapt(mapKey, mapOfSetOfProviderOf(keyType, valueType));
Key<?> mapOfSet = adapt(mapKey, mapOf(keyType, setOf(valueType)));
Key<?> setOfEntry = adapt(mapKey, setOf(entryOfProviderOf(keyType, valueType)));
boolean entrySetMatch = false;
boolean mapProviderMatch = false;
boolean mapSetMatch = false;
boolean mapSetProviderMatch = false;
List<Object> otherMapBindings = Lists.newArrayList();
List<Binding> otherMatches = Lists.newArrayList();
for(Binding b : injector.getAllBindings().values()) {
boolean contains = mapbinder.containsElement(b);
Object visited = b.acceptTargetVisitor(visitor);
if(visited instanceof MapBinderBinding) {
if(visited.equals(mapbinder)) {
assertTrue(contains);
} else {
otherMapBindings.add(visited);
}
} else if(b.getKey().equals(mapOfProvider)) {
assertTrue(contains);
mapProviderMatch = true;
} else if(b.getKey().equals(mapOfSet)) {
assertTrue(contains);
mapSetMatch = true;
} else if(b.getKey().equals(mapOfSetOfProvider)) {
assertTrue(contains);
mapSetProviderMatch = true;
} else if(b.getKey().equals(setOfEntry)) {
assertTrue(contains);
entrySetMatch = true;
// Validate that this binding is also a MultibinderBinding.
assertTrue(b.acceptTargetVisitor(visitor) instanceof MultibinderBinding);
} else if (contains) {
otherMatches.add(b);
}
}
int sizeOfOther = otherMatches.size();
if(allowDuplicates) {
sizeOfOther--; // account for 1 duplicate binding
}
sizeOfOther = sizeOfOther / 2; // account for 1 value & 1 Map.Entry of each expected binding.
assertEquals("Incorrect other matches: " + otherMatches, mapResults.size(), sizeOfOther);
assertTrue(entrySetMatch);
assertTrue(mapProviderMatch);
assertEquals(allowDuplicates, mapSetMatch);
assertEquals(allowDuplicates, mapSetProviderMatch);
assertEquals("other MapBindings found: " + otherMapBindings, expectedMapBindings,
otherMapBindings.size());
}
/** Adapts a key, keeping the original annotation, using the new type literal. */
private static Key<?> adapt(Key<?> mapKey, TypeLiteral<?> resultType) {
if(mapKey.getAnnotation() != null) {
return Key.get(resultType, mapKey.getAnnotation());
} else if(mapKey.getAnnotationType() != null) {
return Key.get(resultType, mapKey.getAnnotationType());
} else {
return Key.get(resultType);
}
}
@SuppressWarnings("unchecked")
private static <T> void mapModuleTest(Key<T> mapKey, TypeLiteral<?> keyType,
TypeLiteral<?> valueType, Iterable<? extends Module> modules, boolean allowDuplicates,
int expectedMapBindings, MapResult... results) {
List<Element> elements = Elements.getElements(modules);
Visitor<T> visitor = new Visitor<T>();
MapBinderBinding<T> mapbinder = null;
for(Element element : elements) {
if(element instanceof Binding && ((Binding)element).getKey().equals(mapKey)) {
mapbinder = (MapBinderBinding<T>)((Binding)element).acceptTargetVisitor(visitor);
break;
}
}
assertNotNull(mapbinder);
assertEquals(keyType, mapbinder.getKeyTypeLiteral());
assertEquals(valueType, mapbinder.getValueTypeLiteral());
List<MapResult> mapResults = Lists.newArrayList(results);
Key<?> mapOfProvider = adapt(mapKey, mapOfProviderOf(keyType, valueType));
Key<?> mapOfSetOfProvider = adapt(mapKey, mapOfSetOfProviderOf(keyType, valueType));
Key<?> mapOfSet = adapt(mapKey, mapOf(keyType, setOf(valueType)));
Key<?> setOfEntry = adapt(mapKey, setOf(entryOfProviderOf(keyType, valueType)));
boolean entrySetMatch = false;
boolean mapProviderMatch = false;
boolean mapSetMatch = false;
boolean mapSetProviderMatch = false;
List<Object> otherMapBindings = Lists.newArrayList();
List<Element> otherMatches = Lists.newArrayList();
List<Element> otherElements = Lists.newArrayList();
for(Element element : elements) {
boolean contains = mapbinder.containsElement(element);
if(!contains) {
otherElements.add(element);
}
boolean matched = false;
Key key = null;
Binding b = null;
if(element instanceof Binding) {
b = (Binding)element;
key = b.getKey();
Object visited = b.acceptTargetVisitor(visitor);
if(visited instanceof MapBinderBinding) {
matched = true;
if(visited.equals(mapbinder)) {
assertTrue(contains);
} else {
otherMapBindings.add(visited);
}
}
} else if(element instanceof ProviderLookup) {
key = ((ProviderLookup)element).getKey();
}
if(!matched && key != null) {
if(key.equals(mapOfProvider)) {
matched = true;
assertTrue(contains);
mapProviderMatch = true;
} else if(key.equals(mapOfSet)) {
matched = true;
assertTrue(contains);
mapSetMatch = true;
} else if(key.equals(mapOfSetOfProvider)) {
matched = true;
assertTrue(contains);
mapSetProviderMatch = true;
} else if(key.equals(setOfEntry)) {
matched = true;
assertTrue(contains);
entrySetMatch = true;
// Validate that this binding is also a MultibinderBinding.
if(b != null) {
assertTrue(b.acceptTargetVisitor(visitor) instanceof MultibinderBinding);
}
}
}
if(!matched && contains) {
otherMatches.add(element);
}
}
int otherMatchesSize = otherMatches.size();
if(allowDuplicates) {
otherMatchesSize--; // allow for 1 duplicate binding
}
otherMatchesSize = otherMatchesSize / 3; // value, ProviderLookup per value, Map.Entry per value
assertEquals("incorrect number of contains, leftover matches: " + otherMatches, mapResults
.size(), otherMatchesSize);
assertTrue(entrySetMatch);
assertTrue(mapProviderMatch);
assertEquals(allowDuplicates, mapSetMatch);
assertEquals(allowDuplicates, mapSetProviderMatch);
assertEquals("other MapBindings found: " + otherMapBindings, expectedMapBindings,
otherMapBindings.size());
// Validate that we can construct an injector out of the remaining bindings.
Guice.createInjector(Elements.getModule(otherElements));
}
/**
* Asserts that MultibinderBinding visitors work correctly.
*
* @param <T> The type of the binding
* @param setKey The key the set belongs to.
* @param elementType the TypeLiteral of the element
* @param modules The modules that define the multibindings
* @param visitType The kind of test we should perform. A live Injector, a raw Elements (Module) test, or both.
* @param allowDuplicates If duplicates are allowed.
* @param expectedMultibindings The number of other multibinders we expect to see.
* @param results The kind of bindings contained in the multibinder.
*/
static <T> void assertSetVisitor(Key<T> setKey, TypeLiteral<?> elementType,
Iterable<? extends Module> modules, VisitType visitType, boolean allowDuplicates,
int expectedMultibindings, BindResult... results) {
if(visitType == null) {
fail("must test something");
}
if(visitType == BOTH || visitType == INJECTOR) {
setInjectorTest(setKey, elementType, modules, allowDuplicates, expectedMultibindings, results);
}
if(visitType == BOTH || visitType == MODULE) {
setModuleTest(setKey, elementType, modules, allowDuplicates, expectedMultibindings, results);
}
}
@SuppressWarnings("unchecked")
private static <T> void setInjectorTest(Key<T> setKey, TypeLiteral<?> elementType,
Iterable<? extends Module> modules, boolean allowDuplicates, int otherMultibindings,
BindResult... results) {
Injector injector = Guice.createInjector(modules);
Visitor<T> visitor = new Visitor<T>();
Binding<T> binding = injector.getBinding(setKey);
MultibinderBinding<T> multibinder = (MultibinderBinding<T>)binding.acceptTargetVisitor(visitor);
assertNotNull(multibinder);
assertEquals(elementType, multibinder.getElementTypeLiteral());
assertEquals(allowDuplicates, multibinder.permitsDuplicates());
List<Binding<?>> elements = Lists.newArrayList(multibinder.getElements());
List<BindResult> bindResults = Lists.newArrayList(results);
assertEquals("wrong bind elements, expected: " + bindResults + ", but was: " + multibinder.getElements(),
bindResults.size(), elements.size());
for(BindResult result : bindResults) {
Binding found = null;
for(Binding item : elements) {
switch (result.type) {
case INSTANCE:
if (item instanceof InstanceBinding
&& ((InstanceBinding) item).getInstance().equals(result.instance)) {
found = item;
}
break;
case LINKED:
if (item instanceof LinkedKeyBinding
&& ((LinkedKeyBinding) item).getKey().equals(result.key)) {
found = item;
}
break;
case PROVIDER_INSTANCE:
if (item instanceof ProviderInstanceBinding
&& ((ProviderInstanceBinding) item).getProviderInstance().get().equals(
result.instance)) {
found = item;
}
break;
}
}
if(found == null) {
fail("Could not find element: " + result + " in remaining elements: " + elements);
} else {
elements.remove(found);
}
}
if(!elements.isEmpty()) {
fail("Found all elements of: " + bindResults + ", but more were left over: " + elements);
}
Set<Binding> setOfElements = new HashSet<Binding>(multibinder.getElements());
List<Object> otherMultibinders = Lists.newArrayList();
List<Binding> otherContains = Lists.newArrayList();
for(Binding b : injector.getAllBindings().values()) {
boolean contains = multibinder.containsElement(b);
Object visited = b.acceptTargetVisitor(visitor);
if(visited != null) {
if(visited.equals(multibinder)) {
assertTrue(contains);
} else {
otherMultibinders.add(visited);
}
} else if(setOfElements.contains(b)) {
assertTrue(contains);
} else if(contains) {
otherContains.add(b);
}
}
if(allowDuplicates) {
assertEquals("contained more than it should: " + otherContains, 1, otherContains.size());
} else {
assertTrue("contained more than it should: " + otherContains, otherContains.isEmpty());
}
assertEquals("other multibindings found: " + otherMultibinders, otherMultibindings,
otherMultibinders.size());
}
@SuppressWarnings("unchecked")
private static <T> void setModuleTest(Key<T> setKey, TypeLiteral<?> elementType,
Iterable<? extends Module> modules, boolean allowDuplicates, int otherMultibindings,
BindResult... results) {
List<BindResult> bindResults = Lists.newArrayList(results);
List<Element> elements = Elements.getElements(modules);
Visitor<T> visitor = new Visitor<T>();
MultibinderBinding<T> multibinder = null;
for(Element element : elements) {
if(element instanceof Binding && ((Binding)element).getKey().equals(setKey)) {
multibinder = (MultibinderBinding<T>)((Binding)element).acceptTargetVisitor(visitor);
break;
}
}
assertNotNull(multibinder);
assertEquals(elementType, multibinder.getElementTypeLiteral());
List<Object> otherMultibinders = Lists.newArrayList();
Set<Element> otherContains = new HashSet<Element>();
List<Element> otherElements = Lists.newArrayList();
for(Element element : elements) {
boolean contains = multibinder.containsElement(element);
if(!contains) {
otherElements.add(element);
}
boolean matched = false;
if(element instanceof Binding) {
Binding binding = (Binding)element;
Object visited = binding.acceptTargetVisitor(visitor);
if(visited != null) {
matched = true;
if(visited.equals(multibinder)) {
assertTrue(contains);
} else {
otherMultibinders.add(visited);
}
}
}
if(!matched && contains) {
otherContains.add(element);
}
}
if(allowDuplicates) {
assertEquals("wrong contained elements: " + otherContains, bindResults.size() + 1, otherContains.size());
} else {
assertEquals("wrong contained elements: " + otherContains, bindResults.size(), otherContains.size());
}
assertEquals("other multibindings found: " + otherMultibinders, otherMultibindings,
otherMultibinders.size());
// Validate that we can construct an injector out of the remaining bindings.
Guice.createInjector(Elements.getModule(otherElements));
}
static <K, V> MapResult instance(K k, V v) {
return new MapResult<K, V>(k, new BindResult<V>(INSTANCE, v, null));
}
static <K, V> MapResult linked(K k, Class<? extends V> clazz) {
return new MapResult<K, V>(k, new BindResult<V>(LINKED, null, Key.get(clazz)));
}
static <K, V> MapResult linked(K k, Key<? extends V> key) {
return new MapResult<K, V>(k, new BindResult<V>(LINKED, null, key));
}
static <K, V> MapResult providerInstance(K k, V v) {
return new MapResult<K, V>(k, new BindResult<V>(PROVIDER_INSTANCE, v, null));
}
private static class MapResult<K, V> {
private final K k;
private final BindResult<V> v;
MapResult(K k, BindResult<V> v) {
this.k = k;
this.v = v;
}
@Override
public String toString() {
return "entry[key[" + k + "],value[" + v + "]]";
}
}
static <T> BindResult instance(T t) {
return new BindResult<T>(INSTANCE, t, null);
}
static <T> BindResult linked(Class<? extends T> clazz) {
return new BindResult<T>(LINKED, null, Key.get(clazz));
}
static <T> BindResult linked(Key<? extends T> key) {
return new BindResult<T>(LINKED, null, key);
}
static <T> BindResult providerInstance(T t) {
return new BindResult<T>(PROVIDER_INSTANCE, t, null);
}
/** The kind of binding. */
static enum BindType { INSTANCE, LINKED, PROVIDER_INSTANCE }
/** The result of the binding. */
private static class BindResult<T> {
private final BindType type;
private final Key<? extends T> key;
private final T instance;
private BindResult(BindType type, T instance, Key<? extends T> key) {
this.type = type;
this.instance = instance;
this.key = key;
}
@Override
public String toString() {
switch(type) {
case INSTANCE:
return "instance[" + instance + "]";
case LINKED:
return "linkedKey[" + key + "]";
case PROVIDER_INSTANCE:
return "providerInstance[" + instance + "]";
}
return null;
}
}
private static class Visitor<T> extends
DefaultBindingTargetVisitor<T, Object> implements MultibindingsTargetVisitor<T, Object> {
public Object visit(MultibinderBinding<? extends T> multibinding) {
return multibinding;
}
public Object visit(MapBinderBinding<? extends T> mapbinding) {
return mapbinding;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
/* Generated By:JJTree: Do not edit this line. ASTExpr.java */
/* JJT: 0.3pre1 */
package Mini;
import org.apache.bcel.generic.BranchHandle;
import org.apache.bcel.generic.ConstantPoolGen;
import org.apache.bcel.generic.GOTO;
import org.apache.bcel.generic.IF_ICMPEQ;
import org.apache.bcel.generic.IF_ICMPGE;
import org.apache.bcel.generic.IF_ICMPGT;
import org.apache.bcel.generic.IF_ICMPLE;
import org.apache.bcel.generic.IF_ICMPLT;
import org.apache.bcel.generic.IF_ICMPNE;
import org.apache.bcel.generic.InstructionConstants;
import org.apache.bcel.generic.InstructionList;
import org.apache.bcel.generic.MethodGen;
import org.apache.bcel.generic.PUSH;
/**
* Represents arithmetic expressions such as `(a + 12 == b) OR c'.
* The parse tree is built initially by the parser and modified, i.e.
* compacted with `traverse()'. Each (Expr, Term, Factor) node
* with kind == -1 is replaced which its successor node, which is
* converted to type `Expr'
*
* A node with kind == -1 denotes the fact that this expression
* node has just one child branch and thus may be replaced by this
* branch (or leaf) directly without altering the expression
* semantics. Term and Factor nodes are used only to build the parse tree
* obeying the aritmetical precedences (* stronger than +, etc.) and
* are discarded in the first pass.
*/
public class ASTExpr extends SimpleNode
implements MiniParserConstants, MiniParserTreeConstants, org.apache.bcel.Constants {
protected int kind=-1; // Single twig to leaf?
private int unop=-1; // Special case: Unary operand applied
protected ASTExpr[] exprs; // Sub expressions
protected Environment env; // Needed in all passes
protected int line, column;
protected boolean is_simple; // true, if simple expression like `12 + f(a)'
/* Not all children shall inherit this, exceptions are ASTIdent and ASTFunAppl, which
* look up the type in the corresponding environment entry.
*/
protected int type = T_UNKNOWN;
// Generated methods
ASTExpr(final int id) {
super(id);
}
ASTExpr(final MiniParser p, final int id) {
super(p, id);
}
public static Node jjtCreate(final MiniParser p, final int id) {
return new ASTExpr(p, id);
}
ASTExpr(final int line, final int column, final int id) {
super(id);
this.line = line;
this.column = column;
}
ASTExpr(final int line, final int column, final int kind, final int id) {
this(line, column, id);
this.kind = kind;
}
/* Special constructor, called from ASTTerm.traverse() and
* ASTFactor.traverse(), when traverse()ing the parse tree replace
* themselves with Expr nodes.
*/
ASTExpr(final ASTExpr[] children, final int kind, final int line, final int column) {
this(line, column, kind, JJTEXPR);
exprs = children;
}
/**
* @return name of node, its kind and the number of children.
*/
@Override
public String toString() {
String op="";
final int len = (children != null)? children.length : 0;
if(unop != -1) {
op = tokenImage[unop];
} else if(kind != -1) {
op = tokenImage[kind];
}
return jjtNodeName[id] + "(" + op + ")[" + len + "]<" +
TYPE_NAMES[type] + "> @" + line + ", " + column;
}
/**
* Overrides SimpleNode.closeNode(). Overridden by some subclasses.
*
* Called by the parser when the construction of this node is finished.
* Casts children Node[] to precise ASTExpr[] type.
*/
@Override
public void closeNode() {
if(children != null) {
exprs = new ASTExpr[children.length];
System.arraycopy(children, 0, exprs, 0, children.length);
children=null; // Throw away old reference
}
}
/**
* First pass
* Overridden by subclasses. Traverse the whole parse tree recursively and
* drop redundant nodes.
*/
public ASTExpr traverse(final Environment env) {
this.env = env;
if((kind == -1) && (unop == -1)) {
return exprs[0].traverse(env); // --> Replaced by successor
}
for(int i=0; i < exprs.length; i++) {
exprs[i] = exprs[i].traverse(env); // References may change
}
return this;
}
/**
* Second and third pass
* @return type of expression
* @param expected type
*/
public int eval(final int expected) {
int child_type = T_UNKNOWN, t;
is_simple = true;
// Determine expected node type depending on used operator.
if(unop != -1) {
if(unop == MINUS) {
child_type = type = T_INT; // -
} else {
child_type = type = T_BOOLEAN; // !
}
}
else {
// Compute expected type
if((kind == PLUS) || (kind == MINUS) || (kind == MULT) ||
(kind == MOD) || (kind == DIV)) {
child_type = type = T_INT;
} else if((kind == AND) || (kind == OR)) {
child_type = type = T_BOOLEAN;
} else { // LEQ, GT, etc.
child_type = T_INT;
type = T_BOOLEAN;
}
}
// Get type of subexpressions
for (final ASTExpr expr : exprs) {
t = expr.eval(child_type);
if(t != child_type) {
MiniC.addError(expr.getLine(), expr.getColumn(),
"Expression has not expected type " + TYPE_NAMES[child_type] +
" but " + TYPE_NAMES[t] + ".");
}
is_simple = is_simple && expr.isSimple();
}
return type;
}
private static String toBool(final String i) {
return "(" + i + " != 0)";
}
private static String toInt(final String i) {
return "((" + i + ")? 1 : 0)";
}
/**
* Fourth pass, produce Java code.
*/
public void code(final StringBuffer buf) {
if(unop != -1) {
exprs[0].code(buf);
final String top = ASTFunDecl.pop();
if(unop == MINUS) {
ASTFunDecl.push(buf, "-" + top);
} else {
ASTFunDecl.push(buf, "(" + top + " == 1)? 0 : 1)");
}
}
else {
exprs[0].code(buf);
exprs[1].code(buf);
final String _body_int2 = ASTFunDecl.pop();
final String _body_int = ASTFunDecl.pop();
switch(kind) {
case PLUS: ASTFunDecl.push(buf, _body_int + " + " + _body_int2); break;
case MINUS: ASTFunDecl.push(buf, _body_int + " - " + _body_int2); break;
case MULT: ASTFunDecl.push(buf, _body_int + " * " + _body_int2); break;
case DIV: ASTFunDecl.push(buf, _body_int + " / " + _body_int2); break;
case AND: ASTFunDecl.push(buf, toInt(toBool(_body_int) + " && " +
toBool(_body_int2))); break;
case OR: ASTFunDecl.push(buf, toInt(toBool(_body_int) + " || " +
toBool(_body_int2))); break;
case EQ: ASTFunDecl.push(buf, toInt(_body_int + " == " + _body_int2));
break;
case LEQ: ASTFunDecl.push(buf, toInt(_body_int + " <= " + _body_int2));
break;
case GEQ: ASTFunDecl.push(buf, toInt(_body_int + " >= " + _body_int2));
break;
case NEQ: ASTFunDecl.push(buf, toInt(_body_int + " != " + _body_int2));
break;
case LT: ASTFunDecl.push(buf, toInt(_body_int + " < " + _body_int2));
break;
case GT: ASTFunDecl.push(buf, toInt(_body_int + " > " + _body_int2));
break;
default: System.err.println("Unhandled case: " + kind);
}
}
}
/**
* Fifth pass, produce Java byte code.
*/
public void byte_code(final InstructionList il, final MethodGen method, final ConstantPoolGen cp) {
exprs[0].byte_code(il, method, cp);
if(unop != -1) { // Apply unary operand
if(unop == MINUS) {
il.append(InstructionConstants.INEG);
} else { // == NOT
il.append(new PUSH(cp, 1)); ASTFunDecl.push(); // Push TRUE
il.append(InstructionConstants.IXOR); ASTFunDecl.pop();
}
}
else { // Apply binary operand
BranchHandle bh=null;
exprs[1].byte_code(il, method, cp);
switch(kind) {
case PLUS: il.append(InstructionConstants.IADD); ASTFunDecl.pop(); break;
case MINUS: il.append(InstructionConstants.ISUB); ASTFunDecl.pop(); break;
case MULT: il.append(InstructionConstants.IMUL); ASTFunDecl.pop(); break;
case DIV: il.append(InstructionConstants.IDIV); ASTFunDecl.pop(); break;
case AND: il.append(InstructionConstants.IAND); ASTFunDecl.pop(); break;
case OR: il.append(InstructionConstants.IOR); ASTFunDecl.pop(); break;
/* Use negated operands */
case EQ: bh = il.append(new IF_ICMPNE(null)); ASTFunDecl.pop(2); break;
case LEQ: bh = il.append(new IF_ICMPGT(null)); ASTFunDecl.pop(2); break;
case GEQ: bh = il.append(new IF_ICMPLT(null)); ASTFunDecl.pop(2); break;
case NEQ: bh = il.append(new IF_ICMPEQ(null)); ASTFunDecl.pop(2); break;
case LT: bh = il.append(new IF_ICMPGE(null)); ASTFunDecl.pop(2); break;
case GT: bh = il.append(new IF_ICMPLE(null)); ASTFunDecl.pop(2); break;
default: System.err.println("Unhandled case: " + kind);
}
switch(kind) {
case EQ: case LEQ: case GEQ: case NEQ: case LT: case GT:
BranchHandle g;
il.append(new PUSH(cp, 1));
g = il.append(new GOTO(null));
bh.setTarget(il.append(new PUSH(cp, 0)));
g.setTarget(il.append(InstructionConstants.NOP)); // May be optimized away later
ASTFunDecl.push();
break;
default: break;
}
}
}
public boolean isSimple() { return is_simple; }
public void setType(final int type) { this.type = type; }
public int getType() { return type; }
public void setKind(final int kind) { this.kind = kind; }
public int getKind() { return kind; }
public void setUnOp(final int unop) { this.unop = unop; }
public int getUnOp() { return unop; }
public void setLine(final int line) { this.line = line; }
public int getLine() { return line; }
public void setColumn(final int column) { this.column = column; }
public int getColumn() { return column; }
public void setPosition(final int line, final int column) {
this.line = line;
this.column = column;
}
@Override
public void dump(final String prefix) {
System.out.println(toString(prefix));
if(exprs != null) {
for (final ASTExpr expr : exprs) {
expr.dump(prefix + " ");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.adapter.os;
import org.apache.calcite.linq4j.AbstractEnumerable;
import org.apache.calcite.linq4j.Enumerable;
import org.apache.calcite.linq4j.Enumerator;
import java.io.BufferedInputStream;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.function.Supplier;
/**
* Utilities regarding operating system processes.
*
* <p>WARNING: Spawning processes is not secure.
* Use this class with caution.
* This class is in the "plus" module because "plus" is not used by default.
* Do not move this class to the "core" module.
*/
public class Processes {
private Processes() {}
/** Executes a command and returns its result as an enumerable of lines. */
static Enumerable<String> processLines(String... args) {
return processLines(' ', args);
}
/** Executes a command and returns its result as an enumerable of lines. */
static Enumerable<String> processLines(char sep, String... args) {
return processLines(sep, processSupplier(args));
}
/** Executes a command and returns its result as an enumerable of lines.
*
* @param sep Separator character
* @param processSupplier Command and its arguments
*/
private static Enumerable<String> processLines(char sep,
Supplier<Process> processSupplier) {
if (sep != ' ') {
return new SeparatedLinesEnumerable(processSupplier, sep);
} else {
return new ProcessLinesEnumerator(processSupplier);
}
}
private static Supplier<Process> processSupplier(final String... args) {
return new ProcessFactory(args);
}
/** Enumerator that executes a process and returns each line as an element. */
private static class ProcessLinesEnumerator
extends AbstractEnumerable<String> {
private Supplier<Process> processSupplier;
ProcessLinesEnumerator(Supplier<Process> processSupplier) {
this.processSupplier = processSupplier;
}
public Enumerator<String> enumerator() {
final Process process = processSupplier.get();
final InputStream is = process.getInputStream();
final BufferedInputStream bis =
new BufferedInputStream(is);
final InputStreamReader isr =
new InputStreamReader(bis, StandardCharsets.UTF_8);
final BufferedReader br = new BufferedReader(isr);
return new Enumerator<String>() {
private String line;
public String current() {
return line;
}
public boolean moveNext() {
try {
line = br.readLine();
return line != null;
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void reset() {
throw new UnsupportedOperationException();
}
public void close() {
try {
br.close();
} catch (IOException e) {
throw new RuntimeException("while running " + processSupplier, e);
}
process.destroy();
}
};
}
}
/** Enumerator that executes a process and returns each line as an element. */
private static class SeparatedLinesEnumerable
extends AbstractEnumerable<String> {
private final Supplier<Process> processSupplier;
private final int sep;
SeparatedLinesEnumerable(Supplier<Process> processSupplier, char sep) {
this.processSupplier = processSupplier;
this.sep = sep;
}
public Enumerator<String> enumerator() {
final Process process = processSupplier.get();
final InputStream is = process.getInputStream();
final BufferedInputStream bis =
new BufferedInputStream(is);
final InputStreamReader isr =
new InputStreamReader(bis, StandardCharsets.UTF_8);
final BufferedReader br = new BufferedReader(isr);
return new Enumerator<String>() {
private final StringBuilder b = new StringBuilder();
private String line;
public String current() {
return line;
}
public boolean moveNext() {
try {
for (;;) {
int c = br.read();
if (c < 0) {
return false;
}
if (c == sep) {
line = b.toString();
b.setLength(0);
return true;
}
b.append((char) c);
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void reset() {
throw new UnsupportedOperationException();
}
public void close() {
try {
br.close();
} catch (IOException e) {
throw new RuntimeException("while running " + processSupplier, e);
}
process.destroy();
}
};
}
}
/** Creates processes. */
private static class ProcessFactory implements Supplier<Process> {
private final String[] args;
ProcessFactory(String... args) {
this.args = args;
}
public Process get() {
try {
return new ProcessBuilder().command(args).start();
} catch (IOException e) {
throw new RuntimeException("while creating process: "
+ Arrays.toString(args), e);
}
}
@Override public String toString() {
return args[0];
}
}
}
| |
/* JAI-Ext - OpenSource Java Advanced Image Extensions Library
* http://www.geo-solutions.it/
* Copyright 2014 GeoSolutions
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package it.geosolutions.jaiext.range;
import java.awt.image.DataBuffer;
import java.util.ArrayList;
import java.util.List;
/**
* This class is a factory class which creates a {@link Range} object for the specific data type. This Range can have 2 bounds or be a single-point
* range. If the 2 bound values are equal and almost one of them is included, then a single-point range is created, else an exception is thrown. If
* the minimum bound value is bigger than the maximum value, then the 2 numbers are inverted at the Range creation time.
*/
public class RangeFactory {
private static final double TOLERANCE = 1E-6;
// Private Constructor for avoiding a new factory instantiation
private RangeFactory() {
}
// Byte data
public static Range create(byte minValue, boolean minIncluded, byte maxValue,
boolean maxIncluded) {
return new RangeByte(minValue, minIncluded, maxValue, maxIncluded);
}
// Ushort data
public static Range createU(short minValue, boolean minIncluded, short maxValue,
boolean maxIncluded) {
return new RangeUshort(minValue, minIncluded, maxValue, maxIncluded);
}
// Short data
public static Range create(short minValue, boolean minIncluded, short maxValue,
boolean maxIncluded) {
return new RangeShort(minValue, minIncluded, maxValue, maxIncluded);
}
// Integer data
public static Range create(int minValue, boolean minIncluded, int maxValue, boolean maxIncluded) {
return new RangeInt(minValue, minIncluded, maxValue, maxIncluded);
}
// Float data
public static Range create(float minValue, boolean minIncluded, float maxValue,
boolean maxIncluded, boolean nanIncluded) {
return new RangeFloat(minValue, minIncluded, maxValue, maxIncluded, nanIncluded);
}
// Double data
public static Range create(double minValue, boolean minIncluded, double maxValue,
boolean maxIncluded, boolean nanIncluded) {
return new RangeDouble(minValue, minIncluded, maxValue, maxIncluded, nanIncluded);
}
// Byte data
public static Range create(byte minValue, byte maxValue) {
return new RangeByte(minValue, true, maxValue, true);
}
// Ushort data
public static Range createU(short minValue, short maxValue) {
return new RangeUshort(minValue, true, maxValue, true);
}
// Short data
public static Range create(short minValue, short maxValue) {
return new RangeShort(minValue, true, maxValue, true);
}
// Integer data
public static Range create(int minValue, int maxValue) {
return new RangeInt(minValue, true, maxValue, true);
}
// Float data
public static Range create(float minValue, float maxValue) {
return new RangeFloat(minValue, true, maxValue, true, false);
}
// Double data
public static Range create(double minValue, double maxValue) {
return new RangeDouble(minValue, true, maxValue, true, false);
}
// Float data
public static Range create(float minValue, boolean minIncluded, float maxValue,
boolean maxIncluded) {
return new RangeFloat(minValue, minIncluded, maxValue, maxIncluded, false);
}
// Double data
public static Range create(double minValue, boolean minIncluded, double maxValue,
boolean maxIncluded) {
return new RangeDouble(minValue, minIncluded, maxValue, maxIncluded, false);
}
// Long data
public static Range create(long minValue, boolean minIncluded, long maxValue,
boolean maxIncluded) {
return new RangeLong(minValue, minIncluded, maxValue, maxIncluded);
}
public static Range convertToDoubleRange(Range input) {
// If already double do nothing
if (input instanceof RangeDouble) {
return input;
}
// Otherwise get minimum and maximum values and convert it
double min = input.getMin().doubleValue();
double max = input.getMax().doubleValue();
boolean minIncluded = input.isMinIncluded();
boolean maxIncluded = input.isMaxIncluded();
boolean nanIncluded = input.isNanIncluded();
// New Double range
return new RangeDouble(min, minIncluded, max, maxIncluded, nanIncluded);
}
public static Range convertToFloatRange(Range input) {
// If already double do nothing
if (input instanceof RangeFloat) {
return input;
}
// Otherwise get minimum and maximum values and convert it
float min = input.getMin().floatValue();
float max = input.getMax().floatValue();
boolean minIncluded = input.isMinIncluded();
boolean maxIncluded = input.isMaxIncluded();
boolean nanIncluded = input.isNanIncluded();
// New Double range
return new RangeFloat(min, minIncluded, max, maxIncluded, nanIncluded);
}
public static Range convert(Range input, int dataType) {
if(input == null){
return null;
}
// If already double do nothing
if (input.getDataType().getDataType() == dataType) {
return input;
}
boolean minIncluded = input.isMinIncluded();
boolean maxIncluded = input.isMaxIncluded();
Number min = input.getMin();
Number max = input.getMax();
switch (dataType) {
case DataBuffer.TYPE_BYTE:
return new RangeByte(min.byteValue(), minIncluded, max.byteValue(), maxIncluded);
case DataBuffer.TYPE_USHORT:
return new RangeUshort(min.shortValue(), minIncluded, max.shortValue(), maxIncluded);
case DataBuffer.TYPE_SHORT:
return new RangeShort(min.shortValue(), minIncluded, max.shortValue(), maxIncluded);
case DataBuffer.TYPE_INT:
return new RangeInt(min.intValue(), minIncluded, max.intValue(), maxIncluded);
case DataBuffer.TYPE_FLOAT:
return new RangeFloat(min.floatValue(), minIncluded, max.floatValue(), maxIncluded, input.isNanIncluded());
case DataBuffer.TYPE_DOUBLE:
return new RangeDouble(min.floatValue(), minIncluded, max.floatValue(), maxIncluded, input.isNanIncluded());
default:
return null;
}
}
public static Range convertToByteRange(Range input) {
// If already double do nothing
if (input instanceof RangeByte) {
return input;
}
// Otherwise get minimum and maximum values and convert it
byte min = input.getMin().byteValue();
byte max = input.getMax().byteValue();
boolean minIncluded = input.isMinIncluded();
boolean maxIncluded = input.isMaxIncluded();
// New Double range
return new RangeByte(min, minIncluded, max, maxIncluded);
}
public static List<Range> subtract(Range r1, Range r2) {
// Creation of the Range List
List<Range> list = new ArrayList<Range>();
// Populating the list
/*
* Check for equality between inputs
*/
if (r1.equals(r2)) {
return list; // empty list
}
Range common = intersect(r1, r2);
/*
* Check for no overlap between inputs
*/
if (common == null) {
list.add(r2);
return list;
}
/*
* Check if r1 enclosed r2
*/
if (common.equals(r2)) {
return list; // empty list
}
// Checks on the minimum/maximum
double min1 = r1.getMin().doubleValue();
double min2 = r2.getMin().doubleValue();
double max1 = r1.getMax().doubleValue();
double max2 = r2.getMax().doubleValue();
// Checks on the comparison between the min and max
boolean minmin = equals(min1, min2);
boolean maxmax = equals(max1, max2);
boolean minmax = equals(min1, max2);
boolean maxmin = equals(max1, min2);
// Case 0a) min1 equals to max2
if (minmax) {
if (r1.isMinIncluded()) {
Range r = RangeFactory.create(min2, r2.isMinIncluded(), max2, false);
list.add(r);
return list;
} else {
list.add(r2);
return list;
}
}
// Case 0b) min2 equals to max1
if (maxmin) {
if (r1.isMaxIncluded()) {
Range r = RangeFactory.create(min2, false, max2, r2.isMinIncluded());
list.add(r);
return list;
} else {
list.add(r2);
return list;
}
}
// Case 1) equal minimums and different max values
if (minmin && max2 > max1 && !maxmax) {
Range r = RangeFactory.create(max1, !r1.isMaxIncluded(), max2, r2.isMaxIncluded());
list.add(r);
return list;
}
// Case 2) equal maximum and different min values
if (maxmax && min2 < min1 && !minmin) {
Range r = RangeFactory.create(min2, r2.isMinIncluded(), min1, !r1.isMinIncluded());
list.add(r);
return list;
}
// Case 3) r2 on the left and r1 on the right
if (min2 < min1 && max2 < max1) {
Range r = RangeFactory.create(min2, r2.isMinIncluded(), min1, !r1.isMinIncluded());
list.add(r);
return list;
}
// Case 4) r1 on the left and r2 on the right
if (min2 > min1 && max2 > max1) {
Range r = RangeFactory.create(max1, !r1.isMaxIncluded(), max2, r2.isMaxIncluded());
list.add(r);
return list;
}
// Case 5) r1 contained in r2 (two ranges)
if (min2 < min1 && max2 > max1) {
Range r1New = RangeFactory.create(min2, r2.isMinIncluded(), min1, !r1.isMinIncluded());
Range r2New = RangeFactory.create(max1, !r1.isMaxIncluded(), max2, r2.isMaxIncluded());
list.add(r1New);
list.add(r2New);
return list;
}
return list;
}
public static Range intersect(Range r1, Range r2) {
// Initial checks
if (r1.contains(r2)) {
return r2;
}
if (r2.contains(r1)) {
return r1;
}
// Checks on the bounds
double min1 = r1.getMin().doubleValue();
double min2 = r2.getMin().doubleValue();
double max1 = r1.getMax().doubleValue();
double max2 = r2.getMax().doubleValue();
// Checks on the comparison between the min and max
boolean minmin = equals(min1, min2);
boolean maxmax = equals(max1, max2);
boolean minmax = equals(min1, max2);
boolean maxmin = equals(max1, min2);
// Check on the single point comparison
if (minmax && r1.isMinIncluded() && r2.isMaxIncluded()) {
return RangeFactory.create(min1, min1);
}
if (maxmin && r1.isMaxIncluded() && r2.isMinIncluded()) {
return RangeFactory.create(min2, min2);
}
if ((min1 > max2 || min2 > max1)) {
return null;
}
// More precise checks
boolean min1Used = min1 > min2;
boolean max1Used = max1 < max2;
double minN = min1Used || minmin ? min1 : min2;
double maxN = max1Used || maxmax ? max1 : max2;
boolean minIncluded = (minmin && r1.isMinIncluded() && r2.isMinIncluded())
|| (min1Used ? r1.isMinIncluded() : r2.isMinIncluded());
boolean maxIncluded = (maxmax && r1.isMaxIncluded() && r2.isMaxIncluded())
|| (max1Used ? r1.isMaxIncluded() : r2.isMaxIncluded());
return RangeFactory.create(minN, minIncluded, maxN, maxIncluded);
}
public static boolean equals(double d1, double d2) {
return Math.abs(d1 - d2) < TOLERANCE;
}
}
| |
/*
* Copyright (C) 2013 Joan Puig Sanz
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.servDroid.server.service;
import java.io.IOException;
import java.net.ServerSocket;
import java.net.Socket;
import java.util.List;
import org.servDroid.db.LogHelper;
import org.servDroid.db.LogMessage;
import org.servDroid.helper.IPreferenceHelper;
import org.servDroid.module.AppModule;
import org.servDroid.server.HttpRequestHandler;
import org.servDroid.server.service.params.ServerParams;
import org.servDroid.ui.activity.StartActivity;
import org.servDroid.util.Logger;
import org.servDroid.util.shell.ShellCommands;
import org.servDroid.web.R;
import roboguice.service.RoboService;
import android.annotation.SuppressLint;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.IntentFilter;
import android.content.res.Resources;
import android.net.wifi.WifiManager;
import android.net.wifi.WifiManager.WifiLock;
import android.os.Handler;
import android.os.IBinder;
import android.os.Message;
import android.os.RemoteException;
import android.os.Vibrator;
import android.support.v4.app.NotificationCompat;
import com.google.inject.Inject;
import com.google.inject.name.Named;
public class ServerService extends RoboService implements ServerValues {
private static final String TAG = "ServerService";
private static final int START_NOTIFICATION_ID = 1;
private static final int VIBRATE_IDENTIFIER = 0x102;
private static final int SERVER_STARTED_IDENTIFIER = 0x102 + 1;
private static final int SERVER_STOPED_IDENTIFIER = 0x102 + 2;
/**
* This is the default port opened when the user ask for opening a port
* under 1024. <br>
* The system will try to use iptables like this:<br>
* iptables -t nat -A PREROUTING -p tcp --dport 80 -j REDIRECT --to-port
* DEFAULT_PORT_ON_ROOT
*/
public static final int DEFAULT_PORT_ON_ROOT = 65535 - 50;
@Inject
private LogHelper mLogAdapter;
@Inject
@Named(AppModule.APP_VERSION_NAME)
private String mVersion;
@Inject
private IPreferenceHelper mPreferenceHelper;
// This field is can only be setted if the server is started.
private ServerParams mParams;
private int mCurrentPort;
private String mLogPort;
private ServerSocket mServerSocket;
private static MainServerThread mServerThread;
private volatile boolean mVibrate;
private NotificationManager mNotificationManager;
private BroadcastReceiver wifiStateChangedReceiver;
@SuppressLint("HandlerLeak")
final Handler mServiceHandler = new Handler() {
@Override
public void handleMessage(Message msg) {
switch (msg.what) {
case VIBRATE_IDENTIFIER:
((Vibrator) getSystemService(Context.VIBRATOR_SERVICE)).vibrate(300);
break;
case SERVER_STARTED_IDENTIFIER:
showRunningNotification();
break;
case SERVER_STOPED_IDENTIFIER:
clearRunningNotification();
break;
}
super.handleMessage(msg);
}
};
@Override
public IBinder onBind(Intent intent) {
if (getServerStatus() != STATUS_RUNNING) {
clearRunningNotification();
}
return new ServiceController.Stub() {
@Override
public boolean startService(ServerParams params) throws RemoteException {
if (null == params) {
return false;
}
mParams = params;
return startServer();
}
@Override
public boolean restartService(ServerParams params) throws RemoteException {
if (null == params) {
return false;
}
if (getStatus() == STATUS_RUNNING) {
stopServer();
}
return startServer();
}
@Override
public boolean stopService() throws RemoteException {
return stopServer();
}
@Override
public int getStatus() throws RemoteException {
return getServerStatus();
}
@Override
public void setVibrate(boolean vibrate) throws RemoteException {
mVibrate = vibrate;
}
@Override
public String getVersion() throws RemoteException {
return mVersion;
}
@Override
public long addLog(LogMessage msg) throws RemoteException {
return ServerService.this.addLog(msg);
}
@Override
public List<LogMessage> getLogList(int n) throws RemoteException {
return mLogAdapter.fetchLogList(n);
}
@Override
public ServerParams getCurrentParams() throws RemoteException {
return mParams;
}
@Override
public int getDefaultPortOnRoot() throws RemoteException {
return DEFAULT_PORT_ON_ROOT;
}
};
}
private int getServerStatus() {
if (null == mServerThread) {
return STATUS_STOPPED;
} else if (mServerThread.isAlive()) {
return STATUS_RUNNING;
} else {
return STATUS_STOPPED;
}
}
/**
* This function displays the notifications
*/
private void showRunningNotification() {
if (null == mNotificationManager) {
mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
}
if (!mPreferenceHelper.getShowNotification()) {
return;
}
Context context = getApplicationContext();
Intent notificationIntent = new Intent(context, StartActivity.class);
PendingIntent contentIntent = PendingIntent.getActivity(context, START_NOTIFICATION_ID,
notificationIntent, PendingIntent.FLAG_CANCEL_CURRENT);
NotificationManager nm = (NotificationManager) context
.getSystemService(Context.NOTIFICATION_SERVICE);
Resources res = context.getResources();
NotificationCompat.Builder builder = new NotificationCompat.Builder(context);
builder.setContentIntent(contentIntent).setSmallIcon(R.drawable.icon)
.setOngoing(true)
.setAutoCancel(false)
.setWhen(System.currentTimeMillis())
.setContentTitle(res.getString(R.string.app_name))
.setContentText(res.getString(R.string.text_running));
Notification n = builder.build();
nm.notify(START_NOTIFICATION_ID, n);
}
/**
* Clear all notifications
*/
private void clearRunningNotification() {
if (null == mNotificationManager) {
mNotificationManager = (NotificationManager) getSystemService(Context.NOTIFICATION_SERVICE);
}
try {
mNotificationManager.cancel(START_NOTIFICATION_ID);
} catch (Exception e) {
}
}
@Override
public void onCreate() {
super.onCreate();
wifiStateChangedReceiver = new BroadcastReceiver() {
@Override
public void onReceive(Context context, Intent intent) {
int extraWifiState = intent.getIntExtra(WifiManager.EXTRA_WIFI_STATE,
WifiManager.WIFI_STATE_UNKNOWN);
switch (extraWifiState) {
case WifiManager.WIFI_STATE_DISABLED:
// Logger.d(TAG, "WIFI STATE DISABLED");
break;
case WifiManager.WIFI_STATE_DISABLING:
if (mPreferenceHelper.isAutostopWifiEnabled()
&& getServerStatus() == STATUS_RUNNING) {
addLog("", "", "", "Wifi connection down... Stopping server");
stopServer();
}
break;
case WifiManager.WIFI_STATE_ENABLED:
// Logger.d(TAG, "WIFI STATE ENABLED");
break;
case WifiManager.WIFI_STATE_ENABLING:
// Logger.d(TAG, "WIFI STATE ENABLING");
break;
case WifiManager.WIFI_STATE_UNKNOWN:
// Logger.d(TAG, "WIFI STATE UNKNOWN");
break;
}
}
};
registerReceiver(wifiStateChangedReceiver, new IntentFilter(
WifiManager.WIFI_STATE_CHANGED_ACTION));
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_STICKY;
}
@Override
public void onDestroy() {
Logger.d(TAG, " Destroing ServDroid Service");
stopServer();
if (getServerStatus() != STATUS_RUNNING) {
clearRunningNotification();
}
super.onDestroy();
}
private boolean startServer() {
if (null == mServerThread || !mServerThread.isAlive()) {
mServerThread = new MainServerThread();
mServerThread.start();
return true;
}
return false;
}
private boolean stopServer() {
clearRunningNotification();
if (mCurrentPort < 1024) {
ShellCommands.closeNatPorts();
}
if (null == mServerThread) {
addLog("", "", "", "ERROR stopping ServDroid.web server ");
return false;
}
if (mServerThread.isAlive()) {
mServerThread.stopThread();
mServerThread = null;
addLog("", "", "", "ServDroid.web server stoped ");
return true;
}
addLog("", "", "", "ERROR stopping ServDroid.web server");
mServerThread = null;
return false;
}
public void addLog(String ip, String path, String infoBeginning, String infoEnd) {
if (mLogAdapter == null) {
return;
}
mLogAdapter.addLog(ip, path, infoBeginning, infoEnd);
}
public void addLog(String ip, String path) {
if (mLogAdapter == null) {
return;
}
mLogAdapter.addLog(ip, path);
}
public long addLog(LogMessage msg) {
if (mLogAdapter == null) {
return -1;
}
return mLogAdapter.addLog(msg);
}
// ///////////////////////////////////////
// ///////////////////////////////////////
// ///////////////////////////////////////
/**
* Private class for the server thread
*/
private class MainServerThread extends Thread {
private volatile boolean mRun;
private WifiLock mWl;
public MainServerThread() {
mRun = true;
}
public synchronized void stopThread() {
if (null != mWl && mWl.isHeld()) {
mWl.release();
}
if (mRun == false) {
return;
}
mRun = false;
if (mServerSocket == null) {
return;
}
try {
mServerSocket.close();
} catch (IOException e) {
Logger.e(TAG, "Error stoping server thread: ", e);
e.printStackTrace();
}
}
public void run() {
try {
if (mWl == null || !mWl.isHeld()) {
WifiManager manager = (WifiManager) getSystemService(Context.WIFI_SERVICE);
mWl = manager.createWifiLock(WifiManager.WIFI_MODE_FULL, "servdroid_wifilock");
mWl.setReferenceCounted(false);
mWl.acquire();
}
} catch (Exception e) {
}
mCurrentPort = mParams.getPort();
mLogPort = "" + mCurrentPort;
try {
if (mParams.getPort() < 1024) {
if (!ShellCommands.isDeviceRooted()
|| !ShellCommands.openNatPort(mParams.getPort(), DEFAULT_PORT_ON_ROOT)) {
mLogPort = "" + DEFAULT_PORT_ON_ROOT;
addLog("", "", "", "ERROR opening port " + mParams.getPort());
Logger.d(TAG, "ERROR opening port " + mParams.getPort());
mCurrentPort = 8080;
mLogPort = "" + mCurrentPort;
} else {
mCurrentPort = DEFAULT_PORT_ON_ROOT;
mLogPort = mLogPort + " / " + DEFAULT_PORT_ON_ROOT;
}
}
mServerSocket = new ServerSocket(mCurrentPort, mParams.getMaxClients());
Message m = new Message();
m.what = ServerService.SERVER_STARTED_IDENTIFIER;
mServiceHandler.sendMessage(m);
addLog("", "", "",
"ServDroid.web server running on port: " + mLogPort + " | WWW path: "
+ mParams.getWwwPath() + " | Error path: " + mParams.getErrorPath()
+ " | Max clients: " + mParams.getMaxClients()
+ " | File indexing: " + mParams.isFileIndexing());
Logger.d(TAG, "ServDroid.web server running on port " + mLogPort);
} catch (IOException e) {
if (mRun) {
Logger.e(TAG, "Error accepting connections: ", e);
}
addLog("", "", "", "ERROR starting server ServDroid.web on port " + mLogPort);
Message m = new Message();
m.what = ServerService.SERVER_STOPED_IDENTIFIER;
mServiceHandler.sendMessage(m);
// Toast.makeText(ServerService.this,
// R.string.error_starting_process,
// Toast.LENGTH_LONG).show();
return;
}
while (mRun) {
Socket socket;
try {
socket = mServerSocket.accept();
} catch (IOException e1) {
if (mRun) {
addLog("", "", "",
"Warning! One connection has been droped! " + mParams.getPort());
}
return;
}
// Logger.d(TAG, "New connection accepted " +
// socket.getInetAddress()
// + ":" + socket.getPort());
try {
HttpRequestHandler request = new HttpRequestHandler(socket, mLogAdapter,
mParams, mVersion);
Thread thread = new Thread(request);
thread.start();
} catch (Exception e) {
// Logger.e(TAG, "ERROR handing request: " + e.getMessage());
return;
}
if (mVibrate) {
Message m = new Message();
m.what = ServerService.VIBRATE_IDENTIFIER;
mServiceHandler.sendMessage(m);
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.aries.blueprint.itests;
import static org.apache.aries.itest.ExtraOptions.mavenBundle;
import static org.apache.aries.itest.ExtraOptions.paxLogging;
import static org.apache.aries.itest.ExtraOptions.testOptions;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.ops4j.pax.exam.CoreOptions.equinox;
import java.util.ArrayList;
import java.util.List;
import org.apache.aries.blueprint.BeanProcessor;
import org.apache.aries.blueprint.testbundlea.NSHandlerOne;
import org.apache.aries.blueprint.testbundlea.NSHandlerTwo;
import org.apache.aries.blueprint.testbundlea.ProcessableBean;
import org.apache.aries.blueprint.testbundlea.ProcessableBean.Phase;
import org.apache.aries.blueprint.testbundlea.multi.InterfaceA;
import org.apache.aries.blueprint.testbundlea.multi.InterfaceB;
import org.apache.aries.blueprint.testbundlea.multi.InterfaceC;
import org.apache.aries.blueprint.testbundlea.multi.InterfaceD;
import org.apache.aries.blueprint.testbundleb.OtherBean;
import org.apache.aries.blueprint.testbundleb.TestBean;
import org.apache.aries.itest.AbstractIntegrationTest;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.ops4j.pax.exam.Option;
import org.ops4j.pax.exam.junit.JUnit4TestRunner;
import org.osgi.framework.Bundle;
import org.osgi.service.blueprint.container.BlueprintContainer;
@RunWith(JUnit4TestRunner.class)
public class ASMMultiBundleTest extends AbstractIntegrationTest {
private void checkInterceptorLog(String []expected, List<String> log){
assertNotNull("interceptor log should not be null",log);
System.out.println("Log:");
for(String entry: log){
System.out.println(""+entry);
}
assertEquals("interceptor log size does not match expected size",expected.length,log.size());
List<String> extra=new ArrayList<String>();
boolean[] found = new boolean[expected.length];
for(String s : log){
boolean used=false;
for(int i=0; i<expected.length; i++){
if(s.startsWith(expected[i])){
found[i]=true;
used=true;
}
}
if(!used){
extra.add(s);
}
}
if(extra.size()!=0){
String extraFormatted="{";
for(String e:extra){
extraFormatted+=e+" ";
}
extraFormatted+="}";
fail("surplus interceptor invocations present in invocation log "+extraFormatted);
}
for(int i=0; i<found.length; i++){
assertTrue("interceptor invocation "+expected[i]+" not found",found[i]);
}
}
@Test
public void multiBundleTest() throws Exception {
//bundlea provides the ns handlers, bean processors, interceptors etc for this test.
Bundle bundlea = context().getBundleByName("org.apache.aries.blueprint.testbundlea");
assertNotNull(bundlea);
bundlea.start();
//bundleb makes use of the extensions provided by bundlea
Bundle bundleb = context().getBundleByName("org.apache.aries.blueprint.testbundleb");
assertNotNull(bundleb);
bundleb.start();
//bundleb's container will hold the beans we need to query to check the function
//provided by bundlea functioned as expected
BlueprintContainer beanContainer =
Helper.getBlueprintContainerForBundle(context(), "org.apache.aries.blueprint.testbundleb");
assertNotNull(beanContainer);
//TestBeanA should have the values below, no interference should be present from other sources.
Object obj1 = beanContainer.getComponentInstance("TestBeanA");
assertTrue(obj1 instanceof TestBean);
TestBean testBeanA = (TestBean)obj1;
org.junit.Assert.assertEquals("RED", testBeanA.getRed());
org.junit.Assert.assertEquals("GREEN", testBeanA.getGreen());
org.junit.Assert.assertEquals("BLUE", testBeanA.getBlue());
//TestBeanB tests that a custom ns handler is able to inject custom components to the blueprint,
//and modify existing components, and use injected components as modifications.
Object obj2 = beanContainer.getComponentInstance("TestBeanB");
assertTrue(obj2 instanceof TestBean);
TestBean testBeanB = (TestBean)obj2;
//value should be set in via the added passthroughmetadata via the nshandler.
org.junit.Assert.assertEquals("ONE_VALUE", testBeanB.getRed());
org.junit.Assert.assertEquals("GREEN", testBeanB.getGreen());
org.junit.Assert.assertEquals("BLUE", testBeanB.getBlue());
//TestBeanC tests that custom ns handlers can add interceptors to beans.
Object obj3 = beanContainer.getComponentInstance("TestBeanC");
assertTrue(obj3 instanceof TestBean);
TestBean testBeanC = (TestBean)obj3;
//handlers are in bundlea, with its own container.
BlueprintContainer handlerContainer =
Helper.getBlueprintContainerForBundle( context(), "org.apache.aries.blueprint.testbundlea");
assertNotNull(handlerContainer);
Object ns1 = handlerContainer.getComponentInstance("NSHandlerOne");
assertTrue(ns1 instanceof NSHandlerOne);
Object ns2 = handlerContainer.getComponentInstance("NSHandlerTwo");
assertTrue(ns2 instanceof NSHandlerTwo);
NSHandlerTwo nstwo = (NSHandlerTwo)ns2;
//now we have a handle to the nshandler2, we can query what it 'saw', and ensure
//that the interceptors are functioning as expected.
List<String> log = nstwo.getLog();
//TestBeanC has the interceptor configured, and is injected to OtherBeanA & OtherBeanB
//which then uses the injected bean during their init method call, to invoke a method
checkInterceptorLog(new String[] {
"PRECALL:TestBeanC:methodToInvoke:[RED]:",
"POSTCALL[true]:TestBeanC:methodToInvoke:[RED]:",
"PRECALL:TestBeanC:methodToInvoke:[BLUE]:",
"POSTCALL[false]:TestBeanC:methodToInvoke:[BLUE]:"
}, log);
//invoking GREEN is hardwired to cause an exception response, we do this
//from here to ensure the exception occurs and is visible as expected
RuntimeException re=null;
try{
testBeanC.methodToInvoke("GREEN");
}catch(RuntimeException e){
re=e;
}
assertNotNull("invocation of Green did not cause an exception as expected",re);
//Exception responses should be intercepted too, test for the POSTCALLWITHEXCEPTION log entry.
log = nstwo.getLog();
checkInterceptorLog(new String[] {
"PRECALL:TestBeanC:methodToInvoke:[RED]:",
"POSTCALL[true]:TestBeanC:methodToInvoke:[RED]:",
"PRECALL:TestBeanC:methodToInvoke:[BLUE]:",
"POSTCALL[false]:TestBeanC:methodToInvoke:[BLUE]:",
"PRECALL:TestBeanC:methodToInvoke:[GREEN]:",
"POSTCALLEXCEPTION[java.lang.RuntimeException: MATCHED ON GREEN (GREEN)]:TestBeanC:methodToInvoke:[GREEN]:"
}, log);
//ProcessedBean is a test to ensure that BeanProcessors are called..
//The test has the BeanProcessor look for ProcessableBeans, and log itself with them
Object obj4 = beanContainer.getComponentInstance("ProcessedBean");
assertTrue(obj4 instanceof ProcessableBean);
ProcessableBean pb = (ProcessableBean)obj4;
//Note, the BeanProcessor exists in the same container as the beans it processes!!
Object bp = beanContainer.getComponentInstance("http://ns.handler.three/BeanProcessor");
assertNotNull(bp);
assertTrue(bp instanceof BeanProcessor);
assertEquals(1,pb.getProcessedBy().size());
//check we were invoked..
assertEquals(pb.getProcessedBy().get(0),bp);
//check invocation for each phase.
assertEquals(pb.getProcessedBy(Phase.BEFORE_INIT).get(0),bp);
assertEquals(pb.getProcessedBy(Phase.AFTER_INIT).get(0),bp);
//destroy invocation will only occur at tear down.. TODO, how to test after teardown.
//assertEquals(pb.getProcessedBy(Phase.BEFORE_DESTROY).get(0),bp);
//assertEquals(pb.getProcessedBy(Phase.AFTER_DESTROY).get(0),bp);
Object objOther = beanContainer.getComponentInstance("PlaceHolderTestBean");
assertTrue(objOther instanceof OtherBean);
assertEquals("test1value", ((OtherBean)objOther).getTestValue());
}
@org.ops4j.pax.exam.junit.Configuration
public static Option[] configuration() {
return testOptions(
paxLogging("DEBUG"),
Helper.blueprintBundles(),
mavenBundle("org.apache.aries.blueprint", "org.apache.aries.blueprint.testbundlea").noStart(),
mavenBundle("org.apache.aries.blueprint", "org.apache.aries.blueprint.testbundleb").noStart(),
equinox().version("3.5.0")
);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.security.user;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Iterators;
import org.apache.jackrabbit.JcrConstants;
import org.apache.jackrabbit.api.JackrabbitSession;
import org.apache.jackrabbit.api.security.user.AuthorizableExistsException;
import org.apache.jackrabbit.api.security.user.Group;
import org.apache.jackrabbit.api.security.user.User;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.namepath.NamePathMapper;
import org.apache.jackrabbit.oak.plugins.tree.TreeUtil;
import org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants;
import org.apache.jackrabbit.oak.spi.security.principal.EveryonePrincipal;
import org.apache.jackrabbit.oak.spi.security.user.UserConstants;
import org.apache.jackrabbit.oak.spi.security.user.action.AuthorizableAction;
import org.apache.jackrabbit.oak.spi.security.user.util.PasswordUtil;
import org.apache.jackrabbit.oak.spi.xml.PropInfo;
import org.apache.jackrabbit.oak.spi.xml.ReferenceChangeTracker;
import org.junit.Test;
import javax.jcr.ImportUUIDBehavior;
import javax.jcr.RepositoryException;
import javax.jcr.Session;
import javax.jcr.nodetype.ConstraintViolationException;
import javax.jcr.nodetype.PropertyDefinition;
import java.lang.reflect.Field;
import java.util.Collections;
import java.util.Map;
import static org.apache.jackrabbit.oak.spi.nodetype.NodeTypeConstants.NT_OAK_UNSTRUCTURED;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.ArgumentMatchers.nullable;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import static org.mockito.Mockito.when;
public class UserImporterTest extends UserImporterBaseTest implements UserConstants {
//---------------------------------------------------------------< init >---
@Test
public void testInitNoJackrabbitSession() {
Session s = mock(Session.class);
assertFalse(importer.init(s, root, getNamePathMapper(), false, ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW, new ReferenceChangeTracker(), getSecurityProvider()));
}
@Test
public void testInitGetUserManagerFails() throws Exception {
Session s = when(mock(JackrabbitSession.class).getUserManager()).thenThrow(new RepositoryException()).getMock();
assertFalse(importer.init(s, root, getNamePathMapper(), false, ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW, new ReferenceChangeTracker(), getSecurityProvider()));
}
@Test(expected = IllegalStateException.class)
public void testInitAlreadyInitialized() throws Exception {
init();
importer.init(mockJackrabbitSession(), root, getNamePathMapper(), isWorkspaceImport(), ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING, new ReferenceChangeTracker(), getSecurityProvider());
}
@Test
public void testInitImportUUIDBehaviorRemove() throws Exception {
assertTrue(importer.init(mockJackrabbitSession(), root, getNamePathMapper(), isWorkspaceImport(), ImportUUIDBehavior.IMPORT_UUID_COLLISION_REMOVE_EXISTING, new ReferenceChangeTracker(), getSecurityProvider()));
}
@Test
public void testInitImportUUIDBehaviorReplace() throws Exception {
assertTrue(importer.init(mockJackrabbitSession(), root, getNamePathMapper(), isWorkspaceImport(), ImportUUIDBehavior.IMPORT_UUID_COLLISION_REPLACE_EXISTING, new ReferenceChangeTracker(), getSecurityProvider()));
}
@Test
public void testInitImportUUIDBehaviorThrow() throws Exception {
assertTrue(importer.init(mockJackrabbitSession(), root, getNamePathMapper(), isWorkspaceImport(), ImportUUIDBehavior.IMPORT_UUID_COLLISION_THROW, new ReferenceChangeTracker(), getSecurityProvider()));
}
@Test
public void testInitImportUUIDBehaviourCreateNew() throws Exception {
assertFalse(importer.init(mockJackrabbitSession(), root, getNamePathMapper(), isWorkspaceImport(), ImportUUIDBehavior.IMPORT_UUID_CREATE_NEW, new ReferenceChangeTracker(), getSecurityProvider()));
}
//-----------------------------------------------------< handlePropInfo >---
@Test(expected = IllegalStateException.class)
public void testHandlePropInfoNotInitialized() throws Exception {
importer.handlePropInfo(createUserTree(), mock(PropInfo.class), mock(PropertyDefinition.class));
}
@Test
public void testHandlePropInfoParentNotAuthorizable() throws Exception {
init();
assertFalse(importer.handlePropInfo(root.getTree(PathUtils.ROOT_PATH), mock(PropInfo.class), mock(PropertyDefinition.class)));
}
@Test
public void testHandleAuthorizableId() throws Exception {
init();
Tree userTree = createUserTree();
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, TEST_USER_ID), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false)));
assertEquals(TEST_USER_ID, userTree.getProperty(REP_AUTHORIZABLE_ID).getValue(Type.STRING));
assertEquals(userTree.getPath(), getUserManager(root).getAuthorizable(TEST_USER_ID).getPath());
}
@Test(expected = ConstraintViolationException.class)
public void testHandleAuthorizableIdMismatch() throws Exception {
init();
Tree userTree = createUserTree();
importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, "mismatch"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false));
}
@Test(expected = AuthorizableExistsException.class)
public void testHandleAuthorizableIdConflictExisting() throws Exception {
init();
Tree userTree = createUserTree();
importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, testUser.getID()), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false));
}
@Test
public void testHandleAuthorizableIdMvPropertyDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, TEST_USER_ID), mockPropertyDefinition(NT_REP_AUTHORIZABLE, true)));
assertNull(userTree.getProperty(REP_AUTHORIZABLE_ID));
}
@Test
public void testHandleAuthorizableIdOtherDeclNtDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, TEST_USER_ID), mockPropertyDefinition(NT_REP_AUTHORIZABLE_FOLDER, false)));
assertNull(userTree.getProperty(REP_AUTHORIZABLE_ID));
}
@Test
public void testHandleAuthorizableIdDeclNtDefSubtype() throws Exception {
init();
Tree userTree = createUserTree();
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_AUTHORIZABLE_ID, TEST_USER_ID), mockPropertyDefinition(NT_REP_USER, false)));
}
@Test
public void testHandlePrincipalName() throws Exception {
init();
Tree userTree = createUserTree();
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_PRINCIPAL_NAME, "principalName"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false)));
assertEquals("principalName", userTree.getProperty(REP_PRINCIPAL_NAME).getValue(Type.STRING));
}
@Test(expected = IllegalArgumentException.class)
public void testHandleEmptyPrincipalName() throws Exception {
init();
Tree userTree = createUserTree();
importer.handlePropInfo(userTree, createPropInfo(REP_PRINCIPAL_NAME, ""), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false));
}
@Test(expected = IllegalArgumentException.class)
public void testHandleEveryonePrincipalNameOnUser() throws Exception {
init();
Tree userTree = createUserTree();
importer.handlePropInfo(userTree, createPropInfo(REP_PRINCIPAL_NAME, EveryonePrincipal.NAME), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false));
}
@Test
public void testHandlePrincipalNameMvPropertyDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_PRINCIPAL_NAME, "principalName"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, true)));
assertNull(userTree.getProperty(REP_PRINCIPAL_NAME));
}
@Test
public void testHandlePrincipalNameOtherDeclNtDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_PRINCIPAL_NAME, "principalName"), mockPropertyDefinition(NT_REP_AUTHORIZABLE_FOLDER, false)));
assertNull(userTree.getProperty(REP_PRINCIPAL_NAME));
}
@Test
public void testHandlePassword() throws Exception {
init();
Tree userTree = createUserTree();
String pwHash = PasswordUtil.buildPasswordHash("pw");
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_PASSWORD, pwHash), mockPropertyDefinition(NT_REP_USER, false)));
assertEquals(pwHash, userTree.getProperty(REP_PASSWORD).getValue(Type.STRING));
}
@Test
public void testHandlePasswordOnSystemUser() throws Exception {
init();
Tree userTree = createUserTree();
userTree.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_SYSTEM_USER, Type.NAME);
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_PASSWORD, PasswordUtil.buildPasswordHash("pw")), mockPropertyDefinition(NT_REP_USER, false)));
}
@Test
public void testHandlePasswordOnGroup() throws Exception {
init();
Tree groupTree = createGroupTree();
assertFalse(importer.handlePropInfo(groupTree, createPropInfo(REP_PASSWORD, PasswordUtil.buildPasswordHash("pw")), mockPropertyDefinition(NT_REP_USER, false)));
}
@Test
public void testHandlePasswordMvPropertyDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_PASSWORD, PasswordUtil.buildPasswordHash("pw")), mockPropertyDefinition(NT_REP_USER, true)));
assertNull(userTree.getProperty(REP_PASSWORD));
}
@Test
public void testHandlePasswordOtherDeclNtDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_PASSWORD, PasswordUtil.buildPasswordHash("pw")), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false)));
assertNull(userTree.getProperty(REP_PASSWORD));
}
@Test
public void testHandlePwNodePropertyInvalidDef() throws Exception {
init();
Tree userTree = createUserTree();
Tree pwHistory = TreeUtil.addChild(userTree, REP_PWD, NT_REP_PASSWORD);
PropertyDefinition pd = mockPropertyDefinition(NT_REP_PASSWORD, false);
when(pd.getName()).thenReturn(NodeTypeConstants.RESIDUAL_NAME);
assertFalse(importer.handlePropInfo(pwHistory, createPropInfo(null, PasswordUtil.buildPasswordHash("pw")), pd));
}
@Test
public void testHandlePwNodePropertyInvalidDef2() throws Exception {
init();
Tree userTree = createUserTree();
Tree pwHistory = TreeUtil.addChild(userTree, REP_PWD, NT_REP_PASSWORD);
PropertyDefinition pd = mockPropertyDefinition(NT_REP_PASSWORD, false);
when(pd.getName()).thenReturn(null);
assertFalse(importer.handlePropInfo(pwHistory, createPropInfo(null, PasswordUtil.buildPasswordHash("pw")), pd));
}
@Test
public void testHandlePwNodePropertyValidDef() throws Exception {
init();
Tree userTree = createUserTree();
Tree pwHistory = TreeUtil.addChild(userTree, REP_PWD, NT_REP_PASSWORD);
PropertyDefinition pd = mockPropertyDefinition(NT_REP_PASSWORD, true);
when(pd.getName()).thenReturn(REP_PWD_HISTORY);
assertTrue(importer.handlePropInfo(pwHistory, createPropInfo(null, PasswordUtil.buildPasswordHash("pw")), pd));
}
@Test
public void testHandleImpersonators() throws Exception {
init();
Tree userTree = createUserTree();
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_IMPERSONATORS, "impersonator1", "impersonator2"), mockPropertyDefinition(NT_REP_USER, true)));
// writing is postponed though and the ref-tracker must not be empty
assertNull(userTree.getProperty(REP_IMPERSONATORS));
assertTrue(refTracker.getProcessedReferences().hasNext());
}
@Test
public void testHandleImpersonatorsOnGroup() throws Exception {
init();
Tree groupTree = createGroupTree();
assertFalse(importer.handlePropInfo(groupTree, createPropInfo(REP_IMPERSONATORS, "impersonator1"), mockPropertyDefinition(NT_REP_USER, true)));
}
@Test
public void testHandleImpersonatorsSinglePropertyDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_IMPERSONATORS, "impersonator1"), mockPropertyDefinition(NT_REP_USER, false)));
assertNull(userTree.getProperty(REP_IMPERSONATORS));
}
@Test
public void testHandleImpersonatorsOtherDeclNtDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_IMPERSONATORS, "impersonator1"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, true)));
assertNull(userTree.getProperty(REP_IMPERSONATORS));
}
@Test
public void testHandleMembers() throws Exception {
init();
Tree groupTree = createGroupTree();
assertTrue(importer.handlePropInfo(groupTree, createPropInfo(REP_MEMBERS, "member1", "member2"), mockPropertyDefinition(NT_REP_MEMBER_REFERENCES, true)));
// writing is postponed though
assertNull(groupTree.getProperty(REP_MEMBERS));
}
@Test
public void testHandleMembersOnUser() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_MEMBERS, "member1"), mockPropertyDefinition(NT_REP_MEMBER_REFERENCES, true)));
}
@Test
public void testHandleMembersSinglePropertyDef() throws Exception {
init();
Tree groupTree = createGroupTree();
assertFalse(importer.handlePropInfo(groupTree, createPropInfo(REP_MEMBERS, "member1"), mockPropertyDefinition(NT_REP_MEMBER_REFERENCES, false)));
assertNull(groupTree.getProperty(REP_MEMBERS));
}
@Test
public void testHandleMembersOtherDeclNtDef() throws Exception {
init();
Tree groupTree = createGroupTree();
assertFalse(importer.handlePropInfo(groupTree, createPropInfo(REP_MEMBERS, "member1"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, true)));
assertNull(groupTree.getProperty(REP_MEMBERS));
}
@Test
public void testHandleDisabled() throws Exception {
init();
Tree userTree = createUserTree();
assertTrue(importer.handlePropInfo(userTree, createPropInfo(REP_DISABLED, "disabled"), mockPropertyDefinition(NT_REP_USER, false)));
PropertyState property = userTree.getProperty(REP_DISABLED);
assertNotNull(property);
assertEquals("disabled", property.getValue(Type.STRING));
}
@Test
public void testHandleDisabledOnGroup() throws Exception {
init();
Tree groupTree = createGroupTree();
assertFalse(importer.handlePropInfo(groupTree, createPropInfo(REP_DISABLED, "disabled"), mockPropertyDefinition(NT_REP_USER, false)));
assertNull(groupTree.getProperty(REP_DISABLED));
}
@Test(expected = RepositoryException.class)
public void testHandleDisabledMvProperty() throws Exception {
init();
Tree userTree = createUserTree();
importer.handlePropInfo(userTree, createPropInfo(REP_DISABLED, "disabled", "disabled"), mockPropertyDefinition(NT_REP_USER, false));
}
@Test
public void testHandleDisabledMvPropertyDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_DISABLED, "disabled"), mockPropertyDefinition(NT_REP_USER, true)));
assertNull(userTree.getProperty(REP_DISABLED));
}
@Test
public void testHandleDisabledOtherDeclNtDef() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo(REP_DISABLED, "disabled"), mockPropertyDefinition(NT_REP_AUTHORIZABLE, false)));
assertNull(userTree.getProperty(REP_DISABLED));
}
@Test
public void testHandleUnknownProperty() throws Exception {
init();
Tree userTree = createUserTree();
assertFalse(importer.handlePropInfo(userTree, createPropInfo("unknownProperty", "value"), mockPropertyDefinition(NT_OAK_UNSTRUCTURED, false)));
assertNull(userTree.getProperty("unknownProperty"));
}
//--------------------------------------------------< processReferences >---
@Test(expected = IllegalStateException.class)
public void testProcessReferencesNotInitialized() throws Exception {
importer.processReferences();
}
@Test
public void testProcessReferencesUnknownReferenceObject() throws Exception {
init();
Map unknownReferenceObj = Collections.EMPTY_MAP;
refTracker.processedReference(unknownReferenceObj);
importer.processReferences();
assertTrue(Iterators.contains(refTracker.getProcessedReferences(), unknownReferenceObj));
}
//------------------------------------------------< propertiesCompleted >---
@Test
public void testPropertiesCompletedNonExistingTree() throws Exception {
init();
Tree nonExisting = when(mock(Tree.class).exists()).thenReturn(false).getMock();
importer.propertiesCompleted(nonExisting);
verify(nonExisting, times(2)).exists();
verify(nonExisting, never()).getProperty(anyString());
}
@Test
public void testPropertiesCompletedInvalidCacheTree() throws Exception {
init();
Tree userTree = createUserTree();
Tree cacheTree = TreeUtil.addChild(userTree, CacheConstants.REP_CACHE, NT_OAK_UNSTRUCTURED);
importer.propertiesCompleted(cacheTree);
assertTrue(cacheTree.exists());
}
@Test
public void testPropertiesCompletedClearsCache() throws Exception {
Tree userTree = createUserTree();
Tree cacheTree = userTree.addChild(CacheConstants.REP_CACHE);
cacheTree.setProperty(JcrConstants.JCR_PRIMARYTYPE, CacheConstants.NT_REP_CACHE);
importer.propertiesCompleted(cacheTree);
assertFalse(cacheTree.exists());
assertFalse(userTree.hasChild(CacheConstants.REP_CACHE));
}
@Test
public void testPropertiesCompletedParentNotAuthorizable() throws Exception {
init();
importer.propertiesCompleted(root.getTree("/"));
}
@Test
public void testPropertiesCompletedIdMissing() throws Exception {
init();
Tree userTree = createUserTree();
importer.propertiesCompleted(userTree);
assertTrue(userTree.hasProperty(REP_AUTHORIZABLE_ID));
}
@Test
public void testPropertiesCompletedIdPresent() throws Exception {
init();
testAction = mock(AuthorizableAction.class);
Tree userTree = createUserTree();
userTree.setProperty(REP_AUTHORIZABLE_ID, "userId");
importer.propertiesCompleted(userTree);
// property must not be touched
assertEquals("userId", userTree.getProperty(REP_AUTHORIZABLE_ID).getValue(Type.STRING));
}
@Test
public void testPropertiesCompletedNewUser() throws Exception {
init(true);
importer.propertiesCompleted(createUserTree());
verify(testAction, times(1)).onCreate(any(User.class), nullable(String.class), any(Root.class), any(NamePathMapper.class));
}
@Test
public void testPropertiesCompletedNewSystemUser() throws Exception {
init(true);
importer.propertiesCompleted(createSystemUserTree());
// create-actions must not be called for system users
verifyNoInteractions(testAction);
}
@Test
public void testPropertiesCompletedNewGroup() throws Exception {
Tree groupTree = createGroupTree();
init(true);
importer.propertiesCompleted(groupTree);
verify(testAction, times(1)).onCreate(any(Group.class), any(Root.class), any(NamePathMapper.class));
}
@Test
public void testPropertiesCompletedExistingUser() throws Exception {
init(true);
importer.propertiesCompleted(root.getTree(testUser.getPath()));
verifyNoInteractions(testAction);
}
//--------------------------------------------------------------< start >---
@Test
public void testStartUserTree() throws Exception {
init(true);
assertFalse(importer.start(createUserTree()));
}
@Test
public void testStartGroupTree() throws Exception {
init(true);
assertFalse(importer.start(createGroupTree()));
}
@Test
public void testStartMembersRefListTree() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
assertTrue(importer.start(memberRefList));
}
@Test
public void testStartMembersRefListBelowUserTree() throws Exception {
init(true);
Tree userTree = createUserTree();
Tree memberRefList = userTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
assertFalse(importer.start(memberRefList));
}
@Test
public void testStartMembersRefBelowAnyTree() throws Exception {
init(true);
Tree memberRefList = root.getTree(PathUtils.ROOT_PATH).addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
assertFalse(importer.start(memberRefList));
}
@Test
public void testStartRepMembersTree() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree repMembers = groupTree.addChild("memberTree");
repMembers.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBERS);
repMembers = repMembers.addChild("memberTree");
repMembers.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBERS);
assertTrue(importer.start(repMembers));
}
@Test
public void testStartRepMembersBelowUserTree() throws Exception {
init(true);
Tree userTree = createUserTree();
Tree repMembers = userTree.addChild("memberTree");
repMembers.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBERS);
repMembers = repMembers.addChild("memberTree");
repMembers.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBERS);
assertFalse(importer.start(repMembers));
}
@Test
public void testStartRepMembersBelowRootTree() throws Exception {
init(true);
Tree repMembers = TreeUtil.addChild(root.getTree(PathUtils.ROOT_PATH), "memberTree", NT_REP_MEMBERS);
repMembers = TreeUtil.addChild(repMembers, "memberTree", NT_REP_MEMBERS);
repMembers = TreeUtil.addChild(repMembers, "memberTree", NT_REP_MEMBERS);
assertFalse(importer.start(repMembers));
}
@Test
public void testStartRepMembersBelowAnyTree() throws Exception {
init(true);
Tree anyTree = TreeUtil.addChild(root.getTree(PathUtils.ROOT_PATH), "anyTree", NT_OAK_UNSTRUCTURED);
Tree repMembers = TreeUtil.addChild(anyTree, "memberTree", NT_REP_MEMBERS);
repMembers = TreeUtil.addChild(repMembers, "memberTree", NT_REP_MEMBERS);
assertFalse(importer.start(repMembers));
}
@Test
public void testStartNonExistingTree() throws Exception {
init();
Tree tree = when(mock(Tree.class).exists()).thenReturn(false).getMock();
assertFalse(importer.start(tree));
}
//-----------------------------------------------------< startChildInfo >---
@Test(expected = IllegalStateException.class)
public void testStartChildInfoIllegalState() {
importer.startChildInfo(createNodeInfo("memberRef", NT_REP_MEMBER_REFERENCES), ImmutableList.of(createPropInfo(REP_MEMBERS, "member1")));
}
@Test(expected = IllegalStateException.class)
public void testStartChildInfoWithoutValidStart() throws Exception {
init(true);
Tree memberRefList = root.getTree(PathUtils.ROOT_PATH).addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
importer.startChildInfo(createNodeInfo("memberRef", NT_REP_MEMBER_REFERENCES), ImmutableList.of(createPropInfo(REP_MEMBERS, "member1")));
}
@Test
public void testStartChildInfoWithoutRepMembersProperty() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
importer.startChildInfo(createNodeInfo("memberRef", NT_REP_MEMBER_REFERENCES), ImmutableList.of());
}
@Test
public void testStartChildInfoWithRepMembersProperty() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
importer.startChildInfo(createNodeInfo("memberRef", NT_REP_MEMBER_REFERENCES), ImmutableList.of(createPropInfo(REP_MEMBERS, "member1")));
}
@Test
public void testStartChildInfoWithOtherProperty() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
importer.startChildInfo(createNodeInfo("memberRef", NT_REP_MEMBER_REFERENCES), ImmutableList.of(createPropInfo("otherName", "member1")));
importer.processReferences();
// no members should have been added to the group node
assertFalse(groupTree.hasProperty(REP_MEMBERS));
assertFalse(memberRefList.getChildren().iterator().hasNext());
}
@Test
public void testStartRepMembersChildInfo() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree repMembers = groupTree.addChild("memberTree");
repMembers.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBERS);
importer.start(repMembers);
importer.startChildInfo(createNodeInfo("memberTree", NT_REP_MEMBERS), ImmutableList.of(createPropInfo("anyProp", "memberValue")));
}
@Test
public void testStartOtherChildInfo() throws Exception {
init(true);
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
importer.startChildInfo(createNodeInfo("memberRef", NT_OAK_UNSTRUCTURED), ImmutableList.of(createPropInfo(REP_MEMBERS, "member1")));
}
//-------------------------------------------------------< endChildInfo >---
@Test
public void testEndChildInfoIsNoop() {
importer.endChildInfo();
}
//----------------------------------------------------------------< end >---
@Test
public void testEnd() throws Exception {
init();
Tree groupTree = createGroupTree();
Tree memberRefList = groupTree.addChild(REP_MEMBERS_LIST);
memberRefList.setProperty(JcrConstants.JCR_PRIMARYTYPE, NT_REP_MEMBER_REFERENCES_LIST);
importer.start(memberRefList);
Field f = UserImporter.class.getDeclaredField("currentMembership");
f.setAccessible(true);
assertNotNull(f.get(importer));
importer.end(memberRefList);
assertNull(f.get(importer));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.directory.server.core.partition.impl.btree.jdbm;
import java.io.IOException;
import jdbm.helper.TupleBrowser;
import org.apache.directory.api.ldap.model.constants.Loggers;
import org.apache.directory.api.ldap.model.cursor.AbstractCursor;
import org.apache.directory.api.ldap.model.cursor.CursorException;
import org.apache.directory.api.ldap.model.cursor.InvalidCursorPositionException;
import org.apache.directory.api.ldap.model.cursor.Tuple;
import org.apache.directory.api.ldap.model.exception.LdapException;
import org.apache.directory.server.i18n.I18n;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Cursor over the Tuples of a JDBM BTree. Duplicate keys are not supported
* by JDBM natively so you will not see duplicate keys. For this reason as
* well before() and after() positioning only considers the key of the Tuple
* arguments provided.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
class NoDupsCursor<K, V> extends AbstractCursor<Tuple<K, V>>
{
/** A dedicated log for cursors */
private static final Logger LOG_CURSOR = LoggerFactory.getLogger( Loggers.CURSOR_LOG.getName() );
/** Speedup for logs */
private static final boolean IS_DEBUG = LOG_CURSOR.isDebugEnabled();
private final JdbmTable<K, V> table;
private jdbm.helper.Tuple jdbmTuple = new jdbm.helper.Tuple();
private Tuple<K, V> returnedTuple = new Tuple<K, V>();
private TupleBrowser browser;
private boolean valueAvailable;
/**
* Creates a Cursor over the tuples of a JDBM table.
*
* @param table the JDBM Table to build a Cursor over
* @throws IOException of there are problems accessing the BTree
*/
public NoDupsCursor( JdbmTable<K, V> table )
{
if ( IS_DEBUG )
{
LOG_CURSOR.debug( "Creating NoDupsCursor {}", this );
}
this.table = table;
}
private void clearValue()
{
returnedTuple.setKey( null );
returnedTuple.setValue( null );
jdbmTuple.setKey( null );
jdbmTuple.setValue( null );
valueAvailable = false;
}
public boolean available()
{
return valueAvailable;
}
public void beforeKey( K key ) throws LdapException, CursorException
{
checkNotClosed( "beforeKey()" );
try
{
browser = table.getBTree().browse( key );
clearValue();
}
catch( IOException e )
{
throw new CursorException( e );
}
}
@SuppressWarnings("unchecked")
public void afterKey( K key ) throws LdapException, CursorException
{
try
{
browser = table.getBTree().browse( key );
/*
* While the next value is less than or equal to the element keep
* advancing forward to the next item. If we cannot advance any
* further then stop and return. If we find a value greater than
* the element then we stop, backup, and return so subsequent calls
* to getNext() will return a value greater than the element.
*/
while ( browser.getNext( jdbmTuple ) )
{
checkNotClosed( "afterKey()" );
K next = ( K ) jdbmTuple.getKey();
int nextCompared = table.getKeyComparator().compare( next, key );
if ( nextCompared > 0 )
{
browser.getPrevious( jdbmTuple );
clearValue();
return;
}
}
clearValue();
}
catch( IOException e )
{
throw new CursorException( e );
}
}
public void beforeValue( K key, V value ) throws Exception
{
throw new UnsupportedOperationException( I18n.err( I18n.ERR_596 ) );
}
public void afterValue( K key, V value ) throws Exception
{
throw new UnsupportedOperationException( I18n.err( I18n.ERR_596 ) );
}
/**
* Positions this Cursor before the key of the supplied tuple.
*
* @param element the tuple who's key is used to position this Cursor
* @throws IOException if there are failures to position the Cursor
*/
public void before( Tuple<K, V> element ) throws LdapException, CursorException
{
beforeKey( element.getKey() );
}
/**
* {@inheritDoc}
*/
public void after( Tuple<K, V> element ) throws LdapException, CursorException
{
afterKey( element.getKey() );
}
/**
* {@inheritDoc}
*/
public void beforeFirst() throws LdapException, CursorException
{
checkNotClosed( "beforeFirst()" );
try
{
browser = table.getBTree().browse();
clearValue();
}
catch( IOException e )
{
throw new CursorException( e );
}
}
/**
* {@inheritDoc}
*/
public void afterLast() throws LdapException, CursorException
{
checkNotClosed( "afterLast()" );
try
{
browser = table.getBTree().browse( null );
clearValue();
}
catch( IOException e )
{
throw new CursorException( e );
}
}
/**
* {@inheritDoc}
*/
public boolean first() throws LdapException, CursorException
{
beforeFirst();
return next();
}
/**
* {@inheritDoc}
*/
public boolean last() throws LdapException, CursorException
{
afterLast();
return previous();
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
public boolean previous() throws LdapException, CursorException
{
checkNotClosed( "previous()" );
if ( browser == null )
{
afterLast();
}
try
{
if ( browser.getPrevious( jdbmTuple ) )
{
if ( returnedTuple.getKey() != null && table.getKeyComparator().compare(
( K ) jdbmTuple.getKey(), returnedTuple.getKey() ) == 0 )
{
browser.getPrevious( jdbmTuple );
}
returnedTuple.setKey( ( K ) jdbmTuple.getKey() );
returnedTuple.setValue( ( V ) jdbmTuple.getValue() );
return valueAvailable = true;
}
else
{
clearValue();
return false;
}
}
catch( IOException e )
{
throw new CursorException( e );
}
}
/**
* {@inheritDoc}
*/
@SuppressWarnings("unchecked")
public boolean next() throws LdapException, CursorException
{
checkNotClosed( "previous()" );
if ( browser == null )
{
beforeFirst();
}
try
{
if ( browser.getNext( jdbmTuple ) )
{
if ( returnedTuple.getKey() != null && table.getKeyComparator().compare(
( K ) jdbmTuple.getKey(), returnedTuple.getKey() ) == 0 )
{
browser.getNext( jdbmTuple );
}
returnedTuple.setKey( ( K ) jdbmTuple.getKey() );
returnedTuple.setValue( ( V ) jdbmTuple.getValue() );
return valueAvailable = true;
}
else
{
clearValue();
return false;
}
}
catch( IOException e )
{
throw new CursorException( e );
}
}
/**
* {@inheritDoc}
*/
public Tuple<K, V> get() throws CursorException
{
checkNotClosed( "get()" );
if ( valueAvailable )
{
return returnedTuple;
}
throw new InvalidCursorPositionException();
}
/**
* {@inheritDoc}
*/
@Override
public void close()
{
if ( IS_DEBUG )
{
LOG_CURSOR.debug( "Closing NoDupsCursor {}", this );
}
super.close();
}
/**
* {@inheritDoc}
*/
@Override
public void close( Exception cause )
{
if ( IS_DEBUG )
{
LOG_CURSOR.debug( "Closing NoDupsCursor {}", this );
}
super.close( cause );
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.commons.configuration2.interpol;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertSame;
import static org.junit.Assert.assertTrue;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.easymock.EasyMock;
import org.easymock.IAnswer;
import org.junit.Before;
import org.junit.Test;
/**
* Test class for ConfigurationInterpolator.
*
* @version $Id$
*/
public class TestConfigurationInterpolator
{
/** Constant for a test variable prefix. */
private static final String TEST_PREFIX = "prefix";
/** Constant for a test variable name. */
private static final String TEST_NAME = "varname";
/** Constant for the value of the test variable. */
private static final String TEST_VALUE = "TestVariableValue";
/** Stores the object to be tested. */
private ConfigurationInterpolator interpolator;
@Before
public void setUp() throws Exception
{
interpolator = new ConfigurationInterpolator();
}
/**
* Creates a lookup object that can resolve the test variable (and nothing else).
*
* @return the test lookup object
*/
private static Lookup setUpTestLookup()
{
return setUpTestLookup(TEST_NAME, TEST_VALUE);
}
/**
* Creates a lookup object that can resolve the specified variable (and
* nothing else).
*
* @param var the variable name
* @param value the value of this variable
* @return the test lookup object
*/
private static Lookup setUpTestLookup(final String var, final String value)
{
Lookup lookup = EasyMock.createMock(Lookup.class);
EasyMock.expect(lookup.lookup(EasyMock.anyObject(String.class)))
.andAnswer(new IAnswer<Object>()
{
@Override
public Object answer() throws Throwable
{
if (var.equals(EasyMock.getCurrentArguments()[0]))
{
return value;
}
return null;
}
}).anyTimes();
EasyMock.replay(lookup);
return lookup;
}
/**
* Tests creating an instance. Does it contain some predefined lookups?
*/
@Test
public void testInit()
{
assertTrue("A default lookup is set", interpolator.getDefaultLookups().isEmpty());
assertTrue("Got predefined lookups", interpolator.getLookups().isEmpty());
assertNull("Got a parent interpolator", interpolator.getParentInterpolator());
}
/**
* Tests registering a lookup object at an instance.
*/
@Test
public void testRegisterLookup()
{
Lookup lookup = EasyMock.createMock(Lookup.class);
EasyMock.replay(lookup);
interpolator.registerLookup(TEST_PREFIX, lookup);
assertSame("New lookup not registered", lookup, interpolator
.getLookups().get(TEST_PREFIX));
assertTrue("Not in prefix set",
interpolator.prefixSet().contains(TEST_PREFIX));
assertTrue("Default lookups were changed", interpolator
.getDefaultLookups().isEmpty());
}
/**
* Tests registering a null lookup object. This should cause an exception.
*/
@Test(expected = IllegalArgumentException.class)
public void testRegisterLookupNull()
{
interpolator.registerLookup(TEST_PREFIX, null);
}
/**
* Tests registering a lookup object for an undefined prefix. This should
* cause an exception.
*/
@Test(expected = IllegalArgumentException.class)
public void testRegisterLookupNullPrefix()
{
interpolator.registerLookup(null, EasyMock.createMock(Lookup.class));
}
/**
* Tests deregistering a lookup object.
*/
@Test
public void testDeregisterLookup()
{
Lookup lookup = EasyMock.createMock(Lookup.class);
EasyMock.replay(lookup);
interpolator.registerLookup(TEST_PREFIX, lookup);
assertTrue("Derigstration not successfull", interpolator
.deregisterLookup(TEST_PREFIX));
assertFalse("Deregistered prefix still contained", interpolator
.prefixSet().contains(TEST_PREFIX));
assertTrue("Lookups not empty", interpolator.getLookups().isEmpty());
}
/**
* Tests deregistering an unknown lookup object.
*/
@Test
public void testDeregisterLookupNonExisting()
{
assertFalse("Could deregister unknown lookup", interpolator
.deregisterLookup(TEST_PREFIX));
}
/**
* Tests whether a variable can be resolved using the associated lookup
* object. The lookup is identified by the variable's prefix.
*/
@Test
public void testResolveWithPrefix()
{
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
assertEquals("Wrong variable value", TEST_VALUE, interpolator
.resolve(TEST_PREFIX + ':' + TEST_NAME));
}
/**
* Tests the behavior of the lookup method for variables with an unknown
* prefix. These variables should not be resolved.
*/
@Test
public void testResolveWithUnknownPrefix()
{
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
assertNull("Variable could be resolved", interpolator
.resolve("UnknownPrefix:" + TEST_NAME));
assertNull("Variable with empty prefix could be resolved", interpolator
.resolve(":" + TEST_NAME));
}
/**
* Tests looking up a variable without a prefix. This should trigger the
* default lookup object.
*/
@Test
public void testResolveDefault()
{
Lookup l1 = EasyMock.createMock(Lookup.class);
Lookup l2 = EasyMock.createMock(Lookup.class);
Lookup l3 = EasyMock.createMock(Lookup.class);
EasyMock.expect(l1.lookup(TEST_NAME)).andReturn(null);
EasyMock.expect(l2.lookup(TEST_NAME)).andReturn(TEST_VALUE);
EasyMock.replay(l1, l2, l3);
interpolator.addDefaultLookups(Arrays.asList(l1, l2, l3));
assertEquals("Wrong variable value", TEST_VALUE, interpolator
.resolve(TEST_NAME));
EasyMock.verify(l1, l2, l3);
}
/**
* Tests looking up a variable without a prefix when no default lookup is
* specified. Result should be null in this case.
*/
@Test
public void testResolveNoDefault()
{
assertNull("Variable could be resolved", interpolator.resolve(TEST_NAME));
}
/**
* Tests the empty variable prefix. This is a special case, but legal.
*/
@Test
public void testResolveEmptyPrefix()
{
interpolator.registerLookup("", setUpTestLookup());
assertEquals("Wrong variable value", TEST_VALUE, interpolator
.resolve(":" + TEST_NAME));
}
/**
* Tests an empty variable name.
*/
@Test
public void testResolveEmptyVarName()
{
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup("", TEST_VALUE));
assertEquals("Wrong variable value", TEST_VALUE, interpolator
.resolve(TEST_PREFIX + ":"));
}
/**
* Tests an empty variable name without a prefix.
*/
@Test
public void testResolveDefaultEmptyVarName()
{
interpolator.addDefaultLookup(setUpTestLookup("", TEST_VALUE));
assertEquals("Wrong variable value", TEST_VALUE, interpolator
.resolve(""));
}
/**
* Tests looking up a null variable. Result should be null, too.
*/
@Test
public void testResolveNull()
{
assertNull("Could resolve null variable", interpolator.resolve(null));
}
/**
* Tests whether the default lookup is called for variables with a prefix
* when the lookup that was registered for this prefix is not able to
* resolve the variable.
*/
@Test
public void testResolveDefaultAfterPrefixFails()
{
final String varName = TEST_PREFIX + ':' + TEST_NAME + "2";
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
interpolator.addDefaultLookup(setUpTestLookup(varName, TEST_VALUE));
assertEquals("Variable is not resolved by default lookup", TEST_VALUE,
interpolator.resolve(varName));
}
/**
* Tests whether a map with lookup objects can be registered.
*/
@Test
public void testRegisterLookups()
{
Lookup l1 = setUpTestLookup();
Lookup l2 = setUpTestLookup("someVar", "someValue");
Map<String, Lookup> lookups = new HashMap<String, Lookup>();
lookups.put(TEST_PREFIX, l1);
String prefix2 = TEST_PREFIX + "_other";
lookups.put(prefix2, l2);
interpolator.registerLookups(lookups);
Map<String, Lookup> lookups2 = interpolator.getLookups();
assertEquals("Wrong number of lookups", 2, lookups2.size());
assertEquals("Wrong l1", l1, lookups2.get(TEST_PREFIX));
assertEquals("Wrong l2", l2, lookups2.get(prefix2));
}
/**
* Tests whether a null map with lookup objects is handled correctly.
*/
@Test
public void testRegisterLookupsNull()
{
interpolator.registerLookups(null);
assertTrue("Got lookups", interpolator.getLookups().isEmpty());
}
/**
* Tests that modification of the map with lookups does not affect the object.
*/
@Test
public void testGetLookupsModify()
{
Map<String, Lookup> lookups = interpolator.getLookups();
lookups.put(TEST_PREFIX, setUpTestLookup());
assertTrue("Map was modified", interpolator.getLookups().isEmpty());
}
/**
* Tests whether multiple default lookups can be added.
*/
@Test
public void testAddDefaultLookups()
{
List<Lookup> lookups = new ArrayList<Lookup>();
lookups.add(setUpTestLookup());
lookups.add(setUpTestLookup("test", "value"));
interpolator.addDefaultLookups(lookups);
List<Lookup> lookups2 = interpolator.getDefaultLookups();
assertEquals("Wrong number of default lookups", 2, lookups2.size());
assertTrue("Wrong content", lookups2.containsAll(lookups));
}
/**
* Tests whether a null collection of default lookups is handled correctly.
*/
@Test
public void testAddDefaultLookupsNull()
{
interpolator.addDefaultLookups(null);
assertTrue("Got default lookups", interpolator.getDefaultLookups()
.isEmpty());
}
/**
* Tests whether modification of the list of default lookups does not affect
* the object.
*/
@Test
public void testGetDefaultLookupsModify()
{
List<Lookup> lookups = interpolator.getDefaultLookups();
lookups.add(setUpTestLookup());
assertTrue("List was modified", interpolator.getDefaultLookups()
.isEmpty());
}
/**
* Tests whether a default lookup object can be removed.
*/
@Test
public void testRemoveDefaultLookup()
{
List<Lookup> lookups = new ArrayList<Lookup>();
lookups.add(setUpTestLookup());
lookups.add(setUpTestLookup("test", "value"));
interpolator.addDefaultLookups(lookups);
assertTrue("Wrong result",
interpolator.removeDefaultLookup(lookups.get(0)));
assertFalse("Lookup still available", interpolator.getDefaultLookups()
.contains(lookups.get(0)));
assertEquals("Wrong number of default lookups", 1, interpolator
.getDefaultLookups().size());
}
/**
* Tests whether a non existing default lookup object can be removed.
*/
@Test
public void testRemoveDefaultLookupNonExisting()
{
assertFalse("Wrong result",
interpolator.removeDefaultLookup(setUpTestLookup()));
}
/**
* Tests that the prefix set cannot be modified.
*/
@Test(expected = UnsupportedOperationException.class)
public void testPrefixSetModify()
{
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
Iterator<String> it = interpolator.prefixSet().iterator();
it.next();
it.remove();
}
/**
* Tests handling of a parent {@code ConfigurationInterpolator} if the
* variable can already be resolved by the current instance.
*/
@Test
public void testResolveParentVariableFound()
{
ConfigurationInterpolator parent =
EasyMock.createMock(ConfigurationInterpolator.class);
EasyMock.replay(parent);
interpolator.setParentInterpolator(parent);
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
assertEquals("Wrong value", TEST_VALUE,
interpolator.resolve(TEST_PREFIX + ':' + TEST_NAME));
}
/**
* Tests whether the parent {@code ConfigurationInterpolator} is invoked if
* the test instance cannot resolve a variable.
*/
@Test
public void testResolveParentVariableNotFound()
{
ConfigurationInterpolator parent =
EasyMock.createMock(ConfigurationInterpolator.class);
EasyMock.expect(parent.resolve(TEST_NAME)).andReturn(TEST_VALUE);
EasyMock.replay(parent);
interpolator.setParentInterpolator(parent);
assertEquals("Wrong value", TEST_VALUE, interpolator.resolve(TEST_NAME));
EasyMock.verify(parent);
}
/**
* Tests interpolation of a non string argument.
*/
@Test
public void testInterpolateObject()
{
Object value = 42;
assertSame("Value was changed", value, interpolator.interpolate(value));
}
/**
* Tests a successful interpolation of a string value.
*/
@Test
public void testInterpolateString()
{
String value = "${" + TEST_PREFIX + ':' + TEST_NAME + "}";
interpolator.registerLookup(TEST_PREFIX, setUpTestLookup());
assertEquals("Wrong result", TEST_VALUE,
interpolator.interpolate(value));
}
/**
* Tests interpolation with a variable which cannot be resolved.
*/
@Test
public void testInterpolateStringUnknownVariable()
{
String value = "${unknownVariable}";
assertEquals("Wrong result", value, interpolator.interpolate(value));
}
/**
* Tests whether the flag for substitution in variable names can be
* modified.
*/
@Test
public void testEnableSubstitutionInVariables()
{
assertFalse("Variable substitution enabled",
interpolator.isEnableSubstitutionInVariables());
interpolator.addDefaultLookup(setUpTestLookup("java.version", "1.4"));
interpolator.addDefaultLookup(setUpTestLookup("jre-1.4",
"C:\\java\\1.4"));
String var = "${jre-${java.version}}";
assertEquals("Wrong result (1)", var, interpolator.interpolate(var));
interpolator.setEnableSubstitutionInVariables(true);
assertTrue("Variable substitution not enabled",
interpolator.isEnableSubstitutionInVariables());
assertEquals("Wrong result (2)", "C:\\java\\1.4",
interpolator.interpolate(var));
}
/**
* Tries to obtain an instance from a null specification.
*/
@Test(expected = IllegalArgumentException.class)
public void testFromSpecificationNull()
{
ConfigurationInterpolator.fromSpecification(null);
}
/**
* Tests fromSpecification() if the specification contains an instance.
*/
@Test
public void testFromSpecificationInterpolator()
{
ConfigurationInterpolator ci =
EasyMock.createMock(ConfigurationInterpolator.class);
EasyMock.replay(ci);
InterpolatorSpecification spec =
new InterpolatorSpecification.Builder()
.withDefaultLookup(EasyMock.createMock(Lookup.class))
.withParentInterpolator(interpolator)
.withInterpolator(ci).create();
assertSame("Wrong result", ci,
ConfigurationInterpolator.fromSpecification(spec));
}
/**
* Tests fromSpecification() if a new instance has to be created.
*/
@Test
public void testFromSpecificationNewInstance()
{
Lookup defLookup = EasyMock.createMock(Lookup.class);
Lookup preLookup = EasyMock.createMock(Lookup.class);
EasyMock.replay(defLookup, preLookup);
InterpolatorSpecification spec =
new InterpolatorSpecification.Builder()
.withDefaultLookup(defLookup)
.withPrefixLookup("p", preLookup)
.withParentInterpolator(interpolator).create();
ConfigurationInterpolator ci =
ConfigurationInterpolator.fromSpecification(spec);
assertEquals("Wrong number of default lookups", 1, ci
.getDefaultLookups().size());
assertTrue("Wrong default lookup",
ci.getDefaultLookups().contains(defLookup));
assertEquals("Wrong number of prefix lookups", 1, ci.getLookups()
.size());
assertSame("Wrong prefix lookup", preLookup, ci.getLookups().get("p"));
assertSame("Wrong parent", interpolator, ci.getParentInterpolator());
}
/**
* Tests whether default prefix lookups can be queried as a map.
*/
@Test
public void testGetDefaultPrefixLookups()
{
Map<String, Lookup> lookups =
ConfigurationInterpolator.getDefaultPrefixLookups();
assertEquals("Wrong number of lookups", DefaultLookups.values().length,
lookups.size());
for (DefaultLookups l : DefaultLookups.values())
{
assertSame("Wrong entry for " + l, l.getLookup(),
lookups.get(l.getPrefix()));
}
}
/**
* Tests that the map with default lookups cannot be modified.
*/
@Test(expected = UnsupportedOperationException.class)
public void testGetDefaultPrefixLookupsModify()
{
ConfigurationInterpolator.getDefaultPrefixLookups().put("test",
EasyMock.createMock(Lookup.class));
}
/**
* Tests nullSafeLookup() if a lookup object was provided.
*/
@Test
public void testNullSafeLookupExisting()
{
Lookup look = EasyMock.createMock(Lookup.class);
EasyMock.replay(look);
assertSame("Wrong result", look,
ConfigurationInterpolator.nullSafeLookup(look));
}
/**
* Tests whether nullSafeLookup() can handle null input.
*/
@Test
public void testNullSafeLookupNull()
{
Lookup lookup = ConfigurationInterpolator.nullSafeLookup(null);
assertNull("Got a lookup result", lookup.lookup("someVar"));
}
}
| |
package <%=packageName%>.config;
<%_ if (authenticationType == 'session') { _%>
import javax.validation.constraints.NotNull;
<%_ } _%>
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.web.cors.CorsConfiguration;
<%_ if (applicationType == 'gateway') { _%>
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
<%_ } _%>
/**
* Properties specific to JHipster.
*
* <p>
* Properties are configured in the application.yml file.
* </p>
*/
@ConfigurationProperties(prefix = "jhipster", ignoreUnknownFields = false)
public class JHipsterProperties {
private final Async async = new Async();
private final Http http = new Http();
private final Cache cache = new Cache();
private final Mail mail = new Mail();
private final Security security = new Security();
private final Swagger swagger = new Swagger();
private final Metrics metrics = new Metrics();
private final CorsConfiguration cors = new CorsConfiguration();
<%_ if (enableSocialSignIn) { _%>
private final Social social = new Social();
<%_ } _%>
<%_ if (applicationType == 'gateway') { _%>
private final Gateway gateway = new Gateway();
<%_ } _%>
private final Ribbon ribbon = new Ribbon();
public Async getAsync() {
return async;
}
public Http getHttp() {
return http;
}
public Cache getCache() {
return cache;
}
public Mail getMail() {
return mail;
}
public Security getSecurity() {
return security;
}
public Swagger getSwagger() {
return swagger;
}
public Metrics getMetrics() {
return metrics;
}
public CorsConfiguration getCors() {
return cors;
}
<%_ if (enableSocialSignIn) { _%>
public Social getSocial() {
return social;
}
<%_ } _%>
<%_ if (applicationType == 'gateway') { _%>
public Gateway getGateway() {
return gateway;
}
<%_ } _%>
public Ribbon getRibbon() {
return ribbon;
}
public static class Async {
private int corePoolSize = 2;
private int maxPoolSize = 50;
private int queueCapacity = 10000;
public int getCorePoolSize() {
return corePoolSize;
}
public void setCorePoolSize(int corePoolSize) {
this.corePoolSize = corePoolSize;
}
public int getMaxPoolSize() {
return maxPoolSize;
}
public void setMaxPoolSize(int maxPoolSize) {
this.maxPoolSize = maxPoolSize;
}
public int getQueueCapacity() {
return queueCapacity;
}
public void setQueueCapacity(int queueCapacity) {
this.queueCapacity = queueCapacity;
}
}
public static class Http {
private final Cache cache = new Cache();
public Cache getCache() {
return cache;
}
public static class Cache {
private int timeToLiveInDays = 1461;
public int getTimeToLiveInDays() {
return timeToLiveInDays;
}
public void setTimeToLiveInDays(int timeToLiveInDays) {
this.timeToLiveInDays = timeToLiveInDays;
}
}
}
public static class Cache {
private int timeToLiveSeconds = 3600;
<%_ if (hibernateCache == 'ehcache') { _%>
private final Ehcache ehcache = new Ehcache();
<%_ } _%>
<%_ if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { _%>
private final Hazelcast hazelcast = new Hazelcast();
<%_ } _%>
public int getTimeToLiveSeconds() {
return timeToLiveSeconds;
}
public void setTimeToLiveSeconds(int timeToLiveSeconds) {
this.timeToLiveSeconds = timeToLiveSeconds;
}
<%_ if (hibernateCache == 'ehcache') { _%>
public Ehcache getEhcache() {
return ehcache;
}
public static class Ehcache {
private String maxBytesLocalHeap = "16M";
public String getMaxBytesLocalHeap() {
return maxBytesLocalHeap;
}
public void setMaxBytesLocalHeap(String maxBytesLocalHeap) {
this.maxBytesLocalHeap = maxBytesLocalHeap;
}
}
<%_ } _%>
<%_ if (clusteredHttpSession == 'hazelcast' || hibernateCache == 'hazelcast') { _%>
public Hazelcast getHazelcast() {
return hazelcast;
}
public static class Hazelcast {
private int backupCount = 1;
public int getBackupCount() {
return backupCount;
}
public void setBackupCount(int backupCount) {
this.backupCount = backupCount;
}
}
<%_ } _%>
}
public static class Mail {
private String from = "<%=baseName%>@localhost";
public String getFrom() {
return from;
}
public void setFrom(String from) {
this.from = from;
}
}
public static class Security {
<%_ if (authenticationType == 'session') { _%>
private final RememberMe rememberMe = new RememberMe();
<%_ } _%>
<%_ if (authenticationType == 'oauth2' || authenticationType == 'jwt' || authenticationType == 'uaa') { _%>
private final Authentication authentication = new Authentication();
<%_ } _%>
<%_ if (authenticationType == 'session') { _%>
public RememberMe getRememberMe() {
return rememberMe;
}
<%_ } _%>
<%_ if (authenticationType == 'oauth2' || authenticationType == 'jwt' || authenticationType == 'uaa') { _%>
public Authentication getAuthentication() {
return authentication;
}
public static class Authentication {
<%_ if (authenticationType == 'oauth2') { _%>
private final Oauth oauth = new Oauth();
<%_ } _%>
<%_ if (authenticationType == 'jwt' || authenticationType == 'uaa') { _%>
private final Jwt jwt = new Jwt();
<%_ } _%>
<%_ if (authenticationType == 'oauth2') { _%>
public Oauth getOauth() {
return oauth;
}
<%_ } _%>
<%_ if (authenticationType == 'jwt' || authenticationType == 'uaa') { _%>
public Jwt getJwt() {
return jwt;
}
<%_ } _%>
<%_ if (authenticationType == 'oauth2') { _%>
public static class Oauth {
private String clientid;
private String secret;
private int tokenValidityInSeconds = 1800;
public String getClientid() {
return clientid;
}
public void setClientid(String clientid) {
this.clientid = clientid;
}
public String getSecret() {
return secret;
}
public void setSecret(String secret) {
this.secret = secret;
}
public int getTokenValidityInSeconds() {
return tokenValidityInSeconds;
}
public void setTokenValidityInSeconds(int tokenValidityInSeconds) {
this.tokenValidityInSeconds = tokenValidityInSeconds;
}
}
<%_ } _%>
<%_ if (authenticationType == 'jwt' || authenticationType == 'uaa') { _%>
public static class Jwt {
private String secret;
private long tokenValidityInSeconds = 1800;
private long tokenValidityInSecondsForRememberMe = 2592000;
public String getSecret() {
return secret;
}
public void setSecret(String secret) {
this.secret = secret;
}
public long getTokenValidityInSeconds() {
return tokenValidityInSeconds;
}
public void setTokenValidityInSeconds(long tokenValidityInSeconds) {
this.tokenValidityInSeconds = tokenValidityInSeconds;
}
public long getTokenValidityInSecondsForRememberMe() {
return tokenValidityInSecondsForRememberMe;
}
public void setTokenValidityInSecondsForRememberMe(long tokenValidityInSecondsForRememberMe) {
this.tokenValidityInSecondsForRememberMe = tokenValidityInSecondsForRememberMe;
}
}
<%_ } _%>
}
<%_ } _%>
<%_ if (authenticationType == 'session') { _%>
public static class RememberMe {
@NotNull
private String key;
public String getKey() {
return key;
}
public void setKey(String key) {
this.key = key;
}
}
<%_ } _%>
}
public static class Swagger {
private String title = "<%=baseName%> API";
private String description = "<%=baseName%> API documentation";
private String version = "0.0.1";
private String termsOfServiceUrl;
private String contactName;
private String contactUrl;
private String contactEmail;
private String license;
private String licenseUrl;
private Boolean enabled;
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getVersion() {
return version;
}
public void setVersion(String version) {
this.version = version;
}
public String getTermsOfServiceUrl() {
return termsOfServiceUrl;
}
public void setTermsOfServiceUrl(String termsOfServiceUrl) {
this.termsOfServiceUrl = termsOfServiceUrl;
}
public String getContactName() {
return contactName;
}
public void setContactName(String contactName) {
this.contactName = contactName;
}
public String getContactUrl() {
return contactUrl;
}
public void setContactUrl(String contactUrl) {
this.contactUrl = contactUrl;
}
public String getContactEmail() {
return contactEmail;
}
public void setContactEmail(String contactEmail) {
this.contactEmail = contactEmail;
}
public String getLicense() {
return license;
}
public void setLicense(String license) {
this.license = license;
}
public String getLicenseUrl() {
return licenseUrl;
}
public void setLicenseUrl(String licenseUrl) {
this.licenseUrl = licenseUrl;
}
public Boolean isEnabled() {
return enabled;
}
public void setEnabled(Boolean enabled) {
this.enabled = enabled;
}
}
public static class Metrics {
private final Jmx jmx = new Jmx();
private final Spark spark = new Spark();
private final Graphite graphite = new Graphite();
private final Logs logs = new Logs();
public Jmx getJmx() {
return jmx;
}
public Spark getSpark() {
return spark;
}
public Graphite getGraphite() {
return graphite;
}
public Logs getLogs() {
return logs;
}
public static class Jmx {
private boolean enabled = true;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
public static class Spark {
private boolean enabled = false;
private String host = "localhost";
private int port = 9999;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
}
public static class Graphite {
private boolean enabled = false;
private String host = "localhost";
private int port = 2003;
private String prefix = "<%=baseName%>";
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public String getHost() {
return host;
}
public void setHost(String host) {
this.host = host;
}
public int getPort() {
return port;
}
public void setPort(int port) {
this.port = port;
}
public String getPrefix() {
return prefix;
}
public void setPrefix(String prefix) {
this.prefix = prefix;
}
}
public static class Logs {
private boolean enabled = false;
private long reportFrequency = 60;
public long getReportFrequency() {
return reportFrequency;
}
public void setReportFrequency(int reportFrequency) {
this.reportFrequency = reportFrequency;
}
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
}
}
private final Logging logging = new Logging();
public Logging getLogging() { return logging; }
public static class Logging {
private final Logstash logstash = new Logstash();
public Logstash getLogstash() { return logstash; }
public static class Logstash {
private boolean enabled = false;
private String host = "localhost";
private int port = 5000;
private int queueSize = 512;
public boolean isEnabled() { return enabled; }
public void setEnabled(boolean enabled) { this.enabled = enabled; }
public String getHost() { return host; }
public void setHost(String host) { this.host = host; }
public int getPort() { return port; }
public void setPort(int port) { this.port = port; }
public int getQueueSize() { return queueSize; }
public void setQueueSize(int queueSize) { this.queueSize = queueSize; }
}
<%_ if (applicationType == 'gateway' || applicationType == 'microservice') { _%>
private final SpectatorMetrics spectatorMetrics = new SpectatorMetrics();
public SpectatorMetrics getSpectatorMetrics() { return spectatorMetrics; }
public static class SpectatorMetrics {
private boolean enabled = false;
public boolean isEnabled() { return enabled; }
public void setEnabled(boolean enabled) { this.enabled = enabled; }
}
<%_ } _%>
}
<%_ if (enableSocialSignIn) { _%>
public static class Social {
private String redirectAfterSignIn = "/#/home";
public String getRedirectAfterSignIn() {
return redirectAfterSignIn;
}
public void setRedirectAfterSignIn(String redirectAfterSignIn) {
this.redirectAfterSignIn = redirectAfterSignIn;
}
}<%_ } _%>
<%_ if (applicationType == 'gateway') { _%>
public static class Gateway {
private final RateLimiting rateLimiting = new RateLimiting();
public RateLimiting getRateLimiting() {
return rateLimiting;
}
private Map<String, List<String>> authorizedMicroservicesEndpoints = new LinkedHashMap<>();
public Map<String, List<String>> getAuthorizedMicroservicesEndpoints() {
return authorizedMicroservicesEndpoints;
}
public void setAuthorizedMicroservicesEndpoints(Map<String, List<String>> authorizedMicroservicesEndpoints) {
this.authorizedMicroservicesEndpoints = authorizedMicroservicesEndpoints;
}
public static class RateLimiting {
private boolean enabled = false;
private long limit = 100000L;
public boolean isEnabled() {
return enabled;
}
public void setEnabled(boolean enabled) {
this.enabled = enabled;
}
public long getLimit() {
return this.limit;
}
public void setLimit(long limit) {
this.limit = limit;
}
}
}
<%_ } _%>
public static class Ribbon {
private String[] displayOnActiveProfiles;
public String[] getDisplayOnActiveProfiles() {
return displayOnActiveProfiles;
}
public void setDisplayOnActiveProfiles(String[] displayOnActiveProfiles) {
this.displayOnActiveProfiles = displayOnActiveProfiles;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.eigenbase.sql;
import java.io.*;
import java.nio.charset.*;
import java.util.*;
import org.eigenbase.resource.*;
import org.eigenbase.sql.parser.*;
import org.eigenbase.util.*;
import static org.eigenbase.util.Static.RESOURCE;
/**
* A <code>SqlCollation</code> is an object representing a <code>Collate</code>
* statement. It is immutable.
*/
public class SqlCollation implements Serializable {
public static final SqlCollation COERCIBLE =
new SqlCollation(Coercibility.COERCIBLE);
public static final SqlCollation IMPLICIT =
new SqlCollation(Coercibility.IMPLICIT);
//~ Enums ------------------------------------------------------------------
/**
* <blockquote>A <character value expression> consisting of a column
* reference has the coercibility characteristic Implicit, with collating
* sequence as defined when the column was created. A <character value
* expression> consisting of a value other than a column (e.g., a host
* variable or a literal) has the coercibility characteristic Coercible,
* with the default collation for its character repertoire. A <character
* value expression> simply containing a <collate clause> has the
* coercibility characteristic Explicit, with the collating sequence
* specified in the <collate clause>.</blockquote>
*
* @sql.99 Part 2 Section 4.2.3
*/
public enum Coercibility {
/** Strongest coercibility. */
EXPLICIT,
IMPLICIT,
COERCIBLE,
/** Weakest coercibility. */
NONE
}
//~ Instance fields --------------------------------------------------------
protected final String collationName;
protected final SerializableCharset wrappedCharset;
protected final Locale locale;
protected final String strength;
private final Coercibility coercibility;
//~ Constructors -----------------------------------------------------------
/**
* Creates a Collation by its name and its coercibility
*
* @param collation Collation specification
* @param coercibility Coercibility
*/
public SqlCollation(
String collation,
Coercibility coercibility) {
this.coercibility = coercibility;
SqlParserUtil.ParsedCollation parseValues =
SqlParserUtil.parseCollation(collation);
Charset charset = parseValues.getCharset();
this.wrappedCharset = SerializableCharset.forCharset(charset);
locale = parseValues.getLocale();
strength = parseValues.getStrength();
String c = charset.name().toUpperCase() + "$" + locale.toString();
if ((strength != null) && (strength.length() > 0)) {
c += "$" + strength;
}
collationName = c;
}
/**
* Creates a SqlCollation with the default collation name and the given
* coercibility.
*
* @param coercibility Coercibility
*/
public SqlCollation(Coercibility coercibility) {
this(
SaffronProperties.instance().defaultCollation.get(),
coercibility);
}
//~ Methods ----------------------------------------------------------------
public boolean equals(Object o) {
return this == o
|| o instanceof SqlCollation
&& collationName.equals(((SqlCollation) o).collationName);
}
@Override
public int hashCode() {
return collationName.hashCode();
}
/**
* Returns the collating sequence (the collation name) and the coercibility
* for the resulting value of a dyadic operator.
*
* @param col1 first operand for the dyadic operation
* @param col2 second operand for the dyadic operation
* @return the resulting collation sequence. The "no collating sequence"
* result is returned as null.
*
* @sql.99 Part 2 Section 4.2.3 Table 2
*/
public static SqlCollation getCoercibilityDyadicOperator(
SqlCollation col1,
SqlCollation col2) {
return getCoercibilityDyadic(col1, col2);
}
/**
* Returns the collating sequence (the collation name) and the coercibility
* for the resulting value of a dyadic operator.
*
* @param col1 first operand for the dyadic operation
* @param col2 second operand for the dyadic operation
* @return the resulting collation sequence
* @throws EigenbaseException {@link EigenbaseNewResource#invalidCompare} or
* {@link EigenbaseNewResource#differentCollations}
* if no collating sequence can be deduced
*
* @sql.99 Part 2 Section 4.2.3 Table 2
*/
public static SqlCollation getCoercibilityDyadicOperatorThrows(
SqlCollation col1,
SqlCollation col2) {
SqlCollation ret = getCoercibilityDyadic(col1, col2);
if (null == ret) {
throw RESOURCE.invalidCompare(
col1.collationName,
"" + col1.coercibility,
col2.collationName,
"" + col2.coercibility).ex();
}
return ret;
}
/**
* Returns the collating sequence (the collation name) to use for the
* resulting value of a comparison.
*
* @param col1 first operand for the dyadic operation
* @param col2 second operand for the dyadic operation
* @return the resulting collation sequence. If no collating sequence could
* be deduced a {@link EigenbaseNewResource#invalidCompare} is thrown
*
* @sql.99 Part 2 Section 4.2.3 Table 3
*/
public static String getCoercibilityDyadicComparison(
SqlCollation col1,
SqlCollation col2) {
return getCoercibilityDyadicOperatorThrows(col1, col2).collationName;
}
/**
* Returns the result for {@link #getCoercibilityDyadicComparison} and
* {@link #getCoercibilityDyadicOperator}.
*/
protected static SqlCollation getCoercibilityDyadic(
SqlCollation col1,
SqlCollation col2) {
assert null != col1;
assert null != col2;
final Coercibility coercibility1 = col1.getCoercibility();
final Coercibility coercibility2 = col2.getCoercibility();
switch (coercibility1) {
case COERCIBLE:
switch (coercibility2) {
case COERCIBLE:
return new SqlCollation(
col2.collationName,
Coercibility.COERCIBLE);
case IMPLICIT:
return new SqlCollation(
col2.collationName,
Coercibility.IMPLICIT);
case NONE:
return null;
case EXPLICIT:
return new SqlCollation(
col2.collationName,
Coercibility.EXPLICIT);
default:
throw Util.unexpected(coercibility2);
}
case IMPLICIT:
switch (coercibility2) {
case COERCIBLE:
return new SqlCollation(
col1.collationName,
Coercibility.IMPLICIT);
case IMPLICIT:
if (col1.collationName.equals(col2.collationName)) {
return new SqlCollation(
col2.collationName,
Coercibility.IMPLICIT);
}
return null;
case NONE:
return null;
case EXPLICIT:
return new SqlCollation(
col2.collationName,
Coercibility.EXPLICIT);
default:
throw Util.unexpected(coercibility2);
}
case NONE:
switch (coercibility2) {
case COERCIBLE:
case IMPLICIT:
case NONE:
return null;
case EXPLICIT:
return new SqlCollation(
col2.collationName,
Coercibility.EXPLICIT);
default:
throw Util.unexpected(coercibility2);
}
case EXPLICIT:
switch (coercibility2) {
case COERCIBLE:
case IMPLICIT:
case NONE:
return new SqlCollation(
col1.collationName,
Coercibility.EXPLICIT);
case EXPLICIT:
if (col1.collationName.equals(col2.collationName)) {
return new SqlCollation(
col2.collationName,
Coercibility.EXPLICIT);
}
throw RESOURCE.differentCollations(
col1.collationName,
col2.collationName).ex();
default:
throw Util.unexpected(coercibility2);
}
default:
throw Util.unexpected(coercibility1);
}
}
public String toString() {
return "COLLATE " + collationName;
}
public void unparse(
SqlWriter writer,
int leftPrec,
int rightPrec) {
writer.keyword("COLLATE");
writer.identifier(collationName);
}
public Charset getCharset() {
return wrappedCharset.getCharset();
}
public final String getCollationName() {
return collationName;
}
public final SqlCollation.Coercibility getCoercibility() {
return coercibility;
}
}
// End SqlCollation.java
| |
/*
* Copyright (c) 2005, 2021, Oracle and/or its affiliates. All rights reserved.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.sun.org.apache.xerces.internal.jaxp;
import com.sun.org.apache.xerces.internal.parsers.DOMParser;
import com.sun.org.apache.xerces.internal.util.SAXMessageFormatter;
import com.sun.org.apache.xerces.internal.utils.XMLSecurityManager;
import com.sun.org.apache.xerces.internal.utils.XMLSecurityPropertyManager;
import java.util.HashMap;
import java.util.Map;
import javax.xml.XMLConstants;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.validation.Schema;
import jdk.xml.internal.JdkProperty;
import org.xml.sax.SAXException;
import org.xml.sax.SAXNotRecognizedException;
import org.xml.sax.SAXNotSupportedException;
/**
* @author Rajiv Mordani
* @author Edwin Goei
* @LastModified: May 2021
*/
public class DocumentBuilderFactoryImpl extends DocumentBuilderFactory {
/** These are DocumentBuilderFactory attributes not DOM attributes */
private Map<String, Object> attributes;
private Map<String, Boolean> features;
private Schema grammar;
private boolean isXIncludeAware;
/**
* State of the secure processing feature, initially <code>false</code>
*/
private boolean fSecureProcess = true;
// used to verify attributes
XMLSecurityManager fSecurityManager = new XMLSecurityManager(true);
XMLSecurityPropertyManager fSecurityPropertyMgr = new XMLSecurityPropertyManager();
/**
* Creates a new instance of a {@link javax.xml.parsers.DocumentBuilder}
* using the currently configured parameters.
*/
public DocumentBuilder newDocumentBuilder()
throws ParserConfigurationException
{
/** Check that if a Schema has been specified that neither of the schema properties have been set. */
if (grammar != null && attributes != null) {
if (attributes.containsKey(JAXPConstants.JAXP_SCHEMA_LANGUAGE)) {
throw new ParserConfigurationException(
SAXMessageFormatter.formatMessage(null,
"schema-already-specified", new Object[] {JAXPConstants.JAXP_SCHEMA_LANGUAGE}));
}
else if (attributes.containsKey(JAXPConstants.JAXP_SCHEMA_SOURCE)) {
throw new ParserConfigurationException(
SAXMessageFormatter.formatMessage(null,
"schema-already-specified", new Object[] {JAXPConstants.JAXP_SCHEMA_SOURCE}));
}
}
try {
return new DocumentBuilderImpl(this, attributes, features, fSecureProcess);
} catch (SAXException se) {
// Handles both SAXNotSupportedException, SAXNotRecognizedException
throw new ParserConfigurationException(se.getMessage());
}
}
/**
* Allows the user to set specific attributes on the underlying
* implementation.
* @param name name of attribute
* @param value null means to remove attribute
*/
public void setAttribute(String name, Object value)
throws IllegalArgumentException
{
// This handles removal of attributes
if (value == null) {
if (attributes != null) {
attributes.remove(name);
}
// Unrecognized attributes do not cause an exception
return;
}
// This is ugly. We have to collect the attributes and then
// later create a DocumentBuilderImpl to verify the attributes.
// Create the Map if none existed before
if (attributes == null) {
attributes = new HashMap<>();
}
//check if the property is managed by security manager
String pName;
if ((pName = fSecurityManager.find(name)) != null) {
// as the qName is deprecated, let the manager decide whether the
// value shall be changed
fSecurityManager.setLimit(name, JdkProperty.State.APIPROPERTY, value);
attributes.put(pName, fSecurityManager.getLimitAsString(pName));
// no need to create a DocumentBuilderImpl
return;
} else if ((pName = fSecurityPropertyMgr.find(name)) != null) {
attributes.put(pName, value);
return;
}
attributes.put(name, value);
// Test the attribute name by possibly throwing an exception
try {
new DocumentBuilderImpl(this, attributes, features);
} catch (Exception e) {
attributes.remove(name);
throw new IllegalArgumentException(e.getMessage());
}
}
/**
* Allows the user to retrieve specific attributes on the underlying
* implementation.
*/
public Object getAttribute(String name)
throws IllegalArgumentException
{
//check if the property is managed by security manager
String pName;
if ((pName = fSecurityManager.find(name)) != null) {
return attributes.get(pName);
} else if ((pName = fSecurityPropertyMgr.find(name)) != null) {
return attributes.get(pName);
}
// See if it's in the attributes Map
if (attributes != null) {
Object val = attributes.get(name);
if (val != null) {
return val;
}
}
DOMParser domParser = null;
try {
// We create a dummy DocumentBuilderImpl in case the attribute
// name is not one that is in the attributes map.
domParser =
new DocumentBuilderImpl(this, attributes, features).getDOMParser();
return domParser.getProperty(name);
} catch (SAXException se1) {
// assert(name is not recognized or not supported), try feature
try {
boolean result = domParser.getFeature(name);
// Must have been a feature
return result ? Boolean.TRUE : Boolean.FALSE;
} catch (SAXException se2) {
// Not a property or a feature
throw new IllegalArgumentException(se1.getMessage());
}
}
}
public Schema getSchema() {
return grammar;
}
public void setSchema(Schema grammar) {
this.grammar = grammar;
}
public boolean isXIncludeAware() {
return this.isXIncludeAware;
}
public void setXIncludeAware(boolean state) {
this.isXIncludeAware = state;
}
public boolean getFeature(String name)
throws ParserConfigurationException {
if (name.equals(XMLConstants.FEATURE_SECURE_PROCESSING)) {
return fSecureProcess;
}
// See if it's in the features map
if (features != null) {
Boolean val = features.get(name);
if (val != null) {
return val;
}
}
try {
DOMParser domParser = new DocumentBuilderImpl(this, attributes, features).getDOMParser();
return domParser.getFeature(name);
}
catch (SAXException e) {
throw new ParserConfigurationException(e.getMessage());
}
}
@SuppressWarnings("removal")
public void setFeature(String name, boolean value)
throws ParserConfigurationException {
if (features == null) {
features = new HashMap<>();
}
// If this is the secure processing feature, save it then return.
if (name.equals(XMLConstants.FEATURE_SECURE_PROCESSING)) {
if (System.getSecurityManager() != null && (!value)) {
throw new ParserConfigurationException(
SAXMessageFormatter.formatMessage(null,
"jaxp-secureprocessing-feature", null));
}
fSecureProcess = value;
features.put(name, value ? Boolean.TRUE : Boolean.FALSE);
return;
}
features.put(name, value ? Boolean.TRUE : Boolean.FALSE);
// Test the feature by possibly throwing SAX exceptions
try {
new DocumentBuilderImpl(this, attributes, features);
}
catch (SAXNotSupportedException e) {
features.remove(name);
throw new ParserConfigurationException(e.getMessage());
}
catch (SAXNotRecognizedException e) {
features.remove(name);
throw new ParserConfigurationException(e.getMessage());
}
}
}
| |
/*
* Copyright (c) 2013, Swedish Institute of Computer Science All rights reserved. Redistribution and
* use in source and binary forms, with or without modification, are permitted provided that the
* following conditions are met: * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer. * Redistributions in binary form
* must reproduce the above copyright notice, this list of conditions and the following disclaimer
* in the documentation and/or other materials provided with the distribution. * Neither the name of
* The Swedish Institute of Computer Science nor the names of its contributors may be used to
* endorse or promote products derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
* FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE SWEDISH INSTITUTE OF
* COMPUTER SCIENCE BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY
* OF SUCH DAMAGE.
*/
/*
* Description: TODO:
*/
package models;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.ManyToMany;
import javax.persistence.ManyToOne;
import javax.persistence.OneToMany;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import javax.persistence.Version;
import play.db.ebean.Model;
import play.Logger;
import com.avaje.ebean.Ebean;
import com.avaje.ebean.Expr;
import com.avaje.ebean.annotation.EnumValue;
import controllers.Utils;
import controllers.CtrlStream;
@Entity
@Table(name = "streams")
public class Stream extends Model implements Comparable<Stream> {
/**
*
*/
private static final long serialVersionUID = -8823372604684774587L;
/* Type of data points this stream stores */
public static enum StreamType {
@EnumValue("D")
DOUBLE,
// Long is not needed
// TODO: Should provide location instead...s
// @EnumValue("L")
// LONG,
@EnumValue("S")
STRING, @EnumValue("U")
UNDEFINED
}
@Id
public Long id;
public StreamType type = StreamType.UNDEFINED;
public double latitude;
public double longitude;
public String description;
public boolean publicAccess = false;
public boolean publicSearch = false;
/** Freeze the Stream so any new incoming data is discarded */
public boolean frozen = false;
/**
* The maximum duration to be kept. This should be used with the database to limit the size of
* the datapoints list
*/
public Long historySize = 1L;
/** Last time a point was inserted */
public Long lastUpdated = 0L;
@Version
// for concurrency protection
private int version;
/** Secret key for authentication */
@Column(name = "secret_key")
// key is a reserved keyword in mysql
private String key;
@ManyToOne
public User owner;
@ManyToOne
public Resource resource;
// should this be a field in the table? (i.e. not mappedBy)?
@OneToOne(cascade = CascadeType.ALL, mappedBy = "linkedStream")
public Vfile file;
@javax.persistence.Transient
public List dataPoints;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "stream")
public List<DataPointString> dataPointsString;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "stream")
public List<DataPointDouble> dataPointsDouble;
@OneToMany(cascade = CascadeType.ALL, mappedBy = "stream")
public List<StreamParser> streamparsers = new ArrayList<StreamParser>();
@ManyToMany(cascade = CascadeType.ALL, mappedBy = "followedStreams")
public List<User> followingUsers = new ArrayList<User>();
public Stream(User user, Resource resource, StreamType type) {
this.owner = user;
this.resource = resource;
this.type = type;
this.latitude = 0.0;
this.longitude = 0.0;
createKey();
switch (this.type) {
case DOUBLE:
this.dataPoints = this.dataPointsDouble;
break;
case STRING:
this.dataPoints = this.dataPointsString;
break;
default:
break;
}
}
public Stream(User user, Resource resource) {
this(user, resource, StreamType.UNDEFINED);
}
public Stream(User user) {
this(user, null, StreamType.UNDEFINED);
}
public Stream() {
this(null, null, StreamType.UNDEFINED);
}
public void updateStream(Stream modified) {
this.description = modified.description;
this.longitude = modified.longitude;
this.latitude = modified.latitude;
update();
}
public Long getHistorySize() {
return historySize;
}
public void setHistorySize(long historySize) {
if (historySize <= 0) historySize = 1;
this.historySize = historySize;
}
/*
* public void setLocation(Location location) { this.location=location; }
*
* public void setLocation(double lon, double lat) { setLocation(new Location(lon,lat)); }
*/
protected String createKey() {
key = UUID.randomUUID().toString();
return key;
}
/** Call to create, or update an access key */
public String updateKey() {
key = createKey();
update();
return key;
}
public String getKey() {
return key;
}
/** Create a persisted stream */
public static Stream create(User user) {
if (user != null) {
Stream stream = new Stream(user);
stream.save();
return stream;
}
return null;
}
/** Persist a stream */
public static Stream create(Stream stream) {
if (stream.owner != null) {
stream.save();
return stream;
}
return null;
}
public static Stream get(Long id) {
return find.byId(id);
}
public boolean canRead(User user) {
return (publicAccess || owner.equals(user)); // || isShare(user);
}
public boolean canWrite(User user) {
return (owner.equals(user));
}
public boolean canRead(String key) {
return (publicAccess || this.key == key);
}
public Boolean hasData() {
//return lastUpdated != 0L;
List<? extends DataPoint> set = DataPointDouble.find.where().eq("stream", this).setMaxRows(1).findList();
if (set.size()==0) {
return false;
}
return true;
}
public boolean post(double data, long time) {
if (!this.frozen) {
if (type == StreamType.UNDEFINED) {
type = StreamType.DOUBLE;
this.dataPoints = this.dataPointsDouble;
}
if (type == StreamType.DOUBLE) {
DataPoint dp = new DataPointDouble(this, data, time).add();
// Logger.info("Adding new point: " + dp);
lastUpdated = time;
update();
return true;
}
}
return false;
}
public boolean post(String data, long time) {
if (!this.frozen) {
if (type == StreamType.UNDEFINED) {
type = StreamType.STRING;
this.dataPoints = this.dataPointsString;
}
if (type == StreamType.STRING) {
DataPoint dp = new DataPointString(this, data, time).add();
lastUpdated = time;
update();
return true;
}
}
return false;
}
public static Model.Finder<Long, Stream> find = new Model.Finder<Long, Stream>(Long.class, Stream.class);
public static Stream findByKey(String key) {
if (key == null) {
return null;
}
try {
return find.where().eq("key", key).findUnique();
} catch (Exception e) {
return null;
}
}
public static void delete(Long id) {
find.ref(id).delete();
}
public static void deleteByResource(Resource resource) {
List<Stream> list = find.where().eq("resource", resource).findList();
for (Stream stream : list) {
stream.delete();
}
}
public static void dattachResource(Resource resource) {
List<Stream> list = find.where().eq("resource", resource).findList();
for (Stream stream : list) {
stream.resource = null;
stream.update();
}
}
public static void clearStream(Long id) {
Stream stream = (Stream) get(id);
if (stream != null) {
stream.clearStream();
}
}
@play.db.ebean.Transactional
public void clearStream() {
this.lastUpdated = 0L;
this.deleteDataPoints();
this.update();
}
/*
* Liam: This seems to only ever return null, added switch version public List<? extends
* DataPoint> getDataPoints() { return (List<? extends DataPoint>)dataPoints; }
*/
public List<? extends DataPoint> getDataPoints() {
List<? extends DataPoint> set = null;
if (type == StreamType.STRING) {
set = DataPointString.find.where().eq("stream", this).orderBy("timestamp desc") .findList();
} else if (type == StreamType.DOUBLE) {
set = DataPointDouble.find.where().eq("stream", this).orderBy("timestamp desc") .findList();
}
return set;
}
public List<? extends DataPoint> getDataPointsTail(long tail) {
if (tail <= 0) {
tail = 1L;
// return new ArrayList<? extends DataPoint>(); // TODO should this be return new
// ArrayList<? extends DataPoint>(0) ??
}
List<? extends DataPoint> set = null;
if (type == StreamType.STRING) {
set = DataPointString.find.where().eq("stream", this).setMaxRows((int) tail) .orderBy("timestamp desc").findList();
} else if (type == StreamType.DOUBLE) {
set = DataPointDouble.find.where().eq("stream", this).setMaxRows((int) tail) .orderBy("timestamp desc").findList();
}
return set;
}
public List<? extends DataPoint> getDataPointsLast(long last) {
return this.getDataPointsSince(Utils.currentTime() - last);
}
public List<? extends DataPoint> getDataPointsSince(long since) {
List<? extends DataPoint> set = null;
if (type == StreamType.STRING) {
set = DataPointString.find.where().eq("stream", this).ge("timestamp", since) .orderBy("timestamp desc").findList();
} else if (type == StreamType.DOUBLE) {
set = DataPointDouble.find.where().eq("stream", this).ge("timestamp", since) .orderBy("timestamp desc").findList();
}
// Logger.info(this.id + " : Points since: " + since + set.toString());
return set;
}
public static Stream getByKey(String key) {
return find.where().eq("key", key).findUnique();
}
public static Stream getByUserPath(String username, String path) {
User user = User.getByUserName(username);
if (user==null) {
Logger.warn("Can't find user: "+username);
return null;
}
//Logger.warn(username+" "+user.id+" path "+path);
Vfile file = Vfile.find.where().eq("owner_id",user.id).eq("path", path).findUnique();
if (file==null) {
return null;
}
return file.linkedStream;
}
private void deleteDataPoints() {
if (type == StreamType.STRING && dataPointsString.size() > 0) {
Ebean.delete(this.dataPointsString);
} else if (type == StreamType.DOUBLE && dataPointsDouble.size() > 0) {
Ebean.delete(this.dataPointsDouble);
}
}
public StreamType getType() {
return type;
}
public void setType(StreamType type) {
this.type = type;
}
public void save() {
super.save();
}
public void update() {
// verify();
// this.lastUpdated = System.currentTimeMillis();
super.update();
}
// @play.db.ebean.Transactional
public void delete() {
Ebean.beginTransaction();
try {
// (StreamParsers will be deleted with cascading deletes; instead.)
// Detach parsers from streams.
// String s =
// "UPDATE parsers set stream_id = :target_stream_id where stream_id = :stream_id";
// SqlUpdate update = Ebean.createSqlUpdate(s);
// update.setParameter("stream_id", this.id);
// update.setParameter("target_stream_id", null);
// int modifiedCount = Ebean.execute(update);
// String msg = "There were " + modifiedCount + "rows updated";
// Logger.info("Deleting stream: Detaching some stream parsers: "
// + msg);
// Detach file: No need; cascading delete instead.
// this.file.linkedStream = null;
// this.file.update();
deleteDataPoints();
// detach following users //Cascading delete instead (Will only delete the relation)
// this.followingUsers.clear();
// this.saveManyToManyAssociations("followingUsers");
// delete stream
super.delete();
// commit transaction
Ebean.commitTransaction();
}
// catch(Exception e) {
// Logger.warn("Deleting stream failed!! " + e.getMessage() +
// e.getStackTrace()[0].toString());
// }
finally {
Ebean.endTransaction();
}
// / probably a bit easier way
// // setFrozen(true);
// Logger.info("Deleting stream: Detaching some stream parsers: "
// + this.streamparsers.size());
// //List<StreamParser> relatedParsers = StreamParser.find.where().eq("stream",
// this).findList();
// for (StreamParser sp : this.streamparsers) {
// sp.stream = null;
// sp.update();
// }
// this.file.linkedStream = null;
// this.file.update();
// // clearStream(this.id);
// deleteDataPoints();
// this.followingUsers.clear();
// this.saveManyToManyAssociations("followingUsers");
// super.delete();
}
public String showKey(User user) {
if (this.owner.equals(user)) {
return this.key;
}
return null;
}
public String getHierarchy() {
//System.out.println(this.owner);
//System.out.println(this.owner.id);
//return "/users/"+resource.owner.id+"/resources/"+resource.id+"/streams/"+id;
if (resource==null) {return "Error: resource is null";}
return resource.getHierarchy()+"/streams/"+id;
}
public boolean setPublicAccess(Boolean pub) {
this.publicAccess = pub;
this.update();
return pub;
}
public boolean setPublicSearch(Boolean pub) {
this.publicSearch = pub;
this.update();
return pub;
}
public boolean isPublicSearch() {
return publicSearch;
}
public boolean setFrozen(Boolean frozen) {
this.frozen = frozen;
this.update();
return frozen;
}
public List<StreamParser> getStreamParsers() {
return streamparsers;
// if(resource != null) {
// return StreamParser.find.where().eq("resource", resource).eq("stream",
// this).orderBy("streamVfilePath asce").findList();
// }
// return null;
}
public int compareTo(Stream other) {
// Logger.info("paths: "+file.getPath()+" "+other.file.getPath());
if (other==null) {return -1;}
return file.getPath().compareTo(other.file.getPath());
}
public static List<Stream> availableStreams(User currentUser, Integer p) {
List<Stream> available = find.where().or(Expr.eq("publicSearch", true), Expr.eq("owner", currentUser))
.orderBy("owner").orderBy("id asc").findPagingList(CtrlStream.pageSize).getPage(p.intValue()).getList();
return available;
}
// get the recently updated public streams that are not followed by currentUser
public static List<Stream> getLastUpdatedStreams(User currentUser, int count) {
List<Stream> available = find.where().and(
// Expr.and( Expr.eq("publicSearch", true), Expr.eq("publicAccess", true) ),
Expr.eq("publicSearch", true), Expr.ne("owner", currentUser))
// do not include already followed streams
.not(Expr.in( "id", Stream.find.where().join("followingUsers").where().eq("followingUsers.id", currentUser.id).findIds()))
// User.find.where().join("streams").where().eq("id",currentUser.id)))
.orderBy("lastUpdated").setMaxRows(count).findList();
return available;
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.cmis;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import org.apache.camel.Exchange;
import org.apache.camel.Message;
import org.apache.camel.NoSuchHeaderException;
import org.apache.camel.RuntimeExchangeException;
import org.apache.camel.impl.DefaultProducer;
import org.apache.camel.util.ExchangeHelper;
import org.apache.camel.util.MessageHelper;
import org.apache.chemistry.opencmis.client.api.CmisObject;
import org.apache.chemistry.opencmis.client.api.Document;
import org.apache.chemistry.opencmis.client.api.Folder;
import org.apache.chemistry.opencmis.commons.PropertyIds;
import org.apache.chemistry.opencmis.commons.data.ContentStream;
import org.apache.chemistry.opencmis.commons.enums.VersioningState;
import org.apache.chemistry.opencmis.commons.exceptions.CmisObjectNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The CMIS producer.
*/
public class CMISProducer extends DefaultProducer {
private static final Logger LOG = LoggerFactory.getLogger(CMISProducer.class);
private final CMISSessionFacadeFactory sessionFacadeFactory;
private CMISSessionFacade sessionFacade;
public CMISProducer(CMISEndpoint endpoint, CMISSessionFacadeFactory sessionFacadeFactory) {
super(endpoint);
this.sessionFacadeFactory = sessionFacadeFactory;
this.sessionFacade = null;
}
@Override
public CMISEndpoint getEndpoint() {
return (CMISEndpoint) super.getEndpoint();
}
public void process(Exchange exchange) throws Exception {
CmisObject cmisObject = createNode(exchange);
LOG.debug("Created node with id: {}", cmisObject.getId());
// copy the header of in message to the out message
exchange.getOut().copyFrom(exchange.getIn());
exchange.getOut().setBody(cmisObject.getId());
}
private Map<String, Object> filterTypeProperties(Map<String, Object> properties) throws Exception {
Map<String, Object> result = new HashMap<>(properties.size());
String objectTypeName = CamelCMISConstants.CMIS_DOCUMENT;
if (properties.containsKey(PropertyIds.OBJECT_TYPE_ID)) {
objectTypeName = (String) properties.get(PropertyIds.OBJECT_TYPE_ID);
}
Set<String> types = new HashSet<>();
types.addAll(getSessionFacade().getPropertiesFor(objectTypeName));
if (getSessionFacade().supportsSecondaries() && properties.containsKey(PropertyIds.SECONDARY_OBJECT_TYPE_IDS)) {
@SuppressWarnings("unchecked")
Collection<String> secondaryTypes = (Collection<String>) properties.get(PropertyIds.SECONDARY_OBJECT_TYPE_IDS);
for (String secondaryType : secondaryTypes) {
types.addAll(getSessionFacade().getPropertiesFor(secondaryType));
}
}
for (Map.Entry<String, Object> entry : properties.entrySet()) {
if (types.contains(entry.getKey())) {
result.put(entry.getKey(), entry.getValue());
}
}
return result;
}
private CmisObject createNode(Exchange exchange) throws Exception {
validateRequiredHeader(exchange, PropertyIds.NAME);
Message message = exchange.getIn();
String parentFolderPath = parentFolderPathFor(message);
Folder parentFolder = getFolderOnPath(exchange, parentFolderPath);
Map<String, Object> cmisProperties = filterTypeProperties(message.getHeaders());
if (isDocument(exchange)) {
String fileName = message.getHeader(PropertyIds.NAME, String.class);
String mimeType = getMimeType(message);
byte[] buf = getBodyData(message);
ContentStream contentStream = getSessionFacade().createContentStream(fileName, buf, mimeType);
return storeDocument(parentFolder, cmisProperties, contentStream);
} else if (isFolder(message)) {
return storeFolder(parentFolder, cmisProperties);
} else { // other types
return storeDocument(parentFolder, cmisProperties, null);
}
}
private Folder getFolderOnPath(Exchange exchange, String path) throws Exception {
try {
return (Folder) getSessionFacade().getObjectByPath(path);
} catch (CmisObjectNotFoundException e) {
throw new RuntimeExchangeException("Path not found " + path, exchange, e);
}
}
private String parentFolderPathFor(Message message) throws Exception {
String customPath = message.getHeader(CamelCMISConstants.CMIS_FOLDER_PATH, String.class);
if (customPath != null) {
return customPath;
}
if (isFolder(message)) {
String path = (String) message.getHeader(PropertyIds.PATH);
String name = (String) message.getHeader(PropertyIds.NAME);
if (path != null && path.length() > name.length()) {
return path.substring(0, path.length() - name.length());
}
}
return "/";
}
private boolean isFolder(Message message) throws Exception {
String baseTypeId = message.getHeader(PropertyIds.OBJECT_TYPE_ID, String.class);
if (baseTypeId != null) {
return CamelCMISConstants.CMIS_FOLDER.equals(getSessionFacade().getCMISTypeFor(baseTypeId));
}
return message.getBody() == null;
}
private Folder storeFolder(Folder parentFolder, Map<String, Object> cmisProperties) throws Exception {
if (!cmisProperties.containsKey(PropertyIds.OBJECT_TYPE_ID)) {
cmisProperties.put(PropertyIds.OBJECT_TYPE_ID, CamelCMISConstants.CMIS_FOLDER);
}
LOG.debug("Creating folder with properties: {}", cmisProperties);
return parentFolder.createFolder(cmisProperties);
}
private Document storeDocument(Folder parentFolder, Map<String, Object> cmisProperties, ContentStream contentStream) throws Exception {
if (!cmisProperties.containsKey(PropertyIds.OBJECT_TYPE_ID)) {
cmisProperties.put(PropertyIds.OBJECT_TYPE_ID, CamelCMISConstants.CMIS_DOCUMENT);
}
VersioningState versioningState = VersioningState.NONE;
if (getSessionFacade().isObjectTypeVersionable((String) cmisProperties.get(PropertyIds.OBJECT_TYPE_ID))) {
versioningState = VersioningState.MAJOR;
}
LOG.debug("Creating document with properties: {}", cmisProperties);
return parentFolder.createDocument(cmisProperties, contentStream, versioningState);
}
private void validateRequiredHeader(Exchange exchange, String name) throws NoSuchHeaderException {
ExchangeHelper.getMandatoryHeader(exchange, name, String.class);
}
private boolean isDocument(Exchange exchange) throws Exception {
String baseTypeId = exchange.getIn().getHeader(PropertyIds.OBJECT_TYPE_ID, String.class);
if (baseTypeId != null) {
return CamelCMISConstants.CMIS_DOCUMENT.equals(getSessionFacade().getCMISTypeFor(baseTypeId));
}
return exchange.getIn().getBody() != null;
}
private byte[] getBodyData(Message message) {
return message.getBody(byte[].class);
}
private String getMimeType(Message message) throws NoSuchHeaderException {
String mimeType = message.getHeader(PropertyIds.CONTENT_STREAM_MIME_TYPE, String.class);
if (mimeType == null) {
mimeType = MessageHelper.getContentType(message);
}
return mimeType;
}
private CMISSessionFacade getSessionFacade() throws Exception {
if (sessionFacade == null) {
sessionFacade = sessionFacadeFactory.create(getEndpoint());
sessionFacade.initSession();
}
return sessionFacade;
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.run;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import com.intellij.execution.DefaultExecutionResult;
import com.intellij.execution.ExecutionException;
import com.intellij.execution.ExecutionResult;
import com.intellij.execution.Executor;
import com.intellij.execution.configurations.*;
import com.intellij.execution.configurations.GeneralCommandLine.ParentEnvironmentType;
import com.intellij.execution.filters.TextConsoleBuilder;
import com.intellij.execution.filters.TextConsoleBuilderFactory;
import com.intellij.execution.filters.UrlFilter;
import com.intellij.execution.process.ProcessHandler;
import com.intellij.execution.process.ProcessTerminatedListener;
import com.intellij.execution.runners.ExecutionEnvironment;
import com.intellij.execution.runners.ProgramRunner;
import com.intellij.execution.ui.ConsoleView;
import com.intellij.facet.Facet;
import com.intellij.facet.FacetManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleManager;
import com.intellij.openapi.module.ModuleUtilCore;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.SdkAdditionalData;
import com.intellij.openapi.roots.*;
import com.intellij.openapi.roots.impl.libraries.LibraryImpl;
import com.intellij.openapi.roots.libraries.Library;
import com.intellij.openapi.roots.libraries.PersistentLibraryKind;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.JarFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.encoding.EncodingProjectManager;
import com.intellij.remote.RemoteProcessControl;
import com.intellij.util.PlatformUtils;
import com.jetbrains.python.PythonHelpersLocator;
import com.jetbrains.python.console.PyDebugConsoleBuilder;
import com.jetbrains.python.debugger.PyDebugRunner;
import com.jetbrains.python.debugger.PyDebuggerOptionsProvider;
import com.jetbrains.python.facet.LibraryContributingFacet;
import com.jetbrains.python.facet.PythonPathContributingFacet;
import com.jetbrains.python.library.PythonLibraryType;
import com.jetbrains.python.remote.PyRemotePathMapper;
import com.jetbrains.python.sdk.PySdkUtil;
import com.jetbrains.python.sdk.PythonEnvUtil;
import com.jetbrains.python.sdk.PythonSdkAdditionalData;
import com.jetbrains.python.sdk.PythonSdkType;
import com.jetbrains.python.sdk.flavors.JythonSdkFlavor;
import com.jetbrains.python.sdk.flavors.PythonSdkFlavor;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.io.IOException;
import java.net.ServerSocket;
import java.nio.charset.Charset;
import java.util.*;
import java.util.stream.Collectors;
/**
* @author traff, Leonid Shalupov
*/
public abstract class PythonCommandLineState extends CommandLineState {
private static final Logger LOG = Logger.getInstance("#com.jetbrains.python.run.PythonCommandLineState");
// command line has a number of fixed groups of parameters; patchers should only operate on them and not the raw list.
public static final String GROUP_EXE_OPTIONS = "Exe Options";
public static final String GROUP_DEBUGGER = "Debugger";
public static final String GROUP_PROFILER = "Profiler";
public static final String GROUP_COVERAGE = "Coverage";
public static final String GROUP_SCRIPT = "Script";
private final AbstractPythonRunConfiguration myConfig;
private Boolean myMultiprocessDebug = null;
private boolean myRunWithPty = PtyCommandLine.isEnabled();
public boolean isRunWithPty() {
return myRunWithPty;
}
public boolean isDebug() {
return PyDebugRunner.PY_DEBUG_RUNNER.equals(getEnvironment().getRunner().getRunnerId());
}
public static ServerSocket createServerSocket() throws ExecutionException {
final ServerSocket serverSocket;
try {
//noinspection SocketOpenedButNotSafelyClosed
serverSocket = new ServerSocket(0);
}
catch (IOException e) {
throw new ExecutionException("Failed to find free socket port", e);
}
return serverSocket;
}
public PythonCommandLineState(AbstractPythonRunConfiguration runConfiguration, ExecutionEnvironment env) {
super(env);
myConfig = runConfiguration;
}
@Nullable
public PythonSdkFlavor getSdkFlavor() {
return PythonSdkFlavor.getFlavor(myConfig.getInterpreterPath());
}
@Nullable
public Sdk getSdk() {
return myConfig.getSdk();
}
@NotNull
@Override
public ExecutionResult execute(@NotNull Executor executor, @NotNull ProgramRunner runner) throws ExecutionException {
return execute(executor, (CommandLinePatcher[])null);
}
public ExecutionResult execute(Executor executor, CommandLinePatcher... patchers) throws ExecutionException {
return execute(executor, getDefaultPythonProcessStarter(), patchers);
}
public ExecutionResult execute(Executor executor,
PythonProcessStarter processStarter,
CommandLinePatcher... patchers) throws ExecutionException {
final ProcessHandler processHandler = startProcess(processStarter, patchers);
final ConsoleView console = createAndAttachConsole(myConfig.getProject(), processHandler, executor);
return new DefaultExecutionResult(console, processHandler, createActions(console, processHandler));
}
@NotNull
protected ConsoleView createAndAttachConsole(Project project, ProcessHandler processHandler, Executor executor)
throws ExecutionException {
final ConsoleView consoleView = createConsoleBuilder(project).getConsole();
consoleView.addMessageFilter(createUrlFilter(processHandler));
addTracebackFilter(project, consoleView, processHandler);
consoleView.attachToProcess(processHandler);
return consoleView;
}
protected void addTracebackFilter(Project project, ConsoleView consoleView, ProcessHandler processHandler) {
if (PySdkUtil.isRemote(myConfig.getSdk())) {
assert processHandler instanceof RemoteProcessControl;
consoleView
.addMessageFilter(new PyRemoteTracebackFilter(project, myConfig.getWorkingDirectory(), (RemoteProcessControl)processHandler));
}
else {
consoleView.addMessageFilter(new PythonTracebackFilter(project, myConfig.getWorkingDirectorySafe()));
}
consoleView.addMessageFilter(createUrlFilter(processHandler)); // Url filter is always nice to have
}
private TextConsoleBuilder createConsoleBuilder(Project project) {
if (isDebug()) {
return new PyDebugConsoleBuilder(project, PythonSdkType.findSdkByPath(myConfig.getInterpreterPath()));
}
else {
return TextConsoleBuilderFactory.getInstance().createBuilder(project);
}
}
@Override
@NotNull
protected ProcessHandler startProcess() throws ExecutionException {
return startProcess(getDefaultPythonProcessStarter());
}
/**
* Patches the command line parameters applying patchers from first to last, and then runs it.
*
* @param patchers any number of patchers; any patcher may be null, and the whole argument may be null.
* @return handler of the started process
* @throws ExecutionException
* @deprecated use {@link #startProcess(PythonProcessStarter, CommandLinePatcher...)} instead
*/
@Deprecated
@NotNull
protected ProcessHandler startProcess(CommandLinePatcher... patchers) throws ExecutionException {
return startProcess(getDefaultPythonProcessStarter(), patchers);
}
/**
* Patches the command line parameters applying patchers from first to last, and then runs it.
*
* @param processStarter
* @param patchers any number of patchers; any patcher may be null, and the whole argument may be null.
* @return handler of the started process
* @throws ExecutionException
*/
@NotNull
protected ProcessHandler startProcess(PythonProcessStarter processStarter, CommandLinePatcher... patchers) throws ExecutionException {
GeneralCommandLine commandLine = generateCommandLine(patchers);
// Extend command line
PythonRunConfigurationExtensionsManager.getInstance()
.patchCommandLine(myConfig, getRunnerSettings(), commandLine, getEnvironment().getRunner().getRunnerId());
ProcessHandler processHandler = processStarter.start(myConfig, commandLine);
// attach extensions
PythonRunConfigurationExtensionsManager.getInstance().attachExtensionsToProcess(myConfig, processHandler, getRunnerSettings());
return processHandler;
}
@NotNull
protected final PythonProcessStarter getDefaultPythonProcessStarter() {
return (config, commandLine) -> {
Sdk sdk = PythonSdkType.findSdkByPath(myConfig.getInterpreterPath());
final ProcessHandler processHandler;
if (PySdkUtil.isRemote(sdk)) {
PyRemotePathMapper pathMapper = createRemotePathMapper();
processHandler = createRemoteProcessStarter().startRemoteProcess(sdk, commandLine, myConfig.getProject(), pathMapper);
}
else {
EncodingEnvironmentUtil.setLocaleEnvironmentIfMac(commandLine);
processHandler = doCreateProcess(commandLine);
ProcessTerminatedListener.attach(processHandler);
}
return processHandler;
};
}
@Nullable
private PyRemotePathMapper createRemotePathMapper() {
if (myConfig.getMappingSettings() == null) {
return null;
}
else {
return PyRemotePathMapper.fromSettings(myConfig.getMappingSettings(), PyRemotePathMapper.PyPathMappingType.USER_DEFINED);
}
}
protected PyRemoteProcessStarter createRemoteProcessStarter() {
return new PyRemoteProcessStarter();
}
public GeneralCommandLine generateCommandLine(CommandLinePatcher[] patchers) {
GeneralCommandLine commandLine = generateCommandLine();
if (patchers != null) {
for (CommandLinePatcher patcher : patchers) {
if (patcher != null) patcher.patchCommandLine(commandLine);
}
}
return commandLine;
}
protected ProcessHandler doCreateProcess(GeneralCommandLine commandLine) throws ExecutionException {
return PythonProcessRunner.createProcess(commandLine);
}
public GeneralCommandLine generateCommandLine() {
GeneralCommandLine commandLine = createPythonCommandLine(myConfig.getProject(), myConfig, isDebug(), myRunWithPty);
buildCommandLineParameters(commandLine);
customizeEnvironmentVars(commandLine.getEnvironment(), myConfig.isPassParentEnvs());
return commandLine;
}
@NotNull
public static GeneralCommandLine createPythonCommandLine(Project project, PythonRunParams config, boolean isDebug, boolean runWithPty) {
GeneralCommandLine commandLine = generalCommandLine(runWithPty);
commandLine.withCharset(EncodingProjectManager.getInstance(project).getDefaultCharset());
createStandardGroups(commandLine);
initEnvironment(project, commandLine, config, isDebug);
setRunnerPath(project, commandLine, config);
return commandLine;
}
private static GeneralCommandLine generalCommandLine(boolean runWithPty) {
return runWithPty ? new PtyCommandLine() : new GeneralCommandLine();
}
/**
* Creates a number of parameter groups in the command line:
* GROUP_EXE_OPTIONS, GROUP_DEBUGGER, GROUP_SCRIPT.
* These are necessary for command line patchers to work properly.
*
* @param commandLine
*/
public static void createStandardGroups(GeneralCommandLine commandLine) {
ParametersList params = commandLine.getParametersList();
params.addParamsGroup(GROUP_EXE_OPTIONS);
params.addParamsGroup(GROUP_DEBUGGER);
params.addParamsGroup(GROUP_PROFILER);
params.addParamsGroup(GROUP_COVERAGE);
params.addParamsGroup(GROUP_SCRIPT);
}
protected static void initEnvironment(Project project, GeneralCommandLine commandLine, PythonRunParams myConfig, boolean isDebug) {
Map<String, String> env = Maps.newHashMap();
setupEncodingEnvs(env, commandLine.getCharset());
if (myConfig.getEnvs() != null) {
env.putAll(myConfig.getEnvs());
}
addCommonEnvironmentVariables(getInterpreterPath(project, myConfig), env);
setupVirtualEnvVariables(myConfig, env, myConfig.getSdkHome());
commandLine.getEnvironment().clear();
commandLine.getEnvironment().putAll(env);
commandLine.withParentEnvironmentType(myConfig.isPassParentEnvs() ? ParentEnvironmentType.CONSOLE : ParentEnvironmentType.NONE);
buildPythonPath(project, commandLine, myConfig, isDebug);
}
private static void setupVirtualEnvVariables(PythonRunParams myConfig, Map<String, String> env, String sdkHome) {
if (PythonSdkType.isVirtualEnv(sdkHome)) {
PyVirtualEnvReader reader = new PyVirtualEnvReader(sdkHome);
if (reader.getActivate() != null) {
try {
env.putAll(reader.readShellEnv().entrySet().stream().filter((entry) -> PyVirtualEnvReader.Companion.getVirtualEnvVars().contains(entry.getKey())
).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
for (Map.Entry<String, String> e : myConfig.getEnvs().entrySet()) {
if ("PATH".equals(e.getKey())) {
env.put(e.getKey(), PythonEnvUtil.appendToPathEnvVar(env.get("PATH"), e.getValue()));
}
else {
env.put(e.getKey(), e.getValue());
}
}
}
catch (Exception e) {
LOG.error("Couldn't read virtualenv variables", e);
}
}
}
}
protected static void addCommonEnvironmentVariables(@Nullable String homePath, Map<String, String> env) {
PythonEnvUtil.setPythonUnbuffered(env);
if (homePath != null) {
PythonEnvUtil.resetHomePathChanges(homePath, env);
}
env.put("PYCHARM_HOSTED", "1");
}
public void customizeEnvironmentVars(Map<String, String> envs, boolean passParentEnvs) {
}
private static void setupEncodingEnvs(Map<String, String> envs, Charset charset) {
PythonSdkFlavor.setupEncodingEnvs(envs, charset);
}
private static void buildPythonPath(Project project, GeneralCommandLine commandLine, PythonRunParams config, boolean isDebug) {
Sdk pythonSdk = PythonSdkType.findSdkByPath(config.getSdkHome());
if (pythonSdk != null) {
List<String> pathList = Lists.newArrayList(getAddedPaths(pythonSdk));
pathList.addAll(collectPythonPath(project, config, isDebug));
initPythonPath(commandLine, config.isPassParentEnvs(), pathList, config.getSdkHome());
}
}
public static void initPythonPath(GeneralCommandLine commandLine,
boolean passParentEnvs,
List<String> pathList,
final String interpreterPath) {
final PythonSdkFlavor flavor = PythonSdkFlavor.getFlavor(interpreterPath);
if (flavor != null) {
flavor.initPythonPath(commandLine, pathList);
}
else {
PythonSdkFlavor.initPythonPath(commandLine.getEnvironment(), passParentEnvs, pathList);
}
}
public static List<String> getAddedPaths(Sdk pythonSdk) {
List<String> pathList = new ArrayList<>();
final SdkAdditionalData sdkAdditionalData = pythonSdk.getSdkAdditionalData();
if (sdkAdditionalData instanceof PythonSdkAdditionalData) {
final Set<VirtualFile> addedPaths = ((PythonSdkAdditionalData)sdkAdditionalData).getAddedPathFiles();
for (VirtualFile file : addedPaths) {
addToPythonPath(file, pathList);
}
}
return pathList;
}
private static void addToPythonPath(VirtualFile file, Collection<String> pathList) {
if (file.getFileSystem() instanceof JarFileSystem) {
final VirtualFile realFile = JarFileSystem.getInstance().getVirtualFileForJar(file);
if (realFile != null) {
addIfNeeded(realFile, pathList);
}
}
else {
addIfNeeded(file, pathList);
}
}
private static void addIfNeeded(@NotNull final VirtualFile file, @NotNull final Collection<String> pathList) {
addIfNeeded(pathList, file.getPath());
}
protected static void addIfNeeded(Collection<String> pathList, String path) {
final Set<String> vals = Sets.newHashSet(pathList);
final String filePath = FileUtil.toSystemDependentName(path);
if (!vals.contains(filePath)) {
pathList.add(filePath);
}
}
@VisibleForTesting
public static Collection<String> collectPythonPath(Project project, PythonRunParams config, boolean isDebug) {
final Module module = getModule(project, config);
final HashSet<String> pythonPath =
Sets.newLinkedHashSet(collectPythonPath(module, config.shouldAddContentRoots(), config.shouldAddSourceRoots()));
if (isDebug && PythonSdkFlavor.getFlavor(config.getSdkHome()) instanceof JythonSdkFlavor) {
//that fixes Jython problem changing sys.argv on execfile, see PY-8164
pythonPath.add(PythonHelpersLocator.getHelperPath("pycharm"));
pythonPath.add(PythonHelpersLocator.getHelperPath("pydev"));
}
return pythonPath;
}
@Nullable
private static Module getModule(Project project, PythonRunParams config) {
String name = config.getModuleName();
return StringUtil.isEmpty(name) ? null : ModuleManager.getInstance(project).findModuleByName(name);
}
@NotNull
public static Collection<String> collectPythonPath(@Nullable Module module) {
return collectPythonPath(module, true, true);
}
@NotNull
public static Collection<String> collectPythonPath(@Nullable Module module, boolean addContentRoots,
boolean addSourceRoots) {
Collection<String> pythonPathList = Sets.newLinkedHashSet();
if (module != null) {
Set<Module> dependencies = new HashSet<>();
ModuleUtilCore.getDependencies(module, dependencies);
if (addContentRoots) {
addRoots(pythonPathList, ModuleRootManager.getInstance(module).getContentRoots());
for (Module dependency : dependencies) {
addRoots(pythonPathList, ModuleRootManager.getInstance(dependency).getContentRoots());
}
}
if (addSourceRoots) {
addRoots(pythonPathList, ModuleRootManager.getInstance(module).getSourceRoots());
for (Module dependency : dependencies) {
addRoots(pythonPathList, ModuleRootManager.getInstance(dependency).getSourceRoots());
}
}
addLibrariesFromModule(module, pythonPathList);
addRootsFromModule(module, pythonPathList);
for (Module dependency : dependencies) {
addLibrariesFromModule(dependency, pythonPathList);
addRootsFromModule(dependency, pythonPathList);
}
}
return pythonPathList;
}
private static void addLibrariesFromModule(Module module, Collection<String> list) {
final OrderEntry[] entries = ModuleRootManager.getInstance(module).getOrderEntries();
for (OrderEntry entry : entries) {
if (entry instanceof LibraryOrderEntry) {
final String name = ((LibraryOrderEntry)entry).getLibraryName();
if (name != null && name.endsWith(LibraryContributingFacet.PYTHON_FACET_LIBRARY_NAME_SUFFIX)) {
// skip libraries from Python facet
continue;
}
for (VirtualFile root : ((LibraryOrderEntry)entry).getRootFiles(OrderRootType.CLASSES)) {
final Library library = ((LibraryOrderEntry)entry).getLibrary();
if (!PlatformUtils.isPyCharm()) {
addToPythonPath(root, list);
}
else if (library instanceof LibraryImpl) {
final PersistentLibraryKind<?> kind = ((LibraryImpl)library).getKind();
if (kind == PythonLibraryType.getInstance().getKind()) {
addToPythonPath(root, list);
}
}
}
}
}
}
private static void addRootsFromModule(Module module, Collection<String> pythonPathList) {
// for Jython
final CompilerModuleExtension extension = CompilerModuleExtension.getInstance(module);
if (extension != null) {
final VirtualFile path = extension.getCompilerOutputPath();
if (path != null) {
pythonPathList.add(path.getPath());
}
final VirtualFile pathForTests = extension.getCompilerOutputPathForTests();
if (pathForTests != null) {
pythonPathList.add(pathForTests.getPath());
}
}
//additional paths from facets (f.e. buildout)
final Facet[] facets = FacetManager.getInstance(module).getAllFacets();
for (Facet facet : facets) {
if (facet instanceof PythonPathContributingFacet) {
List<String> more_paths = ((PythonPathContributingFacet)facet).getAdditionalPythonPath();
if (more_paths != null) pythonPathList.addAll(more_paths);
}
}
}
private static void addRoots(Collection<String> pythonPathList, VirtualFile[] roots) {
for (VirtualFile root : roots) {
addToPythonPath(root, pythonPathList);
}
}
protected static void setRunnerPath(Project project, GeneralCommandLine commandLine, PythonRunParams config) {
String interpreterPath = getInterpreterPath(project, config);
if (StringUtil.isNotEmpty(interpreterPath)) {
commandLine.setExePath(FileUtil.toSystemDependentName(interpreterPath));
}
}
@Nullable
public static String getInterpreterPath(Project project, PythonRunParams config) {
String sdkHome = config.getSdkHome();
if (config.isUseModuleSdk() || StringUtil.isEmpty(sdkHome)) {
Module module = getModule(project, config);
Sdk sdk = PythonSdkType.findPythonSdk(module);
if (sdk != null) {
sdkHome = sdk.getHomePath();
}
}
return sdkHome;
}
protected String getInterpreterPath() throws ExecutionException {
String interpreterPath = myConfig.getInterpreterPath();
if (interpreterPath == null) {
throw new ExecutionException("Cannot find Python interpreter for this run configuration");
}
return interpreterPath;
}
protected void buildCommandLineParameters(GeneralCommandLine commandLine) {
}
public boolean isMultiprocessDebug() {
if (myMultiprocessDebug != null) {
return myMultiprocessDebug;
}
else {
return PyDebuggerOptionsProvider.getInstance(myConfig.getProject()).isAttachToSubprocess();
}
}
public void setMultiprocessDebug(boolean multiprocessDebug) {
myMultiprocessDebug = multiprocessDebug;
}
public void setRunWithPty(boolean runWithPty) {
myRunWithPty = runWithPty;
}
@NotNull
protected UrlFilter createUrlFilter(ProcessHandler handler) {
return new UrlFilter();
}
public interface PythonProcessStarter {
@NotNull
ProcessHandler start(@NotNull AbstractPythonRunConfiguration config,
@NotNull GeneralCommandLine commandLine) throws ExecutionException;
}
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.kotlin.idea.completion.test.weighers;
import com.intellij.testFramework.TestDataPath;
import org.jetbrains.kotlin.test.JUnit3RunnerWithInners;
import org.jetbrains.kotlin.test.KotlinTestUtils;
import org.jetbrains.kotlin.test.TestMetadata;
import org.jetbrains.kotlin.test.TestRoot;
import org.junit.runner.RunWith;
/**
* This class is generated by {@link org.jetbrains.kotlin.testGenerator.generator.TestGenerator}.
* DO NOT MODIFY MANUALLY.
*/
@SuppressWarnings("all")
@TestRoot("completion/tests")
@TestDataPath("$CONTENT_ROOT")
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public abstract class BasicCompletionWeigherTestGenerated extends AbstractBasicCompletionWeigherTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn")
public abstract static class ContextualReturn extends AbstractBasicCompletionWeigherTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn/noReturnType")
public static class NoReturnType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("BeginOfNestedBlock.kt")
public void testBeginOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/BeginOfNestedBlock.kt");
}
@TestMetadata("BeginOfTopLevelBlock.kt")
public void testBeginOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/BeginOfTopLevelBlock.kt");
}
@TestMetadata("EndOfNestedBlock.kt")
public void testEndOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/EndOfNestedBlock.kt");
}
@TestMetadata("EndOfTopLevelBlock.kt")
public void testEndOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/EndOfTopLevelBlock.kt");
}
@TestMetadata("ForWithBody.kt")
public void testForWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/ForWithBody.kt");
}
@TestMetadata("ForWithoutBody.kt")
public void testForWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/ForWithoutBody.kt");
}
@TestMetadata("IfWithoutBody.kt")
public void testIfWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/IfWithoutBody.kt");
}
@TestMetadata("InElvis.kt")
public void testInElvis() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InElvis.kt");
}
@TestMetadata("InElvisWhenSmartCompletionWins.kt")
public void testInElvisWhenSmartCompletionWins() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InElvisWhenSmartCompletionWins.kt");
}
@TestMetadata("InWhenSingleExpression.kt")
public void testInWhenSingleExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InWhenSingleExpression.kt");
}
@TestMetadata("InWhenWithBody.kt")
public void testInWhenWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/noReturnType/InWhenWithBody.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/contextualReturn/withReturnType")
public static class WithReturnType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("BeginOfNestedBlock.kt")
public void testBeginOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/BeginOfNestedBlock.kt");
}
@TestMetadata("BeginOfTopLevelBlock.kt")
public void testBeginOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/BeginOfTopLevelBlock.kt");
}
@TestMetadata("EndOfNestedBlock.kt")
public void testEndOfNestedBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/EndOfNestedBlock.kt");
}
@TestMetadata("EndOfTopLevelBlock.kt")
public void testEndOfTopLevelBlock() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/EndOfTopLevelBlock.kt");
}
@TestMetadata("ForWithBody.kt")
public void testForWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/ForWithBody.kt");
}
@TestMetadata("ForWithoutBody.kt")
public void testForWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/ForWithoutBody.kt");
}
@TestMetadata("IfWithoutBody.kt")
public void testIfWithoutBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/IfWithoutBody.kt");
}
@TestMetadata("InElvis.kt")
public void testInElvis() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvis.kt");
}
@TestMetadata("InElvisInReturn.kt")
public void testInElvisInReturn() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvisInReturn.kt");
}
@TestMetadata("InElvisWhenSmartCompletionWins.kt")
public void testInElvisWhenSmartCompletionWins() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InElvisWhenSmartCompletionWins.kt");
}
@TestMetadata("InIfAsReturnedExpression.kt")
public void testInIfAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InIfAsReturnedExpression.kt");
}
@TestMetadata("InIfInWhenWithBodyAsReturnedExpression.kt")
public void testInIfInWhenWithBodyAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InIfInWhenWithBodyAsReturnedExpression.kt");
}
@TestMetadata("InNotElvisBinaryOperator.kt")
public void testInNotElvisBinaryOperator() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InNotElvisBinaryOperator.kt");
}
@TestMetadata("InWhenAsReturnedExpression.kt")
public void testInWhenAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenAsReturnedExpression.kt");
}
@TestMetadata("InWhenSingleExpression.kt")
public void testInWhenSingleExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenSingleExpression.kt");
}
@TestMetadata("InWhenWithBody.kt")
public void testInWhenWithBody() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenWithBody.kt");
}
@TestMetadata("InWhenWithBodyAsReturnedExpression.kt")
public void testInWhenWithBodyAsReturnedExpression() throws Exception {
runTest("testData/weighers/basic/contextualReturn/withReturnType/InWhenWithBodyAsReturnedExpression.kt");
}
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/expectedInfo")
public static class ExpectedInfo extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("AfterAs.kt")
public void testAfterAs() throws Exception {
runTest("testData/weighers/basic/expectedInfo/AfterAs.kt");
}
@TestMetadata("CompanionObjectMethod.kt")
public void testCompanionObjectMethod() throws Exception {
runTest("testData/weighers/basic/expectedInfo/CompanionObjectMethod.kt");
}
@TestMetadata("EnumEntries.kt")
public void testEnumEntries() throws Exception {
runTest("testData/weighers/basic/expectedInfo/EnumEntries.kt");
}
@TestMetadata("ExpectedType.kt")
public void testExpectedType() throws Exception {
runTest("testData/weighers/basic/expectedInfo/ExpectedType.kt");
}
@TestMetadata("ExpectedType2.kt")
public void testExpectedType2() throws Exception {
runTest("testData/weighers/basic/expectedInfo/ExpectedType2.kt");
}
@TestMetadata("LambdaValue.kt")
public void testLambdaValue() throws Exception {
runTest("testData/weighers/basic/expectedInfo/LambdaValue.kt");
}
@TestMetadata("MultiArgsItem.kt")
public void testMultiArgsItem() throws Exception {
runTest("testData/weighers/basic/expectedInfo/MultiArgsItem.kt");
}
@TestMetadata("NameSimilarity.kt")
public void testNameSimilarity() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarity.kt");
}
@TestMetadata("NameSimilarityAndNoExpectedType.kt")
public void testNameSimilarityAndNoExpectedType() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarityAndNoExpectedType.kt");
}
@TestMetadata("NameSimilarityAndNoExpectedType2.kt")
public void testNameSimilarityAndNoExpectedType2() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NameSimilarityAndNoExpectedType2.kt");
}
@TestMetadata("NoStupidComparison.kt")
public void testNoStupidComparison() throws Exception {
runTest("testData/weighers/basic/expectedInfo/NoStupidComparison.kt");
}
@TestMetadata("Null.kt")
public void testNull() throws Exception {
runTest("testData/weighers/basic/expectedInfo/Null.kt");
}
@TestMetadata("PreferMatchingThis.kt")
public void testPreferMatchingThis() throws Exception {
runTest("testData/weighers/basic/expectedInfo/PreferMatchingThis.kt");
}
@TestMetadata("TrueFalse.kt")
public void testTrueFalse() throws Exception {
runTest("testData/weighers/basic/expectedInfo/TrueFalse.kt");
}
@TestMetadata("WhenByEnum.kt")
public void testWhenByEnum() throws Exception {
runTest("testData/weighers/basic/expectedInfo/WhenByEnum.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/expectedType")
public static class ExpectedType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("ifConditionQualified.kt")
public void testIfConditionQualified() throws Exception {
runTest("testData/weighers/basic/expectedType/ifConditionQualified.kt");
}
@TestMetadata("returnFromFunction.kt")
public void testReturnFromFunction() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromFunction.kt");
}
@TestMetadata("returnFromFunctionQualifiedSelector.kt")
public void testReturnFromFunctionQualifiedSelector() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromFunctionQualifiedSelector.kt");
}
@TestMetadata("returnFromLambda.kt")
public void testReturnFromLambda() throws Exception {
runTest("testData/weighers/basic/expectedType/returnFromLambda.kt");
}
@TestMetadata("whileConditionQualified.kt")
public void testWhileConditionQualified() throws Exception {
runTest("testData/weighers/basic/expectedType/whileConditionQualified.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic/parameterNameAndType")
public static class ParameterNameAndType extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("Deprecated.kt")
public void testDeprecated() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/Deprecated.kt");
}
@TestMetadata("FromCurrentFilePriority.kt")
public void testFromCurrentFilePriority() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/FromCurrentFilePriority.kt");
}
@TestMetadata("ImportedFirst.kt")
public void testImportedFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/ImportedFirst.kt");
}
@TestMetadata("MoreWordsMatchFirst.kt")
public void testMoreWordsMatchFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/MoreWordsMatchFirst.kt");
}
@TestMetadata("ShorterFirst.kt")
public void testShorterFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/ShorterFirst.kt");
}
@TestMetadata("StartMatchFirst.kt")
public void testStartMatchFirst() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/StartMatchFirst.kt");
}
@TestMetadata("UserPrefix.kt")
public void testUserPrefix() throws Exception {
runTest("testData/weighers/basic/parameterNameAndType/UserPrefix.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public abstract static class Uncategorized extends AbstractBasicCompletionWeigherTest {
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public static class TestBucket001 extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("AfterNullable.kt")
public void testAfterNullable() throws Exception {
runTest("testData/weighers/basic/AfterNullable.kt");
}
@TestMetadata("CallableReference_NothingLast.kt")
public void testCallableReference_NothingLast() throws Exception {
runTest("testData/weighers/basic/CallableReference_NothingLast.kt");
}
@TestMetadata("Callables.kt")
public void testCallables() throws Exception {
runTest("testData/weighers/basic/Callables.kt");
}
@TestMetadata("DelegateToOtherObject.kt")
public void testDelegateToOtherObject() throws Exception {
runTest("testData/weighers/basic/DelegateToOtherObject.kt");
}
@TestMetadata("DeprecatedFun.kt")
public void testDeprecatedFun() throws Exception {
runTest("testData/weighers/basic/DeprecatedFun.kt");
}
@TestMetadata("DeprecatedJavaClass.kt")
public void testDeprecatedJavaClass() throws Exception {
runTest("testData/weighers/basic/DeprecatedJavaClass.kt");
}
@TestMetadata("DeprecatedSinceKotlinFun.kt")
public void testDeprecatedSinceKotlinFun() throws Exception {
runTest("testData/weighers/basic/DeprecatedSinceKotlinFun.kt");
}
@TestMetadata("DslCallWithExpectedType.kt")
public void testDslCallWithExpectedType() throws Exception {
runTest("testData/weighers/basic/DslCallWithExpectedType.kt");
}
@TestMetadata("DslCalls.kt")
public void testDslCalls() throws Exception {
runTest("testData/weighers/basic/DslCalls.kt");
}
@TestMetadata("DslCallsAnnotatedFunctionType.kt")
public void testDslCallsAnnotatedFunctionType() throws Exception {
runTest("testData/weighers/basic/DslCallsAnnotatedFunctionType.kt");
}
@TestMetadata("DslCallsWithMultipleReceivers.kt")
public void testDslCallsWithMultipleReceivers() throws Exception {
runTest("testData/weighers/basic/DslCallsWithMultipleReceivers.kt");
}
@TestMetadata("DslMemberCalls.kt")
public void testDslMemberCalls() throws Exception {
runTest("testData/weighers/basic/DslMemberCalls.kt");
}
@TestMetadata("ExactMatchForKeyword.kt")
public void testExactMatchForKeyword() throws Exception {
runTest("testData/weighers/basic/ExactMatchForKeyword.kt");
}
@TestMetadata("ImportedFirst.kt")
public void testImportedFirst() throws Exception {
runTest("testData/weighers/basic/ImportedFirst.kt");
}
@TestMetadata("ImportedFirstForJavaClass.kt")
public void testImportedFirstForJavaClass() throws Exception {
runTest("testData/weighers/basic/ImportedFirstForJavaClass.kt");
}
@TestMetadata("ImportedOrder.kt")
public void testImportedOrder() throws Exception {
runTest("testData/weighers/basic/ImportedOrder.kt");
}
@TestMetadata("KT-25588_1.kts")
public void testKT_25588_1() throws Exception {
runTest("testData/weighers/basic/KT-25588_1.kts");
}
@TestMetadata("KT-25588_2.kts")
public void testKT_25588_2() throws Exception {
runTest("testData/weighers/basic/KT-25588_2.kts");
}
@TestMetadata("KeywordsLast.kt")
public void testKeywordsLast() throws Exception {
runTest("testData/weighers/basic/KeywordsLast.kt");
}
@TestMetadata("LambdaSignature.kt")
public void testLambdaSignature() throws Exception {
runTest("testData/weighers/basic/LambdaSignature.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public static class TestBucket002 extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("LocalFileBeforeImported.kt")
public void testLocalFileBeforeImported() throws Exception {
runTest("testData/weighers/basic/LocalFileBeforeImported.kt");
}
@TestMetadata("LocalValuesAndParams.kt")
public void testLocalValuesAndParams() throws Exception {
runTest("testData/weighers/basic/LocalValuesAndParams.kt");
}
@TestMetadata("LocalsBeforeKeywords.kt")
public void testLocalsBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/LocalsBeforeKeywords.kt");
}
@TestMetadata("LocalsPropertiesKeywords.kt")
public void testLocalsPropertiesKeywords() throws Exception {
runTest("testData/weighers/basic/LocalsPropertiesKeywords.kt");
}
@TestMetadata("NamedParameters.kt")
public void testNamedParameters() throws Exception {
runTest("testData/weighers/basic/NamedParameters.kt");
}
@TestMetadata("NamedParameters2.kt")
public void testNamedParameters2() throws Exception {
runTest("testData/weighers/basic/NamedParameters2.kt");
}
@TestMetadata("NamedParameters3.kt")
public void testNamedParameters3() throws Exception {
runTest("testData/weighers/basic/NamedParameters3.kt");
}
@TestMetadata("NoExpectedType.kt")
public void testNoExpectedType() throws Exception {
runTest("testData/weighers/basic/NoExpectedType.kt");
}
@TestMetadata("NullArgForInfixFunctionOnTheLeft.kt")
public void testNullArgForInfixFunctionOnTheLeft() throws Exception {
runTest("testData/weighers/basic/NullArgForInfixFunctionOnTheLeft.kt");
}
@TestMetadata("NullArgForInfixFunctionOnTheRight.kt")
public void testNullArgForInfixFunctionOnTheRight() throws Exception {
runTest("testData/weighers/basic/NullArgForInfixFunctionOnTheRight.kt");
}
@TestMetadata("NullArgForNotImportedFunction.kt")
public void testNullArgForNotImportedFunction() throws Exception {
runTest("testData/weighers/basic/NullArgForNotImportedFunction.kt");
}
@TestMetadata("NullForIfConditionOnTheLeft.kt")
public void testNullForIfConditionOnTheLeft() throws Exception {
runTest("testData/weighers/basic/NullForIfConditionOnTheLeft.kt");
}
@TestMetadata("NullForIfConditionOnTheRight.kt")
public void testNullForIfConditionOnTheRight() throws Exception {
runTest("testData/weighers/basic/NullForIfConditionOnTheRight.kt");
}
@TestMetadata("Packages.kt")
public void testPackages() throws Exception {
runTest("testData/weighers/basic/Packages.kt");
}
@TestMetadata("ParametersBeforeKeywords.kt")
public void testParametersBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/ParametersBeforeKeywords.kt");
}
@TestMetadata("PreferFromJdk.kt")
public void testPreferFromJdk() throws Exception {
runTest("testData/weighers/basic/PreferFromJdk.kt");
}
@TestMetadata("PreferGetMethodToProperty.kt")
public void testPreferGetMethodToProperty() throws Exception {
runTest("testData/weighers/basic/PreferGetMethodToProperty.kt");
}
@TestMetadata("Prefix.kt")
public void testPrefix() throws Exception {
runTest("testData/weighers/basic/Prefix.kt");
}
@TestMetadata("PropertiesBeforeKeywords.kt")
public void testPropertiesBeforeKeywords() throws Exception {
runTest("testData/weighers/basic/PropertiesBeforeKeywords.kt");
}
@TestMetadata("StaticMembers.kt")
public void testStaticMembers() throws Exception {
runTest("testData/weighers/basic/StaticMembers.kt");
}
}
@RunWith(JUnit3RunnerWithInners.class)
@TestMetadata("testData/weighers/basic")
public static class TestBucket003 extends AbstractBasicCompletionWeigherTest {
private void runTest(String testDataFilePath) throws Exception {
KotlinTestUtils.runTest(this::doTest, this, testDataFilePath);
}
@TestMetadata("SuperMembers.kt")
public void testSuperMembers() throws Exception {
runTest("testData/weighers/basic/SuperMembers.kt");
}
@TestMetadata("TopLevelKeywordWithClassName.kt")
public void testTopLevelKeywordWithClassName() throws Exception {
runTest("testData/weighers/basic/TopLevelKeywordWithClassName.kt");
}
@TestMetadata("UnavailableDslReceiver.kt")
public void testUnavailableDslReceiver() throws Exception {
runTest("testData/weighers/basic/UnavailableDslReceiver.kt");
}
}
}
}
| |
/*
* Copyright 2014-present Open Networking Foundation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.cluster.impl;
import java.util.List;
import java.util.Set;
import java.util.function.Consumer;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import org.easymock.EasyMock;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.onlab.junit.TestUtils;
import org.onlab.packet.IpAddress;
import org.onosproject.cfg.ComponentConfigService;
import org.onosproject.cluster.ClusterService;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.cluster.DefaultControllerNode;
import org.onosproject.cluster.NodeId;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.mastership.MastershipService;
import org.onosproject.mastership.MastershipStore;
import org.onosproject.mastership.MastershipTermService;
import org.onosproject.net.DeviceId;
import org.onosproject.net.config.NetworkConfigServiceAdapter;
import org.onosproject.net.region.Region;
import org.onosproject.net.region.RegionId;
import org.onosproject.net.region.RegionStore;
import org.onosproject.net.region.impl.RegionManager;
import org.onosproject.store.cluster.StaticClusterService;
import org.onosproject.store.region.impl.DistributedRegionStore;
import org.onosproject.store.service.TestStorageService;
import org.onosproject.store.trivial.SimpleMastershipStore;
import com.google.common.collect.Sets;
import com.google.common.util.concurrent.Futures;
import org.onosproject.upgrade.impl.UpgradeServiceAdapter;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.junit.Assert.*;
import static org.onosproject.net.MastershipRole.MASTER;
import static org.onosproject.net.MastershipRole.NONE;
import static org.onosproject.net.MastershipRole.STANDBY;
import static org.onosproject.net.NetTestTools.injectEventDispatcher;
import static org.onosproject.net.region.Region.Type.METRO;
/**
* Test codifying the mastership service contracts.
*/
public class MastershipManagerTest {
private static final NodeId NID_LOCAL = new NodeId("local");
private static final NodeId NID_OTHER = new NodeId("foo");
private static final IpAddress LOCALHOST = IpAddress.valueOf("127.0.0.1");
private static final DeviceId DEV_MASTER = DeviceId.deviceId("of:1");
private static final DeviceId DEV_OTHER = DeviceId.deviceId("of:2");
private static final RegionId RID1 = RegionId.regionId("r1");
private static final RegionId RID2 = RegionId.regionId("r2");
private static final DeviceId DID1 = DeviceId.deviceId("foo:d1");
private static final DeviceId DID2 = DeviceId.deviceId("foo:d2");
private static final DeviceId DID3 = DeviceId.deviceId("foo:d3");
private static final DeviceId DID4 = DeviceId.deviceId("foo:d4");
private static final DeviceId DID5 = DeviceId.deviceId("foo:d5");
private static final DeviceId DID6 = DeviceId.deviceId("foo:d6");
private static final NodeId NID1 = NodeId.nodeId("n1");
private static final NodeId NID2 = NodeId.nodeId("n2");
private static final NodeId NID3 = NodeId.nodeId("n3");
private static final NodeId NID4 = NodeId.nodeId("n4");
private static final ControllerNode CNODE1 =
new DefaultControllerNode(NID1, IpAddress.valueOf("127.0.1.1"));
private static final ControllerNode CNODE2 =
new DefaultControllerNode(NID2, IpAddress.valueOf("127.0.1.2"));
private static final ControllerNode CNODE3 =
new DefaultControllerNode(NID3, IpAddress.valueOf("127.0.1.3"));
private static final ControllerNode CNODE4 =
new DefaultControllerNode(NID4, IpAddress.valueOf("127.0.1.4"));
private MastershipManager mgr;
protected MastershipService service;
private TestRegionManager regionManager;
private RegionStore regionStore;
private TestClusterService testClusterService;
@Before
public void setUp() throws Exception {
mgr = new MastershipManager();
service = mgr;
injectEventDispatcher(mgr, new TestEventDispatcher());
testClusterService = new TestClusterService();
mgr.clusterService = testClusterService;
mgr.upgradeService = new UpgradeServiceAdapter();
mgr.store = new TestSimpleMastershipStore(mgr.clusterService);
regionStore = new DistributedRegionStore();
TestUtils.setField(regionStore, "storageService", new TestStorageService());
TestUtils.callMethod(regionStore, "activate",
new Class<?>[] {});
regionManager = new TestRegionManager();
TestUtils.setField(regionManager, "store", regionStore);
regionManager.activate();
mgr.regionService = regionManager;
ComponentConfigService mockConfigService =
EasyMock.createMock(ComponentConfigService.class);
expect(mockConfigService.getProperties(anyObject())).andReturn(ImmutableSet.of());
mockConfigService.registerProperties(mgr.getClass());
expectLastCall();
mockConfigService.unregisterProperties(mgr.getClass(), false);
expectLastCall();
expect(mockConfigService.getProperties(anyObject())).andReturn(ImmutableSet.of());
mgr.cfgService = mockConfigService;
replay(mockConfigService);
mgr.activate();
}
@After
public void tearDown() {
mgr.deactivate();
mgr.clusterService = null;
injectEventDispatcher(mgr, null);
regionManager.deactivate();
mgr.regionService = null;
mgr.store = null;
}
@Test
public void setRole() {
mgr.setRole(NID_OTHER, DEV_MASTER, MASTER);
assertEquals("wrong local role:", NONE, mgr.getLocalRole(DEV_MASTER));
assertEquals("wrong obtained role:", STANDBY, Futures.getUnchecked(mgr.requestRoleFor(DEV_MASTER)));
//set to master
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
assertEquals("wrong local role:", MASTER, mgr.getLocalRole(DEV_MASTER));
}
@Test
public void relinquishMastership() {
//no backups - should just turn to NONE for device.
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
assertEquals("wrong role:", MASTER, mgr.getLocalRole(DEV_MASTER));
mgr.relinquishMastership(DEV_MASTER);
assertNull("wrong master:", mgr.getMasterFor(DEV_OTHER));
assertEquals("wrong role:", NONE, mgr.getLocalRole(DEV_MASTER));
//not master, nothing should happen
mgr.setRole(NID_LOCAL, DEV_OTHER, NONE);
mgr.relinquishMastership(DEV_OTHER);
assertNull("wrong role:", mgr.getMasterFor(DEV_OTHER));
//provide NID_OTHER as backup and relinquish
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
assertEquals("wrong master:", NID_LOCAL, mgr.getMasterFor(DEV_MASTER));
mgr.setRole(NID_OTHER, DEV_MASTER, STANDBY);
mgr.relinquishMastership(DEV_MASTER);
assertEquals("wrong master:", NID_OTHER, mgr.getMasterFor(DEV_MASTER));
}
@Test
public void requestRoleFor() {
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
mgr.setRole(NID_OTHER, DEV_OTHER, MASTER);
//local should be master for one but standby for other
assertEquals("wrong role:", MASTER, Futures.getUnchecked(mgr.requestRoleFor(DEV_MASTER)));
assertEquals("wrong role:", STANDBY, Futures.getUnchecked(mgr.requestRoleFor(DEV_OTHER)));
}
@Test
public void getMasterFor() {
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
mgr.setRole(NID_OTHER, DEV_OTHER, MASTER);
assertEquals("wrong master:", NID_LOCAL, mgr.getMasterFor(DEV_MASTER));
assertEquals("wrong master:", NID_OTHER, mgr.getMasterFor(DEV_OTHER));
//have NID_OTHER hand over DEV_OTHER to NID_LOCAL
mgr.setRole(NID_LOCAL, DEV_OTHER, MASTER);
assertEquals("wrong master:", NID_LOCAL, mgr.getMasterFor(DEV_OTHER));
}
@Test
public void getDevicesOf() {
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
mgr.setRole(NID_LOCAL, DEV_OTHER, STANDBY);
assertEquals("should be one device:", 1, mgr.getDevicesOf(NID_LOCAL).size());
//hand both devices to NID_LOCAL
mgr.setRole(NID_LOCAL, DEV_OTHER, MASTER);
assertEquals("should be two devices:", 2, mgr.getDevicesOf(NID_LOCAL).size());
}
@Test
public void termService() {
MastershipTermService ts = mgr;
//term = 1 for both
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
assertEquals("inconsistent term: ", 1, ts.getMastershipTerm(DEV_MASTER).termNumber());
//hand devices to NID_LOCAL and back: term = 1 + 2
mgr.setRole(NID_OTHER, DEV_MASTER, MASTER);
mgr.setRole(NID_LOCAL, DEV_MASTER, MASTER);
assertEquals("inconsistent terms: ", 3, ts.getMastershipTerm(DEV_MASTER).termNumber());
}
@Test
public void balanceWithOrphans() {
// Setup cluster of three nodes
testClusterService.put(CNODE1, ControllerNode.State.ACTIVE);
testClusterService.put(CNODE2, ControllerNode.State.INACTIVE);
testClusterService.put(CNODE3, ControllerNode.State.ACTIVE);
// Pre-assign some devices to each of the node
// Leave some devices as orphans assigned to a downed node
assignRoles(NID1, ImmutableSet.of(DID1, DID2, DID3, DID4));
assignRoles(NID2, ImmutableSet.of(DID5));
assignRoles(NID3, ImmutableSet.of(DID6));
// Trigger load balancing
mgr.balanceRoles();
// Make sure we have a balanced load
// Make sure that we no longer have any orphans
assertEquals("incorrect balance for node 1", 3, mgr.getDevicesOf(NID1).size());
assertEquals("incorrect balance for node 2", 0, mgr.getDevicesOf(NID2).size());
assertEquals("incorrect balance for node 3", 3, mgr.getDevicesOf(NID3).size());
}
private void assignRoles(NodeId nid, Set<DeviceId> deviceIds) {
Set<DeviceId> all = ImmutableSet.of(DID1, DID2, DID3, DID4, DID5, DID6);
for (DeviceId did : all) {
mgr.setRole(nid, did, deviceIds.contains(did) ? MASTER : STANDBY);
}
}
@Test
public void balanceWithRegion1() {
//set up region - 2 sets of masters with 1 node in each
Set<NodeId> masterSet1 = ImmutableSet.of(NID1);
Set<NodeId> masterSet2 = ImmutableSet.of(NID2);
List<Set<NodeId>> masters = ImmutableList.of(masterSet1, masterSet2);
Region r = regionManager.createRegion(RID1, "R1", METRO, masters);
regionManager.addDevices(RID1, ImmutableSet.of(DID1, DID2));
Set<DeviceId> deviceIds = regionManager.getRegionDevices(RID1);
assertEquals("incorrect device count", 2, deviceIds.size());
testClusterService.put(CNODE1, ControllerNode.State.ACTIVE);
testClusterService.put(CNODE2, ControllerNode.State.ACTIVE);
//set master to non region nodes
mgr.setRole(NID_LOCAL, DID1, MASTER);
mgr.setRole(NID_LOCAL, DID2, MASTER);
assertEquals("wrong local role:", MASTER, mgr.getLocalRole(DID1));
assertEquals("wrong local role:", MASTER, mgr.getLocalRole(DID2));
assertEquals("wrong master:", NID_LOCAL, mgr.getMasterFor(DID1));
assertEquals("wrong master:", NID_LOCAL, mgr.getMasterFor(DID2));
//do region balancing
mgr.useRegionForBalanceRoles = true;
mgr.balanceRoles();
assertEquals("wrong master:", NID1, mgr.getMasterFor(DID1));
assertEquals("wrong master:", NID1, mgr.getMasterFor(DID2));
// make N1 inactive
testClusterService.put(CNODE1, ControllerNode.State.INACTIVE);
mgr.balanceRoles();
assertEquals("wrong master:", NID2, mgr.getMasterFor(DID1));
assertEquals("wrong master:", NID2, mgr.getMasterFor(DID2));
}
@Test
public void balanceWithRegion2() {
//set up region - 2 sets of masters with (3 nodes, 1 node)
Set<NodeId> masterSet1 = ImmutableSet.of(NID1, NID3, NID4);
Set<NodeId> masterSet2 = ImmutableSet.of(NID2);
List<Set<NodeId>> masters = ImmutableList.of(masterSet1, masterSet2);
Region r = regionManager.createRegion(RID1, "R1", METRO, masters);
Set<DeviceId> deviceIdsOrig = ImmutableSet.of(DID1, DID2, DID3, DEV_OTHER);
regionManager.addDevices(RID1, deviceIdsOrig);
Set<DeviceId> deviceIds = regionManager.getRegionDevices(RID1);
assertEquals("incorrect device count", deviceIdsOrig.size(), deviceIds.size());
assertEquals("incorrect devices in region", deviceIdsOrig, deviceIds);
testClusterService.put(CNODE1, ControllerNode.State.ACTIVE);
testClusterService.put(CNODE2, ControllerNode.State.ACTIVE);
testClusterService.put(CNODE3, ControllerNode.State.ACTIVE);
testClusterService.put(CNODE4, ControllerNode.State.ACTIVE);
//set master to non region nodes
deviceIdsOrig.forEach(deviceId1 -> mgr.setRole(NID_LOCAL, deviceId1, MASTER));
checkDeviceMasters(deviceIds, Sets.newHashSet(NID_LOCAL), deviceId ->
assertEquals("wrong local role:", MASTER, mgr.getLocalRole(deviceId)));
//do region balancing
mgr.useRegionForBalanceRoles = true;
mgr.balanceRoles();
Set<NodeId> expectedMasters = Sets.newHashSet(NID1, NID3, NID4);
checkDeviceMasters(deviceIds, expectedMasters);
// make N1 inactive
testClusterService.put(CNODE1, ControllerNode.State.INACTIVE);
expectedMasters.remove(NID1);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
// make N4 inactive
testClusterService.put(CNODE4, ControllerNode.State.INACTIVE);
expectedMasters.remove(NID4);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
// make N3 inactive
testClusterService.put(CNODE3, ControllerNode.State.INACTIVE);
expectedMasters = Sets.newHashSet(NID2);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
// make N3 active
testClusterService.put(CNODE3, ControllerNode.State.ACTIVE);
expectedMasters = Sets.newHashSet(NID3);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
// make N4 active
testClusterService.put(CNODE4, ControllerNode.State.ACTIVE);
expectedMasters.add(NID4);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
// make N1 active
testClusterService.put(CNODE1, ControllerNode.State.ACTIVE);
expectedMasters.add(NID1);
mgr.balanceRoles();
checkDeviceMasters(deviceIds, expectedMasters);
}
private void checkDeviceMasters(Set<DeviceId> deviceIds, Set<NodeId> expectedMasters) {
checkDeviceMasters(deviceIds, expectedMasters, null);
}
private void checkDeviceMasters(Set<DeviceId> deviceIds, Set<NodeId> expectedMasters,
Consumer<DeviceId> checkRole) {
// each device's master must be contained in the list of expectedMasters
deviceIds.forEach(deviceId -> {
assertTrue("wrong master:", expectedMasters.contains(mgr.getMasterFor(deviceId)));
if (checkRole != null) {
checkRole.accept(deviceId);
}
});
// each node in expectedMasters must have approximately the same number of devices
if (expectedMasters.size() > 1) {
int minValue = Integer.MAX_VALUE;
int maxDevices = -1;
for (NodeId nodeId: expectedMasters) {
int numDevicesManagedByNode = mgr.getDevicesOf(nodeId).size();
if (numDevicesManagedByNode < minValue) {
minValue = numDevicesManagedByNode;
}
if (numDevicesManagedByNode > maxDevices) {
maxDevices = numDevicesManagedByNode;
}
assertTrue("not balanced:", maxDevices - minValue <= 1);
}
}
}
private final class TestClusterService extends StaticClusterService {
ControllerNode local = new DefaultControllerNode(NID_LOCAL, LOCALHOST);
@Override
public ControllerNode getLocalNode() {
return local;
}
public void put(ControllerNode cn, ControllerNode.State state) {
nodes.put(cn.id(), cn);
nodeStates.put(cn.id(), state);
}
}
private final class TestSimpleMastershipStore extends SimpleMastershipStore
implements MastershipStore {
public TestSimpleMastershipStore(ClusterService clusterService) {
super.clusterService = clusterService;
}
}
private class TestRegionManager extends RegionManager {
TestRegionManager() {
eventDispatcher = new TestEventDispatcher();
networkConfigService = new NetworkConfigServiceAdapter();
}
}
}
| |
// Copyright 2016 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.remote;
import static java.nio.charset.StandardCharsets.US_ASCII;
import com.google.common.base.Preconditions;
import com.google.common.base.Predicate;
import com.google.common.collect.ImmutableCollection;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Interner;
import com.google.common.collect.Iterables;
import com.google.common.graph.Traverser;
import com.google.common.io.BaseEncoding;
import com.google.devtools.build.lib.actions.ActionInput;
import com.google.devtools.build.lib.actions.ActionInputHelper;
import com.google.devtools.build.lib.actions.DigestOfDirectoryException;
import com.google.devtools.build.lib.actions.FileArtifactValue;
import com.google.devtools.build.lib.actions.MetadataProvider;
import com.google.devtools.build.lib.actions.cache.VirtualActionInput;
import com.google.devtools.build.lib.concurrent.BlazeInterners;
import com.google.devtools.build.lib.concurrent.ThreadSafety.Immutable;
import com.google.devtools.build.lib.concurrent.ThreadSafety.ThreadSafe;
import com.google.devtools.build.lib.remote.util.DigestUtil;
import com.google.devtools.build.lib.vfs.Dirent;
import com.google.devtools.build.lib.vfs.Path;
import com.google.devtools.build.lib.vfs.PathFragment;
import com.google.devtools.build.lib.vfs.Symlinks;
import com.google.devtools.remoteexecution.v1test.Digest;
import com.google.devtools.remoteexecution.v1test.Directory;
import com.google.protobuf.ByteString;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.SortedMap;
import java.util.concurrent.ConcurrentHashMap;
import javax.annotation.Nullable;
/**
* A factory and repository for {@link TreeNode} objects. Provides directory structure traversals,
* computing and caching Merkle hashes on all objects.
*/
@ThreadSafe
public final class TreeNodeRepository {
private static final BaseEncoding LOWER_CASE_HEX = BaseEncoding.base16().lowerCase();
// In this implementation, symlinks are NOT followed when expanding directory artifacts
public static final Symlinks SYMLINK_POLICY = Symlinks.NOFOLLOW;
private final Traverser<TreeNode> traverser =
Traverser.forTree((TreeNode node) -> children(node));
/**
* A single node in a hierarchical directory structure. Leaves are the Artifacts, although we only
* use the ActionInput interface. We assume that the objects used for the ActionInputs are unique
* (same data corresponds to a canonical object in memory).
*
* <p>There are three cases:
*
* <ol>
* <li>The node is a leaf that represents an artifact file.
* <li>The node is a directory optionally associated with an artifact (an "artifact directory").
* <li>The node is a leaf that is the descendant of an artifact directory. In this case, the
* node is associated with a BasicActionInput, not a full Artifact.
* </ol>
*/
@Immutable
@ThreadSafe
public static final class TreeNode {
private final int hashCode;
private final ImmutableList<ChildEntry> childEntries; // no need to make it a map thus far.
@Nullable private final ActionInput actionInput;
private final boolean isLeaf;
/** A pair of path segment, TreeNode. */
@Immutable
public static final class ChildEntry {
private final String segment;
private final TreeNode child;
public ChildEntry(String segment, TreeNode child) {
this.segment = segment;
this.child = child;
}
public TreeNode getChild() {
return child;
}
public String getSegment() {
return segment;
}
@Override
@SuppressWarnings("ReferenceEquality")
public boolean equals(Object o) {
if (o == this) {
return true;
}
if (!(o instanceof ChildEntry)) {
return false;
}
ChildEntry other = (ChildEntry) o;
// Pointer comparison for the TreeNode as it is interned
return other.segment.equals(segment) && other.child == child;
}
@Override
public int hashCode() {
return Objects.hash(segment, child);
}
public static Comparator<ChildEntry> segmentOrder =
Comparator.comparing(ChildEntry::getSegment);
}
// Should only be called by the TreeNodeRepository.
private TreeNode(Iterable<ChildEntry> childEntries, @Nullable ActionInput actionInput) {
isLeaf = false;
this.actionInput = actionInput;
this.childEntries = ImmutableList.copyOf(childEntries);
if (actionInput != null) {
hashCode = actionInput.hashCode(); // This will ensure efficient interning of TreeNodes as
// long as all ActionInputs either implement data-based hashCode or are interned themselves.
} else {
hashCode = Arrays.hashCode(this.childEntries.toArray());
}
}
// Should only be called by the TreeNodeRepository.
private TreeNode(ActionInput actionInput) {
isLeaf = true;
this.actionInput =
Preconditions.checkNotNull(actionInput, "a TreeNode leaf should have an ActionInput");
this.childEntries = ImmutableList.of();
hashCode = actionInput.hashCode(); // This will ensure efficient interning of TreeNodes as
// long as all ActionInputs either implement data-based hashCode or are interned themselves.
}
public ActionInput getActionInput() {
return actionInput;
}
public ImmutableList<ChildEntry> getChildEntries() {
return childEntries;
}
public boolean isLeaf() {
return isLeaf;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (!(o instanceof TreeNode)) {
return false;
}
TreeNode otherNode = (TreeNode) o;
// Full comparison of ActionInputs. If pointers are different, will compare paths.
return Objects.equals(otherNode.actionInput, actionInput)
&& childEntries.equals(otherNode.childEntries);
}
private String toDebugStringAtLevel(int level) {
char[] prefix = new char[level];
Arrays.fill(prefix, ' ');
StringBuilder sb = new StringBuilder();
if (isLeaf()) {
sb.append('\n');
sb.append(prefix);
sb.append("leaf: ");
sb.append(actionInput);
} else {
for (ChildEntry entry : childEntries) {
sb.append('\n');
sb.append(prefix);
sb.append(entry.segment);
sb.append(entry.child.toDebugStringAtLevel(level + 1));
}
}
return sb.toString();
}
public String toDebugString() {
return toDebugStringAtLevel(0);
}
}
private static final TreeNode EMPTY_NODE =
new TreeNode(ImmutableList.<TreeNode.ChildEntry>of(), null);
// Keep only one canonical instance of every TreeNode in the repository.
private final Interner<TreeNode> interner = BlazeInterners.newWeakInterner();
// Merkle hashes are computed and cached by the repository, therefore execRoot must
// be part of the state.
private final Path execRoot;
private final MetadataProvider inputFileCache;
private final Map<ByteString, ActionInput> reverseInputMap = new ConcurrentHashMap<>();
// For directories that are themselves artifacts, map of the ActionInput to the Merkle hash
private final Map<ActionInput, Digest> inputDirectoryDigestCache = new HashMap<>();
private final Map<TreeNode, Digest> treeNodeDigestCache = new HashMap<>();
private final Map<Digest, TreeNode> digestTreeNodeCache = new HashMap<>();
private final Map<TreeNode, Directory> directoryCache = new HashMap<>();
private final Map<VirtualActionInput, Digest> virtualInputDigestCache = new HashMap<>();
private final Map<Digest, VirtualActionInput> digestVirtualInputCache = new HashMap<>();
private final DigestUtil digestUtil;
public TreeNodeRepository(Path execRoot, MetadataProvider inputFileCache, DigestUtil digestUtil) {
this.execRoot = execRoot;
this.inputFileCache = inputFileCache;
this.digestUtil = digestUtil;
}
public MetadataProvider getInputFileCache() {
return inputFileCache;
}
public Iterable<TreeNode> children(TreeNode node) {
return Iterables.transform(node.getChildEntries(), TreeNode.ChildEntry::getChild);
}
/** Traverse the directory structure in order (pre-order tree traversal). */
public Iterable<TreeNode> descendants(TreeNode node) {
return traverser.depthFirstPreOrder(node);
}
/**
* Traverse the directory structure in order (pre-order tree traversal), return only the leaves.
*/
public Iterable<TreeNode> leaves(TreeNode node) {
return Iterables.filter(
descendants(node),
new Predicate<TreeNode>() {
@Override
public boolean apply(TreeNode node) {
return node.isLeaf();
}
});
}
/**
* This function is a temporary and highly inefficient hack! It builds the tree from a ready list
* of input files. TODO(olaola): switch to creating and maintaining the TreeNodeRepository based
* on the build graph structure.
*/
public TreeNode buildFromActionInputs(SortedMap<PathFragment, ActionInput> sortedMap)
throws IOException {
ImmutableList.Builder<ImmutableList<String>> segments = ImmutableList.builder();
for (PathFragment path : sortedMap.keySet()) {
segments.add(path.getSegments());
}
List<ActionInput> inputs = new ArrayList<>();
for (Map.Entry<PathFragment, ActionInput> e : sortedMap.entrySet()) {
inputs.add(e.getValue());
}
return buildParentNode(inputs, segments.build(), 0, inputs.size(), 0);
}
// Expand the descendant of an artifact (input) directory
private List<TreeNode.ChildEntry> buildInputDirectoryEntries(Path path) throws IOException {
List<Dirent> sortedDirent = new ArrayList<>(path.readdir(SYMLINK_POLICY));
sortedDirent.sort(Comparator.comparing(Dirent::getName));
List<TreeNode.ChildEntry> entries = new ArrayList<>(sortedDirent.size());
for (Dirent dirent : sortedDirent) {
String name = dirent.getName();
Path child = path.getRelative(name);
TreeNode childNode;
if (dirent.getType() == Dirent.Type.DIRECTORY) {
childNode = interner.intern(new TreeNode(buildInputDirectoryEntries(child), null));
} else {
childNode = interner.intern(new TreeNode(ActionInputHelper.fromPath(child.asFragment())));
}
entries.add(new TreeNode.ChildEntry(name, childNode));
}
return entries;
}
@SuppressWarnings("ReferenceEquality") // Segments are interned.
private TreeNode buildParentNode(
List<ActionInput> inputs,
ImmutableList<ImmutableList<String>> segments,
int inputsStart,
int inputsEnd,
int segmentIndex)
throws IOException {
if (segments.isEmpty()) {
// We sometimes have actions with no inputs (e.g., echo "xyz" > $@), so we need to handle that
// case here.
Preconditions.checkState(inputs.isEmpty());
return EMPTY_NODE;
}
if (segmentIndex == segments.get(inputsStart).size()) {
// Leaf node reached. Must be unique.
Preconditions.checkArgument(
inputsStart == inputsEnd - 1, "Encountered two inputs with the same path.");
ActionInput input = inputs.get(inputsStart);
try {
if (!(input instanceof VirtualActionInput)
&& getInputMetadata(input).getType().isDirectory()) {
Path leafPath = execRoot.getRelative(input.getExecPathString());
return interner.intern(new TreeNode(buildInputDirectoryEntries(leafPath), input));
}
} catch (DigestOfDirectoryException e) {
Path leafPath = execRoot.getRelative(input.getExecPathString());
return interner.intern(new TreeNode(buildInputDirectoryEntries(leafPath), input));
}
return interner.intern(new TreeNode(input));
}
ArrayList<TreeNode.ChildEntry> entries = new ArrayList<>();
String segment = segments.get(inputsStart).get(segmentIndex);
for (int inputIndex = inputsStart; inputIndex < inputsEnd; ++inputIndex) {
if (inputIndex + 1 == inputsEnd
|| !segment.equals(segments.get(inputIndex + 1).get(segmentIndex))) {
entries.add(
new TreeNode.ChildEntry(
segment,
buildParentNode(inputs, segments, inputsStart, inputIndex + 1, segmentIndex + 1)));
if (inputIndex + 1 < inputsEnd) {
inputsStart = inputIndex + 1;
segment = segments.get(inputsStart).get(segmentIndex);
}
}
}
Collections.sort(entries, TreeNode.ChildEntry.segmentOrder);
return interner.intern(new TreeNode(entries, null));
}
private synchronized Directory getOrComputeDirectory(TreeNode node) throws IOException {
// Assumes all child digests have already been computed!
Preconditions.checkArgument(!node.isLeaf());
Directory directory = directoryCache.get(node);
if (directory == null) {
Directory.Builder b = Directory.newBuilder();
for (TreeNode.ChildEntry entry : node.getChildEntries()) {
TreeNode child = entry.getChild();
if (child.isLeaf()) {
ActionInput input = child.getActionInput();
final Digest digest;
if (input instanceof VirtualActionInput) {
VirtualActionInput virtualInput = (VirtualActionInput) input;
digest = digestUtil.compute(virtualInput);
virtualInputDigestCache.put(virtualInput, digest);
// There may be multiple inputs with the same digest. In that case, we don't care which
// one we get back from the digestVirtualInputCache later.
digestVirtualInputCache.put(digest, virtualInput);
} else {
digest = DigestUtil.getFromInputCache(input, inputFileCache);
}
b.addFilesBuilder().setName(entry.getSegment()).setDigest(digest).setIsExecutable(true);
} else {
Digest childDigest = Preconditions.checkNotNull(treeNodeDigestCache.get(child));
if (child.getActionInput() != null) {
inputDirectoryDigestCache.put(child.getActionInput(), childDigest);
}
b.addDirectoriesBuilder().setName(entry.getSegment()).setDigest(childDigest);
}
}
directory = b.build();
directoryCache.put(node, directory);
Digest digest = digestUtil.compute(directory);
treeNodeDigestCache.put(node, digest);
digestTreeNodeCache.put(digest, node);
}
return directory;
}
// Recursively traverses the tree, expanding and computing Merkle digests for nodes for which
// they have not yet been computed and cached.
public void computeMerkleDigests(TreeNode root) throws IOException {
synchronized (this) {
if (directoryCache.get(root) != null) {
// Strong assumption: the cache is valid, i.e. parent present implies children present.
return;
}
}
if (!root.isLeaf()) {
for (TreeNode child : children(root)) {
computeMerkleDigests(child);
}
getOrComputeDirectory(root);
}
}
/**
* Should only be used after computeMerkleDigests has been called on one of the node ancestors.
* Returns the precomputed digest.
*/
public Digest getMerkleDigest(TreeNode node) throws IOException {
return node.isLeaf()
? actionInputToDigest(node.getActionInput())
: treeNodeDigestCache.get(node);
}
/**
* Returns the precomputed digests for both data and metadata. Should only be used after
* computeMerkleDigests has been called on one of the node ancestors.
*/
public ImmutableCollection<Digest> getAllDigests(TreeNode root) throws IOException {
ImmutableSet.Builder<Digest> digests = ImmutableSet.builder();
for (TreeNode node : descendants(root)) {
digests.add(
node.isLeaf()
? actionInputToDigest(node.getActionInput())
: Preconditions.checkNotNull(treeNodeDigestCache.get(node)));
}
return digests.build();
}
private Digest actionInputToDigest(ActionInput input) throws IOException {
if (input instanceof VirtualActionInput) {
return Preconditions.checkNotNull(virtualInputDigestCache.get(input));
}
FileArtifactValue metadata = getInputMetadata(input);
byte[] digest = metadata.getDigest();
if (digest == null) {
// If the artifact does not have a digest, it is because it is a directory.
// We get the digest from the set of Merkle hashes computed in this TreeNodeRepository.
return Preconditions.checkNotNull(
inputDirectoryDigestCache.get(input),
"a directory should have a precomputed Merkle hash (instead of a digest)");
}
return DigestUtil.getFromInputCache(input, inputFileCache);
}
/**
* Serializes all of the subtree to a Directory list. TODO(olaola): add a version that only copies
* a part of the tree that we are interested in. Should only be used after computeMerkleDigests
* has been called on one of the node ancestors.
*/
// Note: this is not, strictly speaking, thread safe. If someone is deleting cached Merkle hashes
// while this is executing, it will trigger an exception. But I think this is WAI.
public ImmutableList<Directory> treeToDirectories(TreeNode root) {
ImmutableList.Builder<Directory> directories = ImmutableList.builder();
for (TreeNode node : descendants(root)) {
if (!node.isLeaf()) {
directories.add(Preconditions.checkNotNull(directoryCache.get(node)));
}
}
return directories.build();
}
/**
* Should only be used on digests created by a call to computeMerkleDigests. Looks up ActionInputs
* or Directory messages by cached digests and adds them to the lists.
*/
public void getDataFromDigests(
Iterable<Digest> digests,
Map<Digest, ActionInput> actionInputs,
Map<Digest, Directory> nodes) {
for (Digest digest : digests) {
TreeNode treeNode = digestTreeNodeCache.get(digest);
if (treeNode != null) {
nodes.put(digest, Preconditions.checkNotNull(directoryCache.get(treeNode)));
} else { // If not there, it must be an ActionInput.
ByteString hexDigest = ByteString.copyFromUtf8(digest.getHash());
ActionInput input = reverseInputMap.get(hexDigest);
if (input == null) {
// ... or a VirtualActionInput.
input = digestVirtualInputCache.get(digest);
}
actionInputs.put(digest, Preconditions.checkNotNull(input));
}
}
}
private FileArtifactValue getInputMetadata(ActionInput input) throws IOException {
FileArtifactValue metadata =
Preconditions.checkNotNull(
inputFileCache.getMetadata(input), "Missing metadata for: %s", input);
if (metadata.getDigest() != null) {
reverseInputMap.put(
ByteString.copyFrom(LOWER_CASE_HEX.encode(metadata.getDigest()).getBytes(US_ASCII)),
input);
}
return metadata;
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.index.translog;
import joptsimple.OptionParser;
import joptsimple.OptionSet;
import joptsimple.OptionSpec;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.IndexCommit;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.apache.lucene.store.Lock;
import org.apache.lucene.store.LockObtainFailedException;
import org.apache.lucene.store.NativeFSLockFactory;
import org.apache.lucene.store.OutputStreamDataOutput;
import org.apache.lucene.util.BytesRef;
import org.apache.lucene.util.IOUtils;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.cli.EnvironmentAwareCommand;
import org.elasticsearch.cli.Terminal;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.PathUtils;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexNotFoundException;
import java.io.IOException;
import java.nio.channels.Channels;
import java.nio.channels.FileChannel;
import java.nio.file.DirectoryStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardCopyOption;
import java.nio.file.StandardOpenOption;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class TruncateTranslogCommand extends EnvironmentAwareCommand {
private final OptionSpec<String> translogFolder;
private final OptionSpec<Void> batchMode;
public TruncateTranslogCommand() {
super("Truncates a translog to create a new, empty translog");
this.translogFolder = parser.acceptsAll(Arrays.asList("d", "dir"),
"Translog Directory location on disk")
.withRequiredArg()
.required();
this.batchMode = parser.acceptsAll(Arrays.asList("b", "batch"),
"Enable batch mode explicitly, automatic confirmation of warnings");
}
// Visible for testing
public OptionParser getParser() {
return this.parser;
}
@Override
protected void printAdditionalHelp(Terminal terminal) {
terminal.println("This tool truncates the translog and translog");
terminal.println("checkpoint files to create a new translog");
}
@SuppressForbidden(reason = "Necessary to use the path passed in")
private Path getTranslogPath(OptionSet options) {
return PathUtils.get(translogFolder.value(options), "", "");
}
@Override
protected void execute(Terminal terminal, OptionSet options, Environment env) throws Exception {
boolean batch = options.has(batchMode);
Path translogPath = getTranslogPath(options);
Path idxLocation = translogPath.getParent().resolve("index");
if (Files.exists(translogPath) == false || Files.isDirectory(translogPath) == false) {
throw new ElasticsearchException("translog directory [" + translogPath + "], must exist and be a directory");
}
if (Files.exists(idxLocation) == false || Files.isDirectory(idxLocation) == false) {
throw new ElasticsearchException("unable to find a shard at [" + idxLocation + "], which must exist and be a directory");
}
// Hold the lock open for the duration of the tool running
try (Directory dir = FSDirectory.open(idxLocation, NativeFSLockFactory.INSTANCE);
Lock writeLock = dir.obtainLock(IndexWriter.WRITE_LOCK_NAME)) {
Set<Path> translogFiles;
try {
terminal.println("Checking existing translog files");
translogFiles = filesInDirectory(translogPath);
} catch (IOException e) {
terminal.println("encountered IOException while listing directory, aborting...");
throw new ElasticsearchException("failed to find existing translog files", e);
}
// Warn about ES being stopped and files being deleted
warnAboutDeletingFiles(terminal, translogFiles, batch);
List<IndexCommit> commits;
try {
terminal.println("Reading translog UUID information from Lucene commit from shard at [" + idxLocation + "]");
commits = DirectoryReader.listCommits(dir);
} catch (IndexNotFoundException infe) {
throw new ElasticsearchException("unable to find a valid shard at [" + idxLocation + "]", infe);
}
// Retrieve the generation and UUID from the existing data
Map<String, String> commitData = commits.get(commits.size() - 1).getUserData();
String translogGeneration = commitData.get(Translog.TRANSLOG_GENERATION_KEY);
String translogUUID = commitData.get(Translog.TRANSLOG_UUID_KEY);
if (translogGeneration == null || translogUUID == null) {
throw new ElasticsearchException("shard must have a valid translog generation and UUID but got: [{}] and: [{}]",
translogGeneration, translogUUID);
}
terminal.println("Translog Generation: " + translogGeneration);
terminal.println("Translog UUID : " + translogUUID);
Path tempEmptyCheckpoint = translogPath.resolve("temp-" + Translog.CHECKPOINT_FILE_NAME);
Path realEmptyCheckpoint = translogPath.resolve(Translog.CHECKPOINT_FILE_NAME);
Path tempEmptyTranslog = translogPath.resolve("temp-" + Translog.TRANSLOG_FILE_PREFIX +
translogGeneration + Translog.TRANSLOG_FILE_SUFFIX);
Path realEmptyTranslog = translogPath.resolve(Translog.TRANSLOG_FILE_PREFIX +
translogGeneration + Translog.TRANSLOG_FILE_SUFFIX);
// Write empty checkpoint and translog to empty files
long gen = Long.parseLong(translogGeneration);
int translogLen = writeEmptyTranslog(tempEmptyTranslog, translogUUID);
writeEmptyCheckpoint(tempEmptyCheckpoint, translogLen, gen);
terminal.println("Removing existing translog files");
IOUtils.rm(translogFiles.toArray(new Path[]{}));
terminal.println("Creating new empty checkpoint at [" + realEmptyCheckpoint + "]");
Files.move(tempEmptyCheckpoint, realEmptyCheckpoint, StandardCopyOption.ATOMIC_MOVE);
terminal.println("Creating new empty translog at [" + realEmptyTranslog + "]");
Files.move(tempEmptyTranslog, realEmptyTranslog, StandardCopyOption.ATOMIC_MOVE);
// Fsync the translog directory after rename
IOUtils.fsync(translogPath, true);
} catch (LockObtainFailedException lofe) {
throw new ElasticsearchException("Failed to lock shard's directory at [" + idxLocation + "], is Elasticsearch still running?");
}
terminal.println("Done.");
}
/** Write a checkpoint file to the given location with the given generation */
public static void writeEmptyCheckpoint(Path filename, int translogLength, long translogGeneration) throws IOException {
Checkpoint emptyCheckpoint = new Checkpoint(translogLength, 0, translogGeneration);
Checkpoint.write(FileChannel::open, filename, emptyCheckpoint,
StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
// fsync with metadata here to make sure.
IOUtils.fsync(filename, false);
}
/**
* Write a translog containing the given translog UUID to the given location. Returns the number of bytes written.
*/
public static int writeEmptyTranslog(Path filename, String translogUUID) throws IOException {
final BytesRef translogRef = new BytesRef(translogUUID);
try (FileChannel fc = FileChannel.open(filename, StandardOpenOption.WRITE, StandardOpenOption.READ, StandardOpenOption.CREATE_NEW);
OutputStreamDataOutput out = new OutputStreamDataOutput(Channels.newOutputStream(fc))) {
TranslogWriter.writeHeader(out, translogRef);
fc.force(true);
}
return TranslogWriter.getHeaderLength(translogRef.length);
}
/** Show a warning about deleting files, asking for a confirmation if {@code batchMode} is false */
public static void warnAboutDeletingFiles(Terminal terminal, Set<Path> files, boolean batchMode) {
terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
terminal.println("! WARNING: Elasticsearch MUST be stopped before running this tool !");
terminal.println("! !");
terminal.println("! WARNING: Documents inside of translog files will be lost !");
terminal.println("! !");
terminal.println("! WARNING: The following files will be DELETED! !");
terminal.println("!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!");
for (Path file : files) {
terminal.println("--> " + file);
}
terminal.println("");
if (batchMode == false) {
String text = terminal.readText("Continue and DELETE files? [y/N] ");
if (!text.equalsIgnoreCase("y")) {
throw new ElasticsearchException("aborted by user");
}
}
}
/** Return a Set of all files in a given directory */
public static Set<Path> filesInDirectory(Path directory) throws IOException {
Set<Path> files = new HashSet<>();
try (DirectoryStream<Path> stream = Files.newDirectoryStream(directory)) {
for (Path file : stream) {
files.add(file);
}
}
return files;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.ode.store;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.ode.bpel.compiler.BpelC;
import org.apache.ode.bpel.compiler.DefaultResourceFinder;
import org.apache.ode.bpel.compiler.WSDLLocatorImpl;
import org.apache.ode.bpel.extension.ExtensionValidator;
import org.apache.ode.bpel.compiler.wsdl.Definition4BPEL;
import org.apache.ode.bpel.compiler.wsdl.WSDLFactory4BPEL;
import org.apache.ode.bpel.compiler.wsdl.WSDLFactoryBPEL20;
import org.apache.ode.bpel.dd.DeployDocument;
import org.apache.ode.bpel.dd.TDeployment;
import org.apache.ode.bpel.dd.TDeployment.Process;
import org.apache.ode.bpel.iapi.ContextException;
import org.apache.ode.bpel.rapi.Serializer;
import org.apache.ode.utils.fs.FileUtils;
import org.apache.xmlbeans.XmlOptions;
import org.w3c.dom.Node;
import javax.wsdl.Definition;
import javax.wsdl.WSDLException;
import javax.wsdl.xml.WSDLReader;
import javax.xml.namespace.QName;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeSet;
/**
* Container providing various functions on the deployment directory.
*/
class DeploymentUnitDir {
private static Log __log = LogFactory.getLog(DeploymentUnitDir.class);
private String _name;
private File _duDirectory;
private File _descriptorFile;
private HashMap<QName, CBPInfo> _processes = new HashMap<QName,CBPInfo>();
private HashMap<QName, TDeployment.Process> _processInfo = new HashMap<QName,TDeployment.Process>();
private Map<QName, ExtensionValidator> _extensionValidators;
private volatile DeployDocument _dd;
private volatile DocumentRegistry _docRegistry;
private long _version = -1;
private static final FileFilter _wsdlFilter = new FileFilter() {
public boolean accept(File path) {
return path.getName().endsWith(".wsdl") && path.isFile();
}
};
private static final FileFilter _cbpFilter = new FileFilter() {
public boolean accept(File path) {
return path.getName().endsWith(".cbp") && path.isFile();
}
};
private static final FileFilter _bpelFilter = new FileFilter() {
public boolean accept(File path) {
return path.getName().endsWith(".bpel") && path.isFile();
}
};
private static final FileFilter _endpointFilter = new FileFilter() {
public boolean accept(File path) {
// endpoint-configuration.properties is deprecated, keep it for backward compatibility
return (path.getName().endsWith(".endpoint") || path.getName().equals("endpoint-configuration.properties")) && path.isFile();
}
};
DeploymentUnitDir(File dir) {
if (!dir.exists())
throw new IllegalArgumentException("Directory " + dir + " does not exist!");
_duDirectory = dir;
_name = dir.getName();
_descriptorFile = new File(_duDirectory, "deploy.xml");
if (!_descriptorFile.exists())
throw new IllegalArgumentException("Directory " + dir + " does not contain a deploy.xml file!");
}
String getName() {
return _duDirectory.getName();
}
CBPInfo getCBPInfo(QName typeName) {
return _processes.get(typeName);
}
/**
* Checking for each BPEL file if we have a corresponding compiled process. If we don't,
* starts compilation.
*/
void compile() {
List<File> bpels = FileUtils.directoryEntriesInPath(_duDirectory, DeploymentUnitDir._bpelFilter);
if (bpels.size() == 0)
throw new IllegalArgumentException("Directory " + _duDirectory.getName() + " does not contain any process!");
for (File bpel : bpels) {
compile(bpel);
}
}
void scan() {
HashMap<QName, CBPInfo> processes = new HashMap<QName, CBPInfo>();
List<File> cbps = FileUtils.directoryEntriesInPath(_duDirectory, DeploymentUnitDir._cbpFilter);
for (File file : cbps) {
CBPInfo cbpinfo = loadCBPInfo(file);
processes.put(cbpinfo.processName, cbpinfo);
}
_processes = processes;
HashMap<QName, Process> processInfo = new HashMap<QName, TDeployment.Process>();
for (TDeployment.Process p : getDeploymentDescriptor().getDeploy().getProcessList()) {
processInfo.put(p.getName(), p);
}
_processInfo = processInfo;
}
boolean isRemoved() {
return !_duDirectory.exists();
}
private void compile(File bpelFile) {
BpelC bpelc = BpelC.newBpelCompiler();
// BPEL 1.1 does not suport the <import> element, so "global" WSDL needs to be configured explicitly.
File bpel11wsdl = findBpel11Wsdl(bpelFile);
if (bpel11wsdl != null)
bpelc.setProcessWSDL(bpel11wsdl.toURI());
bpelc.setCompileProperties(prepareCompileProperties(bpelFile));
bpelc.setExtensionValidators(_extensionValidators);
bpelc.setBaseDirectory(_duDirectory);
try {
bpelc.compile(bpelFile);
} catch (IOException e) {
__log.error("Compile error in " + bpelFile, e);
throw new RuntimeException(e);
}
}
/**
* Load the parsed and compiled BPEL process definition.
*/
private CBPInfo loadCBPInfo(File f) {
InputStream is = null;
try {
is = new FileInputStream(f);
Serializer ofh = new Serializer(is);
return new CBPInfo(ofh.getType(), ofh.getGuid(), f);
} catch (Exception e) {
throw new ContextException("Couldn't read compiled BPEL process " + f.getAbsolutePath(), e);
} finally {
try {
if (is != null)
is.close();
} catch (Exception e) {
;
}
}
}
public int hashCode() {
return _duDirectory.hashCode();
}
public boolean equals(Object obj) {
if (!(obj instanceof DeploymentUnitDir)) return false;
return ((DeploymentUnitDir) obj).getDeployDir().getAbsolutePath().equals(getDeployDir().getAbsolutePath());
}
public File getDeployDir() {
return _duDirectory;
}
/**
* The list of endpoint configuration files contained in the deployment directory and its subdirectories.
* Files are ordered lexicographically but for each directory, files come before its sudirectories.
* <p>The list is built on each call to handle changes.
*
* @see org.apache.ode.utils.fs.FileUtils#directoryEntriesInPath(java.io.File)
*/
public List<File> getEndpointConfigFiles() {
return FileUtils.directoryEntriesInPath(getDeployDir(), _endpointFilter);
}
public DeployDocument getDeploymentDescriptor() {
if (_dd == null) {
File ddLocation = new File(_duDirectory, "deploy.xml");
try {
XmlOptions options = new XmlOptions();
HashMap otherNs = new HashMap();
otherNs.put("http://ode.fivesight.com/schemas/2006/06/27/dd",
"http://www.apache.org/ode/schemas/dd/2007/03");
options.setLoadSubstituteNamespaces(otherNs);
_dd = DeployDocument.Factory.parse(ddLocation, options);
} catch (Exception e) {
throw new ContextException("Couldn't read deployment descriptor at location "
+ ddLocation.getAbsolutePath(), e);
}
}
return _dd;
}
public DocumentRegistry getDocRegistry() {
if (_docRegistry == null) {
_docRegistry = new DocumentRegistry();
WSDLFactory4BPEL wsdlFactory = (WSDLFactory4BPEL) WSDLFactoryBPEL20.newInstance();
WSDLReader r = wsdlFactory.newWSDLReader();
DefaultResourceFinder rf = new DefaultResourceFinder(_duDirectory, _duDirectory);
URI basedir = _duDirectory.toURI();
List<File> wsdls = FileUtils.directoryEntriesInPath(_duDirectory, DeploymentUnitDir._wsdlFilter);
for (File file : wsdls) {
URI uri = basedir.relativize(file.toURI());
try {
_docRegistry.addDefinition((Definition4BPEL) r.readWSDL(new WSDLLocatorImpl(rf, uri)));
} catch (WSDLException e) {
throw new ContextException("Couldn't read WSDL document at " + uri, e);
}
}
}
return _docRegistry;
}
public Definition getDefinitionForService(QName name) {
return getDocRegistry().getDefinition(name);
}
public Definition getDefinitionForPortType(QName name) {
return getDocRegistry().getDefinitionForPortType(name);
}
public Collection<Definition> getDefinitions() {
Definition4BPEL defs[] = getDocRegistry().getDefinitions();
ArrayList<Definition> ret = new ArrayList<Definition>(defs.length);
for (Definition4BPEL def : defs)
ret.add(def);
return ret;
}
public Set<QName> getProcessNames() {
return _processInfo.keySet();
}
public String toString() {
return "{DeploymentUnit " + _name + "}";
}
public TDeployment.Process getProcessDeployInfo(QName type) {
if (_processInfo == null) {
}
return _processInfo.get(type);
}
public List<File> allFiles() {
return allFiles(_duDirectory);
}
private List<File> allFiles(File dir) {
ArrayList<File> result = new ArrayList<File>();
for (File file : dir.listFiles()) {
if (file.isDirectory()) {
result.addAll(allFiles(file));
}
if (file.isHidden()) continue;
if (file.isFile()) {
result.add(file);
}
}
return result;
}
public final class CBPInfo {
final QName processName;
final String guid;
final File cbp;
CBPInfo(QName processName, String guid, File cbp) {
this.processName = processName;
this.guid = guid;
this.cbp = cbp;
}
}
private Map<String, Object> prepareCompileProperties(File bpelFile) {
List<Process> plist = getDeploymentDescriptor().getDeploy().getProcessList();
for (Process process : plist) {
if (process.getFileName() == null || "".equals(process.getFileName()))
continue;
if (bpelFile.getName().equals(process.getFileName())) {
Map<QName, Node> props = ProcessStoreImpl.calcInitialProperties(process);
Map<String, Object> result = new HashMap<String, Object>();
result.put(BpelC.PROCESS_CUSTOM_PROPERTIES, props);
return result;
}
}
return null;
}
/**
* Figure out the name of the WSDL file for a BPEL 1.1 process.
*
* @param bpelFile BPEL process file name
* @return file name of the WSDL, or null if none specified.
*/
private File findBpel11Wsdl(File bpelFile) {
List<Process> plist = getDeploymentDescriptor().getDeploy().getProcessList();
for (Process process : plist) {
if (process.getFileName() == null || "".equals(process.getFileName()))
continue;
if (!bpelFile.getName().equals(process.getFileName()))
continue;
if (process.getBpel11WsdlFileName() == null || "".equals(process.getBpel11WsdlFileName()))
return null;
return new File(bpelFile.getParentFile(), process.getBpel11WsdlFileName());
}
return null;
}
public long getVersion() {
return _version;
}
public void setVersion(long version) {
_version = version;
}
public void setExtensionValidators(Map<QName, ExtensionValidator> extensionValidators) {
_extensionValidators = extensionValidators;
}
}
| |
package com.mxgraph.view;
import java.awt.Point;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
import com.mxgraph.layout.mxIGraphLayout;
import com.mxgraph.model.mxGraphModel;
import com.mxgraph.model.mxIGraphModel;
import com.mxgraph.model.mxGraphModel.mxChildChange;
import com.mxgraph.model.mxGraphModel.mxGeometryChange;
import com.mxgraph.model.mxGraphModel.mxRootChange;
import com.mxgraph.model.mxGraphModel.mxTerminalChange;
import com.mxgraph.util.mxEvent;
import com.mxgraph.util.mxEventObject;
import com.mxgraph.util.mxEventSource;
import com.mxgraph.util.mxUndoableEdit;
import com.mxgraph.util.mxUtils;
import com.mxgraph.util.mxUndoableEdit.mxUndoableChange;
/**
* Implements a layout manager that updates the layout for a given transaction.
* The following example installs an automatic tree layout in a graph:
*
* <code>
* new mxLayoutManager(graph) {
*
* mxCompactTreeLayout layout = new mxCompactTreeLayout(graph);
*
* public mxIGraphLayout getLayout(Object parent)
* {
* if (graph.getModel().getChildCount(parent) > 0) {
* return layout;
* }
* return null;
* }
* };
* </code>
*
* This class fires the following event:
*
* mxEvent.LAYOUT_CELLS fires between begin- and endUpdate after all cells have
* been layouted in layoutCells. The <code>cells</code> property contains all
* cells that have been passed to layoutCells.
*/
public class mxLayoutManager extends mxEventSource
{
/**
* Defines the type of the source or target terminal. The type is a string
* passed to mxCell.is to check if the rule applies to a cell.
*/
protected mxGraph graph;
/**
* Optional string that specifies the value of the attribute to be passed
* to mxCell.is to check if the rule applies to a cell. Default is true.
*/
protected boolean enabled = true;
/**
* Optional string that specifies the attributename to be passed to
* mxCell.is to check if the rule applies to a cell. Default is true.
*/
protected boolean bubbling = true;
/**
*
*/
protected mxIEventListener undoHandler = new mxIEventListener()
{
public void invoke(Object source, mxEventObject evt)
{
if (isEnabled())
{
beforeUndo((mxUndoableEdit) evt.getProperty("edit"));
}
}
};
/**
*
*/
protected mxIEventListener moveHandler = new mxIEventListener()
{
public void invoke(Object source, mxEventObject evt)
{
if (isEnabled())
{
cellsMoved((Object[]) evt.getProperty("cells"), (Point) evt
.getProperty("location"));
}
}
};
/**
*
*/
public mxLayoutManager(mxGraph graph)
{
setGraph(graph);
}
/**
* @return the enabled
*/
public boolean isEnabled()
{
return enabled;
}
/**
* @param value the enabled to set
*/
public void setEnabled(boolean value)
{
enabled = value;
}
/**
* @return the bubbling
*/
public boolean isBubbling()
{
return bubbling;
}
/**
* @param value the bubbling to set
*/
public void setBubbling(boolean value)
{
bubbling = value;
}
/**
* @return the graph
*/
public mxGraph getGraph()
{
return graph;
}
/**
* @param value the graph to set
*/
public void setGraph(mxGraph value)
{
if (graph != null)
{
mxIGraphModel model = graph.getModel();
model.removeListener(undoHandler);
graph.removeListener(moveHandler);
}
graph = value;
if (graph != null)
{
mxIGraphModel model = graph.getModel();
model.addListener(mxEvent.BEFORE_UNDO, undoHandler);
graph.addListener(mxEvent.MOVE_CELLS, moveHandler);
}
}
/**
*
*/
protected mxIGraphLayout getLayout(Object parent)
{
return null;
}
/**
*
*/
protected void cellsMoved(Object[] cells, Point location)
{
if (cells != null && location != null)
{
mxIGraphModel model = getGraph().getModel();
// Checks if a layout exists to take care of the moving
for (int i = 0; i < cells.length; i++)
{
mxIGraphLayout layout = getLayout(model.getParent(cells[i]));
if (layout != null)
{
layout.moveCell(cells[i], location.x, location.y);
}
}
}
}
/**
*
*/
protected void beforeUndo(mxUndoableEdit edit)
{
Collection<Object> cells = getCellsForChanges(edit.getChanges());
mxIGraphModel model = getGraph().getModel();
if (isBubbling())
{
Object[] tmp = mxGraphModel.getParents(model, cells.toArray());
while (tmp.length > 0)
{
cells.addAll(Arrays.asList(tmp));
tmp = mxGraphModel.getParents(model, tmp);
}
}
layoutCells(mxUtils.sortCells(cells, false).toArray());
}
/**
*
*/
protected Collection<Object> getCellsForChanges(
List<mxUndoableChange> changes)
{
Set<Object> result = new HashSet<Object>();
Iterator<mxUndoableChange> it = changes.iterator();
while (it.hasNext())
{
mxUndoableChange change = it.next();
if (change instanceof mxRootChange)
{
return new HashSet<Object>();
}
else
{
result.addAll(getCellsForChange(change));
}
}
return result;
}
/**
*
*/
protected Collection<Object> getCellsForChange(mxUndoableChange change)
{
mxIGraphModel model = getGraph().getModel();
Set<Object> result = new HashSet<Object>();
if (change instanceof mxChildChange)
{
mxChildChange cc = (mxChildChange) change;
Object parent = model.getParent(cc.getChild());
if (cc.getChild() != null)
{
result.add(cc.getChild());
}
if (parent != null)
{
result.add(parent);
}
if (cc.getPrevious() != null)
{
result.add(cc.getPrevious());
}
}
else if (change instanceof mxTerminalChange
|| change instanceof mxGeometryChange)
{
Object cell = (change instanceof mxTerminalChange) ? ((mxTerminalChange) change)
.getCell()
: ((mxGeometryChange) change).getCell();
if (cell != null)
{
result.add(cell);
Object parent = model.getParent(cell);
if (parent != null)
{
result.add(parent);
}
}
}
return result;
}
/**
*
*/
protected void layoutCells(Object[] cells)
{
if (cells.length > 0)
{
// Invokes the layouts while removing duplicates
mxIGraphModel model = getGraph().getModel();
model.beginUpdate();
try
{
for (int i = 0; i < cells.length; i++)
{
if (cells[i] != model.getRoot())
{
executeLayout(getLayout(cells[i]), cells[i]);
}
}
fireEvent(new mxEventObject(mxEvent.LAYOUT_CELLS, "cells",
cells));
}
finally
{
model.endUpdate();
}
}
}
/**
*
*/
protected void executeLayout(mxIGraphLayout layout, Object parent)
{
if (layout != null && parent != null)
{
layout.execute(parent);
}
}
/**
*
*/
public void destroy()
{
setGraph(null);
}
}
| |
/*
* Copyright (C) 2012 Louis Fazen
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package com.alphabetbloc.accessmrs.ui.admin;
import android.accounts.Account;
import android.accounts.AccountAuthenticatorResponse;
import android.accounts.AccountManager;
import android.accounts.AccountManagerCallback;
import android.accounts.AccountManagerFuture;
import android.content.ContentResolver;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SyncResult;
import android.os.Bundle;
import android.os.Handler;
import android.preference.PreferenceManager;
import android.util.Log;
import android.view.Gravity;
import android.view.View;
import android.view.View.OnClickListener;
import android.view.Window;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.ProgressBar;
import android.widget.TextView;
import android.widget.Toast;
import com.alphabetbloc.accessmrs.listeners.SyncDataListener;
import com.alphabetbloc.accessmrs.tasks.CheckConnectivityTask;
import com.alphabetbloc.accessmrs.utilities.App;
import com.alphabetbloc.accessmrs.utilities.EncryptionUtil;
import com.alphabetbloc.accessmrs.utilities.NetworkUtils;
import com.alphabetbloc.accessmrs.utilities.UiUtils;
import com.alphabetbloc.accessmrs.R;
/**
*
* @author Louis Fazen (louis.fazen@gmail.com)
*
*/
public class SetupAccountActivity extends BaseAdminActivity implements SyncDataListener {
// setAccountAuthenticatorResult(android.os.Bundle);
private static final String TAG = SetupAccountActivity.class.getSimpleName();
// Intents
public static final String USE_CONFIG_FILE = "use_config_file_defaults";
public static final String INITIAL_SETUP = "initial_setup";
public static final String LAUNCHED_FROM_ACCT_MGR = "launched_from_account_manager";
// views
protected static final int REQUEST_CREDENTIAL_CHANGE = 1;
protected static final int REQUEST_CREDENTIAL_SETUP = 2;
protected static final int CREDENTIAL_ENTRY_ERROR = 3;
protected static final int LOADING = 4;
protected static final int FINISHED = 5;
// buttons
protected static final int VERIFY_ENTRY = 1;
protected static final int ASK_NEW_ENTRY = 2;
protected static final int ENTRY_ERROR = 3;
private TextView mInstructionText;
private EditText mUserText;
private EditText mPwdText;
private String mCurrentUser;
private String mCurrentPwd;
private int mStep;
private Button mSubmitButton;
private boolean mImportFromConfig;
private String mNewUser;
private String mNewPwd;
private Button mOfflineSetupButton;
private ImageView mCenterImage;
private Context mContext;
@Override
protected void onCreate(Bundle icicle) {
super.onCreate(icicle);
requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.account_setup);
mContext = this;
// dynamic views
mInstructionText = (TextView) findViewById(R.id.instruction);
mSubmitButton = (Button) findViewById(R.id.submit_button);
mSubmitButton.setText(getString(R.string.submit));
mSubmitButton.setOnClickListener(mSubmitListener);
mUserText = (EditText) findViewById(R.id.edittext_username);
mPwdText = (EditText) findViewById(R.id.edittext_password);
mCenterImage = (ImageView) findViewById(R.id.center_image);
mOfflineSetupButton = (Button) findViewById(R.id.offline_setup_button);
mOfflineSetupButton.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
removeOldAccounts();
}
});
boolean firstRun = getIntent().getBooleanExtra(INITIAL_SETUP, false);
mImportFromConfig = getIntent().getBooleanExtra(USE_CONFIG_FILE, false);
if (mImportFromConfig) {
createView(LOADING);
importFromConfigFile();
removeOldAccounts();
} else if (firstRun)
createView(REQUEST_CREDENTIAL_SETUP);
else if (NetworkUtils.getServerUsername() != null)
createView(REQUEST_CREDENTIAL_CHANGE);
else {
// Launched from a Service that found no account
UiUtils.toastAlert(App.getApp().getString(R.string.installation_error), App.getApp().getString(R.string.auth_no_account));
createView(REQUEST_CREDENTIAL_SETUP);
}
}
private void importFromConfigFile() {
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(this);
mNewUser = prefs.getString(getString(R.string.key_username), getString(R.string.default_username));
String encPwd = prefs.getString(getString(R.string.key_password), getString(R.string.default_password));
if (encPwd.equalsIgnoreCase(getString(R.string.default_password)))
mNewPwd = encPwd;
else
mNewPwd = EncryptionUtil.decryptString(encPwd);
}
private void createView(int view) {
// if not loading, set appropriate buttons/text
switch (view) {
// changing credentials
case REQUEST_CREDENTIAL_CHANGE:
mStep = VERIFY_ENTRY;
mInstructionText.setText(R.string.auth_server_verify_account);
mUserText.setVisibility(View.VISIBLE);
mCurrentUser = NetworkUtils.getServerUsername();
mUserText.setText(mCurrentUser);
mPwdText.setVisibility(View.VISIBLE);
mCurrentPwd = NetworkUtils.getServerPassword();
mSubmitButton.setVisibility(View.VISIBLE);
mSubmitButton.setText(getString(R.string.submit));
mOfflineSetupButton.setVisibility(View.GONE);
mCenterImage.setVisibility(View.GONE);
break;
// setting up new credentials
case REQUEST_CREDENTIAL_SETUP:
mStep = ASK_NEW_ENTRY;
mInstructionText.setText(R.string.auth_server_account_setup);
mUserText.setVisibility(View.VISIBLE);
mPwdText.setVisibility(View.VISIBLE);
mSubmitButton.setVisibility(View.VISIBLE);
mSubmitButton.setText(getString(R.string.submit));
mOfflineSetupButton.setVisibility(View.GONE);
mCenterImage.setVisibility(View.GONE);
((ProgressBar) findViewById(R.id.progress_wheel)).setVisibility(View.GONE);
break;
case CREDENTIAL_ENTRY_ERROR:
mStep = ASK_NEW_ENTRY;
mInstructionText.setText(getString(R.string.auth_server_error_login));
mUserText.setVisibility(View.VISIBLE);
mUserText.setText(mNewUser);
mPwdText.setVisibility(View.VISIBLE);
mPwdText.setText(mNewPwd);
mSubmitButton.setVisibility(View.VISIBLE);
mSubmitButton.setText(R.string.auth_try_again);
mOfflineSetupButton.setVisibility(View.VISIBLE);
mOfflineSetupButton.setText(R.string.auth_dont_verify);
mCenterImage.setVisibility(View.INVISIBLE);
((ProgressBar) findViewById(R.id.progress_wheel)).setVisibility(View.GONE);
break;
case LOADING:
mInstructionText.setText(getString(R.string.auth_verifying_server_account));
mUserText.setVisibility(View.GONE);
mPwdText.setVisibility(View.GONE);
mSubmitButton.setVisibility(View.GONE);
mOfflineSetupButton.setVisibility(View.GONE);
mCenterImage.setVisibility(View.GONE);
((ProgressBar) findViewById(R.id.progress_wheel)).setVisibility(View.VISIBLE);
break;
case FINISHED:
mStep = FINISHED;
mInstructionText.setText(getString(R.string.auth_server_setup_complete));
mUserText.setVisibility(View.GONE);
mPwdText.setVisibility(View.GONE);
mSubmitButton.setVisibility(View.VISIBLE);
mSubmitButton.setText(getString(R.string.finish));
mOfflineSetupButton.setVisibility(View.GONE);
mCenterImage.setVisibility(View.GONE);
((ProgressBar) findViewById(R.id.progress_wheel)).setVisibility(View.GONE);
break;
default:
break;
}
}
private OnClickListener mSubmitListener = new OnClickListener() {
@Override
public void onClick(View v) {
String userEntry = mUserText.getText().toString();
String pwdEntry = mPwdText.getText().toString();
mPwdText.setText("");
if ((userEntry.equals("") || pwdEntry.equals("")) && (mStep != FINISHED))
mStep = ENTRY_ERROR;
switch (mStep) {
case VERIFY_ENTRY:
if (userEntry.equals(mCurrentUser) && pwdEntry.equals(mCurrentPwd))
createView(REQUEST_CREDENTIAL_SETUP);
else
UiUtils.toastAlert(mContext, getString(R.string.auth_error_title), getString(R.string.auth_server_verify_error));
break;
case ASK_NEW_ENTRY:
if (isAcceptable(userEntry))
checkServerCredentials(userEntry, pwdEntry);
else
UiUtils.toastAlert(mContext, getString(R.string.auth_error_title), getString((R.string.auth_invalid_username), mUserText.getText().toString()));
break;
case FINISHED:
setResult(RESULT_OK);
finish();
break;
case ENTRY_ERROR:
default:
UiUtils.toastAlert(mContext, getString(R.string.auth_error_title), getString(R.string.auth_empty_entry));
break;
}
}
};
// STEP 1:
private void checkServerCredentials(String username, String password) {
createView(LOADING);
mNewUser = username;
mNewPwd = password;
CheckConnectivityTask verifyWithServer = new CheckConnectivityTask();
verifyWithServer.setServerCredentials(username, password);
verifyWithServer.setSyncListener(this);
verifyWithServer.execute(new SyncResult());
}
// STEP 2:
@Override
public void syncComplete(String result, SyncResult syncResult) {
if (App.DEBUG)
Log.v(TAG, "Sync with Server Complete with result=" + result);
if (Boolean.valueOf(result)) {
removeOldAccounts();
} else
createView(CREDENTIAL_ENTRY_ERROR);
}
private static boolean isAcceptable(String userEntry) {
// CHANGED: accepting all characters now...
// long l = Long.valueOf(userEntry);
// if (l < 0 || l > Integer.MAX_VALUE)
// return false;
return true;
}
// STEP 3:
private void removeOldAccounts() {
createView(LOADING);
NetworkUtils.resetServerCredentials();
AccountManager am = AccountManager.get(this);
// STEP 1: if old account exists, delete and replace with new account
Account[] accounts = am.getAccountsByType(getString(R.string.app_account_type));
if (App.DEBUG)
Log.v(TAG, "about to remove old accounts number=" + accounts.length);
if (accounts.length > 0) {
for (Account a : accounts) {
ContentResolver.removePeriodicSync(accounts[0], App.getApp().getString(R.string.app_provider_authority), new Bundle());
myFuture = am.removeAccount(a, myCallback, myHandler);
}
} else {
removedAccount = true;
myCallback.run(null);
}
}
// STEP 4:
private final Handler myHandler = new Handler();
private AccountManagerFuture<Boolean> myFuture = null;
private boolean removedAccount = false;
private AccountManagerCallback<Boolean> myCallback = new AccountManagerCallback<Boolean>() {
@Override
public void run(final AccountManagerFuture<Boolean> amf) {
if (amf != null) {
try {
removedAccount = myFuture.getResult();
} catch (Exception e) {
e.printStackTrace();
}
}
if (removedAccount) {
if (addAccount(mNewUser, mNewPwd)) {
if (App.DEBUG)
Log.v(TAG, "Account was successfully created with user: " + mNewUser);
setupAccountSync(mNewUser);
finishAccountSetup(mNewUser);
} else {
Log.e(TAG, "Account Setup Failed");
createView(CREDENTIAL_ENTRY_ERROR);
}
} else {
Log.e(TAG, "Error: Could not delete old account. Please setup new account manually.");
createView(CREDENTIAL_ENTRY_ERROR);
}
}
};
// STEP 5:
private boolean addAccount(String username, String password) {
final Account account = new Account(username, getString(R.string.app_account_type));
String encPwd = EncryptionUtil.encryptString(password);
AccountManager am = AccountManager.get(this);
boolean accountCreated = am.addAccountExplicitly(account, encPwd, null);
// TODO ?Check? is this necessary... does Android ever delete credentials?
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(App.getApp());
prefs.edit().putString(getString(R.string.key_username), username).commit();
prefs.edit().putString(getString(R.string.key_password), encPwd).commit();
return accountCreated;
}
// STEP 6:
private void setupAccountSync(String username) {
Account account = new Account(username, getString(R.string.app_account_type));
String authority = getString(R.string.app_provider_authority);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(App.getApp());
// Set up sync (IF global settings background data & auto-sync)
ContentResolver.setIsSyncable(account, authority, 1);
ContentResolver.setSyncAutomatically(account, authority, true);
String interval = prefs.getString(getString(R.string.key_max_refresh_seconds), getString(R.string.default_max_refresh_seconds));
ContentResolver.addPeriodicSync(account, authority, new Bundle(), Integer.valueOf(interval));
if (App.DEBUG) Log.v(TAG, "New Account Sync interval is=" + interval);
}
// STEP 7:
private void finishAccountSetup(String username) {
// Pass account back to account manager
Bundle extras = getIntent().getExtras();
boolean launchedFromAccountMgr = getIntent().getBooleanExtra(LAUNCHED_FROM_ACCT_MGR, false);
if (extras != null && launchedFromAccountMgr) {
if (App.DEBUG)
Log.v(TAG, "launched from the account manager...");
AccountAuthenticatorResponse response = extras.getParcelable(AccountManager.KEY_ACCOUNT_AUTHENTICATOR_RESPONSE);
Bundle result = new Bundle();
result.putString(AccountManager.KEY_ACCOUNT_NAME, username);
result.putString(AccountManager.KEY_ACCOUNT_TYPE, getString(R.string.app_account_type));
response.onResult(result);
} else {
if (App.DEBUG)
Log.v(TAG, "not launched from the account manager");
setResult(RESULT_OK);
}
// End the Activity
if (mImportFromConfig) {
UiUtils.toastMessage(mContext, null, getString(R.string.auth_server_setup_complete), 0, Gravity.CENTER, Toast.LENGTH_SHORT);
finish();
} else
createView(FINISHED);
}
// Bundle params = new Bundle();
// params.putBoolean(ContentResolver.SYNC_EXTRAS_EXPEDITED, false);
// params.putBoolean(ContentResolver.SYNC_EXTRAS_DO_NOT_RETRY,
// false);
// params.putBoolean(ContentResolver.SYNC_EXTRAS_MANUAL, true);
// params.putBoolean(ContentResolver.SYNC_EXTRAS_FORCE, true);
// ContentResolver.requestSync(account, authority, params);
}
| |
/*
* Copyright 2020 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package androidx.media3.exoplayer;
import static androidx.media3.exoplayer.MetadataRetriever.retrieveMetadata;
import static androidx.media3.extractor.metadata.mp4.MdtaMetadataEntry.KEY_ANDROID_CAPTURE_FPS;
import static com.google.common.truth.Truth.assertThat;
import static org.junit.Assert.assertThrows;
import android.content.Context;
import android.net.Uri;
import androidx.media3.common.C;
import androidx.media3.common.MediaItem;
import androidx.media3.common.MimeTypes;
import androidx.media3.common.TrackGroupArray;
import androidx.media3.extractor.metadata.mp4.MdtaMetadataEntry;
import androidx.media3.extractor.metadata.mp4.MotionPhotoMetadata;
import androidx.media3.extractor.metadata.mp4.SlowMotionData;
import androidx.media3.extractor.metadata.mp4.SmtaMetadataEntry;
import androidx.media3.test.utils.FakeClock;
import androidx.test.core.app.ApplicationProvider;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import com.google.common.util.concurrent.ListenableFuture;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.TimeUnit;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.robolectric.shadows.ShadowLooper;
/** Tests for {@link MetadataRetriever}. */
@RunWith(AndroidJUnit4.class)
public class MetadataRetrieverTest {
private static final long TEST_TIMEOUT_SEC = 10;
private Context context;
private FakeClock clock;
@Before
public void setUp() throws Exception {
context = ApplicationProvider.getApplicationContext();
clock = new FakeClock(/* isAutoAdvancing= */ true);
}
@Test
public void retrieveMetadata_singleMediaItem_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"));
ListenableFuture<TrackGroupArray> trackGroupsFuture =
retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(trackGroups.length).isEqualTo(2);
// Video group.
assertThat(trackGroups.get(0).length).isEqualTo(1);
assertThat(trackGroups.get(0).getFormat(0).sampleMimeType).isEqualTo(MimeTypes.VIDEO_H264);
// Audio group.
assertThat(trackGroups.get(1).length).isEqualTo(1);
assertThat(trackGroups.get(1).getFormat(0).sampleMimeType).isEqualTo(MimeTypes.AUDIO_AAC);
}
@Test
public void retrieveMetadata_multipleMediaItems_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem1 =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample.mp4"));
MediaItem mediaItem2 =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp3/bear-id3.mp3"));
ListenableFuture<TrackGroupArray> trackGroupsFuture1 =
retrieveMetadata(context, mediaItem1, clock);
ListenableFuture<TrackGroupArray> trackGroupsFuture2 =
retrieveMetadata(context, mediaItem2, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups1 = trackGroupsFuture1.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
TrackGroupArray trackGroups2 = trackGroupsFuture2.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
// First track group.
assertThat(trackGroups1.length).isEqualTo(2);
// First track group - Video group.
assertThat(trackGroups1.get(0).length).isEqualTo(1);
assertThat(trackGroups1.get(0).getFormat(0).sampleMimeType).isEqualTo(MimeTypes.VIDEO_H264);
// First track group - Audio group.
assertThat(trackGroups1.get(1).length).isEqualTo(1);
assertThat(trackGroups1.get(1).getFormat(0).sampleMimeType).isEqualTo(MimeTypes.AUDIO_AAC);
// Second track group.
assertThat(trackGroups2.length).isEqualTo(1);
// Second track group - Audio group.
assertThat(trackGroups2.get(0).length).isEqualTo(1);
assertThat(trackGroups2.get(0).getFormat(0).sampleMimeType).isEqualTo(MimeTypes.AUDIO_MPEG);
}
@Test
public void retrieveMetadata_heicMotionPhoto_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_MP.heic"));
MotionPhotoMetadata expectedMotionPhotoMetadata =
new MotionPhotoMetadata(
/* photoStartPosition= */ 0,
/* photoSize= */ 28_853,
/* photoPresentationTimestampUs= */ C.TIME_UNSET,
/* videoStartPosition= */ 28_869,
/* videoSize= */ 28_803);
ListenableFuture<TrackGroupArray> trackGroupsFuture =
retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(trackGroups.length).isEqualTo(1);
assertThat(trackGroups.get(0).length).isEqualTo(1);
assertThat(trackGroups.get(0).getFormat(0).metadata.length()).isEqualTo(1);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(0))
.isEqualTo(expectedMotionPhotoMetadata);
}
@Test
public void retrieveMetadata_heicStillPhoto_outputsEmptyMetadata() throws Exception {
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_still_photo.heic"));
ListenableFuture<TrackGroupArray> trackGroupsFuture =
retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(trackGroups.length).isEqualTo(1);
assertThat(trackGroups.get(0).length).isEqualTo(1);
assertThat(trackGroups.get(0).getFormat(0).metadata).isNull();
}
@Test
public void retrieveMetadata_sefSlowMotion_outputsExpectedMetadata() throws Exception {
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/mp4/sample_sef_slow_motion.mp4"));
SmtaMetadataEntry expectedSmtaEntry =
new SmtaMetadataEntry(/* captureFrameRate= */ 240, /* svcTemporalLayerCount= */ 4);
List<SlowMotionData.Segment> segments = new ArrayList<>();
segments.add(
new SlowMotionData.Segment(
/* startTimeMs= */ 88, /* endTimeMs= */ 879, /* speedDivisor= */ 2));
segments.add(
new SlowMotionData.Segment(
/* startTimeMs= */ 1255, /* endTimeMs= */ 1970, /* speedDivisor= */ 8));
SlowMotionData expectedSlowMotionData = new SlowMotionData(segments);
MdtaMetadataEntry expectedMdtaEntry =
new MdtaMetadataEntry(
KEY_ANDROID_CAPTURE_FPS,
/* value= */ new byte[] {67, 112, 0, 0},
/* localeIndicator= */ 0,
/* typeIndicator= */ 23);
ListenableFuture<TrackGroupArray> trackGroupsFuture =
retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
TrackGroupArray trackGroups = trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS);
assertThat(trackGroups.length).isEqualTo(2); // Video and audio
// Audio
assertThat(trackGroups.get(0).getFormat(0).metadata.length()).isEqualTo(2);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(0)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(0).getFormat(0).metadata.get(1)).isEqualTo(expectedSlowMotionData);
// Video
assertThat(trackGroups.get(1).getFormat(0).metadata.length()).isEqualTo(3);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(0)).isEqualTo(expectedMdtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(1)).isEqualTo(expectedSmtaEntry);
assertThat(trackGroups.get(1).getFormat(0).metadata.get(2)).isEqualTo(expectedSlowMotionData);
}
@Test
public void retrieveMetadata_invalidMediaItem_throwsError() {
MediaItem mediaItem =
MediaItem.fromUri(Uri.parse("asset://android_asset/media/does_not_exist"));
ListenableFuture<TrackGroupArray> trackGroupsFuture =
retrieveMetadata(context, mediaItem, clock);
ShadowLooper.idleMainLooper();
assertThrows(
ExecutionException.class, () -> trackGroupsFuture.get(TEST_TIMEOUT_SEC, TimeUnit.SECONDS));
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.painless;
import org.apache.lucene.search.Scorer;
import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.ScriptException;
import org.elasticsearch.search.lookup.LeafDocLookup;
import org.elasticsearch.search.lookup.LeafSearchLookup;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* ScriptImpl can be used as either an {@link ExecutableScript} or a {@link LeafSearchScript}
* to run a previously compiled Painless script.
*/
final class ScriptImpl implements ExecutableScript, LeafSearchScript {
/**
* The Painless Executable script that can be run.
*/
private final Executable executable;
/**
* A map that can be used to access input parameters at run-time.
*/
private final Map<String, Object> variables;
/**
* The lookup is used to access search field values at run-time.
*/
private final LeafSearchLookup lookup;
/**
* the 'doc' object accessed by the script, if available.
*/
private final LeafDocLookup doc;
/**
* Current scorer being used
* @see #setScorer(Scorer)
*/
private Scorer scorer;
/**
* Current _value for aggregation
* @see #setNextAggregationValue(Object)
*/
private Object aggregationValue;
/**
* Creates a ScriptImpl for the a previously compiled Painless script.
* @param executable The previously compiled Painless script.
* @param vars The initial variables to run the script with.
* @param lookup The lookup to allow search fields to be available if this is run as a search script.
*/
ScriptImpl(final Executable executable, final Map<String, Object> vars, final LeafSearchLookup lookup) {
this.executable = executable;
this.lookup = lookup;
this.variables = new HashMap<>();
if (vars != null) {
variables.putAll(vars);
}
if (lookup != null) {
variables.putAll(lookup.asMap());
doc = lookup.doc();
} else {
doc = null;
}
}
/**
* Set a variable for the script to be run against.
* @param name The variable name.
* @param value The variable value.
*/
@Override
public void setNextVar(final String name, final Object value) {
variables.put(name, value);
}
/**
* Set the next aggregation value.
* @param value Per-document value, typically a String, Long, or Double.
*/
@Override
public void setNextAggregationValue(Object value) {
this.aggregationValue = value;
}
/**
* Run the script.
* @return The script result.
*/
@Override
public Object run() {
try {
return executable.execute(variables, scorer, doc, aggregationValue);
} catch (PainlessError | BootstrapMethodError | Exception t) {
throw convertToScriptException(t);
}
}
/**
* Adds stack trace and other useful information to exceptiosn thrown
* from a Painless script.
* @param t The throwable to build an exception around.
* @return The generated ScriptException.
*/
private ScriptException convertToScriptException(Throwable t) {
// create a script stack: this is just the script portion
List<String> scriptStack = new ArrayList<>();
for (StackTraceElement element : t.getStackTrace()) {
if (WriterConstants.CLASS_NAME.equals(element.getClassName())) {
// found the script portion
int offset = element.getLineNumber();
if (offset == -1) {
scriptStack.add("<<< unknown portion of script >>>");
} else {
offset--; // offset is 1 based, line numbers must be!
int startOffset = executable.getPreviousStatement(offset);
if (startOffset == -1) {
assert false; // should never happen unless we hit exc in ctor prologue...
startOffset = 0;
}
int endOffset = executable.getNextStatement(startOffset);
if (endOffset == -1) {
endOffset = executable.getSource().length();
}
// TODO: if this is still too long, truncate and use ellipses
String snippet = executable.getSource().substring(startOffset, endOffset);
scriptStack.add(snippet);
StringBuilder pointer = new StringBuilder();
for (int i = startOffset; i < offset; i++) {
pointer.append(' ');
}
pointer.append("^---- HERE");
scriptStack.add(pointer.toString());
}
break;
// but filter our own internal stacks (e.g. indy bootstrap)
} else if (!shouldFilter(element)) {
scriptStack.add(element.toString());
}
}
// build a name for the script:
final String name;
if (PainlessScriptEngineService.INLINE_NAME.equals(executable.getName())) {
name = executable.getSource();
} else {
name = executable.getName();
}
throw new ScriptException("runtime error", t, scriptStack, name, PainlessScriptEngineService.NAME);
}
/** returns true for methods that are part of the runtime */
private static boolean shouldFilter(StackTraceElement element) {
return element.getClassName().startsWith("org.elasticsearch.painless.") ||
element.getClassName().startsWith("java.lang.invoke.") ||
element.getClassName().startsWith("sun.invoke.");
}
/**
* Run the script.
* @return The script result as a double.
*/
@Override
public double runAsDouble() {
return ((Number)run()).doubleValue();
}
/**
* Run the script.
* @return The script result as a long.
*/
@Override
public long runAsLong() {
return ((Number)run()).longValue();
}
/**
* Sets the scorer to be accessible within a script.
* @param scorer The scorer used for a search.
*/
@Override
public void setScorer(final Scorer scorer) {
this.scorer = scorer;
}
/**
* Sets the current document.
* @param doc The current document.
*/
@Override
public void setDocument(final int doc) {
if (lookup != null) {
lookup.setDocument(doc);
}
}
/**
* Sets the current source.
* @param source The current source.
*/
@Override
public void setSource(final Map<String, Object> source) {
if (lookup != null) {
lookup.source().setSource(source);
}
}
}
| |
package com.codepath.apps.restclienttemplate.activities;
import android.app.SearchManager;
import android.content.Context;
import android.content.Intent;
import android.graphics.Color;
import android.support.design.widget.FloatingActionButton;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v4.view.MenuItemCompat;
import android.support.v4.view.PagerAdapter;
import android.support.v4.view.ViewPager;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.support.v7.widget.SearchView;
import android.support.v7.widget.Toolbar;
import android.util.Log;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.EditText;
import android.widget.ImageView;
import android.widget.Toast;
import com.astuetz.PagerSlidingTabStrip;
import com.bumptech.glide.Glide;
import com.codepath.apps.restclienttemplate.R;
import com.codepath.apps.restclienttemplate.TwitterApp;
import com.codepath.apps.restclienttemplate.TwitterClient;
import com.codepath.apps.restclienttemplate.fragments.ComposeFragment;
import com.codepath.apps.restclienttemplate.fragments.DirectMessagesFragment;
import com.codepath.apps.restclienttemplate.fragments.MentionsFragment;
import com.codepath.apps.restclienttemplate.fragments.TimelineFragment;
import com.codepath.apps.restclienttemplate.interfaces.IDataCallback;
import com.codepath.apps.restclienttemplate.interfaces.ISearch;
import com.codepath.apps.restclienttemplate.models.Tweet;
import com.codepath.apps.restclienttemplate.models.User;
import com.codepath.apps.restclienttemplate.utils.SmartFragmentStatePagerAdapter;
import com.codepath.apps.restclienttemplate.utils.Utils;
import com.facebook.stetho.Stetho;
import com.facebook.stetho.okhttp3.StethoInterceptor;
import com.loopj.android.http.JsonHttpResponseHandler;
import com.raizlabs.android.dbflow.sql.language.Select;
import org.json.JSONObject;
import org.parceler.Parcels;
import java.util.ArrayList;
import java.util.List;
import butterknife.Bind;
import butterknife.ButterKnife;
import cz.msebera.android.httpclient.Header;
import okhttp3.OkHttpClient;
import static android.R.attr.data;
import static android.icu.lang.UCharacter.GraphemeClusterBreak.T;
import static com.codepath.apps.restclienttemplate.R.id.fabCompose;
import static com.codepath.apps.restclienttemplate.R.id.ivProfileImage;
import static com.codepath.apps.restclienttemplate.R.id.ivProfilePhoto;
import static com.codepath.apps.restclienttemplate.R.id.swipeContainer;
import static com.codepath.apps.restclienttemplate.R.string.tweet;
import static java.util.Collections.addAll;
public class TimelineActivity extends AppCompatActivity {
@Bind(R.id.viewPager)
ViewPager viewPager;
@Bind(R.id.toolbar)
Toolbar toolbar;
public static String loggedUserScreenName;
TweetPagerAdapter pagerAdapter;
@Bind(R.id.tabs)
PagerSlidingTabStrip tabStrip;
@Bind(R.id.fabCompose)
FloatingActionButton fabCompose;
/* @Bind(R.id.ivAirplaneMode)
ImageView ivAirplaneMode;*/
private TwitterClient client;
/*TweetAdapter mTweetAdapter;
ArrayList<Tweet> mTweets;
@Bind(R.id.rvTweet)
RecyclerView mRecycler;*/
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Stetho.initializeWithDefaults(this);
OkHttpClient clientStetho = new OkHttpClient.Builder().addNetworkInterceptor(new StethoInterceptor()).build();
setContentView(R.layout.activity_timeline);
ButterKnife.bind(this);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayShowHomeEnabled(true);
getSupportActionBar().setLogo(R.drawable.ic_logo);
getSupportActionBar().setDisplayUseLogoEnabled(true);
client = TwitterApp.getRestClient();
pagerAdapter = new TweetPagerAdapter(getSupportFragmentManager());
viewPager.setAdapter(pagerAdapter);
tabStrip.setViewPager(viewPager);
fabCompose.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
ComposeFragment composeFragment = new ComposeFragment();
FragmentManager fm = getSupportFragmentManager();
composeFragment.show(fm, "new tweet");
}
});
/*@Override
public boolean isViewFromObject(View view, Object object) {
return false;
}*/
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.menu_timeline, menu);
MenuItem searchItem = menu.findItem(R.id.action_search);
SearchManager searchManager = (SearchManager) getSystemService(Context.SEARCH_SERVICE);
final SearchView searchView = (SearchView) menu.findItem(R.id.action_search).getActionView();
searchView.setSearchableInfo(searchManager.getSearchableInfo(getComponentName()));
searchView.setIconifiedByDefault(false); // Do not iconify the widget; expand it by default
// Customize searchview text and hint colors
int searchEditId = android.support.v7.appcompat.R.id.search_src_text;
EditText et = (EditText) searchView.findViewById(searchEditId);
et.setTextColor(Color.BLACK);
et.setHintTextColor(Color.GRAY);
et.setHint("Search tweets");
searchView.setOnQueryTextListener(new SearchView.OnQueryTextListener() {
@Override
public boolean onQueryTextSubmit(String query) {
// perform query here
searchView.clearFocus();
Log.i("query", query);
Intent intent = new Intent(getApplicationContext(), SearchActivity.class);
intent.putExtra("q", query);
startActivity(intent);
return true;
}
@Override
public boolean onQueryTextChange(String newText) {
return false;
}
});
return super.onCreateOptionsMenu(menu);
}
// return true;
@Override
public boolean onOptionsItemSelected(MenuItem item){
int id = item.getItemId();
switch (id){
/*case R.id.action_compose:
Intent intent = new Intent(this, ComposeActivity.class);
startActivityForResult(intent, ComposeActivity.REQUEST_CODE);
break;
case R.id.action_profile:
Intent intent1 = new Intent(this, ProfileActivity.class);
startActivity(intent1);
break;*/
case R.id.logout:
TwitterApp.getRestClient().clearAccessToken();
Intent i = new Intent(this, LoginActivity.class);
startActivity(i);
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
if(requestCode == ComposeActivity.REQUEST_CODE && resultCode == ComposeActivity.REQUEST_CODE){
Tweet tweet = data.getParcelableExtra("tweet");
TimelineFragment timelineFragment = (TimelineFragment) pagerAdapter.getRegisteredFragment(0);
timelineFragment.add(tweet);
}
}
//Returns order of fragments
public class TweetPagerAdapter extends SmartFragmentStatePagerAdapter {
private String tabTitles[] = {"Home", "@ Mentions"};
public TweetPagerAdapter(FragmentManager fragmentManager) {
super(fragmentManager);
}
@Override
public CharSequence getPageTitle(int position) {return tabTitles[position];}
@Override
public Fragment getItem(int position) {
switch (position){
case 0:
return new TimelineFragment();
case 1:
return new MentionsFragment();
default:
return null;
}
}
@Override
public int getCount() {
return tabTitles.length;
}
}
public void onTweet(View view) {
FragmentManager fm = getSupportFragmentManager();
ComposeFragment composeFragment = ComposeFragment.newInstance();
composeFragment.show(fm, "fragment_edit_name");
}
//@Override
public void onTweet(Tweet tweet) {
((ComposeFragment.ComposeTweetFragmentListener) pagerAdapter.getRegisteredFragment(0))
.onTweet(tweet);
}
public void composeNewTweet(View view){
ComposeFragment composeFragment = new ComposeFragment();
FragmentManager fm = getSupportFragmentManager();
composeFragment.show(fm, "test");
}
}
/*client = TwitterApp.getRestClient();
mTweets = new ArrayList<>();
mTweetAdapter = new TweetAdapter(mTweets);
mRecycler.setLayoutManager(new LinearLayoutManager(this));
mRecycler.setAdapter(mTweetAdapter);
populateTimeline();*/
/*private void populateTimeline() {
client.getHomeTimeline(new JsonHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Header[] headers, JSONObject response) {
Log.d("TwitterClient", response.toString());
}
@Override
public void onSuccess(int statusCode, Header[] headers, JSONArray response) {
Log.d("TwitterClient", response.toString());
for (int i=0; i<response.length(); i++){
Tweet tweet = null;
try {
tweet = Tweet.fromJSON(response.getJSONObject(i));
mTweets.add(tweet);
mTweetAdapter.notifyItemInserted(mTweets.size()-1);
} catch (JSONException e) {
e.printStackTrace();
}
}
}
@Override
public void onFailure(int statusCode, Header[] headers, String responseString, Throwable throwable) {
Log.d("TwitterClient", responseString);
throwable.printStackTrace();
}
@Override
public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONObject errorResponse) {
Log.d("TwitterClient", errorResponse.toString());
throwable.printStackTrace();
}
@Override
public void onFailure(int statusCode, Header[] headers, Throwable throwable, JSONArray errorResponse) {
Log.d("TwitterClient", errorResponse.toString());
throwable.printStackTrace();
}
});
}*/
| |
package org.mapdb;
import org.junit.After;
import org.junit.Assert;
import org.junit.Ignore;
import org.junit.Test;
import java.io.File;
import java.io.IOError;
import java.io.IOException;
import java.util.*;
import static org.junit.Assert.*;
import static org.mapdb.DataIO.*;
import static org.mapdb.StoreDirect.*;
@SuppressWarnings({"rawtypes","unchecked"})
public class StoreDirectTest <E extends StoreDirect> extends EngineTest<E>{
@Override boolean canRollback(){return false;}
File f = TT.tempDbFile();
@After
public void deleteFile(){
if(e!=null && !e.isClosed()){
e.close();
e = null;
}
if(f==null)
return;
f.delete();
String name = f.getName();
for(File f2:f.getParentFile().listFiles()){
if(f2.getName().startsWith(name))
f2.delete();
}
}
// static final long FREE_RECID_STACK = StoreDirect.IO_FREE_RECID+32;
@Override protected E openEngine() {
StoreDirect e =new StoreDirect(f.getPath());
e.init();
return (E)e;
}
// int countIndexRecords(){
// int ret = 0;
// for(int pos = StoreDirect.IO_USER_START; pos<e.volSize; pos+=8){
// long val = e.vol.getLong(pos);
// if(val!=0 && val != StoreDirect.MASK_ARCHIVE
// && (val&StoreDirect.MUNUSED)==0) {
// ret++; //TODO proper check for non zero offset and size
// }
// }
// return ret;
// }
//
//
// int countIndexPrealloc(){
// int ret = 0;
// for(int pos = (int) (StoreDirect.IO_USER_START+Engine.RECID_FIRST*8); pos<e.physSize; pos+=8){
// long val = e.vol.getLong(pos);
// if((val&StoreDirect.MUNUSED)!=0){
// ret++; //TODO check for zero offset and zero size
// }
// }
// return ret;
// }
//
//
// List<Long> getLongStack(long ioRecid){
//
// ArrayList<Long> ret =new ArrayList<Long>();
//
// long pagePhysid = e.vol.getLong(ioRecid) & StoreDirect.MOFFSET;
// long pageOffset = e.vol.getLong(ioRecid) >>>48;
//
//
// while(pagePhysid!=0){
//
// while(pageOffset>=8){
// //System.out.println(pagePhysid + " - "+pageOffset);
// final Long l = e.vol.getSixLong(pagePhysid + pageOffset);
// pageOffset-=6;
// ret.add(l);
// }
// //System.out.println(ret);
// //read location of previous page
// pagePhysid = e.vol.getLong(pagePhysid) & StoreDirect.MOFFSET;
// pageOffset = (e.vol.getLong(pagePhysid) >>>48) - 6;
// }
//
// return ret;
// }
//
//
// @Test
// public void phys_append_alloc(){
// e.structuralLock.lock();
// long[] ret = e.physAllocate(100,true,false);
// long expected = 100L<<48 | 16L;
// assertTrue(Arrays.equals(new long[]{expected}, ret);
// }
//
// @Test
// public void phys_append_alloc_link2(){
// e.structuralLock.lock();
// long[] ret = e.physAllocate(100 + MAX_REC_SIZE,true,false);
// long exp1 = MLINKED |((long)MAX_REC_SIZE)<<48 | 16L;
// long exp2 = 108L<<48 | (16L+MAX_REC_SIZE+1);
// assertTrue(Arrays.equals(new long[]{exp1, exp2}, ret);
// }
//
// @Test
// public void phys_append_alloc_link3(){
// e.structuralLock.lock();
// long[] ret = e.physAllocate(100 + MAX_REC_SIZE*2,true,false);
// long exp1 = MLINKED | ((long)MAX_REC_SIZE)<<48 | 16L;
// long exp2 = MLINKED | ((long)MAX_REC_SIZE)<<48 | (16L+MAX_REC_SIZE+1);
// long exp3 = ((long)116)<<48 | (16L+MAX_REC_SIZE*2+2);
//
// assertTrue(Arrays.equals(new long[]{exp1, exp2, exp3}, ret);
// }
//
// @Test public void second_rec_pos_round_to_16(){
// e.structuralLock.lock();
// long[] ret= e.physAllocate(1,true,false);
// assertTrue(Arrays.equals(new long[]{1L<<48|16L},ret);
// ret= e.physAllocate(1,true,false);
// assertTrue(Arrays.equals(new long[]{1L<<48|32L},ret);
//
// }
//
//
// @Test public void test_index_record_delete(){
// long recid = e.put(1000L, Serializer.LONG);
// e.commit();
// assertEquals(1, countIndexRecords());
// assertEquals(0, countIndexPrealloc());
// e.delete(recid, Serializer.LONG);
// e.commit();
// assertEquals(0, countIndexRecords());
// assertEquals(1, countIndexPrealloc());
// e.structuralLock.lock();
// assertEquals(recid*8 + StoreDirect.IO_USER_START + 8, e.freeIoRecidTake(true));
// }
//
//
// @Test public void test_index_record_delete_COMPACT(){
// long recid = e.put(1000L, Serializer.LONG);
// e.commit();
// assertEquals(1, countIndexRecords());
// e.delete(recid, Serializer.ILLEGAL_ACCESS);
// e.commit();
// assertEquals(0, countIndexRecords());
// assertEquals(1, countIndexPrealloc());
// e.structuralLock.lock();
// assertEquals(recid*8 +8+ StoreDirect.IO_USER_START, e.freeIoRecidTake(true));
// }
//
// @Test public void test_size2IoList(){
// long old= StoreDirect.IO_FREE_RECID;
// for(int size=1;size<= StoreDirect.MAX_REC_SIZE;size++){
//
// long ioListRecid = size2ListIoRecid(size);
// assertTrue(ioListRecid> StoreDirect.IO_FREE_RECID);
// assertTrue(ioListRecid< StoreDirect.IO_USER_START);
//
// assertEquals(ioListRecid,old+(size%16==1?8:0));
//
// old=ioListRecid;
// }
// }
//
//
//
// @Test public void test_index_record_delete_and_reusef(){
// long recid = e.put(1000L, Serializer.LONG);
// e.commit();
// assertEquals(1, countIndexRecords());
// assertEquals(0, countIndexPrealloc());
// assertEquals(RECID_LAST_RESERVED +1, recid);
// e.delete(recid,Serializer.LONG);
// e.commit();
// assertEquals(0, countIndexRecords());
// assertEquals(1, countIndexPrealloc());
// long recid2 = e.put(1000L, Serializer.LONG);
// e.commit();
// //test that previously deleted index slot was reused
// assertEquals(recid+1, recid2);
// assertEquals(1, countIndexRecords());
// assertEquals(1, countIndexPrealloc());
// assertTrue(0!=e.vol.getLong(recid*8+ StoreDirect.IO_USER_START));
// }
//
//
//
//
// @Test public void test_index_record_delete_and_reusef_COMPACT(){
// long recid = e.put(1000L, Serializer.LONG);
// e.commit();
// assertEquals(1, countIndexRecords());
// assertEquals(RECID_LAST_RESERVED +1, recid);
// e.delete(recid, Serializer.LONG);
// e.commit();
// e.compact();
// assertEquals(0, countIndexRecords());
// long recid2 = e.put(1000L, Serializer.LONG);
// e.commit();
// //test that previously deleted index slot was reused
// assertEquals(recid, recid2);
// assertEquals(1, countIndexRecords());
// assertTrue(0 != e.vol.getLong(recid * 8 + StoreDirect.IO_USER_START));
// }
//
//
// @Test public void test_index_record_delete_and_reuse_large(){
// final long MAX = 10;
//
// List<Long> recids= new ArrayList<Long>();
// for(int i = 0;i<MAX;i++){
// recids.add(e.put(0L, Serializer.LONG));
// }
//
// for(long recid:recids){
// e.delete(recid,Serializer.LONG);
// }
//
// //now allocate again second recid list
// List<Long> recids2= new ArrayList<Long>();
// for(int i = 0;i<MAX;i++){
// recids2.add(e.put(0L, Serializer.LONG));
// }
//
// for(Long recid: recids){
// assertFalse(recids2.contains(recid));
// assertTrue(recids2.contains(recid+MAX));
// }
// }
//
@Test public void test_index_record_delete_and_reuse_large_COMPACT(){
e = openEngine();
final long MAX = 10;
List<Long> recids= new ArrayList<Long>();
for(int i = 0;i<MAX;i++){
recids.add(e.put(0L, Serializer.LONG));
}
for(long recid:recids){
e.delete(recid,Serializer.LONG);
}
//compaction will reclaim recid
e.commit();
e.compact();
//now allocate again second recid list
List<Long> recids2= new ArrayList<Long>();
for(int i = 0;i<MAX;i++){
recids2.add(e.put(0L, Serializer.LONG));
}
//second list should be reverse of first, as Linked Offset List is LIFO
Collections.sort(recids);
Collections.sort(recids);
assertEquals(recids, recids2);
}
//
//
//
// @Test public void test_phys_record_reused(){
// final long recid = e.put(1L, Serializer.LONG);
// assertEquals((Long)1L, e.get(recid, Serializer.LONG));
// final long physRecid = e.vol.getLong(recid*8+ StoreDirect.IO_USER_START);
// e.delete(recid, Serializer.LONG);
// final long recid2 = e.put(1L, Serializer.LONG);
// assertEquals((Long)1L, e.get(recid2, Serializer.LONG));
// assertNotEquals(recid, recid2);
// assertEquals(physRecid, e.vol.getLong(recid2*8+ StoreDirect.IO_USER_START));
// }
//
@Test public void test_phys_record_reused_COMPACT(){
e = openEngine();
final long recid = e.put(1L, Serializer.LONG);
assertEquals((Long)1L, e.get(recid, Serializer.LONG));
e.delete(recid, Serializer.LONG);
e.commit();
e.compact();
final long recid2 = e.put(1L, Serializer.LONG);
assertEquals((Long)1L, e.get(recid2, Serializer.LONG));
e.commit();
assertEquals((Long)1L, e.get(recid2, Serializer.LONG));
assertEquals(recid, recid2);
long indexVal = e.indexValGet(recid);
assertEquals(8L, indexVal>>>48); // size
assertEquals(e.PAGE_SIZE,
indexVal&MOFFSET); //offset
assertEquals(0, indexVal & StoreDirect.MLINKED);
assertEquals(0, indexVal & StoreDirect.MUNUSED);
assertTrue(0 != (indexVal & StoreDirect.MARCHIVE));
e.close();
}
//
//
//
// @Test public void test_index_stores_record_size() throws IOException {
// final long recid = e.put(1, Serializer.INTEGER);
// e.commit();
// assertEquals(4, e.vol.getUnsignedShort(recid * 8+ StoreDirect.IO_USER_START));
// assertEquals(Integer.valueOf(1), e.get(recid, Serializer.INTEGER));
//
// e.update(recid, 1L, Serializer.LONG);
// e.commit();
// assertEquals(8, e.vol.getUnsignedShort(recid * 8+ StoreDirect.IO_USER_START));
// assertEquals(Long.valueOf(1), e.get(recid, Serializer.LONG));
//
// }
//
@Test public void test_long_stack_puts_record_offset_into_index() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 1, false);
e.structuralLock.unlock();
e.commit();
assertEquals(8 + 1,
e.headVol.getLong(FREE_RECID_STACK)>>>48);
}
@Test public void test_long_stack_put_take() throws IOException {
e = openEngine();
e.structuralLock.lock();
final long max = 150;
for(long i=1;i<max;i++){
e.longStackPut(FREE_RECID_STACK, i,false);
}
for(long i = max-1;i>0;i--){
assertEquals(i, e.longStackTake(FREE_RECID_STACK, false));
}
assertEquals(0, getLongStack(FREE_RECID_STACK).size());
e.structuralLock.unlock();
}
protected List<Long> getLongStack(long masterLinkOffset) {
List<Long> ret = new ArrayList<Long>();
for(long v = e.longStackTake(masterLinkOffset,false); v!=0; v=e.longStackTake(masterLinkOffset,false)){
ret.add(v);
}
return ret;
}
@Test public void test_long_stack_put_take_simple() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 111, false);
assertEquals(111L, e.longStackTake(FREE_RECID_STACK, false));
e.structuralLock.unlock();
}
@Test public void test_basic_long_stack() throws IOException {
e = openEngine();
//dirty hack to make sure we have lock
e.structuralLock.lock();
final long max = 150;
ArrayList<Long> list = new ArrayList<Long>();
for(long i=1;i<max;i++){
e.longStackPut(FREE_RECID_STACK, i,false);
list.add(i);
}
Collections.reverse(list);
e.structuralLock.unlock();
e.commit();
e.structuralLock.lock();
assertEquals(list, getLongStack(FREE_RECID_STACK));
e.structuralLock.unlock();
}
@Test public void test_large_long_stack() throws IOException {
e = openEngine();
//dirty hack to make sure we have lock
e.structuralLock.lock();
final long max = 15000;
ArrayList<Long> list = new ArrayList<Long>();
for(long i=1;i<max;i++){
e.longStackPut(FREE_RECID_STACK, i,false);
list.add(i);
}
e.structuralLock.unlock();
Collections.reverse(list);
e.commit();
e.structuralLock.lock();
assertEquals(list, getLongStack(FREE_RECID_STACK));
e.structuralLock.unlock();
}
@Test public void test_basic_long_stack_no_commit() throws IOException {
e = openEngine();
//dirty hack to make sure we have lock
e.structuralLock.lock();
final long max = 150;
for(long i=1;i<max;i++){
e.longStackPut(FREE_RECID_STACK, i,false);
}
for(long i =max-1;i>=1;i--){
assertEquals(i, e.longStackTake(FREE_RECID_STACK,false));
}
e.structuralLock.unlock();
}
@Test public void test_large_long_stack_no_commit() throws IOException {
if(TT.scale()==0)
return;
e = openEngine();
//dirty hack to make sure we have lock
e.structuralLock.lock();
final long max = 15000;
for(long i=1;i<max;i++){
e.longStackPut(FREE_RECID_STACK, i,false);
}
for(long i =max-1;i>=1;i--){
assertEquals(i, e.longStackTake(FREE_RECID_STACK,false));
}
e.structuralLock.unlock();
}
@Test public void long_stack_page_created_after_put() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 111, false);
//update max recid, so paranoid check does not complain
e.maxRecidSet(111L);
e.structuralLock.unlock();
e.commit();
forceFullReplay(e);
long pageId = e.vol.getLong(FREE_RECID_STACK);
assertEquals(8+2, pageId>>>48);
pageId = pageId & StoreDirect.MOFFSET;
assertEquals(PAGE_SIZE, pageId);
assertEquals(LONG_STACK_PREF_SIZE, DataIO.parity4Get(e.vol.getLong(pageId))>>>48);
assertEquals(0, DataIO.parity4Get(e.vol.getLong(pageId))&MOFFSET);
assertEquals(DataIO.parity1Set(111 << 1), e.vol.getLongPackBidi(pageId + 8) & DataIO.PACK_LONG_RESULT_MASK);
}
@Test public void long_stack_put_five() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 111,false);
e.longStackPut(FREE_RECID_STACK, 112, false);
e.longStackPut(FREE_RECID_STACK, 113, false);
e.longStackPut(FREE_RECID_STACK, 114,false);
e.longStackPut(FREE_RECID_STACK, 115, false);
e.structuralLock.unlock();
e.commit();
forceFullReplay(e);
long pageId = e.vol.getLong(FREE_RECID_STACK);
long currPageSize = pageId>>>48;
pageId = pageId & StoreDirect.MOFFSET;
assertEquals(PAGE_SIZE, pageId);
assertEquals(LONG_STACK_PREF_SIZE, e.vol.getLong(pageId) >>> 48);
assertEquals(0, e.vol.getLong(pageId) & MOFFSET); //next link
long offset = pageId + 8;
for(int i=111;i<=115;i++){
long val = e.vol.getLongPackBidi(offset);
assertEquals(i, DataIO.parity1Get(val & DataIO.PACK_LONG_RESULT_MASK)>>>1);
offset += val >>> 60;
}
assertEquals(currPageSize, offset-pageId);
}
@Test public void long_stack_page_deleted_after_take() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 111, false);
e.structuralLock.unlock();
e.commit();
forceFullReplay(e);
e.structuralLock.lock();
assertEquals(111L, e.longStackTake(FREE_RECID_STACK, false));
e.structuralLock.unlock();
e.commit();
forceFullReplay(e);
assertEquals(0L, DataIO.parity1Get(e.headVol.getLong(FREE_RECID_STACK)));
}
@Test public void long_stack_page_deleted_after_take2() throws IOException {
e = openEngine();
e.structuralLock.lock();
e.longStackPut(FREE_RECID_STACK, 111, false);
e.structuralLock.unlock();
e.commit();
e.structuralLock.lock();
assertEquals(111L, e.longStackTake(FREE_RECID_STACK, false));
e.structuralLock.unlock();
e.commit();
forceFullReplay(e);
assertEquals(0L, DataIO.parity1Get(e.headVol.getLong(FREE_RECID_STACK)));
}
@Test public void long_stack_page_overflow() throws IOException {
e = openEngine();
e.structuralLock.lock();
//fill page until near overflow
int actualChunkSize = 8;
for(int i=0;;i++){
long val = 1000L+i;
e.longStackPut(FREE_RECID_STACK, val ,false);
actualChunkSize += DataIO.packLongBidi(new byte[8],0,val<<1);
if(e.headVol.getLong(FREE_RECID_STACK)>>48 >LONG_STACK_PREF_SIZE-10)
break;
}
e.structuralLock.unlock();
e.commit();
e.commitLock.lock();
e.structuralLock.lock();
forceFullReplay(e);
//check content
long pageId = e.headVol.getLong(FREE_RECID_STACK);
assertEquals(actualChunkSize, pageId>>>48);
pageId = pageId & StoreDirect.MOFFSET;
assertEquals(PAGE_SIZE, pageId);
assertEquals(StoreDirect.LONG_STACK_PREF_SIZE, e.vol.getLong(pageId)>>>48);
for(long i=1000,pos=8;;i++){
long val = e.vol.getLongPackBidi(pageId+pos);
assertEquals(i, DataIO.parity1Get(val&DataIO.PACK_LONG_RESULT_MASK)>>>1);
pos+=val>>>60;
if(pos==actualChunkSize){
break;
}
}
//add one more item, this will trigger page overflow
e.longStackPut(FREE_RECID_STACK, 11L,false);
e.structuralLock.unlock();
e.commitLock.unlock();
e.commit();
e.commitLock.lock();
e.structuralLock.lock();
forceFullReplay(e);
//check page overflowed
pageId = e.headVol.getLong(FREE_RECID_STACK);
assertEquals(8+1, pageId>>>48);
pageId = pageId & StoreDirect.MOFFSET;
assertEquals(PAGE_SIZE + StoreDirect.LONG_STACK_PREF_SIZE, pageId);
assertEquals(PAGE_SIZE, DataIO.parity4Get(e.vol.getLong(pageId)) & StoreDirect.MOFFSET); //prev link
assertEquals(LONG_STACK_PREF_SIZE, e.vol.getLong(pageId)>>>48); //cur page size
//overflow value
assertEquals(11L, DataIO.parity1Get(e.vol.getLongPackBidi(pageId+8)&DataIO.PACK_LONG_RESULT_MASK)>>>1);
//remaining bytes should be zero
for(long offset = pageId+8+2;offset<pageId+LONG_STACK_PREF_SIZE;offset++){
assertEquals(0,e.vol.getByte(offset));
}
e.structuralLock.unlock();
e.commitLock.unlock();
}
private void forceFullReplay(E e) {
if(e instanceof StoreWAL) {
StoreWAL wal = (StoreWAL) e;
if (wal.commitLock.isHeldByCurrentThread()){
wal.replaySoft();
}else {
wal.commitLock.lock();
wal.replaySoft();
wal.commitLock.unlock();
}
}
}
@Test public void delete_files_after_close(){
File f = TT.tempDbFile();
File phys = new File(f.getPath());
DB db = DBMaker.fileDB(f).transactionDisable().deleteFilesAfterClose().make();
db.hashMap("test").put("aa","bb");
db.commit();
assertTrue(f.exists());
assertTrue(phys.exists());
db.close();
assertFalse(f.exists());
assertFalse(new File(f+".0.wal").exists());
assertFalse(phys.exists());
}
@Test @Ignore //TODO free space stats
public void freeSpaceWorks(){
long oldFree = e.getFreeSize();
long recid = e.put(new byte[10000],Serializer.BYTE_ARRAY_NOSIZE);
e.commit();
assertEquals(oldFree, e.getFreeSize());
e.delete(recid, Serializer.BYTE_ARRAY_NOSIZE);
assertEquals(oldFree + 10000, e.getFreeSize());
e.commit();
assertEquals(oldFree + 10000, e.getFreeSize());
}
@Test public void prealloc(){
e = openEngine();
long recid = e.preallocate();
assertNull(e.get(recid, TT.FAIL));
e.commit();
assertNull(e.get(recid, TT.FAIL));
}
@Ignore //TODO deal with store versioning and feature bits
@Test public void header_index_inc() throws IOException {
e.put(new byte[10000],Serializer.BYTE_ARRAY_NOSIZE);
e.commit();
e.close();
//increment store version
Volume v = Volume.FileChannelVol.FACTORY.makeVolume(f.getPath(), true);
v.putUnsignedShort(4, StoreDirect.STORE_VERSION + 1);
v.sync();
v.close();
try{
e = openEngine();
fail();
}catch(IOError e){
Throwable e2 = e;
while (e2 instanceof IOError){
e2 = e2.getCause();
}
assertTrue(e2.getMessage().contains("version"));
}
}
@Test @Ignore //TODO deal with store versioning and feature bits
public void header_phys_inc() throws IOException {
e.put(new byte[10000],Serializer.BYTE_ARRAY_NOSIZE);
e.commit();
e.close();
//increment store version
File phys = new File(f.getPath());
Volume v = Volume.FileChannelVol.FACTORY.makeVolume(phys.getPath(), true);
v.putUnsignedShort(4, StoreDirect.STORE_VERSION + 1);
v.sync();
v.close();
try{
e = openEngine();
fail();
}catch(IOError e){
Throwable e2 = e;
while (e2 instanceof IOError){
e2 = e2.getCause();
}
assertTrue(e2.getMessage().contains("version"));
}
}
@Test public void compact_keeps_volume_type(){
if(TT.scale()==0)
return;
for(final Fun.Function1<Volume,String> fab : VolumeTest.VOL_FABS){
Volume.VolumeFactory fac = new Volume.VolumeFactory() {
@Override
public Volume makeVolume(String file, boolean readOnly, boolean fileLockDisable, int sliceShift, long initSize, boolean fixedSize) {
return fab.run(file);
}
};
//init
File f = TT.tempDbFile();
e = (E) new StoreDirect(f.getPath(), fac,
null,
CC.DEFAULT_LOCK_SCALE,
0,
false,false,null,
false,false,false,null,
null, 0L, 0L, false);
e.init();
//fill with some data
Map<Long, String> data = new LinkedHashMap();
for(int i=0;i<1000;i++){
String ss = TT.randomString(1000);
long recid = e.put(ss,Serializer.STRING);
}
//perform compact and check data
Volume vol = e.vol;
e.commit();
e.compact();
assertEquals(vol.getClass(), e.vol.getClass());
if(e.vol.getFile()!=null)
assertEquals(f, e.vol.getFile());
for(Long recid:data.keySet()){
assertEquals(data.get(recid), e.get(recid, Serializer.STRING));
}
e.close();
f.delete();
}
}
@Test public void test_free_space(){
if(TT.shortTest())
return;
e = openEngine();
assertTrue(e.getFreeSize()>=0);
List<Long> recids = new ArrayList<Long>();
for(int i=0;i<10000;i++){
recids.add(
e.put(TT.randomByteArray(1024), Serializer.BYTE_ARRAY_NOSIZE));
}
assertEquals(0, e.getFreeSize());
e.commit();
for(Long recid:recids){
e.delete(recid,Serializer.BYTE_ARRAY_NOSIZE);
}
e.commit();
assertEquals(10000 * 1024, e.getFreeSize());
e.compact();
assertTrue(e.getFreeSize() < 100000); //some leftovers after compaction
}
@Test public void recid2Offset(){
e=openEngine();
//create 2 fake index pages
e.vol.ensureAvailable(PAGE_SIZE * 12);
e.indexPages = new long[]{0L, PAGE_SIZE * 3, PAGE_SIZE * 6, PAGE_SIZE * 11};
//control bitset with expected recid layout
BitSet b = new BitSet((int) (PAGE_SIZE * 7));
//fill bitset at places where recids should be
b.set((int) StoreDirect.HEAD_END + 8, (int) PAGE_SIZE);
b.set((int)PAGE_SIZE*3+16, (int)PAGE_SIZE*4);
b.set((int) PAGE_SIZE * 6 + 16, (int) PAGE_SIZE * 7);
b.set((int) PAGE_SIZE * 11 + 16, (int) PAGE_SIZE * 12);
//bitset with recid layout generated by recid2Offset
BitSet b2 = new BitSet((int) (PAGE_SIZE * 7));
long oldOffset = 0;
recidLoop:
for(long recid=1;;recid++){
long offset = e.recidToOffset(recid);
assertTrue(oldOffset<offset);
oldOffset = offset;
b2.set((int)offset,(int)offset+8);
if(offset==PAGE_SIZE*12-8)
break recidLoop;
}
for(int offset = 0; offset<b.length();offset++){
if(b.get(offset)!=b2.get(offset))
throw new AssertionError("error at offset "+offset);
}
}
@Test public void index_pages_init(){
if(CC.PARANOID)
return; //generates broken store, does not work in paranoid mode
e=openEngine();
e.close();
//now create tree index pages
Volume v = Volume.RandomAccessFileVol.FACTORY.makeVolume(f.getPath(),false);
v.ensureAvailable(PAGE_SIZE*6);
v.putLong(HEAD_END, parity16Set(PAGE_SIZE * 2));
v.putLong(PAGE_SIZE*2, parity16Set(PAGE_SIZE * 4));
v.putLong(PAGE_SIZE*4, parity16Set(PAGE_SIZE*5));
v.putLong(PAGE_SIZE * 5, parity16Set(0));
v.sync();
v.close();
//reopen and check index pages
e=openEngine();
//if store becomes more paranoid this might fail
assertArrayEquals(new long[]{0L, PAGE_SIZE*2, PAGE_SIZE*4, PAGE_SIZE*5}, e.indexPages);
e.close();
f.delete();
}
@Test public void index_pages_overflow_compact(){
StoreDirect e = (StoreDirect) DBMaker.memoryDB()
.transactionDisable()
.makeEngine();
// Overflow a third page
long MAX = (StoreDirect.PAGE_SIZE / 8) * 4;
for(int i = 0;i<MAX;i++){
e.put(0L, Serializer.LONG);
}
e.compact();
e.close();
}
@Test public void index_pages_overflow_compact_after_delete(){
StoreDirect e = (StoreDirect) DBMaker.memoryDB()
.transactionDisable()
.makeEngine();
// Overflow a third page
long MAX = (StoreDirect.PAGE_SIZE / 8) * 4;
// Map of recids and values
Map<Long, Long> recids = new HashMap<Long, Long>();
for(int i = 0;i<MAX;i++){
long val = Long.valueOf(i<<2);
recids.put(e.put(val, Serializer.LONG), val);
}
long filledSize = e.getCurrSize();
// Randomly select a bunch of recids to delete to create gaps for compacting
Random rand = new Random();
List<Long> toDelete = new ArrayList<Long>();
for(Long recid : recids.keySet()) {
if(rand.nextBoolean()) {
toDelete.add(recid);
}
}
// Delete
for(Long recid : toDelete) {
e.delete(recid, Serializer.LONG);
recids.remove(recid);
}
e.compact();
// Assert free space after delete and compact
Assert.assertTrue(e.getFreeSize() > 0L);
// Assert store size has dropped after delete and compact
Assert.assertTrue(e.getCurrSize() < filledSize);
// Assert the objects are what we expect to get back
for(Map.Entry<Long, Long> entry : recids.entrySet()) {
Assert.assertEquals(entry.getValue(), e.get(entry.getKey(), Serializer.LONG));
}
e.close();
}
@Test public void many_recids(){
if(TT.shortTest())
return;
long recidCount = 1024*1024/8+1000;
e = openEngine();
List<Long> recids = new ArrayList<Long>();
for(long i=0;i<recidCount;i++){
long recid = e.put(i, Serializer.LONG);
recids.add(recid);
}
e.commit();
reopen();
for(long i=0;i<recidCount;i++){
long recid = recids.get((int) i);
assertEquals(new Long(i), e.get(recid,Serializer.LONG));
}
e.close();
}
}
| |
/*
* Copyright 2014 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.handler.codec.compression;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.ByteToMessageDecoder;
import io.netty.util.internal.ObjectUtil;
import net.jpountz.lz4.LZ4Exception;
import net.jpountz.lz4.LZ4Factory;
import net.jpountz.lz4.LZ4FastDecompressor;
import java.util.List;
import java.util.zip.Checksum;
import static io.netty.handler.codec.compression.Lz4Constants.*;
/**
* Uncompresses a {@link ByteBuf} encoded with the LZ4 format.
*
* See original <a href="https://github.com/Cyan4973/lz4">LZ4 Github project</a>
* and <a href="https://fastcompression.blogspot.ru/2011/05/lz4-explained.html">LZ4 block format</a>
* for full description.
*
* Since the original LZ4 block format does not contains size of compressed block and size of original data
* this encoder uses format like <a href="https://github.com/idelpivnitskiy/lz4-java">LZ4 Java</a> library
* written by Adrien Grand and approved by Yann Collet (author of original LZ4 library).
*
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* * Magic * Token * Compressed * Decompressed * Checksum * + * LZ4 compressed *
* * * * length * length * * * block *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
*/
public class Lz4FrameDecoder extends ByteToMessageDecoder {
/**
* Current state of stream.
*/
private enum State {
INIT_BLOCK,
DECOMPRESS_DATA,
FINISHED,
CORRUPTED
}
private State currentState = State.INIT_BLOCK;
/**
* Underlying decompressor in use.
*/
private LZ4FastDecompressor decompressor;
/**
* Underlying checksum calculator in use.
*/
private ByteBufChecksum checksum;
/**
* Type of current block.
*/
private int blockType;
/**
* Compressed length of current incoming block.
*/
private int compressedLength;
/**
* Decompressed length of current incoming block.
*/
private int decompressedLength;
/**
* Checksum value of current incoming block.
*/
private int currentChecksum;
/**
* Creates the fastest LZ4 decoder.
*
* Note that by default, validation of the checksum header in each chunk is
* DISABLED for performance improvements. If performance is less of an issue,
* or if you would prefer the safety that checksum validation brings, please
* use the {@link #Lz4FrameDecoder(boolean)} constructor with the argument
* set to {@code true}.
*/
public Lz4FrameDecoder() {
this(false);
}
/**
* Creates a LZ4 decoder with fastest decoder instance available on your machine.
*
* @param validateChecksums if {@code true}, the checksum field will be validated against the actual
* uncompressed data, and if the checksums do not match, a suitable
* {@link DecompressionException} will be thrown
*/
public Lz4FrameDecoder(boolean validateChecksums) {
this(LZ4Factory.fastestInstance(), validateChecksums);
}
/**
* Creates a new LZ4 decoder with customizable implementation.
*
* @param factory user customizable {@link LZ4Factory} instance
* which may be JNI bindings to the original C implementation, a pure Java implementation
* or a Java implementation that uses the {@link sun.misc.Unsafe}
* @param validateChecksums if {@code true}, the checksum field will be validated against the actual
* uncompressed data, and if the checksums do not match, a suitable
* {@link DecompressionException} will be thrown. In this case encoder will use
* xxhash hashing for Java, based on Yann Collet's work available at
* <a href="https://github.com/Cyan4973/xxHash">Github</a>.
*/
public Lz4FrameDecoder(LZ4Factory factory, boolean validateChecksums) {
this(factory, validateChecksums ? new Lz4XXHash32(DEFAULT_SEED) : null);
}
/**
* Creates a new customizable LZ4 decoder.
*
* @param factory user customizable {@link LZ4Factory} instance
* which may be JNI bindings to the original C implementation, a pure Java implementation
* or a Java implementation that uses the {@link sun.misc.Unsafe}
* @param checksum the {@link Checksum} instance to use to check data for integrity.
* You may set {@code null} if you do not want to validate checksum of each block
*/
public Lz4FrameDecoder(LZ4Factory factory, Checksum checksum) {
decompressor = ObjectUtil.checkNotNull(factory, "factory").fastDecompressor();
this.checksum = checksum == null ? null : ByteBufChecksum.wrapChecksum(checksum);
}
@Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
try {
switch (currentState) {
case INIT_BLOCK:
if (in.readableBytes() < HEADER_LENGTH) {
break;
}
final long magic = in.readLong();
if (magic != MAGIC_NUMBER) {
throw new DecompressionException("unexpected block identifier");
}
final int token = in.readByte();
final int compressionLevel = (token & 0x0F) + COMPRESSION_LEVEL_BASE;
int blockType = token & 0xF0;
int compressedLength = Integer.reverseBytes(in.readInt());
if (compressedLength < 0 || compressedLength > MAX_BLOCK_SIZE) {
throw new DecompressionException(String.format(
"invalid compressedLength: %d (expected: 0-%d)",
compressedLength, MAX_BLOCK_SIZE));
}
int decompressedLength = Integer.reverseBytes(in.readInt());
final int maxDecompressedLength = 1 << compressionLevel;
if (decompressedLength < 0 || decompressedLength > maxDecompressedLength) {
throw new DecompressionException(String.format(
"invalid decompressedLength: %d (expected: 0-%d)",
decompressedLength, maxDecompressedLength));
}
if (decompressedLength == 0 && compressedLength != 0
|| decompressedLength != 0 && compressedLength == 0
|| blockType == BLOCK_TYPE_NON_COMPRESSED && decompressedLength != compressedLength) {
throw new DecompressionException(String.format(
"stream corrupted: compressedLength(%d) and decompressedLength(%d) mismatch",
compressedLength, decompressedLength));
}
int currentChecksum = Integer.reverseBytes(in.readInt());
if (decompressedLength == 0 && compressedLength == 0) {
if (currentChecksum != 0) {
throw new DecompressionException("stream corrupted: checksum error");
}
currentState = State.FINISHED;
decompressor = null;
checksum = null;
break;
}
this.blockType = blockType;
this.compressedLength = compressedLength;
this.decompressedLength = decompressedLength;
this.currentChecksum = currentChecksum;
currentState = State.DECOMPRESS_DATA;
// fall through
case DECOMPRESS_DATA:
blockType = this.blockType;
compressedLength = this.compressedLength;
decompressedLength = this.decompressedLength;
currentChecksum = this.currentChecksum;
if (in.readableBytes() < compressedLength) {
break;
}
final ByteBufChecksum checksum = this.checksum;
ByteBuf uncompressed = null;
try {
switch (blockType) {
case BLOCK_TYPE_NON_COMPRESSED:
// Just pass through, we not update the readerIndex yet as we do this outside of the
// switch statement.
uncompressed = in.retainedSlice(in.readerIndex(), decompressedLength);
break;
case BLOCK_TYPE_COMPRESSED:
uncompressed = ctx.alloc().buffer(decompressedLength, decompressedLength);
decompressor.decompress(CompressionUtil.safeNioBuffer(in),
uncompressed.internalNioBuffer(uncompressed.writerIndex(), decompressedLength));
// Update the writerIndex now to reflect what we decompressed.
uncompressed.writerIndex(uncompressed.writerIndex() + decompressedLength);
break;
default:
throw new DecompressionException(String.format(
"unexpected blockType: %d (expected: %d or %d)",
blockType, BLOCK_TYPE_NON_COMPRESSED, BLOCK_TYPE_COMPRESSED));
}
// Skip inbound bytes after we processed them.
in.skipBytes(compressedLength);
if (checksum != null) {
CompressionUtil.checkChecksum(checksum, uncompressed, currentChecksum);
}
out.add(uncompressed);
uncompressed = null;
currentState = State.INIT_BLOCK;
} catch (LZ4Exception e) {
throw new DecompressionException(e);
} finally {
if (uncompressed != null) {
uncompressed.release();
}
}
break;
case FINISHED:
case CORRUPTED:
in.skipBytes(in.readableBytes());
break;
default:
throw new IllegalStateException();
}
} catch (Exception e) {
currentState = State.CORRUPTED;
throw e;
}
}
/**
* Returns {@code true} if and only if the end of the compressed stream
* has been reached.
*/
public boolean isClosed() {
return currentState == State.FINISHED;
}
}
| |
package jk_5.nailed.plugins.worldedit;
import com.sk89q.jnbt.*;
import net.minecraft.nbt.*;
import java.util.*;
import java.util.Map.Entry;
/**
* Converts between JNBT and Minecraft NBT classes.
*/
final class NBTConverter {
private NBTConverter() {
}
public static NBTBase toNative(Tag tag) {
if (tag instanceof IntArrayTag) {
return toNative((IntArrayTag) tag);
} else if (tag instanceof ListTag) {
return toNative((ListTag) tag);
} else if (tag instanceof LongTag) {
return toNative((LongTag) tag);
} else if (tag instanceof StringTag) {
return toNative((StringTag) tag);
} else if (tag instanceof IntTag) {
return toNative((IntTag) tag);
} else if (tag instanceof ByteTag) {
return toNative((ByteTag) tag);
} else if (tag instanceof ByteArrayTag) {
return toNative((ByteArrayTag) tag);
} else if (tag instanceof CompoundTag) {
return toNative((CompoundTag) tag);
} else if (tag instanceof FloatTag) {
return toNative((FloatTag) tag);
} else if (tag instanceof ShortTag) {
return toNative((ShortTag) tag);
} else if (tag instanceof DoubleTag) {
return toNative((DoubleTag) tag);
} else {
throw new IllegalArgumentException("Can't convert tag of type " + tag.getClass().getCanonicalName());
}
}
public static NBTTagIntArray toNative(IntArrayTag tag) {
int[] value = tag.getValue();
return new NBTTagIntArray(Arrays.copyOf(value, value.length));
}
public static NBTTagList toNative(ListTag tag) {
NBTTagList list = new NBTTagList();
for (Tag child : tag.getValue()) {
if (child instanceof EndTag) {
continue;
}
list.appendTag(toNative(child));
}
return list;
}
public static NBTTagLong toNative(LongTag tag) {
return new NBTTagLong(tag.getValue());
}
public static NBTTagString toNative(StringTag tag) {
return new NBTTagString(tag.getValue());
}
public static NBTTagInt toNative(IntTag tag) {
return new NBTTagInt(tag.getValue());
}
public static NBTTagByte toNative(ByteTag tag) {
return new NBTTagByte(tag.getValue());
}
public static NBTTagByteArray toNative(ByteArrayTag tag) {
byte[] value = tag.getValue();
return new NBTTagByteArray(Arrays.copyOf(value, value.length));
}
public static NBTTagCompound toNative(CompoundTag tag) {
NBTTagCompound compound = new NBTTagCompound();
for (Entry<String, Tag> child : tag.getValue().entrySet()) {
compound.setTag(child.getKey(), toNative(child.getValue()));
}
return compound;
}
public static NBTTagFloat toNative(FloatTag tag) {
return new NBTTagFloat(tag.getValue());
}
public static NBTTagShort toNative(ShortTag tag) {
return new NBTTagShort(tag.getValue());
}
public static NBTTagDouble toNative(DoubleTag tag) {
return new NBTTagDouble(tag.getValue());
}
public static Tag fromNative(NBTBase other) {
if (other instanceof NBTTagIntArray) {
return fromNative((NBTTagIntArray) other);
} else if (other instanceof NBTTagList) {
return fromNative((NBTTagList) other);
} else if (other instanceof NBTTagEnd) {
return fromNative((NBTTagEnd) other);
} else if (other instanceof NBTTagLong) {
return fromNative((NBTTagLong) other);
} else if (other instanceof NBTTagString) {
return fromNative((NBTTagString) other);
} else if (other instanceof NBTTagInt) {
return fromNative((NBTTagInt) other);
} else if (other instanceof NBTTagByte) {
return fromNative((NBTTagByte) other);
} else if (other instanceof NBTTagByteArray) {
return fromNative((NBTTagByteArray) other);
} else if (other instanceof NBTTagCompound) {
return fromNative((NBTTagCompound) other);
} else if (other instanceof NBTTagFloat) {
return fromNative((NBTTagFloat) other);
} else if (other instanceof NBTTagShort) {
return fromNative((NBTTagShort) other);
} else if (other instanceof NBTTagDouble) {
return fromNative((NBTTagDouble) other);
} else {
throw new IllegalArgumentException("Can't convert other of type " + other.getClass().getCanonicalName());
}
}
public static IntArrayTag fromNative(NBTTagIntArray other) {
int[] value = other.getIntArray();
return new IntArrayTag(Arrays.copyOf(value, value.length));
}
public static ListTag fromNative(NBTTagList other) {
other = (NBTTagList) other.copy();
List<Tag> list = new ArrayList<Tag>();
Class<? extends Tag> listClass = StringTag.class;
int tags = other.tagCount();
for (int i = 0; i < tags; i++) {
Tag child = fromNative(other.removeTag(0));
list.add(child);
listClass = child.getClass();
}
return new ListTag(listClass, list);
}
public static EndTag fromNative(NBTTagEnd other) {
return new EndTag();
}
public static LongTag fromNative(NBTTagLong other) {
return new LongTag(other.getLong());
}
public static StringTag fromNative(NBTTagString other) {
return new StringTag(other.getString());
}
public static IntTag fromNative(NBTTagInt other) {
return new IntTag(other.getInt());
}
public static ByteTag fromNative(NBTTagByte other) {
return new ByteTag(other.getByte());
}
public static ByteArrayTag fromNative(NBTTagByteArray other) {
byte[] value = other.getByteArray();
return new ByteArrayTag(Arrays.copyOf(value, value.length));
}
public static CompoundTag fromNative(NBTTagCompound other) {
@SuppressWarnings("unchecked") Set<String> tags = other.getKeySet();
Map<String, Tag> map = new HashMap<String, Tag>();
for (String tagName : tags) {
map.put(tagName, fromNative(other.getTag(tagName)));
}
return new CompoundTag(map);
}
public static FloatTag fromNative(NBTTagFloat other) {
return new FloatTag(other.getFloat());
}
public static ShortTag fromNative(NBTTagShort other) {
return new ShortTag(other.getShort());
}
public static DoubleTag fromNative(NBTTagDouble other) {
return new DoubleTag(other.getDouble());
}
}
| |
/*
* Written by Doug Lea with assistance from members of JCP JSR-166
* Expert Group and released to the public domain, as explained at
* http://creativecommons.org/licenses/publicdomain
* Other contributors include Andrew Wright, Jeffrey Hayes,
* Pat Fisher, Mike Judd.
*/
package tests.api.java.util.concurrent; // android-added
import java.util.concurrent.*;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import java.util.concurrent.atomic.*;
import junit.framework.*;
import java.util.*;
public class ThreadPoolExecutorTest extends JSR166TestCase {
public static Test suite() {
return new TestSuite(ThreadPoolExecutorTest.class);
}
static class ExtendedTPE extends ThreadPoolExecutor {
volatile boolean beforeCalled = false;
volatile boolean afterCalled = false;
volatile boolean terminatedCalled = false;
public ExtendedTPE() {
super(1, 1, LONG_DELAY_MS, MILLISECONDS, new SynchronousQueue<Runnable>());
}
protected void beforeExecute(Thread t, Runnable r) {
beforeCalled = true;
}
protected void afterExecute(Runnable r, Throwable t) {
afterCalled = true;
}
protected void terminated() {
terminatedCalled = true;
}
}
static class FailingThreadFactory implements ThreadFactory {
int calls = 0;
public Thread newThread(Runnable r) {
if (++calls > 1) return null;
return new Thread(r);
}
}
/**
* execute successfully executes a runnable
*/
public void testExecute() throws InterruptedException {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
p1.execute(new ShortRunnable());
Thread.sleep(SMALL_DELAY_MS);
} finally {
joinPool(p1);
}
}
/**
* getActiveCount increases but doesn't overestimate, when a
* thread becomes active
*/
public void testGetActiveCount() throws InterruptedException {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p2.getActiveCount());
p2.execute(new MediumRunnable());
Thread.sleep(SHORT_DELAY_MS);
assertEquals(1, p2.getActiveCount());
joinPool(p2);
}
/**
* prestartCoreThread starts a thread if under corePoolSize, else doesn't
*/
public void testPrestartCoreThread() {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p2.getPoolSize());
assertTrue(p2.prestartCoreThread());
assertEquals(1, p2.getPoolSize());
assertTrue(p2.prestartCoreThread());
assertEquals(2, p2.getPoolSize());
assertFalse(p2.prestartCoreThread());
assertEquals(2, p2.getPoolSize());
joinPool(p2);
}
/**
* prestartAllCoreThreads starts all corePoolSize threads
*/
public void testPrestartAllCoreThreads() {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p2.getPoolSize());
p2.prestartAllCoreThreads();
assertEquals(2, p2.getPoolSize());
p2.prestartAllCoreThreads();
assertEquals(2, p2.getPoolSize());
joinPool(p2);
}
/**
* getCompletedTaskCount increases, but doesn't overestimate,
* when tasks complete
*/
public void testGetCompletedTaskCount() throws InterruptedException {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p2.getCompletedTaskCount());
p2.execute(new ShortRunnable());
Thread.sleep(SMALL_DELAY_MS);
assertEquals(1, p2.getCompletedTaskCount());
try { p2.shutdown(); } catch (SecurityException ok) { return; }
joinPool(p2);
}
/**
* getCorePoolSize returns size given in constructor if not otherwise set
*/
public void testGetCorePoolSize() {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(1, p1.getCorePoolSize());
joinPool(p1);
}
/**
* getKeepAliveTime returns value given in constructor if not otherwise set
*/
public void testGetKeepAliveTime() {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, 1000, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(1, p2.getKeepAliveTime(TimeUnit.SECONDS));
joinPool(p2);
}
/**
* getThreadFactory returns factory in constructor if not set
*/
public void testGetThreadFactory() {
ThreadFactory tf = new SimpleThreadFactory();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10), tf, new NoOpREHandler());
assertSame(tf, p.getThreadFactory());
joinPool(p);
}
/**
* setThreadFactory sets the thread factory returned by getThreadFactory
*/
public void testSetThreadFactory() {
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
ThreadFactory tf = new SimpleThreadFactory();
p.setThreadFactory(tf);
assertSame(tf, p.getThreadFactory());
joinPool(p);
}
/**
* setThreadFactory(null) throws NPE
*/
public void testSetThreadFactoryNull() {
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
p.setThreadFactory(null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(p);
}
}
/**
* getRejectedExecutionHandler returns handler in constructor if not set
*/
public void testGetRejectedExecutionHandler() {
RejectedExecutionHandler h = new NoOpREHandler();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10), h);
assertSame(h, p.getRejectedExecutionHandler());
joinPool(p);
}
/**
* setRejectedExecutionHandler sets the handler returned by
* getRejectedExecutionHandler
*/
public void testSetRejectedExecutionHandler() {
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
RejectedExecutionHandler h = new NoOpREHandler();
p.setRejectedExecutionHandler(h);
assertSame(h, p.getRejectedExecutionHandler());
joinPool(p);
}
/**
* setRejectedExecutionHandler(null) throws NPE
*/
public void testSetRejectedExecutionHandlerNull() {
ThreadPoolExecutor p = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
p.setRejectedExecutionHandler(null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(p);
}
}
/**
* getLargestPoolSize increases, but doesn't overestimate, when
* multiple threads active
*/
public void testGetLargestPoolSize() throws InterruptedException {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p2.getLargestPoolSize());
p2.execute(new MediumRunnable());
p2.execute(new MediumRunnable());
Thread.sleep(SHORT_DELAY_MS);
assertEquals(2, p2.getLargestPoolSize());
joinPool(p2);
}
/**
* getMaximumPoolSize returns value given in constructor if not
* otherwise set
*/
public void testGetMaximumPoolSize() {
ThreadPoolExecutor p2 = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(2, p2.getMaximumPoolSize());
joinPool(p2);
}
/**
* getPoolSize increases, but doesn't overestimate, when threads
* become active
*/
public void testGetPoolSize() {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p1.getPoolSize());
p1.execute(new MediumRunnable());
assertEquals(1, p1.getPoolSize());
joinPool(p1);
}
/**
* getTaskCount increases, but doesn't overestimate, when tasks submitted
*/
public void testGetTaskCount() throws InterruptedException {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertEquals(0, p1.getTaskCount());
p1.execute(new MediumRunnable());
Thread.sleep(SHORT_DELAY_MS);
assertEquals(1, p1.getTaskCount());
joinPool(p1);
}
/**
* isShutDown is false before shutdown, true after
*/
public void testIsShutdown() {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertFalse(p1.isShutdown());
try { p1.shutdown(); } catch (SecurityException ok) { return; }
assertTrue(p1.isShutdown());
joinPool(p1);
}
/**
* isTerminated is false before termination, true after
*/
public void testIsTerminated() throws InterruptedException {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertFalse(p1.isTerminated());
try {
p1.execute(new MediumRunnable());
} finally {
try { p1.shutdown(); } catch (SecurityException ok) { return; }
}
assertTrue(p1.awaitTermination(LONG_DELAY_MS, MILLISECONDS));
assertTrue(p1.isTerminated());
}
/**
* isTerminating is not true when running or when terminated
*/
public void testIsTerminating() throws InterruptedException {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertFalse(p1.isTerminating());
try {
p1.execute(new SmallRunnable());
assertFalse(p1.isTerminating());
} finally {
try { p1.shutdown(); } catch (SecurityException ok) { return; }
}
assertTrue(p1.awaitTermination(LONG_DELAY_MS, MILLISECONDS));
assertTrue(p1.isTerminated());
assertFalse(p1.isTerminating());
}
/**
* getQueue returns the work queue, which contains queued tasks
*/
public void testGetQueue() throws InterruptedException {
BlockingQueue<Runnable> q = new ArrayBlockingQueue<Runnable>(10);
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, q);
FutureTask[] tasks = new FutureTask[5];
for (int i = 0; i < 5; i++) {
tasks[i] = new FutureTask(new MediumPossiblyInterruptedRunnable(), Boolean.TRUE);
p1.execute(tasks[i]);
}
try {
Thread.sleep(SHORT_DELAY_MS);
BlockingQueue<Runnable> wq = p1.getQueue();
assertSame(q, wq);
assertFalse(wq.contains(tasks[0]));
assertTrue(wq.contains(tasks[4]));
for (int i = 1; i < 5; ++i)
tasks[i].cancel(true);
p1.shutdownNow();
} finally {
joinPool(p1);
}
}
/**
* remove(task) removes queued task, and fails to remove active task
*/
public void testRemove() throws InterruptedException {
BlockingQueue<Runnable> q = new ArrayBlockingQueue<Runnable>(10);
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, q);
FutureTask[] tasks = new FutureTask[5];
for (int i = 0; i < 5; i++) {
tasks[i] = new FutureTask(new MediumPossiblyInterruptedRunnable(), Boolean.TRUE);
p1.execute(tasks[i]);
}
try {
Thread.sleep(SHORT_DELAY_MS);
assertFalse(p1.remove(tasks[0]));
assertTrue(q.contains(tasks[4]));
assertTrue(q.contains(tasks[3]));
assertTrue(p1.remove(tasks[4]));
assertFalse(p1.remove(tasks[4]));
assertFalse(q.contains(tasks[4]));
assertTrue(q.contains(tasks[3]));
assertTrue(p1.remove(tasks[3]));
assertFalse(q.contains(tasks[3]));
} finally {
joinPool(p1);
}
}
/**
* purge removes cancelled tasks from the queue
*/
public void testPurge() {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
FutureTask[] tasks = new FutureTask[5];
for (int i = 0; i < 5; i++) {
tasks[i] = new FutureTask(new MediumPossiblyInterruptedRunnable(), Boolean.TRUE);
p1.execute(tasks[i]);
}
tasks[4].cancel(true);
tasks[3].cancel(true);
p1.purge();
long count = p1.getTaskCount();
assertTrue(count >= 2 && count < 5);
joinPool(p1);
}
/**
* shutDownNow returns a list containing tasks that were not run
*/
public void testShutDownNow() {
ThreadPoolExecutor p1 = new ThreadPoolExecutor(1, 1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List l;
try {
for (int i = 0; i < 5; i++)
p1.execute(new MediumPossiblyInterruptedRunnable());
}
finally {
try {
l = p1.shutdownNow();
} catch (SecurityException ok) { return; }
}
assertTrue(p1.isShutdown());
assertTrue(l.size() <= 4);
}
// Exception Tests
/**
* Constructor throws if corePoolSize argument is less than zero
*/
public void testConstructor1() {
try {
new ThreadPoolExecutor(-1,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is less than zero
*/
public void testConstructor2() {
try {
new ThreadPoolExecutor(1,-1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is equal to zero
*/
public void testConstructor3() {
try {
new ThreadPoolExecutor(1,0,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if keepAliveTime is less than zero
*/
public void testConstructor4() {
try {
new ThreadPoolExecutor(1,2,-1L,MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if corePoolSize is greater than the maximumPoolSize
*/
public void testConstructor5() {
try {
new ThreadPoolExecutor(2,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if workQueue is set to null
*/
public void testConstructorNullPointerException() {
try {
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,null);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if corePoolSize argument is less than zero
*/
public void testConstructor6() {
try {
new ThreadPoolExecutor(-1,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is less than zero
*/
public void testConstructor7() {
try {
new ThreadPoolExecutor(1,-1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is equal to zero
*/
public void testConstructor8() {
try {
new ThreadPoolExecutor(1,0,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if keepAliveTime is less than zero
*/
public void testConstructor9() {
try {
new ThreadPoolExecutor(1,2,-1L,MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if corePoolSize is greater than the maximumPoolSize
*/
public void testConstructor10() {
try {
new ThreadPoolExecutor(2,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if workQueue is set to null
*/
public void testConstructorNullPointerException2() {
try {
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,null,new SimpleThreadFactory());
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if threadFactory is set to null
*/
public void testConstructorNullPointerException3() {
try {
ThreadFactory f = null;
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(10),f);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if corePoolSize argument is less than zero
*/
public void testConstructor11() {
try {
new ThreadPoolExecutor(-1,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is less than zero
*/
public void testConstructor12() {
try {
new ThreadPoolExecutor(1,-1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is equal to zero
*/
public void testConstructor13() {
try {
new ThreadPoolExecutor(1,0,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if keepAliveTime is less than zero
*/
public void testConstructor14() {
try {
new ThreadPoolExecutor(1,2,-1L,MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if corePoolSize is greater than the maximumPoolSize
*/
public void testConstructor15() {
try {
new ThreadPoolExecutor(2,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if workQueue is set to null
*/
public void testConstructorNullPointerException4() {
try {
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,null,new NoOpREHandler());
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if handler is set to null
*/
public void testConstructorNullPointerException5() {
try {
RejectedExecutionHandler r = null;
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(10),r);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if corePoolSize argument is less than zero
*/
public void testConstructor16() {
try {
new ThreadPoolExecutor(-1,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is less than zero
*/
public void testConstructor17() {
try {
new ThreadPoolExecutor(1,-1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if maximumPoolSize is equal to zero
*/
public void testConstructor18() {
try {
new ThreadPoolExecutor(1,0,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if keepAliveTime is less than zero
*/
public void testConstructor19() {
try {
new ThreadPoolExecutor(1,2,-1L,MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if corePoolSize is greater than the maximumPoolSize
*/
public void testConstructor20() {
try {
new ThreadPoolExecutor(2,1,LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (IllegalArgumentException success) {}
}
/**
* Constructor throws if workQueue is set to null
*/
public void testConstructorNullPointerException6() {
try {
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,null,new SimpleThreadFactory(),new NoOpREHandler());
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if handler is set to null
*/
public void testConstructorNullPointerException7() {
try {
RejectedExecutionHandler r = null;
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(10),new SimpleThreadFactory(),r);
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* Constructor throws if ThreadFactory is set top null
*/
public void testConstructorNullPointerException8() {
try {
ThreadFactory f = null;
new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(10),f,new NoOpREHandler());
shouldThrow();
} catch (NullPointerException success) {}
}
/**
* execute throws RejectedExecutionException
* if saturated.
*/
public void testSaturatedExecute() {
ThreadPoolExecutor p =
new ThreadPoolExecutor(1, 1,
LONG_DELAY_MS, MILLISECONDS,
new ArrayBlockingQueue<Runnable>(1));
try {
for (int i = 0; i < 2; ++i)
p.execute(new MediumRunnable());
for (int i = 0; i < 2; ++i) {
try {
p.execute(new MediumRunnable());
shouldThrow();
} catch (RejectedExecutionException success) {}
}
} finally {
joinPool(p);
}
}
/**
* executor using CallerRunsPolicy runs task if saturated.
*/
public void testSaturatedExecute2() {
RejectedExecutionHandler h = new ThreadPoolExecutor.CallerRunsPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try {
TrackedNoOpRunnable[] tasks = new TrackedNoOpRunnable[5];
for (int i = 0; i < 5; ++i) {
tasks[i] = new TrackedNoOpRunnable();
}
TrackedLongRunnable mr = new TrackedLongRunnable();
p.execute(mr);
for (int i = 0; i < 5; ++i) {
p.execute(tasks[i]);
}
for (int i = 1; i < 5; ++i) {
assertTrue(tasks[i].done);
}
try { p.shutdownNow(); } catch (SecurityException ok) { return; }
} finally {
joinPool(p);
}
}
/**
* executor using DiscardPolicy drops task if saturated.
*/
public void testSaturatedExecute3() {
RejectedExecutionHandler h = new ThreadPoolExecutor.DiscardPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try {
TrackedNoOpRunnable[] tasks = new TrackedNoOpRunnable[5];
for (int i = 0; i < 5; ++i) {
tasks[i] = new TrackedNoOpRunnable();
}
p.execute(new TrackedLongRunnable());
for (int i = 0; i < 5; ++i) {
p.execute(tasks[i]);
}
for (int i = 0; i < 5; ++i) {
assertFalse(tasks[i].done);
}
try { p.shutdownNow(); } catch (SecurityException ok) { return; }
} finally {
joinPool(p);
}
}
/**
* executor using DiscardOldestPolicy drops oldest task if saturated.
*/
public void testSaturatedExecute4() {
RejectedExecutionHandler h = new ThreadPoolExecutor.DiscardOldestPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try {
p.execute(new TrackedLongRunnable());
TrackedLongRunnable r2 = new TrackedLongRunnable();
p.execute(r2);
assertTrue(p.getQueue().contains(r2));
TrackedNoOpRunnable r3 = new TrackedNoOpRunnable();
p.execute(r3);
assertFalse(p.getQueue().contains(r2));
assertTrue(p.getQueue().contains(r3));
try { p.shutdownNow(); } catch (SecurityException ok) { return; }
} finally {
joinPool(p);
}
}
/**
* execute throws RejectedExecutionException if shutdown
*/
public void testRejectedExecutionExceptionOnShutdown() {
ThreadPoolExecutor tpe =
new ThreadPoolExecutor(1,1,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(1));
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
try {
tpe.execute(new NoOpRunnable());
shouldThrow();
} catch (RejectedExecutionException success) {}
joinPool(tpe);
}
/**
* execute using CallerRunsPolicy drops task on shutdown
*/
public void testCallerRunsOnShutdown() {
RejectedExecutionHandler h = new ThreadPoolExecutor.CallerRunsPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try { p.shutdown(); } catch (SecurityException ok) { return; }
try {
TrackedNoOpRunnable r = new TrackedNoOpRunnable();
p.execute(r);
assertFalse(r.done);
} finally {
joinPool(p);
}
}
/**
* execute using DiscardPolicy drops task on shutdown
*/
public void testDiscardOnShutdown() {
RejectedExecutionHandler h = new ThreadPoolExecutor.DiscardPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try { p.shutdown(); } catch (SecurityException ok) { return; }
try {
TrackedNoOpRunnable r = new TrackedNoOpRunnable();
p.execute(r);
assertFalse(r.done);
} finally {
joinPool(p);
}
}
/**
* execute using DiscardOldestPolicy drops task on shutdown
*/
public void testDiscardOldestOnShutdown() {
RejectedExecutionHandler h = new ThreadPoolExecutor.DiscardOldestPolicy();
ThreadPoolExecutor p = new ThreadPoolExecutor(1,1, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(1), h);
try { p.shutdown(); } catch (SecurityException ok) { return; }
try {
TrackedNoOpRunnable r = new TrackedNoOpRunnable();
p.execute(r);
assertFalse(r.done);
} finally {
joinPool(p);
}
}
/**
* execute (null) throws NPE
*/
public void testExecuteNull() {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(1,2,LONG_DELAY_MS, MILLISECONDS,new ArrayBlockingQueue<Runnable>(10));
try {
tpe.execute(null);
shouldThrow();
} catch (NullPointerException success) {}
joinPool(tpe);
}
/**
* setCorePoolSize of negative value throws IllegalArgumentException
*/
public void testCorePoolSizeIllegalArgumentException() {
ThreadPoolExecutor tpe =
new ThreadPoolExecutor(1, 2,
LONG_DELAY_MS, MILLISECONDS,
new ArrayBlockingQueue<Runnable>(10));
try {
tpe.setCorePoolSize(-1);
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
}
joinPool(tpe);
}
/**
* setMaximumPoolSize(int) throws IllegalArgumentException if
* given a value less the core pool size
*/
public void testMaximumPoolSizeIllegalArgumentException() {
ThreadPoolExecutor tpe =
new ThreadPoolExecutor(2, 3,
LONG_DELAY_MS, MILLISECONDS,
new ArrayBlockingQueue<Runnable>(10));
try {
tpe.setMaximumPoolSize(1);
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
}
joinPool(tpe);
}
/**
* setMaximumPoolSize throws IllegalArgumentException
* if given a negative value
*/
public void testMaximumPoolSizeIllegalArgumentException2() {
ThreadPoolExecutor tpe =
new ThreadPoolExecutor(2, 3,
LONG_DELAY_MS, MILLISECONDS,
new ArrayBlockingQueue<Runnable>(10));
try {
tpe.setMaximumPoolSize(-1);
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
}
joinPool(tpe);
}
/**
* setKeepAliveTime throws IllegalArgumentException
* when given a negative value
*/
public void testKeepAliveTimeIllegalArgumentException() {
ThreadPoolExecutor tpe =
new ThreadPoolExecutor(2, 3,
LONG_DELAY_MS, MILLISECONDS,
new ArrayBlockingQueue<Runnable>(10));
try {
tpe.setKeepAliveTime(-1,MILLISECONDS);
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
}
joinPool(tpe);
}
/**
* terminated() is called on termination
*/
public void testTerminated() {
ExtendedTPE tpe = new ExtendedTPE();
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
assertTrue(tpe.terminatedCalled);
joinPool(tpe);
}
/**
* beforeExecute and afterExecute are called when executing task
*/
public void testBeforeAfter() throws InterruptedException {
ExtendedTPE tpe = new ExtendedTPE();
try {
TrackedNoOpRunnable r = new TrackedNoOpRunnable();
tpe.execute(r);
Thread.sleep(SHORT_DELAY_MS);
assertTrue(r.done);
assertTrue(tpe.beforeCalled);
assertTrue(tpe.afterCalled);
try { tpe.shutdown(); } catch (SecurityException ok) { return; }
} finally {
joinPool(tpe);
}
}
/**
* completed submit of callable returns result
*/
public void testSubmitCallable() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
Future<String> future = e.submit(new StringTask());
String result = future.get();
assertSame(TEST_STRING, result);
} finally {
joinPool(e);
}
}
/**
* completed submit of runnable returns successfully
*/
public void testSubmitRunnable() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
Future<?> future = e.submit(new NoOpRunnable());
future.get();
assertTrue(future.isDone());
} finally {
joinPool(e);
}
}
/**
* completed submit of (runnable, result) returns result
*/
public void testSubmitRunnable2() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
Future<String> future = e.submit(new NoOpRunnable(), TEST_STRING);
String result = future.get();
assertSame(TEST_STRING, result);
} finally {
joinPool(e);
}
}
/**
* invokeAny(null) throws NPE
*/
public void testInvokeAny1() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAny(null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* invokeAny(empty collection) throws IAE
*/
public void testInvokeAny2() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAny(new ArrayList<Callable<String>>());
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
joinPool(e);
}
}
/**
* invokeAny(c) throws NPE if c has null elements
*/
public void testInvokeAny3() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(latchAwaitingStringTask(latch));
l.add(null);
try {
e.invokeAny(l);
shouldThrow();
} catch (NullPointerException success) {
} finally {
latch.countDown();
joinPool(e);
}
}
/**
* invokeAny(c) throws ExecutionException if no task completes
*/
public void testInvokeAny4() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new NPETask());
try {
e.invokeAny(l);
shouldThrow();
} catch (ExecutionException success) {
assertTrue(success.getCause() instanceof NullPointerException);
} finally {
joinPool(e);
}
}
/**
* invokeAny(c) returns result of some task
*/
public void testInvokeAny5() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(new StringTask());
String result = e.invokeAny(l);
assertSame(TEST_STRING, result);
} finally {
joinPool(e);
}
}
/**
* invokeAll(null) throws NPE
*/
public void testInvokeAll1() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAll(null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* invokeAll(empty collection) returns empty collection
*/
public void testInvokeAll2() throws InterruptedException {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Future<String>> r = e.invokeAll(new ArrayList<Callable<String>>());
assertTrue(r.isEmpty());
} finally {
joinPool(e);
}
}
/**
* invokeAll(c) throws NPE if c has null elements
*/
public void testInvokeAll3() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(null);
try {
e.invokeAll(l);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* get of element of invokeAll(c) throws exception on failed task
*/
public void testInvokeAll4() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new NPETask());
List<Future<String>> futures = e.invokeAll(l);
assertEquals(1, futures.size());
try {
futures.get(0).get();
shouldThrow();
} catch (ExecutionException success) {
assertTrue(success.getCause() instanceof NullPointerException);
}
} finally {
joinPool(e);
}
}
/**
* invokeAll(c) returns results of all completed tasks
*/
public void testInvokeAll5() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(new StringTask());
List<Future<String>> futures = e.invokeAll(l);
assertEquals(2, futures.size());
for (Future<String> future : futures)
assertSame(TEST_STRING, future.get());
} finally {
joinPool(e);
}
}
/**
* timed invokeAny(null) throws NPE
*/
public void testTimedInvokeAny1() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAny(null, MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* timed invokeAny(,,null) throws NPE
*/
public void testTimedInvokeAnyNullTimeUnit() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
try {
e.invokeAny(l, MEDIUM_DELAY_MS, null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* timed invokeAny(empty collection) throws IAE
*/
public void testTimedInvokeAny2() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAny(new ArrayList<Callable<String>>(), MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (IllegalArgumentException success) {
} finally {
joinPool(e);
}
}
/**
* timed invokeAny(c) throws NPE if c has null elements
*/
public void testTimedInvokeAny3() throws Exception {
CountDownLatch latch = new CountDownLatch(1);
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(latchAwaitingStringTask(latch));
l.add(null);
try {
e.invokeAny(l, MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (NullPointerException success) {
} finally {
latch.countDown();
joinPool(e);
}
}
/**
* timed invokeAny(c) throws ExecutionException if no task completes
*/
public void testTimedInvokeAny4() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new NPETask());
try {
e.invokeAny(l, MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (ExecutionException success) {
assertTrue(success.getCause() instanceof NullPointerException);
} finally {
joinPool(e);
}
}
/**
* timed invokeAny(c) returns result of some task
*/
public void testTimedInvokeAny5() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(new StringTask());
String result = e.invokeAny(l, MEDIUM_DELAY_MS, MILLISECONDS);
assertSame(TEST_STRING, result);
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(null) throws NPE
*/
public void testTimedInvokeAll1() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
e.invokeAll(null, MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(,,null) throws NPE
*/
public void testTimedInvokeAllNullTimeUnit() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
try {
e.invokeAll(l, MEDIUM_DELAY_MS, null);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(empty collection) returns empty collection
*/
public void testTimedInvokeAll2() throws InterruptedException {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Future<String>> r = e.invokeAll(new ArrayList<Callable<String>>(), MEDIUM_DELAY_MS, MILLISECONDS);
assertTrue(r.isEmpty());
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(c) throws NPE if c has null elements
*/
public void testTimedInvokeAll3() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(null);
try {
e.invokeAll(l, MEDIUM_DELAY_MS, MILLISECONDS);
shouldThrow();
} catch (NullPointerException success) {
} finally {
joinPool(e);
}
}
/**
* get of element of invokeAll(c) throws exception on failed task
*/
public void testTimedInvokeAll4() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new NPETask());
List<Future<String>> futures =
e.invokeAll(l, MEDIUM_DELAY_MS, MILLISECONDS);
assertEquals(1, futures.size());
try {
futures.get(0).get();
shouldThrow();
} catch (ExecutionException success) {
assertTrue(success.getCause() instanceof NullPointerException);
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(c) returns results of all completed tasks
*/
public void testTimedInvokeAll5() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(new StringTask());
List<Future<String>> futures =
e.invokeAll(l, MEDIUM_DELAY_MS, MILLISECONDS);
assertEquals(2, futures.size());
for (Future<String> future : futures)
assertSame(TEST_STRING, future.get());
} finally {
joinPool(e);
}
}
/**
* timed invokeAll(c) cancels tasks not completed by timeout
*/
public void testTimedInvokeAll6() throws Exception {
ExecutorService e = new ThreadPoolExecutor(2, 2, LONG_DELAY_MS, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
l.add(new StringTask());
l.add(Executors.callable(new MediumPossiblyInterruptedRunnable(), TEST_STRING));
l.add(new StringTask());
List<Future<String>> futures =
e.invokeAll(l, SHORT_DELAY_MS, MILLISECONDS);
assertEquals(3, futures.size());
Iterator<Future<String>> it = futures.iterator();
Future<String> f1 = it.next();
Future<String> f2 = it.next();
Future<String> f3 = it.next();
assertTrue(f1.isDone());
assertTrue(f2.isDone());
assertTrue(f3.isDone());
assertFalse(f1.isCancelled());
assertTrue(f2.isCancelled());
} finally {
joinPool(e);
}
}
/**
* Execution continues if there is at least one thread even if
* thread factory fails to create more
*/
public void testFailingThreadFactory() throws InterruptedException {
ExecutorService e = new ThreadPoolExecutor(100, 100, LONG_DELAY_MS, MILLISECONDS, new LinkedBlockingQueue<Runnable>(), new FailingThreadFactory());
try {
List<Callable<String>> l = new ArrayList<Callable<String>>();
for (int k = 0; k < 100; ++k) {
e.execute(new NoOpRunnable());
}
Thread.sleep(LONG_DELAY_MS);
} finally {
joinPool(e);
}
}
/**
* allowsCoreThreadTimeOut is by default false.
*/
public void testAllowsCoreThreadTimeOut() {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(2, 2, 1000, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
assertFalse(tpe.allowsCoreThreadTimeOut());
joinPool(tpe);
}
/**
* allowCoreThreadTimeOut(true) causes idle threads to time out
*/
public void testAllowCoreThreadTimeOut_true() throws InterruptedException {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(2, 10, 10, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
tpe.allowCoreThreadTimeOut(true);
tpe.execute(new NoOpRunnable());
try {
Thread.sleep(MEDIUM_DELAY_MS);
assertEquals(0, tpe.getPoolSize());
} finally {
joinPool(tpe);
}
}
/**
* allowCoreThreadTimeOut(false) causes idle threads not to time out
*/
public void testAllowCoreThreadTimeOut_false() throws InterruptedException {
ThreadPoolExecutor tpe = new ThreadPoolExecutor(2, 10, 10, MILLISECONDS, new ArrayBlockingQueue<Runnable>(10));
tpe.allowCoreThreadTimeOut(false);
tpe.execute(new NoOpRunnable());
try {
Thread.sleep(MEDIUM_DELAY_MS);
assertTrue(tpe.getPoolSize() >= 1);
} finally {
joinPool(tpe);
}
}
/**
* execute allows the same task to be submitted multiple times, even
* if rejected
*/
public void testRejectedRecycledTask() throws InterruptedException {
final int nTasks = 1000;
final AtomicInteger nRun = new AtomicInteger(0);
final Runnable recycledTask = new Runnable() {
public void run() {
nRun.getAndIncrement();
} };
final ThreadPoolExecutor p =
new ThreadPoolExecutor(1, 30, 60, TimeUnit.SECONDS,
new ArrayBlockingQueue(30));
try {
for (int i = 0; i < nTasks; ++i) {
for (;;) {
try {
p.execute(recycledTask);
break;
}
catch (RejectedExecutionException ignore) {
}
}
}
Thread.sleep(5000); // enough time to run all tasks
assertEquals(nRun.get(), nTasks);
} finally {
p.shutdown();
}
}
// BEGIN android-added
/** http://b/3046427 */
public void testRejected() {
BlockingQueue<Runnable> queue = new LinkedBlockingQueue<Runnable>(2);
ExecutorService executor = new ThreadPoolExecutor(0, 2, 1, TimeUnit.SECONDS, queue);
executor.submit(new Sleeper()); // thread #1
executor.submit(new Sleeper()); // thread #2
executor.submit(new Sleeper()); // queue #1
executor.submit(new Sleeper()); // queue #2
try {
executor.submit(new Sleeper());
fail();
} catch (RejectedExecutionException expected) {
System.out.println(expected.getMessage());
assertNotNull(expected.getMessage());
}
executor.shutdown();
}
static class Sleeper implements Runnable {
public void run() {
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
// END android-added
}
| |
/*
* Copyright 2012-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.android;
import com.facebook.buck.event.ConsoleEvent;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.shell.ShellStep;
import com.facebook.buck.step.ExecutionContext;
import com.facebook.buck.step.StepExecutionResult;
import com.facebook.buck.util.Verbosity;
import com.google.common.base.Preconditions;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Sets;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.PrintStream;
import java.nio.file.Path;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Set;
import javax.annotation.Nullable;
public class DxStep extends ShellStep {
/**
*/
public static final String XMX_OVERRIDE =
"";
/** Options to pass to {@code dx}. */
public enum Option {
/** Specify the {@code --no-optimize} flag when running {@code dx}. */
NO_OPTIMIZE,
/** Specify the {@code --force-jumbo} flag when running {@code dx}. */
FORCE_JUMBO,
/**
* See if the {@code buck.dx} property was specified, and if so, use the executable that that
* points to instead of the {@code dx} in the user's Android SDK.
*/
USE_CUSTOM_DX_IF_AVAILABLE,
/**
* Execute DX in-process instead of fork/execing.
* This only works with custom dx.
*/
RUN_IN_PROCESS,
;
}
private final ProjectFilesystem filesystem;
private final Path outputDexFile;
private final Set<Path> filesToDex;
private final Set<Option> options;
@Nullable
private Collection<String> resourcesReferencedInCode;
/**
* @param outputDexFile path to the file where the generated classes.dex should go.
* @param filesToDex each element in this set is a path to a .class file, a zip file of .class
* files, or a directory of .class files.
*/
public DxStep(ProjectFilesystem filesystem, Path outputDexFile, Iterable<Path> filesToDex) {
this(filesystem, outputDexFile, filesToDex, EnumSet.noneOf(DxStep.Option.class));
}
/**
* @param outputDexFile path to the file where the generated classes.dex should go.
* @param filesToDex each element in this set is a path to a .class file, a zip file of .class
* files, or a directory of .class files.
* @param options to pass to {@code dx}.
*/
public DxStep(
ProjectFilesystem filesystem,
Path outputDexFile,
Iterable<Path> filesToDex,
EnumSet<Option> options) {
super(filesystem.getRootPath());
this.filesystem = filesystem;
this.outputDexFile = outputDexFile;
this.filesToDex = ImmutableSet.copyOf(filesToDex);
this.options = Sets.immutableEnumSet(options);
Preconditions.checkArgument(
!options.contains(Option.RUN_IN_PROCESS) ||
options.contains(Option.USE_CUSTOM_DX_IF_AVAILABLE),
"In-process dexing is only supported with custom DX");
}
@Override
protected ImmutableList<String> getShellCommandInternal(ExecutionContext context) {
ImmutableList.Builder<String> builder = ImmutableList.builder();
AndroidPlatformTarget androidPlatformTarget = context.getAndroidPlatformTarget();
String dx = androidPlatformTarget.getDxExecutable().toString();
if (options.contains(Option.USE_CUSTOM_DX_IF_AVAILABLE)) {
String customDx = Strings.emptyToNull(System.getProperty("buck.dx"));
dx = customDx != null ? customDx : dx;
}
builder.add(dx);
// Add the Xmx override, but not for in-process dexing, since the dexer won't understand it.
// Also, if DX works in-process, it probably wouldn't need an enlarged Xmx.
if (!XMX_OVERRIDE.isEmpty() && !options.contains(Option.RUN_IN_PROCESS)) {
builder.add(XMX_OVERRIDE);
}
builder.add("--dex");
// --statistics flag, if appropriate.
if (context.getVerbosity().shouldPrintSelectCommandOutput()) {
builder.add("--statistics");
}
if (options.contains(Option.NO_OPTIMIZE)) {
builder.add("--no-optimize");
}
if (options.contains(Option.FORCE_JUMBO)) {
builder.add("--force-jumbo");
}
// verbose flag, if appropriate.
if (context.getVerbosity().shouldUseVerbosityFlagIfAvailable()) {
builder.add("--verbose");
}
builder.add("--output", filesystem.resolve(outputDexFile).toString());
for (Path fileToDex : filesToDex) {
builder.add(filesystem.resolve(fileToDex).toString());
}
return builder.build();
}
@Override
public StepExecutionResult execute(ExecutionContext context)
throws IOException, InterruptedException {
if (options.contains(Option.RUN_IN_PROCESS)) {
return StepExecutionResult.of(executeInProcess(context));
} else {
return super.execute(context);
}
}
private int executeInProcess(ExecutionContext context) {
ImmutableList<String> argv = getShellCommandInternal(context);
// The first arguments should be ".../dx --dex" ("...\dx.bat --dex on Windows). Strip them off
// because we bypass the dispatcher and go straight to the dexer.
Preconditions.checkState(
argv.get(0).endsWith(File.separator + "dx") || argv.get(0).endsWith("\\dx.bat"));
Preconditions.checkState(argv.get(1).equals("--dex"));
ImmutableList<String> args = argv.subList(2, argv.size());
ByteArrayOutputStream stderr = new ByteArrayOutputStream();
PrintStream stderrStream = new PrintStream(stderr);
try {
com.android.dx.command.dexer.Main dexer = new com.android.dx.command.dexer.Main();
int returncode = dexer.run(
args.toArray(new String[args.size()]),
context.getStdOut(),
stderrStream
);
String stdErrOutput = stderr.toString();
if (!stdErrOutput.isEmpty()) {
context.postEvent(ConsoleEvent.warning("%s", stdErrOutput));
}
if (returncode == 0) {
resourcesReferencedInCode = dexer.getReferencedResourceNames();
}
return returncode;
} catch (IOException e) {
e.printStackTrace(context.getStdErr());
return 1;
}
}
@Override
protected boolean shouldPrintStderr(Verbosity verbosity) {
return verbosity.shouldPrintSelectCommandOutput();
}
@Override
protected boolean shouldPrintStdout(Verbosity verbosity) {
return verbosity.shouldPrintSelectCommandOutput();
}
@Override
public String getShortName() {
return "dx";
}
/**
* Return the names of resources referenced in the code that was dexed.
* This is only valid after the step executes successfully and
* only when in-process dexing is used.
* It only returns resources referenced in java classes being dexed,
* not merged dex files.
*/
@Nullable
Collection<String> getResourcesReferencedInCode() {
return resourcesReferencedInCode;
}
}
| |
/*
Copyright (C) 2013-2019 Expedia Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.hotels.styx.api;
import com.hotels.styx.common.EventProcessor;
import com.hotels.styx.common.QueueDrainingEventProcessor;
import org.reactivestreams.Publisher;
import reactor.core.publisher.Flux;
import java.util.Optional;
import java.util.function.Consumer;
import static com.hotels.styx.api.ResponseEventListener.State.INITIAL;
import static com.hotels.styx.api.ResponseEventListener.State.COMPLETED;
import static com.hotels.styx.api.ResponseEventListener.State.STREAMING;
import static com.hotels.styx.api.ResponseEventListener.State.TERMINATED;
import static java.util.Objects.requireNonNull;
/**
* Associate callbacks to Streaming Response object.
*/
public class ResponseEventListener {
private final Flux<LiveHttpResponse> publisher;
private Consumer<Throwable> responseErrorAction = cause -> { };
private Consumer<Throwable> contentErrorAction = cause -> { };
private Consumer<LiveHttpResponse> onCompletedAction = r -> { };
private Runnable cancelAction = () -> { };
private Runnable onHeaders = () -> { };
private Runnable whenFinishedAction = () -> { };
private volatile State state = INITIAL;
private ResponseEventListener(Publisher<LiveHttpResponse> publisher) {
this.publisher = Flux.from(requireNonNull(publisher));
}
public static ResponseEventListener from(Publisher<LiveHttpResponse> publisher) {
return new ResponseEventListener(publisher);
}
public ResponseEventListener whenCancelled(Runnable action) {
this.cancelAction = requireNonNull(action);
return this;
}
public ResponseEventListener whenResponseError(Consumer<Throwable> responseErrorAction) {
this.responseErrorAction = requireNonNull(responseErrorAction);
return this;
}
public ResponseEventListener whenContentError(Consumer<Throwable> contentErrorAction) {
this.contentErrorAction = requireNonNull(contentErrorAction);
return this;
}
public ResponseEventListener whenCompleted(Consumer<LiveHttpResponse> completeAction) {
this.onCompletedAction = requireNonNull(completeAction);
return this;
}
public ResponseEventListener whenHeadersComplete(Runnable action) {
this.onHeaders = requireNonNull(action);
return this;
}
/**
* Executes an action when the response terminates for any reason, normally,
* abnormally, or due to cancellation.
*
* @param action a runnable action
* @return the builder
*/
public ResponseEventListener whenFinished(Runnable action) {
this.whenFinishedAction = requireNonNull(action);
return this;
}
public Flux<LiveHttpResponse> apply() {
EventProcessor eventProcessor = new QueueDrainingEventProcessor(
event -> {
switch (state) {
case INITIAL:
if (event instanceof MessageHeaders) {
onHeaders.run();
state = STREAMING;
} else if (event instanceof MessageCancelled) {
cancelAction.run();
whenFinishedAction.run();
state = TERMINATED;
} else if (event instanceof MessageCompleted) {
// TODO: Add custom exception type?
responseErrorAction.accept(new RuntimeException("Response Observable completed without message headers."));
whenFinishedAction.run();
state = TERMINATED;
} else if (event instanceof MessageError) {
responseErrorAction.accept(((MessageError) event).cause());
whenFinishedAction.run();
state = TERMINATED;
}
break;
case STREAMING:
if (event instanceof ContentEnd) {
onCompletedAction.accept(((ContentEnd) event).response);
whenFinishedAction.run();
state = COMPLETED;
} else if (event instanceof ContentError) {
contentErrorAction.accept(((ContentError) event).cause());
whenFinishedAction.run();
state = TERMINATED;
} else if (event instanceof ContentCancelled) {
cancelAction.run();
whenFinishedAction.run();
state = TERMINATED;
}
break;
}
});
return publisher
.doOnNext(headers -> eventProcessor.submit(new MessageHeaders()))
.doOnComplete(() -> eventProcessor.submit(new MessageCompleted()))
.doOnError(cause -> eventProcessor.submit(new MessageError(cause)))
.doOnCancel(() -> eventProcessor.submit(new MessageCancelled()))
.map(response ->
response.newBuilder()
.body(it -> it.doOnEnd(ifError(cause -> eventProcessor.submit(new ContentError(cause)))))
.body(it -> it.doOnEnd(ifSuccessful(() -> eventProcessor.submit(new ContentEnd(response)))))
.body(it -> it.doOnCancel(() -> eventProcessor.submit(new ContentCancelled())))
.build());
}
enum State {
INITIAL,
STREAMING,
TERMINATED,
COMPLETED
}
private static Consumer<Optional<Throwable>> ifError(Consumer<Throwable> action) {
return maybeCause -> maybeCause.ifPresent(action);
}
private static Consumer<Optional<Throwable>> ifSuccessful(Runnable action) {
return maybeCause -> {
if (!maybeCause.isPresent()) {
action.run();
}
};
}
private static class MessageHeaders {
}
private static class MessageError {
private Throwable cause;
public MessageError(Throwable cause) {
this.cause = cause;
}
public Throwable cause() {
return cause;
}
}
private static class MessageCompleted {
}
private static class MessageCancelled {
}
private static class ContentEnd {
private final LiveHttpResponse response;
public ContentEnd(LiveHttpResponse response) {
this.response = response;
}
}
private static class ContentError {
private Throwable cause;
public ContentError(Throwable cause) {
this.cause = cause;
}
public Throwable cause() {
return cause;
}
}
private static class ContentCancelled {
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.jackrabbit.oak.core;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.apache.jackrabbit.oak.commons.PathUtils.elements;
import static org.apache.jackrabbit.oak.commons.PathUtils.isAbsolute;
import javax.annotation.CheckForNull;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.apache.jackrabbit.oak.api.PropertyState;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.api.Type;
import org.apache.jackrabbit.oak.core.MutableRoot.Move;
import org.apache.jackrabbit.oak.plugins.tree.impl.AbstractMutableTree;
import org.apache.jackrabbit.oak.spi.state.NodeBuilder;
final class MutableTree extends AbstractMutableTree {
/**
* Underlying {@code Root} of this {@code Tree} instance
*/
private final MutableRoot root;
/**
* Parent of this tree. Null for the root.
*/
private MutableTree parent;
/**
* Name of the tree
*/
private String name;
private NodeBuilder nodeBuilder;
/** Pointer into the list of pending moves */
private Move pendingMoves;
MutableTree(@Nonnull MutableRoot root, @Nonnull NodeBuilder nodeBuilder,
@Nonnull Move pendingMoves) {
this(root, pendingMoves, null, nodeBuilder, "");
}
private MutableTree(@Nonnull MutableRoot root, @Nonnull Move pendingMoves,
@Nullable MutableTree parent, @Nonnull NodeBuilder nodeBuilder, @Nonnull String name) {
this.root = checkNotNull(root);
this.parent = parent;
this.name = checkNotNull(name);
this.nodeBuilder = nodeBuilder;
this.pendingMoves = checkNotNull(pendingMoves);
}
//------------------------------------------------------------< AbstractMutableTree >---
@Override
@CheckForNull
protected AbstractMutableTree getParentOrNull() {
return parent;
}
@Nonnull
@Override
protected NodeBuilder getNodeBuilder() {
return nodeBuilder;
}
//-----------------------------------------------------< AbstractTree >---
@Override
@Nonnull
protected MutableTree createChild(@Nonnull String name) throws IllegalArgumentException {
return new MutableTree(root, pendingMoves, this,
nodeBuilder.getChildNode(checkNotNull(name)), name);
}
//------------------------------------------------------------< Tree >---
@Override
@Nonnull
public String getName() {
beforeRead();
return name;
}
@Override
@Nonnull
public String getPath() {
beforeRead();
return super.getPath();
}
@Override
@Nonnull
public Status getStatus() {
beforeRead();
return super.getStatus();
}
@Override
public boolean exists() {
beforeRead();
return super.exists();
}
@Override
public PropertyState getProperty(@Nonnull String name) {
beforeRead();
return super.getProperty(name);
}
@Override
public boolean hasProperty(@Nonnull String name) {
beforeRead();
return super.hasProperty(name);
}
@Override
public long getPropertyCount() {
beforeRead();
return super.getPropertyCount();
}
@Override
@CheckForNull
public Status getPropertyStatus(@Nonnull String name) {
beforeRead();
return super.getPropertyStatus(name);
}
@Override
@Nonnull
public Iterable<? extends PropertyState> getProperties() {
beforeRead();
return super.getProperties();
}
@Override
@Nonnull
public Tree getChild(@Nonnull String name) {
beforeRead();
return super.getChild(name);
}
@Override
public boolean hasChild(@Nonnull String name) {
beforeRead();
return super.hasChild(name);
}
@Override
public long getChildrenCount(long max) {
beforeRead();
return super.getChildrenCount(max);
}
@Override
@Nonnull
public Iterable<Tree> getChildren() {
beforeRead();
return super.getChildren();
}
@Override
public boolean remove() {
beforeWrite();
boolean success = super.remove();
if (success) {
root.updated();
}
return success;
}
@Override
@Nonnull
public Tree addChild(@Nonnull String name) {
beforeWrite();
Tree child;
if (hasChild(name)) {
child = createChild(name);
} else {
child = super.addChild(name);
root.updated();
}
return child;
}
@Override
public void setOrderableChildren(boolean enable) {
beforeWrite();
super.setOrderableChildren(enable);
}
@Override
public boolean orderBefore(@Nullable String name) {
beforeWrite();
boolean success = super.orderBefore(name);
if (success) {
root.updated();
}
return success;
}
@Override
public void setProperty(@Nonnull PropertyState property) {
beforeWrite();
super.setProperty(property);
root.updated();
}
@Override
public <T> void setProperty(@Nonnull String name, @Nonnull T value) {
beforeWrite();
super.setProperty(name, value);
root.updated();
}
@Override
public <T> void setProperty(@Nonnull String name, @Nonnull T value, @Nonnull Type<T> type) {
beforeWrite();
super.setProperty(name, value, type);
root.updated();
}
@Override
public void removeProperty(@Nonnull String name) {
beforeWrite();
super.removeProperty(name);
root.updated();
}
//---------------------------------------------------------< internal >---
/**
* Set the parent and name of this tree.
* @param parent parent of this tree
* @param name name of this tree
*/
void setParentAndName(@Nonnull MutableTree parent, @Nonnull String name) {
this.name = checkNotNull(name);
this.parent = checkNotNull(parent);
}
/**
* Move this tree to the parent at {@code destParent} with the new name
* {@code newName}.
* @param newParent new parent for this tree
* @param newName new name for this tree
*/
boolean moveTo(@Nonnull MutableTree newParent, @Nonnull String newName) {
name = checkNotNull(newName);
parent = checkNotNull(newParent);
boolean success = nodeBuilder.moveTo(newParent.nodeBuilder, newName);
if (success) {
parent.updateChildOrder(false);
newParent.updateChildOrder(false);
}
return success;
}
/**
* Get a possibly non existing tree.
* @param path the path to the tree
* @return a {@link Tree} instance for the child at {@code path}.
*/
@CheckForNull
MutableTree getTree(@Nonnull String path) {
checkArgument(isAbsolute(checkNotNull(path)));
beforeRead();
MutableTree child = this;
for (String name : elements(path)) {
child = new MutableTree(root, pendingMoves, child, child.nodeBuilder.getChildNode(name), name);
}
return child;
}
@Nonnull
String getPathInternal() {
if (parent == null) {
return "/";
} else {
StringBuilder sb = new StringBuilder();
buildPath(sb);
return sb.toString();
}
}
@Override
protected void buildPath(@Nonnull StringBuilder sb) {
if (parent != null) {
parent.buildPath(checkNotNull(sb));
sb.append('/').append(name);
}
}
//------------------------------------------------------------< private >---
private void reconnect() {
if (parent != null) {
parent.reconnect();
nodeBuilder = parent.nodeBuilder.getChildNode(name);
}
}
/**
* Verifies that this session is still alive and applies any pending
* moves that might affect this node. This method needs to be called
* at the beginning of all public read-only {@link Tree} methods to
* guarantee a consistent view of the tree. See {@link #beforeWrite()}
* for the equivalent method for write operations.
*
* @throws IllegalStateException if this session is closed
*/
private void beforeRead() throws IllegalStateException {
root.checkLive();
if (applyPendingMoves()) {
reconnect();
}
}
/**
* Like {@link #beforeRead()} but also checks that (after any pending
* moves have been applied) the current node exists and is visible.
* This method needs to be called at the beginning of all public
* {@link Tree} methods that modify this node to guarantee a consistent
* view of the tree and to throw an exception whenever there's an
* attempt to modify a missing node.
*
* @throws IllegalStateException if this node does not exist or
* if this session is closed
*/
private void beforeWrite() throws IllegalStateException {
beforeRead();
if (!super.exists()) {
throw new IllegalStateException("This tree does not exist");
}
}
private boolean applyPendingMoves() {
boolean movesApplied = false;
if (parent != null) {
movesApplied = parent.applyPendingMoves();
}
Move old = pendingMoves;
pendingMoves = pendingMoves.apply(this);
if (pendingMoves != old) {
movesApplied = true;
}
return movesApplied;
}
}
| |
package org.jenkinsci.plugins.github.config;
import com.cloudbees.jenkins.GitHubWebHook;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import hudson.Extension;
import hudson.XmlFile;
import hudson.model.Descriptor;
import hudson.model.Job;
import hudson.util.FormValidation;
import jenkins.model.GlobalConfiguration;
import jenkins.model.Jenkins;
import net.sf.json.JSONObject;
import org.apache.commons.codec.binary.Base64;
import org.jenkinsci.main.modules.instance_identity.InstanceIdentity;
import org.jenkinsci.plugins.github.GitHubPlugin;
import org.jenkinsci.plugins.github.Messages;
import org.jenkinsci.plugins.github.internal.GHPluginConfigException;
import org.jenkinsci.plugins.github.migration.Migrator;
import org.kohsuke.github.GitHub;
import org.kohsuke.stapler.QueryParameter;
import org.kohsuke.stapler.StaplerRequest;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.io.IOException;
import java.net.HttpURLConnection;
import java.net.MalformedURLException;
import java.net.URL;
import java.security.interfaces.RSAPublicKey;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import static com.google.common.base.Charsets.UTF_8;
import static java.lang.String.format;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
import static org.jenkinsci.plugins.github.config.GitHubServerConfig.allowedToManageHooks;
import static org.jenkinsci.plugins.github.config.GitHubServerConfig.loginToGithub;
import static org.jenkinsci.plugins.github.internal.GitHubClientCacheOps.clearRedundantCaches;
import static org.jenkinsci.plugins.github.util.FluentIterableWrapper.from;
/**
* Global configuration to store all GH Plugin settings
* such as hook managing policy, credentials etc.
*
* @author lanwen (Merkushev Kirill)
* @since 1.13.0
*/
@Extension
public class GitHubPluginConfig extends GlobalConfiguration {
private static final Logger LOGGER = LoggerFactory.getLogger(GitHubPluginConfig.class);
public static final String GITHUB_PLUGIN_CONFIGURATION_ID = "github-plugin-configuration";
/**
* Helps to avoid null in {@link GitHubPlugin#configuration()}
*/
public static final GitHubPluginConfig EMPTY_CONFIG =
new GitHubPluginConfig(Collections.<GitHubServerConfig>emptyList());
private List<GitHubServerConfig> configs = new ArrayList<GitHubServerConfig>();
private URL hookUrl;
private HookSecretConfig hookSecretConfig = new HookSecretConfig(null);
private transient boolean overrideHookUrl;
private String bannedCommitter = "";
/**
* Used to get current instance identity.
* It compared with same value when testing hook url availability in {@link #doCheckHookUrl(String)}
*/
@Inject
@SuppressWarnings("unused")
private transient InstanceIdentity identity;
public GitHubPluginConfig() {
load();
}
public GitHubPluginConfig(List<GitHubServerConfig> configs) {
this.configs = configs;
}
@SuppressWarnings("unused")
public void setConfigs(List<GitHubServerConfig> configs) {
this.configs = configs;
}
public List<GitHubServerConfig> getConfigs() {
return configs;
}
public boolean isManageHooks() {
return from(getConfigs()).filter(allowedToManageHooks()).first().isPresent();
}
public void setHookUrl(URL hookUrl) {
if (overrideHookUrl) {
this.hookUrl = hookUrl;
} else {
this.hookUrl = null;
}
}
public String getBannedCommitter() {
return bannedCommitter;
}
public void setBannedCommitter(final String aBannedCommitter) {
bannedCommitter = aBannedCommitter;
}
public void setOverrideHookUrl(boolean overrideHookUrl) {
this.overrideHookUrl = overrideHookUrl;
}
/**
* @return hook url used as endpoint to search and write auto-managed hooks in GH
* @throws GHPluginConfigException if default jenkins url is malformed
*/
public URL getHookUrl() throws GHPluginConfigException {
if (hookUrl != null) {
return hookUrl;
} else {
return constructDefaultUrl();
}
}
public boolean isOverrideHookURL() {
return hookUrl != null;
}
/**
* Filters all stored configs against given predicate then
* logs in as the given user and returns the non null connection objects
*/
public Iterable<GitHub> findGithubConfig(Predicate<GitHubServerConfig> match) {
// try all the credentials since we don't know which one would work
return from(getConfigs())
.filter(match)
.transform(loginToGithub())
.filter(Predicates.notNull());
}
public List<Descriptor> actions() {
return Collections.singletonList(Jenkins.getInstance().getDescriptor(GitHubTokenCredentialsCreator.class));
}
/**
* To avoid long class name as id in xml tag name and config file
*/
@Override
public String getId() {
return GITHUB_PLUGIN_CONFIGURATION_ID;
}
/**
* @return config file with global {@link com.thoughtworks.xstream.XStream} instance
* with enabled aliases in {@link Migrator#enableAliases()}
*/
@Override
protected XmlFile getConfigFile() {
return new XmlFile(Jenkins.XSTREAM2, super.getConfigFile().getFile());
}
@Override
public boolean configure(StaplerRequest req, JSONObject json) throws FormException {
try {
req.bindJSON(this, json);
} catch (Exception e) {
LOGGER.debug("Problem while submitting form for GitHub Plugin ({})", e.getMessage(), e);
LOGGER.trace("GH form data: {}", json.toString());
throw new FormException(
format("Malformed GitHub Plugin configuration (%s)", e.getMessage()), e, "github-configuration");
}
save();
clearRedundantCaches(configs);
return true;
}
@Override
public String getDisplayName() {
return "GitHub";
}
@SuppressWarnings("unused")
public FormValidation doReRegister() {
if (!GitHubPlugin.configuration().isManageHooks()) {
return FormValidation.warning("Works only when Jenkins manages hooks (one ore more creds specified)");
}
List<Job> registered = GitHubWebHook.get().reRegisterAllHooks();
LOGGER.info("Called registerHooks() for {} jobs", registered.size());
return FormValidation.ok("Called re-register hooks for %s jobs", registered.size());
}
@SuppressWarnings("unused")
public FormValidation doCheckHookUrl(@QueryParameter String value) {
try {
HttpURLConnection con = (HttpURLConnection) new URL(value).openConnection();
con.setRequestMethod("POST");
con.setRequestProperty(GitHubWebHook.URL_VALIDATION_HEADER, "true");
con.connect();
if (con.getResponseCode() != 200) {
return FormValidation.error("Got %d from %s", con.getResponseCode(), value);
}
String v = con.getHeaderField(GitHubWebHook.X_INSTANCE_IDENTITY);
if (v == null) {
// people might be running clever apps that's not Jenkins, and that's OK
return FormValidation.warning("It doesn't look like %s is talking to any Jenkins. "
+ "Are you running your own app?", value);
}
RSAPublicKey key = identity.getPublic();
String expected = new String(Base64.encodeBase64(key.getEncoded()), UTF_8);
if (!expected.equals(v)) {
// if it responds but with a different ID, that's more likely wrong than correct
return FormValidation.error("%s is connecting to different Jenkins instances", value);
}
return FormValidation.ok();
} catch (IOException e) {
return FormValidation.error(e, "Failed to test a connection to %s", value);
}
}
/**
* Used by default in {@link #getHookUrl()}
*
* @return url to be used in GH hooks configuration as main endpoint
* @throws GHPluginConfigException if jenkins root url empty of malformed
*/
private static URL constructDefaultUrl() {
String jenkinsUrl = Jenkins.getInstance().getRootUrl();
validateConfig(isNotEmpty(jenkinsUrl), Messages.global_config_url_is_empty());
try {
return new URL(jenkinsUrl + GitHubWebHook.get().getUrlName() + '/');
} catch (MalformedURLException e) {
throw new GHPluginConfigException(Messages.global_config_hook_url_is_malformed(e.getMessage()));
}
}
/**
* Util method just to hide one more if for better readability
*
* @param state to check. If false, then exception will be thrown
* @param message message to describe exception in case of false state
*
* @throws GHPluginConfigException if state is false
*/
private static void validateConfig(boolean state, String message) {
if (!state) {
throw new GHPluginConfigException(message);
}
}
public HookSecretConfig getHookSecretConfig() {
return hookSecretConfig;
}
public void setHookSecretConfig(HookSecretConfig hookSecretConfig) {
this.hookSecretConfig = hookSecretConfig;
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.opensearch.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.protocol.StructuredPojo;
import com.amazonaws.protocol.ProtocolMarshaller;
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class DryRunResults implements Serializable, Cloneable, StructuredPojo {
/**
* <p>
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no blue/green
* required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the deployment type;
* try again after the update is complete), and <code>None</code> (the request doesn't include any configuration
* changes).
* </p>
*/
private String deploymentType;
/**
* <p>
* Contains an optional message associated with the DryRunResults.
* </p>
*/
private String message;
/**
* <p>
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no blue/green
* required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the deployment type;
* try again after the update is complete), and <code>None</code> (the request doesn't include any configuration
* changes).
* </p>
*
* @param deploymentType
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no
* blue/green required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the
* deployment type; try again after the update is complete), and <code>None</code> (the request doesn't
* include any configuration changes).
*/
public void setDeploymentType(String deploymentType) {
this.deploymentType = deploymentType;
}
/**
* <p>
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no blue/green
* required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the deployment type;
* try again after the update is complete), and <code>None</code> (the request doesn't include any configuration
* changes).
* </p>
*
* @return Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no
* blue/green required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the
* deployment type; try again after the update is complete), and <code>None</code> (the request doesn't
* include any configuration changes).
*/
public String getDeploymentType() {
return this.deploymentType;
}
/**
* <p>
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no blue/green
* required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the deployment type;
* try again after the update is complete), and <code>None</code> (the request doesn't include any configuration
* changes).
* </p>
*
* @param deploymentType
* Specifies the way in which Amazon OpenSearch Service applies the update. Possible responses are
* <code>Blue/Green</code> (the update requires a blue/green deployment), <code>DynamicUpdate</code> (no
* blue/green required), <code>Undetermined</code> (the domain is undergoing an update and can't predict the
* deployment type; try again after the update is complete), and <code>None</code> (the request doesn't
* include any configuration changes).
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DryRunResults withDeploymentType(String deploymentType) {
setDeploymentType(deploymentType);
return this;
}
/**
* <p>
* Contains an optional message associated with the DryRunResults.
* </p>
*
* @param message
* Contains an optional message associated with the DryRunResults.
*/
public void setMessage(String message) {
this.message = message;
}
/**
* <p>
* Contains an optional message associated with the DryRunResults.
* </p>
*
* @return Contains an optional message associated with the DryRunResults.
*/
public String getMessage() {
return this.message;
}
/**
* <p>
* Contains an optional message associated with the DryRunResults.
* </p>
*
* @param message
* Contains an optional message associated with the DryRunResults.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public DryRunResults withMessage(String message) {
setMessage(message);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getDeploymentType() != null)
sb.append("DeploymentType: ").append(getDeploymentType()).append(",");
if (getMessage() != null)
sb.append("Message: ").append(getMessage());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof DryRunResults == false)
return false;
DryRunResults other = (DryRunResults) obj;
if (other.getDeploymentType() == null ^ this.getDeploymentType() == null)
return false;
if (other.getDeploymentType() != null && other.getDeploymentType().equals(this.getDeploymentType()) == false)
return false;
if (other.getMessage() == null ^ this.getMessage() == null)
return false;
if (other.getMessage() != null && other.getMessage().equals(this.getMessage()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getDeploymentType() == null) ? 0 : getDeploymentType().hashCode());
hashCode = prime * hashCode + ((getMessage() == null) ? 0 : getMessage().hashCode());
return hashCode;
}
@Override
public DryRunResults clone() {
try {
return (DryRunResults) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
@com.amazonaws.annotation.SdkInternalApi
@Override
public void marshall(ProtocolMarshaller protocolMarshaller) {
com.amazonaws.services.opensearch.model.transform.DryRunResultsMarshaller.getInstance().marshall(this, protocolMarshaller);
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package br.edu.ufabc.tracking2u.telas;
import br.edu.ufabc.tracking2u.entity.Colaborador;
import br.edu.ufabc.tracking2u.entity.Pendencia;
import br.edu.ufabc.tracking2u.entity.Tarefa;
import br.edu.ufabc.tracking2u.persistence.PersistenceManager;
import br.edu.ufabc.tracking2u.persistence.PersistenceManagerFactory;
import br.edu.ufabc.tracking2u.ui.UIHandler;
import br.edu.ufabc.tracking2u.ui.UIHandlerImpl;
import javax.swing.JOptionPane;
/**
*
* @author tuliocarreira
*/
public class telaCadastroPendencia extends javax.swing.JFrame {
UIHandler uihandler = new UIHandlerImpl();
private final PersistenceManager manager = PersistenceManagerFactory.buildPersistenceManager();
Colaborador colaborador;
Tarefa tarefa;
Pendencia pendencia;
telaListaPendencias telaAnterior;
/**
* Creates new form telaCadastroPendencia
*/
public telaCadastroPendencia() {
initComponents();
}
public telaCadastroPendencia(telaListaPendencias telaAnterior, Tarefa tarefa, Colaborador colaborador) {
initComponents();
this.telaAnterior = telaAnterior;
this.tarefa = tarefa;
this.colaborador = colaborador;
checkboxFinalizado.setVisible(false);
}
public telaCadastroPendencia(telaListaPendencias telaAnterior, Pendencia pendencia) {
initComponents();
this.telaAnterior = telaAnterior;
this.pendencia = pendencia;
this.colaborador = pendencia.getCriador();
this.tarefa = pendencia.getTarefa();
checkboxFinalizado.setVisible(true);
textNome.setText(pendencia.getNome());
textDesc.setText(pendencia.getDescricao());
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated
// <editor-fold defaultstate="collapsed" desc="Generated
// <editor-fold defaultstate="collapsed" desc="Generated
// Code">//GEN-BEGIN:initComponents
private void initComponents() {
checkboxFinalizado = new javax.swing.JCheckBox();
textNome = new javax.swing.JTextField();
jLabel1 = new javax.swing.JLabel();
jLabel2 = new javax.swing.JLabel();
jScrollPane1 = new javax.swing.JScrollPane();
textDesc = new javax.swing.JTextArea();
buttonSalvar = new javax.swing.JButton();
buttonCancelar = new javax.swing.JButton();
setDefaultCloseOperation(javax.swing.WindowConstants.DISPOSE_ON_CLOSE);
setTitle("Cadastro de Pendencia");
checkboxFinalizado.setText("Finalizada?");
textNome.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
textNomeActionPerformed(evt);
}
});
jLabel1.setText("Nome");
jLabel2.setText("Detalhes");
textDesc.setColumns(20);
textDesc.setRows(5);
jScrollPane1.setViewportView(textDesc);
buttonSalvar.setText("Salvar");
buttonSalvar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonSalvarActionPerformed(evt);
}
});
buttonCancelar.setText("Cancelar");
buttonCancelar.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
buttonCancelarActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(getContentPane());
getContentPane().setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup().addGroup(layout.createParallelGroup(
javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout
.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup().addGap(41, 41, 41)
.addGroup(layout
.createParallelGroup(
javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(jLabel2).addComponent(jLabel1)
.addComponent(checkboxFinalizado)))
.addGroup(layout.createSequentialGroup().addGap(64, 64, 64)
.addComponent(textNome, javax.swing.GroupLayout.PREFERRED_SIZE,
100, javax.swing.GroupLayout.PREFERRED_SIZE))
.addGroup(
layout.createSequentialGroup().addGap(65, 65, 65).addComponent(
jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE,
javax.swing.GroupLayout.DEFAULT_SIZE,
javax.swing.GroupLayout.PREFERRED_SIZE)))
.addGap(0, 64, Short.MAX_VALUE))
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING,
layout.createSequentialGroup().addGap(0, 0, Short.MAX_VALUE)
.addComponent(buttonSalvar)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(buttonCancelar)))
.addContainerGap()));
layout.setVerticalGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING,
layout.createSequentialGroup().addGap(28, 28, 28).addComponent(jLabel1)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(textNome, javax.swing.GroupLayout.PREFERRED_SIZE,
javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(18, 18, 18).addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jScrollPane1, javax.swing.GroupLayout.PREFERRED_SIZE,
javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addGap(38, 38, 38).addComponent(checkboxFinalizado)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED, 49,
Short.MAX_VALUE)
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(buttonSalvar).addComponent(buttonCancelar))));
pack();
}// </editor-fold>//GEN-END:initComponents
private void textNomeActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_textNomeActionPerformed
// TODO add your handling code here:
}// GEN-LAST:event_textNomeActionPerformed
private void buttonCancelarActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_buttonCancelarActionPerformed
// TODO add your handling code here:
this.dispose();
this.telaAnterior.setEnabled(true);
}// GEN-LAST:event_buttonCancelarActionPerformed
private void buttonSalvarActionPerformed(java.awt.event.ActionEvent evt) {// GEN-FIRST:event_buttonSalvarActionPerformed
// TODO add your handling code here:
String nome = this.textNome.getText();
String desc = this.textDesc.getText();
boolean status = false;
status = this.checkboxFinalizado.isSelected();
this.uihandler.managePendencia(status, nome, desc, this.colaborador.getId(), this.tarefa.getId(),
this.pendencia);
this.dispose();
telaAnterior.carregaListaPendencias(this.tarefa);
telaAnterior.setEnabled(true);
}// GEN-LAST:event_buttonSalvarActionPerformed
/**
* @param args
* the command line arguments
*/
public static void main(String args[]) {
/* Set the Nimbus look and feel */
// <editor-fold defaultstate="collapsed" desc=" Look and feel setting
// code (optional) ">
/*
* If Nimbus (introduced in Java SE 6) is not available, stay with the
* default look and feel. For details see
* http://download.oracle.com/javase/tutorial/uiswing/lookandfeel/plaf.
* html
*/
try {
for (javax.swing.UIManager.LookAndFeelInfo info : javax.swing.UIManager.getInstalledLookAndFeels()) {
if ("Nimbus".equals(info.getName())) {
javax.swing.UIManager.setLookAndFeel(info.getClassName());
break;
}
}
} catch (ClassNotFoundException ex) {
java.util.logging.Logger.getLogger(telaCadastroPendencia.class.getName())
.log(java.util.logging.Level.SEVERE, null, ex);
} catch (InstantiationException ex) {
java.util.logging.Logger.getLogger(telaCadastroPendencia.class.getName())
.log(java.util.logging.Level.SEVERE, null, ex);
} catch (IllegalAccessException ex) {
java.util.logging.Logger.getLogger(telaCadastroPendencia.class.getName())
.log(java.util.logging.Level.SEVERE, null, ex);
} catch (javax.swing.UnsupportedLookAndFeelException ex) {
java.util.logging.Logger.getLogger(telaCadastroPendencia.class.getName())
.log(java.util.logging.Level.SEVERE, null, ex);
}
// </editor-fold>
/* Create and display the form */
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
new telaCadastroPendencia().setVisible(true);
}
});
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton buttonCancelar;
private javax.swing.JButton buttonSalvar;
private javax.swing.JCheckBox checkboxFinalizado;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JScrollPane jScrollPane1;
private javax.swing.JTextArea textDesc;
private javax.swing.JTextField textNome;
// End of variables declaration//GEN-END:variables
}
| |
// Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license.
package com.intellij.ide.ui.laf.darcula.ui;
import com.intellij.ide.ui.laf.darcula.DarculaUIUtil;
import com.intellij.openapi.ui.ComboBoxWithWidePopup;
import com.intellij.openapi.ui.ErrorBorderCapable;
import com.intellij.openapi.util.ColoredItem;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Key;
import com.intellij.openapi.util.SystemInfo;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.ui.*;
import com.intellij.ui.render.RenderingUtil;
import com.intellij.ui.scale.JBUIScale;
import com.intellij.util.ObjectUtils;
import com.intellij.util.ui.JBInsets;
import com.intellij.util.ui.JBUI;
import com.intellij.util.ui.StartupUiUtil;
import com.intellij.util.ui.UIUtil;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import javax.swing.border.Border;
import javax.swing.plaf.ComponentUI;
import javax.swing.plaf.UIResource;
import javax.swing.plaf.basic.*;
import javax.swing.text.JTextComponent;
import java.awt.*;
import java.awt.event.*;
import java.awt.geom.Path2D;
import java.awt.geom.Rectangle2D;
import java.awt.geom.RectangularShape;
import java.awt.geom.RoundRectangle2D;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import static com.intellij.ide.ui.laf.darcula.DarculaUIUtil.*;
/**
* @author Konstantin Bulenkov
*/
public class DarculaComboBoxUI extends BasicComboBoxUI implements Border, ErrorBorderCapable {
public static final Key<Boolean> PAINT_VERTICAL_LINE = Key.create("PAINT_VERTICAL_LINE");
@SuppressWarnings("UnregisteredNamedColor")
private static final Color NON_EDITABLE_BACKGROUND = JBColor.namedColor("ComboBox.nonEditableBackground",
JBColor.namedColor("ComboBox.darcula.nonEditableBackground", new JBColor(0xfcfcfc, 0x3c3f41)));
private float myArc = COMPONENT_ARC.getFloat();
private Insets myBorderCompensation = JBUI.insets(1);
private boolean myPaintArrowButton = true;
public DarculaComboBoxUI() {}
public DarculaComboBoxUI(float arc,
Insets borderCompensation,
boolean paintArrowButton) {
myArc = arc;
myBorderCompensation = borderCompensation;
myPaintArrowButton = paintArrowButton;
}
@SuppressWarnings("unused")
@Deprecated
public DarculaComboBoxUI(JComboBox c) {}
@SuppressWarnings({"MethodOverridesStaticMethodOfSuperclass", "unused"})
public static ComponentUI createUI(final JComponent c) {
return new DarculaComboBoxUI();
}
private KeyListener editorKeyListener;
private FocusListener editorFocusListener;
private PropertyChangeListener propertyListener;
@Override
protected void installDefaults() {
super.installDefaults();
installDarculaDefaults();
}
@Override
protected void uninstallDefaults() {
super.uninstallDefaults();
uninstallDarculaDefaults();
}
protected void installDarculaDefaults() {
comboBox.setBorder(this);
}
protected void uninstallDarculaDefaults() {
comboBox.setBorder(null);
}
@Override
protected void installListeners() {
super.installListeners();
propertyListener = createPropertyListener();
comboBox.addPropertyChangeListener(propertyListener);
}
@Override
public void uninstallListeners() {
super.uninstallListeners();
if (propertyListener != null) {
comboBox.removePropertyChangeListener(propertyListener);
propertyListener = null;
}
}
public static boolean hasSwingPopup(JComponent component) {
return component.getClientProperty(DarculaJBPopupComboPopup.CLIENT_PROP) == null;
}
@Override
protected ComboPopup createPopup() {
return hasSwingPopup(comboBox) ? new CustomComboPopup(comboBox) : new DarculaJBPopupComboPopup<>(comboBox);
}
protected PropertyChangeListener createPropertyListener() {
return e -> {
if ("enabled".equals(e.getPropertyName())) {
EditorTextField etf = UIUtil.findComponentOfType((JComponent)editor, EditorTextField.class);
if (etf != null) {
boolean enabled = e.getNewValue() == Boolean.TRUE;
Color color = UIManager.getColor(enabled ? "TextField.background" : "ComboBox.disabledBackground");
etf.setBackground(color);
}
}
};
}
@Override
protected JButton createArrowButton() {
Color bg = comboBox.getBackground();
Color fg = comboBox.getForeground();
JButton button = new BasicArrowButton(SwingConstants.SOUTH, bg, fg, fg, fg) {
@Override
public void paint(Graphics g) {
Graphics2D g2 = (Graphics2D)g.create();
Rectangle r = new Rectangle(getSize());
JBInsets.removeFrom(r, JBUI.insets(1, 0, 1, 1));
try {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
g2.translate(r.x, r.y);
if (myPaintArrowButton) {
float bw = BW.getFloat();
float lw = LW.getFloat();
float arc = myArc;
arc = arc > bw + lw ? arc - bw - lw : 0.0f;
Path2D innerShape = new Path2D.Float();
innerShape.moveTo(lw, bw + lw);
innerShape.lineTo(r.width - bw - lw - arc, bw + lw);
innerShape.quadTo(r.width - bw - lw, bw + lw, r.width - bw - lw, bw + lw + arc);
innerShape.lineTo(r.width - bw - lw, r.height - bw - lw - arc);
innerShape.quadTo(r.width - bw - lw, r.height - bw - lw, r.width - bw - lw - arc, r.height - bw - lw);
innerShape.lineTo(lw, r.height - bw - lw);
innerShape.closePath();
g2.setColor(JBUI.CurrentTheme.Arrow.backgroundColor(comboBox.isEnabled(), comboBox.isEditable()));
g2.fill(innerShape);
// Paint vertical line
if (comboBox.isEditable() || ClientProperty.isTrue(comboBox, PAINT_VERTICAL_LINE)) {
g2.setColor(getOutlineColor(comboBox.isEnabled(), false));
g2.fill(new Rectangle2D.Float(0, bw + lw, LW.getFloat(), r.height - (bw + lw) * 2));
}
}
paintArrow(g2, this);
}
finally {
g2.dispose();
}
}
@Override
public Dimension getPreferredSize() {
return getArrowButtonPreferredSize(comboBox);
}
};
button.setBorder(JBUI.Borders.empty());
button.setOpaque(false);
return button;
}
protected void paintArrow(Graphics2D g2, JButton btn) {
g2.setColor(JBUI.CurrentTheme.Arrow.foregroundColor(comboBox.isEnabled()));
g2.fill(getArrowShape(btn));
}
@SuppressWarnings("unused")
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
protected Color getArrowButtonFillColor(Color defaultColor) {
return JBUI.CurrentTheme.Arrow.backgroundColor(comboBox.isEnabled(), comboBox.isEditable());
}
@NotNull
static Dimension getArrowButtonPreferredSize(@Nullable JComboBox comboBox) {
Insets i = comboBox != null ? comboBox.getInsets() : getDefaultComboBoxInsets();
int height = (isCompact(comboBox) ? COMPACT_HEIGHT.get() : MINIMUM_HEIGHT.get()) + i.top + i.bottom;
return new Dimension(ARROW_BUTTON_WIDTH.get() + i.left, height);
}
static Shape getArrowShape(Component button) {
Rectangle r = new Rectangle(button.getSize());
JBInsets.removeFrom(r, JBUI.insets(1, 0, 1, 1));
int tW = JBUIScale.scale(9);
int tH = JBUIScale.scale(5);
int xU = (r.width - tW) / 2 - JBUIScale.scale(1);
int yU = (r.height - tH) / 2 + JBUIScale.scale(1);
Path2D path = new Path2D.Float();
path.moveTo(xU, yU);
path.lineTo(xU + tW, yU);
path.lineTo(xU + tW / 2.0f, yU + tH);
path.lineTo(xU, yU);
path.closePath();
return path;
}
@NotNull
private static JBInsets getDefaultComboBoxInsets() {
return JBUI.insets(3);
}
@Override
public void paint(Graphics g, JComponent c) {
Container parent = c.getParent();
if (parent != null && c.isOpaque()) {
g.setColor(DarculaUIUtil.isTableCellEditor(c) && editor != null ? editor.getBackground() : parent.getBackground());
g.fillRect(0, 0, c.getWidth(), c.getHeight());
}
Graphics2D g2 = (Graphics2D)g.create();
Rectangle r = new Rectangle(c.getSize());
JBInsets.removeFrom(r, myBorderCompensation);
try {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
g2.translate(r.x, r.y);
float bw = isBorderless(c) ? LW.getFloat() : BW.getFloat();
g2.setColor(getBackgroundColor());
g2.fill(getOuterShape(r, bw, myArc));
}
finally {
g2.dispose();
}
if (!comboBox.isEditable()) {
checkFocus();
paintCurrentValue(g, rectangleForCurrentValue(), hasFocus);
}
// remove staled renderers from hierarchy
// see BasicTreeUI#paint
// see BasicListUI#paintImpl
// see BasicTableUI#paintCells
currentValuePane.removeAll();
}
private Color getBackgroundColor() {
Color bg = comboBox.getBackground();
if (comboBox.isEditable() && editor != null) {
return comboBox.isEnabled() ? editor.getBackground() :
comboBox.isBackgroundSet() && !(bg instanceof UIResource) ? bg : UIUtil.getComboBoxDisabledBackground();
}
else {
Object value = comboBox.getSelectedItem();
Color coloredItemColor = value instanceof ColoredItem ? ((ColoredItem)value).getColor(): null;
return ObjectUtils.notNull(coloredItemColor,
comboBox.isBackgroundSet() && !(bg instanceof UIResource) ? bg :
comboBox.isEnabled() ? NON_EDITABLE_BACKGROUND : UIUtil.getComboBoxDisabledBackground());
}
}
/**
* @deprecated Use {@link DarculaUIUtil#isTableCellEditor(Component)} instead
*/
@Deprecated
@ApiStatus.ScheduledForRemoval(inVersion = "2021.3")
protected static boolean isTableCellEditor(JComponent c) {
return DarculaUIUtil.isTableCellEditor(c);
}
@Override
public void paintCurrentValue(Graphics g, Rectangle bounds, boolean hasFocus) {
ListCellRenderer<Object> renderer = comboBox.getRenderer();
Object value = comboBox.getSelectedItem();
Component c = renderer.getListCellRendererComponent(listBox, value, -1, false, false);
c.setFont(comboBox.getFont());
c.setBackground(getBackgroundColor());
if (hasFocus && !isPopupVisible(comboBox)) {
c.setForeground(listBox.getForeground());
}
else {
c.setForeground(comboBox.isEnabled() ? comboBox.getForeground() :
JBColor.namedColor("ComboBox.disabledForeground", comboBox.getForeground()));
}
// paint selection in table-cell-editor mode correctly
boolean changeOpaque = c instanceof JComponent && DarculaUIUtil.isTableCellEditor(comboBox) && c.isOpaque();
if (changeOpaque) {
((JComponent)c).setOpaque(false);
}
boolean shouldValidate = false;
if (c instanceof JPanel) {
shouldValidate = true;
}
Rectangle r = new Rectangle(bounds);
Icon icon = null;
Insets iPad = null;
Border border = null;
boolean enabled = true;
if (c instanceof SimpleColoredComponent) {
SimpleColoredComponent cc = (SimpleColoredComponent)c;
iPad = cc.getIpad();
border = cc.getBorder();
enabled = cc.isEnabled();
cc.setBorder(JBUI.Borders.empty());
cc.setIpad(JBInsets.emptyInsets());
cc.setEnabled(comboBox.isEnabled());
icon = cc.getIcon();
if (!cc.isIconOnTheRight()) {
cc.setIcon(OffsetIcon.getOriginalIcon(icon));
}
}
else if (c instanceof JLabel) {
JLabel cc = (JLabel)c;
border = cc.getBorder();
cc.setBorder(JBUI.Borders.empty());
icon = cc.getIcon();
cc.setIcon(OffsetIcon.getOriginalIcon(icon));
// the following trimMiddle approach is not good for smooth resizing:
// the text jumps as more or less space becomes available.
// a proper text layout algorithm on painting in DarculaLabelUI can fix that.
String text = cc.getText();
int maxWidth = bounds.width - (padding == null || StartupUiUtil.isUnderDarcula() ? 0 : padding.right);
if (StringUtil.isNotEmpty(text) && cc.getPreferredSize().width > maxWidth) {
int max0 = ObjectUtils.binarySearch(7, text.length() - 1, idx -> {
cc.setText(StringUtil.trimMiddle(text, idx));
return Comparing.compare(cc.getPreferredSize().width, maxWidth);
});
int max = max0 < 0 ? -max0 - 2 : max0;
if (max > 7 && max < text.length()) {
cc.setText(StringUtil.trimMiddle(text, max));
}
}
}
else if (c instanceof JComponent) {
JComponent cc = (JComponent)c;
border = cc.getBorder();
cc.setBorder(JBUI.Borders.empty());
}
currentValuePane.paintComponent(g, c, comboBox, r.x, r.y, r.width, r.height, shouldValidate);
// return opaque for combobox popup items painting
if (changeOpaque) {
((JComponent)c).setOpaque(true);
}
if (c instanceof SimpleColoredComponent) {
SimpleColoredComponent cc = (SimpleColoredComponent)c;
cc.setIpad(iPad);
cc.setIcon(icon);
cc.setBorder(border);
cc.setEnabled(enabled);
}
else if (c instanceof JLabel) {
JLabel cc = (JLabel)c;
cc.setBorder(border);
cc.setIcon(icon);
}
else if (c instanceof JComponent) {
JComponent cc = (JComponent)c;
cc.setBorder(border);
}
}
@Override
protected ComboBoxEditor createEditor() {
ComboBoxEditor comboBoxEditor = super.createEditor();
// Reset fixed columns amount set to 9 by default
if (comboBoxEditor instanceof BasicComboBoxEditor) {
JTextField tf = (JTextField)comboBoxEditor.getEditorComponent();
tf.setColumns(0);
}
return comboBoxEditor;
}
protected void installEditorKeyListener(@NotNull ComboBoxEditor cbe) {
Component ec = cbe.getEditorComponent();
if (ec != null) {
editorKeyListener = new KeyAdapter() {
@Override
public void keyPressed(KeyEvent e) {
process(e);
}
@Override
public void keyReleased(KeyEvent e) {
process(e);
}
private void process(KeyEvent e) {
final int code = e.getKeyCode();
if ((code == KeyEvent.VK_UP || code == KeyEvent.VK_DOWN) && e.getModifiers() == 0) {
comboBox.dispatchEvent(e);
}
}
};
ec.addKeyListener(editorKeyListener);
}
}
@Override
public void paintBorder(Component c, Graphics g, int x, int y, int width, int height) {
if (!(c instanceof JComponent)) return;
Graphics2D g2 = (Graphics2D)g.create();
float bw = BW.getFloat();
Rectangle r = new Rectangle(x, y, width, height);
try {
checkFocus();
if (!DarculaUIUtil.isTableCellEditor(c)) {
g2.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g2.setRenderingHint(RenderingHints.KEY_STROKE_CONTROL, RenderingHints.VALUE_STROKE_NORMALIZE);
JBInsets.removeFrom(r, myBorderCompensation);
g2.translate(r.x, r.y);
float lw = LW.getFloat();
Object op = comboBox.getClientProperty("JComponent.outline");
if (comboBox.isEnabled() && op != null) {
paintOutlineBorder(g2, r.width, r.height, myArc, true, hasFocus, Outline.valueOf(op.toString()));
}
else {
if (hasFocus && !isBorderless(c)) {
paintOutlineBorder(g2, r.width, r.height, myArc, true, true, Outline.focus);
}
paintBorder(c, g2, isBorderless(c) ? lw : bw, r, lw, myArc);
}
}
else {
paintCellEditorBorder(g2, c, r, hasFocus);
}
}
finally {
g2.dispose();
}
}
protected void paintBorder(Component c, Graphics2D g2, float bw, Rectangle r, float lw, float arc) {
Path2D border = new Path2D.Float(Path2D.WIND_EVEN_ODD);
border.append(getOuterShape(r, bw, arc), false);
arc = arc > lw ? arc - lw : 0.0f;
border.append(getInnerShape(r, bw, lw, arc), false);
g2.setColor(getOutlineColor(c.isEnabled(), hasFocus));
g2.fill(border);
}
protected RectangularShape getOuterShape(Rectangle r, float bw, float arc) {
return new RoundRectangle2D.Float(bw, bw, r.width - bw * 2, r.height - bw * 2, arc, arc);
}
protected RectangularShape getInnerShape(Rectangle r, float bw, float lw, float arc) {
return new RoundRectangle2D.Float(bw + lw, bw + lw, r.width - (bw + lw) * 2, r.height - (bw + lw) * 2, arc, arc);
}
protected void checkFocus() {
hasFocus = false;
if (!comboBox.isEnabled()) {
hasFocus = false;
return;
}
hasFocus = hasFocus(comboBox);
if (hasFocus) return;
ComboBoxEditor ed = comboBox.getEditor();
if (ed != null) {
hasFocus = hasFocus(ed.getEditorComponent());
}
}
protected static boolean hasFocus(Component c) {
Component owner = KeyboardFocusManager.getCurrentKeyboardFocusManager().getFocusOwner();
return owner != null && SwingUtilities.isDescendingFrom(owner, c);
}
@Override
public Insets getBorderInsets(Component c) {
return DarculaUIUtil.isTableCellEditor(c) || isCompact(c) ? JBInsets.create(2, 3) :
isBorderless(c) ? JBInsets.emptyInsets() : getDefaultComboBoxInsets();
}
@Override
public boolean isBorderOpaque() {
return false;
}
protected Dimension getSizeWithButton(Dimension size, Dimension editorSize) {
Insets i = getInsets();
Dimension abSize = getArrowButtonPreferredSize(comboBox);
if (isCompact(comboBox) && size != null) {
JBInsets.removeFrom(size, padding); // don't count paddings in compact mode
}
int editorHeight = editorSize != null ? editorSize.height + i.top + i.bottom : 0;
int editorWidth = editorSize != null ? editorSize.width + i.left + padding.left + padding.right : 0;
editorWidth = Math.max(editorWidth, MINIMUM_WIDTH.get() + i.left);
int width = size != null ? size.width : 0;
int height = size != null ? size.height : 0;
width = Math.max(editorWidth + abSize.width, width + padding.left);
height = Math.max(Math.max(editorHeight, Math.max(abSize.height, height)),
(isCompact(comboBox) ? COMPACT_HEIGHT.get() : MINIMUM_HEIGHT.get()) + i.top + i.bottom);
return new Dimension(width, height);
}
@Override
public Dimension getPreferredSize(JComponent c) {
return getSizeWithButton(super.getMinimumSize(c), editor != null ? editor.getPreferredSize() : null);
}
@Override
public Dimension getMinimumSize(JComponent c) {
Dimension minSize = super.getMinimumSize(c);
Insets i = c.getInsets();
minSize.width = MINIMUM_WIDTH.get() + ARROW_BUTTON_WIDTH.get() + i.left + i.right;
return getSizeWithButton(minSize, editor != null ? editor.getMinimumSize() : null);
}
@Override
protected void configureEditor() {
super.configureEditor();
installEditorKeyListener(comboBox.getEditor());
if (editor instanceof JComponent) {
JComponent jEditor = (JComponent)editor;
jEditor.setOpaque(false);
jEditor.setBorder(JBUI.Borders.empty());
editorFocusListener = new FocusAdapter() {
@Override
public void focusGained(FocusEvent e) {
update();
}
@Override
public void focusLost(FocusEvent e) {
update();
}
private void update() {
if (comboBox != null) {
comboBox.repaint();
}
}
};
if (editor instanceof JTextComponent) {
editor.addFocusListener(editorFocusListener);
}
else {
EditorTextField etf = UIUtil.findComponentOfType((JComponent)editor, EditorTextField.class);
if (etf != null) {
etf.addFocusListener(editorFocusListener);
Color c = UIManager.getColor(comboBox.isEnabled() ? "TextField.background" : "ComboBox.disabledBackground");
etf.setBackground(c);
}
}
}
// BasicComboboxUI sets focusability depending on the combobox focusability.
// JPanel usually is unfocusable and uneditable.
// It could be set as an editor when people want to have a composite component as an editor.
// In such cases we should restore unfocusable state for panels.
if (editor instanceof JPanel) {
editor.setFocusable(false);
}
}
@Override
protected void unconfigureEditor() {
super.unconfigureEditor();
if (editorKeyListener != null) {
editor.removeKeyListener(editorKeyListener);
}
if (editor instanceof JTextComponent) {
if (editorFocusListener != null) {
editor.removeFocusListener(editorFocusListener);
}
}
else {
EditorTextField etf = UIUtil.findComponentOfType((JComponent)editor, EditorTextField.class);
if (etf != null) {
if (editorFocusListener != null) {
etf.removeFocusListener(editorFocusListener);
}
}
}
}
@Override
public boolean isFocusTraversable(JComboBox<?> c) {
return !comboBox.isEditable() || !(editor instanceof ComboBoxCompositeEditor && ((ComboBoxCompositeEditor<?, ?>)editor).isEditable());
}
@Override
protected LayoutManager createLayoutManager() {
return new ComboBoxLayoutManager() {
@Override
public void layoutContainer(Container parent) {
JComboBox cb = (JComboBox)parent;
if (arrowButton != null) {
Dimension aps = arrowButton.getPreferredSize();
if (cb.getComponentOrientation().isLeftToRight()) {
arrowButton.setBounds(cb.getWidth() - aps.width, 0, aps.width, cb.getHeight());
}
else {
arrowButton.setBounds(0, 0, aps.width, cb.getHeight());
}
}
layoutEditor();
}
};
}
protected void layoutEditor() {
if (comboBox.isEditable() && editor != null) {
Rectangle er = rectangleForCurrentValue();
Dimension eps = editor.getPreferredSize();
if (eps.height < er.height) {
int delta = (er.height - eps.height) / 2;
er.y += delta;
}
er.height = eps.height;
editor.setBounds(er);
}
}
@Override
protected Rectangle rectangleForCurrentValue() {
Rectangle rect = new Rectangle(comboBox.getSize());
Insets i = getInsets();
JBInsets.removeFrom(rect, i);
rect.width -= arrowButton != null ? (arrowButton.getWidth() - i.left) : rect.height;
JBInsets.removeFrom(rect, padding);
rect.width += comboBox.isEditable() ? 0 : padding.right;
return rect;
}
// Wide popup that uses preferred size
protected static class CustomComboPopup extends BasicComboPopup {
public CustomComboPopup(JComboBox combo) {
super(combo);
}
@Override
protected void configurePopup() {
super.configurePopup();
Border border = UIManager.getBorder("ComboPopup.border");
setBorder(border != null ? border :
SystemInfo.isMac ? JBUI.Borders.empty() :
IdeBorderFactory.createBorder());
putClientProperty("JComboBox.isCellEditor", DarculaUIUtil.isTableCellEditor(comboBox));
}
@Override
public void updateUI() {
setUI(new BasicPopupMenuUI() {
@Override
public void uninstallDefaults() {}
@Override
public void installDefaults() {
if (popupMenu.getLayout() == null || popupMenu.getLayout() instanceof UIResource) {
popupMenu.setLayout(new DefaultMenuLayout(popupMenu, BoxLayout.Y_AXIS));
}
popupMenu.setOpaque(true);
LookAndFeel.installColorsAndFont(popupMenu, "PopupMenu.background", "PopupMenu.foreground", "PopupMenu.font");
}
});
}
@Override
public void show(Component invoker, int x, int y) {
if (comboBox instanceof ComboBoxWithWidePopup) {
Dimension popupSize = comboBox.getSize();
int minPopupWidth = ((ComboBoxWithWidePopup<?>)comboBox).getMinimumPopupWidth();
Insets insets = getInsets();
popupSize.width = Math.max(popupSize.width, minPopupWidth);
popupSize.setSize(popupSize.width - (insets.right + insets.left), getPopupHeightForRowCount(comboBox.getMaximumRowCount()));
scroller.setMaximumSize(popupSize);
scroller.setPreferredSize(popupSize);
scroller.setMinimumSize(popupSize);
list.revalidate();
}
super.show(invoker, x, y);
}
@Override
protected void configureList() {
super.configureList();
//noinspection unchecked
list.setCellRenderer(new MyDelegateRenderer());
list.putClientProperty(RenderingUtil.ALWAYS_PAINT_SELECTION_AS_FOCUSED, true);
}
protected void customizeListRendererComponent(JComponent component) {
component.setBorder(JBUI.Borders.empty(2, 8));
}
@Override
protected PropertyChangeListener createPropertyChangeListener() {
PropertyChangeListener listener = super.createPropertyChangeListener();
return new PropertyChangeListener() {
@Override
public void propertyChange(PropertyChangeEvent evt) {
listener.propertyChange(evt);
if ("renderer".equals(evt.getPropertyName())) {
if (!(list.getCellRenderer() instanceof MyDelegateRenderer)) {
//noinspection unchecked
list.setCellRenderer(new MyDelegateRenderer());
}
}
}
};
}
@Override
protected int getPopupHeightForRowCount(int maxRowCount) {
int minRowCount = Math.min(maxRowCount, comboBox.getItemCount());
int height = 0;
ListCellRenderer<Object> renderer = list.getCellRenderer();
for (int i = 0; i < minRowCount; i++) {
Object value = list.getModel().getElementAt(i);
Component c = renderer.getListCellRendererComponent(list, value, i, false, false);
// The whole method is copied from the parent class except for the following line
// that adjusts the minimum row height of a list cell.
// See WideSelectionListUI.updateLayoutState
height += UIUtil.updateListRowHeight(c.getPreferredSize()).height;
}
if (height == 0) {
height = comboBox.getHeight();
}
Border border = scroller.getViewportBorder();
if (border != null) {
Insets insets = border.getBorderInsets(null);
height += insets.top + insets.bottom;
}
border = scroller.getBorder();
if (border != null) {
Insets insets = border.getBorderInsets(null);
height += insets.top + insets.bottom;
}
return height;
}
private class MyDelegateRenderer implements ListCellRenderer {
@Override
public Component getListCellRendererComponent(JList list,
Object value,
int index,
boolean isSelected,
boolean cellHasFocus) {
//noinspection unchecked
Component component = comboBox.getRenderer().getListCellRendererComponent(list, value, index, isSelected, cellHasFocus);
if (component instanceof JComponent) {
customizeListRendererComponent((JComponent)component);
}
return component;
}
}
}
}
| |
/**
* Copyright (c) 2012, Lindsay Bradford and other Contributors.
* All rights reserved.
*
* This program and the accompanying materials are made available
* under the terms of the BSD 3-Clause licence which accompanies
* this distribution, and is available at
* http://opensource.org/licenses/BSD-3-Clause
*/
package blacksmyth.personalfinancier.view;
import java.awt.Container;
import java.awt.FlowLayout;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.util.Arrays;
import java.util.Vector;
import javax.swing.JButton;
import javax.swing.JComponent;
import javax.swing.JDialog;
import javax.swing.JFrame;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPasswordField;
import blacksmyth.general.BlacksmythSwingUtilities;
@SuppressWarnings("serial")
public class PasswordPromptView extends JDialog implements IPasswordPromptView, ActionListener {
private JFrame controllingFrame;
private JPasswordField passwordField = new JPasswordField(10);
private JButton okButton, cancelButton;
private static String OK = "Ok";
private static String CANCEL = "Cancel";
private boolean passwordSpecified = false;
public PasswordPromptView(PersonalFinancierView view) {
super(view.getWindowFrame(), true);
controllingFrame = view.getWindowFrame();
buildView();
this.setSize(this.getPreferredSize());
}
private void buildView() {
this.setTitle("Specify File Access Password");
Container contentPane = this.getContentPane();
GridBagLayout gbl = new GridBagLayout();
GridBagConstraints gbc = new GridBagConstraints();
contentPane.setLayout(gbl);
gbc.insets = new Insets(11, 11, 0, 11);
gbc.gridwidth = GridBagConstraints.REMAINDER;
gbc.gridheight = 1;
gbc.gridx = 0;
gbc.gridy = 0;
gbc.weightx = 0;
gbc.anchor = GridBagConstraints.CENTER;
contentPane.add(createPromptPanel(), gbc);
gbc.insets = new Insets(17, 12, 11, 11);
gbc.gridy++;
gbc.weightx = 1;
gbc.anchor = GridBagConstraints.EAST;
contentPane.add(createButtonPanel(), gbc);
}
private JPanel createPromptPanel() {
JPanel panel = new JPanel();
passwordField.setActionCommand(OK);
passwordField.addActionListener(this);
JLabel label = new JLabel("Enter file password: ");
label.setDisplayedMnemonic('p');
label.setLabelFor(passwordField);
panel.setLayout(new FlowLayout(FlowLayout.TRAILING));
panel.add(label);
panel.add(passwordField);
return panel;
}
private JPanel createButtonPanel() {
JPanel buttonPanel = new JPanel();
GridBagLayout gbl = new GridBagLayout();
GridBagConstraints gbc = new GridBagConstraints();
buttonPanel.setLayout(gbl);
gbc.insets = new Insets(0, 5, 0, 0);
gbc.gridwidth = 1;
gbc.gridheight = 1;
gbc.gridx = 0;
gbc.gridy = 0;
gbc.ipadx = 6;
gbc.weightx = 1;
gbc.gridx = 0;
gbc.anchor = GridBagConstraints.CENTER;
buttonPanel.add(getOkButton(), gbc);
gbc.gridx = 1;
buttonPanel.add(getCancelButton(), gbc);
getRootPane().setDefaultButton(okButton);
Vector<JComponent> buttons = new Vector<JComponent>();
buttons.add(okButton);
buttons.add(cancelButton);
BlacksmythSwingUtilities.equalizeComponentSizes(buttons);
buttons = null;
return buttonPanel;
}
private JButton getCancelButton() {
cancelButton = new JButton("Cancel");
cancelButton.setMnemonic(KeyEvent.VK_C);
cancelButton.setActionCommand(CANCEL);
cancelButton.addActionListener(this);
return cancelButton;
}
private JButton getOkButton() {
okButton = new JButton("Ok");
okButton.setMnemonic(KeyEvent.VK_O);
okButton.setActionCommand(OK);
okButton.addActionListener(this);
return okButton;
}
@Override
public void clearPassword() {
// Zero out the possible password, for security.
char[] password = passwordField.getPassword();
Arrays.fill(password, '0');
passwordField.setText(null);
passwordSpecified = false;
passwordField.selectAll();
resetFocus();
}
public void actionPerformed(ActionEvent e) {
if (e.getActionCommand() == OK) {
passwordSpecified = true;
}
if (e.getActionCommand() == CANCEL) {
clearPassword();
}
setVisible(false);
}
// Must be called from the event dispatch thread.
protected void resetFocus() {
passwordField.requestFocusInWindow();
}
public void setVisible(boolean visible) {
if (visible) {
this.setLocationRelativeTo(controllingFrame);
}
super.setVisible(visible);
}
private void display() {
setVisible(true);
}
@Override
public boolean passwordSpecified() {
return passwordSpecified;
}
@Override
public void displaySavePrompt() {
okButton.setText("Save");
okButton.setMnemonic('S');
display();
}
@Override
public void displayLoadPrompt() {
okButton.setText("Load");
okButton.setMnemonic('L');
display();
}
@Override
public char[] getPassword() {
return passwordField.getPassword();
}
@Override
public void displayError(String errorMessage) {
JOptionPane.showMessageDialog(controllingFrame, errorMessage, "Password Error", JOptionPane.ERROR_MESSAGE);
}
}
| |
/*
* Copyright 2000-2014 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.codeInsight.imports;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.roots.ProjectFileIndex;
import com.intellij.openapi.roots.ProjectRootManager;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.psi.*;
import com.intellij.psi.util.QualifiedName;
import com.intellij.util.containers.ContainerUtil;
import com.jetbrains.python.psi.*;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.List;
/**
* An immutable holder of information for one auto-import candidate.
* <p/>
* There can be do different flavors of such candidates:
* <ul>
* <li>Candidates based on existing imports in module. In this case {@link #getImportElement()} must return not {@code null}.</li>
* <li>Candidates not yet imported. In this case {@link #getPath()} must return not {@code null}.</li>
* </ul>
* <p/>
*
* @author dcheryasov
*/
// visibility is intentionally package-level
public class ImportCandidateHolder implements Comparable<ImportCandidateHolder> {
@NotNull private final SmartPsiElementPointer<PsiElement> myImportable;
@Nullable private final SmartPsiElementPointer<PyImportElement> myImportElement;
@NotNull private final SmartPsiElementPointer<PsiFileSystemItem> myFile;
@Nullable private final QualifiedName myPath;
@Nullable private final String myAsName;
/**
* Creates new instance.
*
* @param importable an element that could be imported either from import element or from file.
* @param file the file which is the source of the importable (module for symbols, containing directory for modules and packages)
* @param importElement an existing import element that can be a source for the importable.
* @param path import path for the file, as a qualified name (a.b.c)
* For top-level imported symbols it's <em>qualified name of containing module</em> (or package for __init__.py).
* For modules and packages it should be <em>qualified name of their parental package</em>
* (empty for modules and packages located at source roots).
*
*/
public ImportCandidateHolder(@NotNull PsiElement importable, @NotNull PsiFileSystemItem file,
@Nullable PyImportElement importElement, @Nullable QualifiedName path, @Nullable String asName) {
SmartPointerManager pointerManager = SmartPointerManager.getInstance(importable.getProject());
myFile = pointerManager.createSmartPsiElementPointer(file);
myImportable = pointerManager.createSmartPsiElementPointer(importable);
myImportElement = importElement != null ? pointerManager.createSmartPsiElementPointer(importElement) : null;
myPath = path;
myAsName = asName;
assert importElement != null || path != null; // one of these must be present
}
public ImportCandidateHolder(@NotNull PsiElement importable, @NotNull PsiFileSystemItem file,
@Nullable PyImportElement importElement, @Nullable QualifiedName path) {
this(importable, file, importElement, path, null);
}
@Nullable
public PsiElement getImportable() {
return myImportable.getElement();
}
@Nullable
public PyImportElement getImportElement() {
return myImportElement != null ? myImportElement.getElement() : null;
}
@Nullable
public PsiFileSystemItem getFile() {
return myFile.getElement();
}
@Nullable
public QualifiedName getPath() {
return myPath;
}
/**
* Helper method that builds an import path, handling all these "import foo", "import foo as bar", "from bar import foo", etc.
* Either importPath or importSource must be not null.
*
* @param name what is ultimately imported.
* @param importPath known path to import the name.
* @param source known ImportElement to import the name; its 'as' clause is used if present.
* @return a properly qualified name.
*/
@NotNull
public static String getQualifiedName(@NotNull String name, @Nullable QualifiedName importPath, @Nullable PyImportElement source) {
final StringBuilder sb = new StringBuilder();
if (source != null) {
final PsiElement parent = source.getParent();
if (parent instanceof PyFromImportStatement) {
sb.append(name);
}
else {
sb.append(source.getVisibleName()).append(".").append(name);
}
}
else {
if (importPath != null && importPath.getComponentCount() > 0) {
sb.append(importPath).append(".");
}
sb.append(name);
}
return sb.toString();
}
@NotNull
public String getPresentableText(@NotNull String myName) {
PyImportElement importElement = getImportElement();
PsiElement importable = getImportable();
final StringBuilder sb = new StringBuilder(getQualifiedName(myName, myPath, importElement));
PsiElement parent = null;
if (importElement != null) {
parent = importElement.getParent();
}
if (importable instanceof PyFunction) {
sb.append("()");
}
else if (importable instanceof PyClass) {
final List<String> supers = ContainerUtil.mapNotNull(((PyClass)importable).getSuperClasses(null),
cls -> PyUtil.isObjectClass(cls) ? null : cls.getName());
if (!supers.isEmpty()) {
sb.append("(");
StringUtil.join(supers, ", ", sb);
sb.append(")");
}
}
if (parent instanceof PyFromImportStatement) {
sb.append(" from ");
final PyFromImportStatement fromImportStatement = (PyFromImportStatement)parent;
sb.append(StringUtil.repeat(".", fromImportStatement.getRelativeLevel()));
final PyReferenceExpression source = fromImportStatement.getImportSource();
if (source != null) {
sb.append(source.asQualifiedName());
}
}
return sb.toString();
}
public int compareTo(@NotNull ImportCandidateHolder other) {
final int lRelevance = getRelevance();
final int rRelevance = other.getRelevance();
if (rRelevance != lRelevance) {
return rRelevance - lRelevance;
}
if (myPath != null && other.myPath != null) {
// prefer shorter paths
final int lengthDiff = myPath.getComponentCount() - other.myPath.getComponentCount();
if (lengthDiff != 0) {
return lengthDiff;
}
}
return Comparing.compare(myPath, other.myPath);
}
int getRelevance() {
if (myImportElement != null) return 4;
final Project project = myImportable.getProject();
final PsiFile psiFile = myImportable.getContainingFile();
final VirtualFile vFile = psiFile == null ? null : psiFile.getVirtualFile();
if (vFile == null) return 0;
final ProjectFileIndex fileIndex = ProjectRootManager.getInstance(project).getFileIndex();
// files under project source are most relevant
final Module module = fileIndex.getModuleForFile(vFile);
if (module != null) return 3;
// then come files directly under Lib
if (vFile.getParent().getName().equals("Lib")) return 2;
// tests we don't want
if (vFile.getParent().getName().equals("test")) return 0;
return 1;
}
@Nullable
public String getAsName() {
return myAsName;
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.type;
import com.facebook.presto.common.type.Type;
import com.facebook.presto.common.type.TypeSignature;
import com.facebook.presto.metadata.FunctionAndTypeManager;
import com.facebook.presto.metadata.OperatorNotFoundException;
import com.google.common.collect.ImmutableSet;
import org.testng.annotations.Test;
import java.util.Optional;
import java.util.Set;
import static com.facebook.presto.common.function.OperatorType.EQUAL;
import static com.facebook.presto.common.function.OperatorType.GREATER_THAN;
import static com.facebook.presto.common.function.OperatorType.GREATER_THAN_OR_EQUAL;
import static com.facebook.presto.common.function.OperatorType.HASH_CODE;
import static com.facebook.presto.common.function.OperatorType.IS_DISTINCT_FROM;
import static com.facebook.presto.common.function.OperatorType.LESS_THAN;
import static com.facebook.presto.common.function.OperatorType.LESS_THAN_OR_EQUAL;
import static com.facebook.presto.common.function.OperatorType.NOT_EQUAL;
import static com.facebook.presto.common.type.BigintType.BIGINT;
import static com.facebook.presto.common.type.CharType.createCharType;
import static com.facebook.presto.common.type.DateType.DATE;
import static com.facebook.presto.common.type.DecimalType.createDecimalType;
import static com.facebook.presto.common.type.DoubleType.DOUBLE;
import static com.facebook.presto.common.type.IntegerType.INTEGER;
import static com.facebook.presto.common.type.RealType.REAL;
import static com.facebook.presto.common.type.SmallintType.SMALLINT;
import static com.facebook.presto.common.type.TimeType.TIME;
import static com.facebook.presto.common.type.TimeWithTimeZoneType.TIME_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.TimestampType.TIMESTAMP;
import static com.facebook.presto.common.type.TimestampWithTimeZoneType.TIMESTAMP_WITH_TIME_ZONE;
import static com.facebook.presto.common.type.TinyintType.TINYINT;
import static com.facebook.presto.common.type.TypeSignature.parseTypeSignature;
import static com.facebook.presto.common.type.UnknownType.UNKNOWN;
import static com.facebook.presto.common.type.VarbinaryType.VARBINARY;
import static com.facebook.presto.common.type.VarcharType.VARCHAR;
import static com.facebook.presto.common.type.VarcharType.createUnboundedVarcharType;
import static com.facebook.presto.common.type.VarcharType.createVarcharType;
import static com.facebook.presto.metadata.CastType.CAST;
import static com.facebook.presto.metadata.FunctionAndTypeManager.createTestFunctionAndTypeManager;
import static com.facebook.presto.sql.analyzer.TypeSignatureProvider.fromTypes;
import static com.facebook.presto.type.JoniRegexpType.JONI_REGEXP;
import static com.facebook.presto.type.JsonPathType.JSON_PATH;
import static com.facebook.presto.type.LikePatternType.LIKE_PATTERN;
import static com.facebook.presto.type.Re2JRegexpType.RE2J_REGEXP;
import static java.lang.String.format;
import static java.util.Objects.requireNonNull;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
public class TestBuiltInTypeRegistry
{
private final FunctionAndTypeManager functionAndTypeManager = createTestFunctionAndTypeManager();
@Test
public void testNonexistentType()
{
try {
functionAndTypeManager.getType(parseTypeSignature("not a real type"));
fail("Expect to throw IllegalArgumentException");
}
catch (IllegalArgumentException e) {
assertTrue(e.getMessage().matches("Unknown type.*"));
}
catch (Throwable t) {
fail("Expect to throw IllegalArgumentException, got " + t.getClass());
}
}
@Test
public void testIsTypeOnlyCoercion()
{
assertTrue(isTypeOnlyCoercion(BIGINT, BIGINT));
assertTrue(isTypeOnlyCoercion("varchar(42)", "varchar(44)"));
assertFalse(isTypeOnlyCoercion("varchar(44)", "varchar(42)"));
assertFalse(isTypeOnlyCoercion("char(42)", "varchar(42)"));
assertTrue(isTypeOnlyCoercion("array(varchar(42))", "array(varchar(44))"));
assertFalse(isTypeOnlyCoercion("array(varchar(44))", "array(varchar(42))"));
assertTrue(isTypeOnlyCoercion("decimal(22,1)", "decimal(23,1)"));
assertTrue(isTypeOnlyCoercion("decimal(2,1)", "decimal(3,1)"));
assertFalse(isTypeOnlyCoercion("decimal(23,1)", "decimal(22,1)"));
assertFalse(isTypeOnlyCoercion("decimal(3,1)", "decimal(2,1)"));
assertFalse(isTypeOnlyCoercion("decimal(3,1)", "decimal(22,1)"));
assertTrue(isTypeOnlyCoercion("array(decimal(22,1))", "array(decimal(23,1))"));
assertTrue(isTypeOnlyCoercion("array(decimal(2,1))", "array(decimal(3,1))"));
assertFalse(isTypeOnlyCoercion("array(decimal(23,1))", "array(decimal(22,1))"));
assertFalse(isTypeOnlyCoercion("array(decimal(3,1))", "array(decimal(2,1))"));
assertTrue(isTypeOnlyCoercion("map(decimal(2,1), decimal(2,1))", "map(decimal(2,1), decimal(3,1))"));
assertFalse(isTypeOnlyCoercion("map(decimal(2,1), decimal(2,1))", "map(decimal(2,1), decimal(23,1))"));
assertFalse(isTypeOnlyCoercion("map(decimal(2,1), decimal(2,1))", "map(decimal(2,1), decimal(3,2))"));
assertTrue(isTypeOnlyCoercion("map(decimal(22,1), decimal(2,1))", "map(decimal(23,1), decimal(3,1))"));
assertFalse(isTypeOnlyCoercion("map(decimal(23,1), decimal(3,1))", "map(decimal(22,1), decimal(2,1))"));
}
@Test
public void testTypeCompatibility()
{
assertThat(UNKNOWN, UNKNOWN).hasCommonSuperType(UNKNOWN).canCoerceToEachOther();
assertThat(BIGINT, BIGINT).hasCommonSuperType(BIGINT).canCoerceToEachOther();
assertThat(UNKNOWN, BIGINT).hasCommonSuperType(BIGINT).canCoerceFirstToSecondOnly();
assertThat(BIGINT, DOUBLE).hasCommonSuperType(DOUBLE).canCoerceFirstToSecondOnly();
assertThat(DATE, TIMESTAMP).hasCommonSuperType(TIMESTAMP).canCoerceFirstToSecondOnly();
assertThat(DATE, TIMESTAMP_WITH_TIME_ZONE).hasCommonSuperType(TIMESTAMP_WITH_TIME_ZONE).canCoerceFirstToSecondOnly();
assertThat(TIME, TIME_WITH_TIME_ZONE).hasCommonSuperType(TIME_WITH_TIME_ZONE).canCoerceFirstToSecondOnly();
assertThat(TIMESTAMP, TIMESTAMP_WITH_TIME_ZONE).hasCommonSuperType(TIMESTAMP_WITH_TIME_ZONE).canCoerceFirstToSecondOnly();
assertThat(VARCHAR, JONI_REGEXP).hasCommonSuperType(JONI_REGEXP).canCoerceFirstToSecondOnly();
assertThat(VARCHAR, RE2J_REGEXP).hasCommonSuperType(RE2J_REGEXP).canCoerceFirstToSecondOnly();
assertThat(VARCHAR, LIKE_PATTERN).hasCommonSuperType(LIKE_PATTERN).canCoerceFirstToSecondOnly();
assertThat(VARCHAR, JSON_PATH).hasCommonSuperType(JSON_PATH).canCoerceFirstToSecondOnly();
assertThat(REAL, DOUBLE).hasCommonSuperType(DOUBLE).canCoerceFirstToSecondOnly();
assertThat(REAL, TINYINT).hasCommonSuperType(REAL).canCoerceSecondToFirstOnly();
assertThat(REAL, SMALLINT).hasCommonSuperType(REAL).canCoerceSecondToFirstOnly();
assertThat(REAL, INTEGER).hasCommonSuperType(REAL).canCoerceSecondToFirstOnly();
assertThat(REAL, BIGINT).hasCommonSuperType(REAL).canCoerceSecondToFirstOnly();
assertThat(TIMESTAMP, TIME_WITH_TIME_ZONE).isIncompatible();
assertThat(VARBINARY, VARCHAR).isIncompatible();
assertThat("unknown", "array(bigint)").hasCommonSuperType("array(bigint)").canCoerceFirstToSecondOnly();
assertThat("array(bigint)", "array(double)").hasCommonSuperType("array(double)").canCoerceFirstToSecondOnly();
assertThat("array(bigint)", "array(unknown)").hasCommonSuperType("array(bigint)").canCoerceSecondToFirstOnly();
assertThat("map(bigint,double)", "map(bigint,double)").hasCommonSuperType("map(bigint,double)").canCoerceToEachOther();
assertThat("map(bigint,double)", "map(double,double)").hasCommonSuperType("map(double,double)").canCoerceFirstToSecondOnly();
assertThat("row(a bigint,b double,c varchar)", "row(a bigint,b double,c varchar)").hasCommonSuperType("row(a bigint,b double,c varchar)").canCoerceToEachOther();
assertThat("decimal(22,1)", "decimal(23,1)").hasCommonSuperType("decimal(23,1)").canCoerceFirstToSecondOnly();
assertThat("bigint", "decimal(23,1)").hasCommonSuperType("decimal(23,1)").canCoerceFirstToSecondOnly();
assertThat("bigint", "decimal(18,0)").hasCommonSuperType("decimal(19,0)").cannotCoerceToEachOther();
assertThat("bigint", "decimal(19,0)").hasCommonSuperType("decimal(19,0)").canCoerceFirstToSecondOnly();
assertThat("bigint", "decimal(37,1)").hasCommonSuperType("decimal(37,1)").canCoerceFirstToSecondOnly();
assertThat("real", "decimal(37,1)").hasCommonSuperType("real").canCoerceSecondToFirstOnly();
assertThat("array(decimal(23,1))", "array(decimal(22,1))").hasCommonSuperType("array(decimal(23,1))").canCoerceSecondToFirstOnly();
assertThat("array(bigint)", "array(decimal(2,1))").hasCommonSuperType("array(decimal(20,1))").cannotCoerceToEachOther();
assertThat("array(bigint)", "array(decimal(20,1))").hasCommonSuperType("array(decimal(20,1))").canCoerceFirstToSecondOnly();
assertThat("decimal(3,2)", "double").hasCommonSuperType("double").canCoerceFirstToSecondOnly();
assertThat("decimal(22,1)", "double").hasCommonSuperType("double").canCoerceFirstToSecondOnly();
assertThat("decimal(37,1)", "double").hasCommonSuperType("double").canCoerceFirstToSecondOnly();
assertThat("decimal(37,37)", "double").hasCommonSuperType("double").canCoerceFirstToSecondOnly();
assertThat("decimal(22,1)", "real").hasCommonSuperType("real").canCoerceFirstToSecondOnly();
assertThat("decimal(3,2)", "real").hasCommonSuperType("real").canCoerceFirstToSecondOnly();
assertThat("decimal(37,37)", "real").hasCommonSuperType("real").canCoerceFirstToSecondOnly();
assertThat("integer", "decimal(23,1)").hasCommonSuperType("decimal(23,1)").canCoerceFirstToSecondOnly();
assertThat("integer", "decimal(9,0)").hasCommonSuperType("decimal(10,0)").cannotCoerceToEachOther();
assertThat("integer", "decimal(10,0)").hasCommonSuperType("decimal(10,0)").canCoerceFirstToSecondOnly();
assertThat("integer", "decimal(37,1)").hasCommonSuperType("decimal(37,1)").canCoerceFirstToSecondOnly();
assertThat("tinyint", "decimal(2,0)").hasCommonSuperType("decimal(3,0)").cannotCoerceToEachOther();
assertThat("tinyint", "decimal(9,0)").hasCommonSuperType("decimal(9,0)").canCoerceFirstToSecondOnly();
assertThat("tinyint", "decimal(2,1)").hasCommonSuperType("decimal(4,1)").cannotCoerceToEachOther();
assertThat("tinyint", "decimal(3,0)").hasCommonSuperType("decimal(3,0)").canCoerceFirstToSecondOnly();
assertThat("tinyint", "decimal(37,1)").hasCommonSuperType("decimal(37,1)").canCoerceFirstToSecondOnly();
assertThat("smallint", "decimal(37,1)").hasCommonSuperType("decimal(37,1)").canCoerceFirstToSecondOnly();
assertThat("smallint", "decimal(4,0)").hasCommonSuperType("decimal(5,0)").cannotCoerceToEachOther();
assertThat("smallint", "decimal(5,0)").hasCommonSuperType("decimal(5,0)").canCoerceFirstToSecondOnly();
assertThat("smallint", "decimal(2,0)").hasCommonSuperType("decimal(5,0)").cannotCoerceToEachOther();
assertThat("smallint", "decimal(9,0)").hasCommonSuperType("decimal(9,0)").canCoerceFirstToSecondOnly();
assertThat("smallint", "decimal(2,1)").hasCommonSuperType("decimal(6,1)").cannotCoerceToEachOther();
assertThat("char(42)", "char(40)").hasCommonSuperType("char(42)").canCoerceSecondToFirstOnly();
assertThat("char(42)", "char(44)").hasCommonSuperType("char(44)").canCoerceFirstToSecondOnly();
assertThat("varchar(42)", "varchar(42)").hasCommonSuperType("varchar(42)").canCoerceToEachOther();
assertThat("varchar(42)", "varchar(44)").hasCommonSuperType("varchar(44)").canCoerceFirstToSecondOnly();
assertThat("char(40)", "varchar(42)").hasCommonSuperType("char(42)").cannotCoerceToEachOther();
assertThat("char(42)", "varchar(42)").hasCommonSuperType("char(42)").canCoerceSecondToFirstOnly();
assertThat("char(44)", "varchar(42)").hasCommonSuperType("char(44)").canCoerceSecondToFirstOnly();
assertThat(createType("char(42)"), JONI_REGEXP).hasCommonSuperType(JONI_REGEXP).canCoerceFirstToSecondOnly();
assertThat(createType("char(42)"), JSON_PATH).hasCommonSuperType(JSON_PATH).canCoerceFirstToSecondOnly();
assertThat(createType("char(42)"), LIKE_PATTERN).hasCommonSuperType(LIKE_PATTERN).canCoerceFirstToSecondOnly();
assertThat(createType("char(42)"), RE2J_REGEXP).hasCommonSuperType(RE2J_REGEXP).canCoerceFirstToSecondOnly();
assertThat("row(varchar(2))", "row(varchar(5))").hasCommonSuperType("row(varchar(5))").canCoerceFirstToSecondOnly();
assertThat("row(a integer)", "row(a bigint)").hasCommonSuperType("row(a bigint)").canCoerceFirstToSecondOnly();
assertThat("row(a integer)", "row(b bigint)").hasCommonSuperType("row(bigint)").canCoerceFirstToSecondOnly();
assertThat("row(integer)", "row(b bigint)").hasCommonSuperType("row(bigint)").canCoerceFirstToSecondOnly();
assertThat("row(a integer)", "row(a varchar(2))").isIncompatible();
assertThat("row(a integer)", "row(a integer,b varchar(2))").isIncompatible();
assertThat("row(a integer,b varchar(2))", "row(a bigint,c varchar(5))").hasCommonSuperType("row(a bigint,varchar(5))").canCoerceFirstToSecondOnly();
assertThat("row(a integer,b varchar(2))", "row(bigint,varchar(5))").hasCommonSuperType("row(bigint,varchar(5))").canCoerceFirstToSecondOnly();
assertThat("row(a integer,b varchar(5))", "row(c bigint,d varchar(2))").hasCommonSuperType("row(bigint,varchar(5))").cannotCoerceToEachOther();
assertThat("row(a row(c integer),b varchar(2))", "row(row(c integer),varchar(5))").hasCommonSuperType("row(row(c integer),varchar(5))").canCoerceFirstToSecondOnly();
assertThat("row(a row(c integer),b varchar(2))", "row(a row(c integer),d varchar(5))").hasCommonSuperType("row(a row(c integer),varchar(5))").canCoerceFirstToSecondOnly();
assertThat("row(a row(c integer),b varchar(5))", "row(d row(e integer),b varchar(5))").hasCommonSuperType("row(row(integer),b varchar(5))").canCoerceToEachOther();
}
@Test
public void testCoerceTypeBase()
{
assertEquals(functionAndTypeManager.coerceTypeBase(createDecimalType(21, 1), "decimal"), Optional.of(createDecimalType(21, 1)));
assertEquals(functionAndTypeManager.coerceTypeBase(BIGINT, "decimal"), Optional.of(createDecimalType(19, 0)));
assertEquals(functionAndTypeManager.coerceTypeBase(INTEGER, "decimal"), Optional.of(createDecimalType(10, 0)));
assertEquals(functionAndTypeManager.coerceTypeBase(TINYINT, "decimal"), Optional.of(createDecimalType(3, 0)));
assertEquals(functionAndTypeManager.coerceTypeBase(SMALLINT, "decimal"), Optional.of(createDecimalType(5, 0)));
}
@Test
public void testCanCoerceIsTransitive()
{
Set<Type> types = getStandardPrimitiveTypes();
for (Type transitiveType : types) {
for (Type resultType : types) {
if (functionAndTypeManager.canCoerce(transitiveType, resultType)) {
for (Type sourceType : types) {
if (functionAndTypeManager.canCoerce(sourceType, transitiveType)) {
if (!functionAndTypeManager.canCoerce(sourceType, resultType)) {
fail(format("'%s' -> '%s' coercion is missing when transitive coercion is possible: '%s' -> '%s' -> '%s'",
sourceType, resultType, sourceType, transitiveType, resultType));
}
}
}
}
}
}
}
@Test
public void testCastOperatorsExistForCoercions()
{
Set<Type> types = getStandardPrimitiveTypes();
for (Type sourceType : types) {
for (Type resultType : types) {
if (functionAndTypeManager.canCoerce(sourceType, resultType) && sourceType != UNKNOWN && resultType != UNKNOWN) {
try {
functionAndTypeManager.lookupCast(CAST, sourceType.getTypeSignature(), resultType.getTypeSignature());
}
catch (OperatorNotFoundException e) {
fail(format("'%s' -> '%s' coercion exists but there is no cast operator", sourceType, resultType));
}
}
}
}
}
@Test
public void testOperatorsImplemented()
{
for (Type type : functionAndTypeManager.getTypes()) {
if (type.isComparable()) {
functionAndTypeManager.resolveOperator(EQUAL, fromTypes(type, type));
functionAndTypeManager.resolveOperator(NOT_EQUAL, fromTypes(type, type));
functionAndTypeManager.resolveOperator(IS_DISTINCT_FROM, fromTypes(type, type));
functionAndTypeManager.resolveOperator(HASH_CODE, fromTypes(type));
}
if (type.isOrderable()) {
functionAndTypeManager.resolveOperator(LESS_THAN, fromTypes(type, type));
functionAndTypeManager.resolveOperator(LESS_THAN_OR_EQUAL, fromTypes(type, type));
functionAndTypeManager.resolveOperator(GREATER_THAN_OR_EQUAL, fromTypes(type, type));
functionAndTypeManager.resolveOperator(GREATER_THAN, fromTypes(type, type));
}
}
}
@Test
public void testRowTypeCreation()
{
createType("row(time with time zone,time time with time zone)");
createType("row(timestamp with time zone,\"timestamp\" timestamp with time zone)");
createType("row(interval day to second,interval interval day to second)");
createType("row(interval year to month,\"interval\" interval year to month)");
createType("row(array(time with time zone), \"a\" array(map(timestamp with time zone, interval day to second)))");
}
private Set<Type> getStandardPrimitiveTypes()
{
ImmutableSet.Builder<Type> builder = ImmutableSet.builder();
// add unparametrized types
builder.addAll(functionAndTypeManager.getTypes());
// add corner cases for parametrized types
builder.add(createDecimalType(1, 0));
builder.add(createDecimalType(17, 0));
builder.add(createDecimalType(38, 0));
builder.add(createDecimalType(17, 17));
builder.add(createDecimalType(38, 38));
builder.add(createVarcharType(0));
builder.add(createUnboundedVarcharType());
builder.add(createCharType(0));
builder.add(createCharType(42));
return builder.build();
}
private CompatibilityAssertion assertThat(Type firstType, Type secondType)
{
Optional<Type> commonSuperType1 = functionAndTypeManager.getCommonSuperType(firstType, secondType);
Optional<Type> commonSuperType2 = functionAndTypeManager.getCommonSuperType(secondType, firstType);
assertEquals(commonSuperType1, commonSuperType2, "Expected getCommonSuperType to return the same result when invoked in either order");
boolean canCoerceFirstToSecond = functionAndTypeManager.canCoerce(firstType, secondType);
boolean canCoerceSecondToFirst = functionAndTypeManager.canCoerce(secondType, firstType);
return new CompatibilityAssertion(commonSuperType1, canCoerceFirstToSecond, canCoerceSecondToFirst);
}
private CompatibilityAssertion assertThat(String firstType, String secondType)
{
return assertThat(createType(firstType), createType(secondType));
}
private boolean isTypeOnlyCoercion(Type actual, Type expected)
{
return functionAndTypeManager.isTypeOnlyCoercion(actual, expected);
}
private boolean isTypeOnlyCoercion(String actual, String expected)
{
return functionAndTypeManager.isTypeOnlyCoercion(createType(actual), createType(expected));
}
private Type createType(String signature)
{
return functionAndTypeManager.getType(TypeSignature.parseTypeSignature(signature));
}
private class CompatibilityAssertion
{
private final Optional<Type> commonSuperType;
private final boolean canCoerceFirstToSecond;
private final boolean canCoerceSecondToFirst;
public CompatibilityAssertion(Optional<Type> commonSuperType, boolean canCoerceFirstToSecond, boolean canCoerceSecondToFirst)
{
this.commonSuperType = requireNonNull(commonSuperType, "commonSuperType is null");
// Assert that: (canFirstCoerceToSecond || canSecondCoerceToFirst) => commonSuperType.isPresent
assertTrue(!(canCoerceFirstToSecond || canCoerceSecondToFirst) || commonSuperType.isPresent(), "Expected canCoercion to be false when there is no commonSuperType");
this.canCoerceFirstToSecond = canCoerceFirstToSecond;
this.canCoerceSecondToFirst = canCoerceSecondToFirst;
}
public void isIncompatible()
{
assertTrue(!commonSuperType.isPresent(), "Expected to be incompatible");
}
public CompatibilityAssertion hasCommonSuperType(Type expected)
{
assertTrue(commonSuperType.isPresent(), "Expected commonSuperType to be present");
assertEquals(commonSuperType.get(), expected, "commonSuperType");
return this;
}
public CompatibilityAssertion hasCommonSuperType(String expected)
{
return hasCommonSuperType(createType(expected));
}
public CompatibilityAssertion canCoerceToEachOther()
{
assertTrue(canCoerceFirstToSecond, "Expected first be coercible to second");
assertTrue(canCoerceSecondToFirst, "Expected second be coercible to first");
return this;
}
public CompatibilityAssertion canCoerceFirstToSecondOnly()
{
assertTrue(canCoerceFirstToSecond, "Expected first be coercible to second");
assertFalse(canCoerceSecondToFirst, "Expected second NOT be coercible to first");
return this;
}
public CompatibilityAssertion canCoerceSecondToFirstOnly()
{
assertFalse(canCoerceFirstToSecond, "Expected first NOT be coercible to second");
assertTrue(canCoerceSecondToFirst, "Expected second be coercible to first");
return this;
}
public CompatibilityAssertion cannotCoerceToEachOther()
{
assertFalse(canCoerceFirstToSecond, "Expected first NOT be coercible to second");
assertFalse(canCoerceSecondToFirst, "Expected second NOT be coercible to first");
return this;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.river.thread;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* A task manager manages a single queue of tasks, and some number of
* worker threads. New tasks are added to the tail of the queue. Each
* thread loops, taking a task from the queue and running it. Each
* thread looks for a task by starting at the head of the queue and
* taking the first task (that is not already being worked on) that is
* not required to run after any of the tasks that precede it in
* the queue (including tasks that are currently being worked on).
* <p>
* This class uses the {@link Logger} named
* <code>org.apache.river.thread.TaskManager</code> to log information at
* the following logging levels:
* <p>
* <table border=1 cellpadding=5
* summary="Describes logging performed by TaskManager at different
* logging levels">
* <caption><b><code>
* org.apache.river.thread.TaskManager</code></b></caption>
* <tr><th>Level<th>Description
* <tr><td>{@link Level#SEVERE SEVERE}<td>
* failure to create a worker thread when no other worker threads exist
* <tr><td>{@link Level#WARNING WARNING}<td>
* exceptions thrown by {@link TaskManager.Task} methods, and failure
* to create a worker thread when other worker threads exist
* </table>
*
* @author Sun Microsystems, Inc.
* @deprecated will be removed from a future release soon.
*/
@Deprecated
public class TaskManager {
/** The interface that tasks must implement */
public interface Task extends Runnable {
/**
* Return true if this task must be run after at least one task
* in the given task list with an index less than size (size may be
* less then tasks.size()). Using List.get will be more efficient
* than List.iterator.
*
* @param tasks the tasks to consider. A read-only List, with all
* elements instanceof Task.
* @param size elements with index less than size should be considered
*/
boolean runAfter(List tasks, int size);
}
/** Logger */
protected static final Logger logger =
Logger.getLogger("org.apache.river.thread.TaskManager");
/** Active and pending tasks */
protected final ArrayList<Runnable> tasks = new ArrayList<Runnable>(); //sync on this
/** Index of the first pending task; all earlier tasks are active */
protected int firstPending = 0;//sync on this
/** Read-only view of tasks */
protected final List roTasks = Collections.unmodifiableList(tasks); // sync on this
/** Active threads */
protected final List threads = new ArrayList(); //sync on this
/** Maximum number of threads allowed */
protected final int maxThreads;
/** Idle time before a thread should exit */
protected final long timeout;
/** Threshold for creating new threads */
protected final float loadFactor;
/** True if manager has been terminated */
protected boolean terminated = false; //sync on this
/**
* Create a task manager with maxThreads = 10, timeout = 15 seconds,
* and loadFactor = 3.0.
*/
public TaskManager() {
this(10, 1000 * 15, 3.0f);
}
/**
* Create a task manager.
*
* @param maxThreads maximum number of threads to use on tasks
* @param timeout idle time before a thread exits
* @param loadFactor threshold for creating new threads. A new
* thread is created if the total number of runnable tasks (both active
* and pending) exceeds the number of threads times the loadFactor,
* and the maximum number of threads has not been reached.
*/
public TaskManager(int maxThreads, long timeout, float loadFactor) {
this.maxThreads = maxThreads;
this.timeout = timeout;
this.loadFactor = loadFactor;
}
/**
* Add a new task if it is not equal to (using the equals method)
* to any existing active or pending task.
*/
public synchronized void addIfNew(Task t) {
if (!tasks.contains(t))
add(t);
}
/** Add a new task. */
public synchronized void add(Runnable t) {
tasks.add(t);
boolean poke = true;
while (threads.size() < maxThreads && needThread()) {
Thread th;
try {
th = new TaskThread();
th.start();
} catch (Throwable tt) {
try {
logger.log(threads.isEmpty() ?
Level.SEVERE : Level.WARNING,
"thread creation exception", tt);
} catch (Throwable ttt) {
}
break;
}
threads.add(th);
poke = false;
}
if (poke &&
threads.size() > firstPending &&
!runAfter(t, tasks.size() - 1))
{
notify();
}
}
/** Add all tasks in a collection, in iterator order. */
public synchronized void addAll(Collection c) {
for (Iterator iter = c.iterator(); iter.hasNext(); ) {
add((Task)iter.next());
}
}
/** Return true if a new thread should be created (ignoring maxThreads). */
protected boolean needThread() {
int bound = (int)(loadFactor * threads.size());
int max = tasks.size();
if (max < bound)
return false;
max--;
if (runAfter(tasks.get(max), max))
return false;
int ready = firstPending + 1;
if (ready > bound)
return true;
for (int i = firstPending; i < max; i++) {
if (!runAfter(tasks.get(i), i)) {
ready++;
if (ready > bound)
return true;
}
}
return false;
}
/**
* Returns t.runAfter(i), or false if an exception is thrown.
*/
private boolean runAfter(Runnable t, int i) {
try {
if (t instanceof Task)
return ((Task)t).runAfter(roTasks, i);
else
return false;
} catch (Throwable tt) {
try {
logger.log(Level.WARNING, "Task.runAfter exception", tt);
} catch (Throwable ttt) {
}
return false;
}
}
/**
* Remove a task if it is pending (not active). Object identity (==)
* is used, not the equals method. Returns true if the task was
* removed.
*/
public synchronized boolean removeIfPending(Runnable t) {
return removeTask(t, firstPending);
}
/*
* Remove a task if it is pending or active. If it is active and not being
* executed by the calling thread, interrupt the thread executing the task,
* but do not wait for the thread to terminate. Object identity (==) is
* used, not the equals method. Returns true if the task was removed.
*/
public synchronized boolean remove(Runnable t) {
return removeTask(t, 0);
}
/**
* Remove a task if it has index >= min. If it is active and not being
* executed by the calling thread, interrupt the thread executing the task.
*/
private boolean removeTask(Runnable t, int min) {
for (int i = tasks.size(); --i >= min; ) {
if (tasks.get(i) == t) {
tasks.remove(i);
if (i < firstPending) {
firstPending--;
for (int j = threads.size(); --j >= 0; ) {
TaskThread thread = (TaskThread)threads.get(j);
if (thread.task == t) {
if (thread != Thread.currentThread())
thread.interrupt();
break;
}
}
}
return true;
}
}
return false;
}
/**
* Interrupt all threads, and stop processing tasks. Only getPending
* should be used afterwards.
*/
public synchronized void terminate() {
terminated = true;
for (int i = threads.size(); --i >= 0; ) {
((Thread)threads.get(i)).interrupt();
}
}
/** Return all pending tasks. A new list is returned each time. */
public synchronized ArrayList getPending() {
ArrayList tc = new ArrayList(tasks);
for (int i = firstPending; --i >= 0; ) {
tc.remove(0);
}
return tc;
}
/** Return the maximum number of threads to use on tasks. */
public int getMaxThreads() {
return maxThreads;
}
private class TaskThread extends Thread {
/** The task being run, if any */
public Runnable task = null; // sync access on TaskManager.this
public TaskThread() {
super("task");
setDaemon(true);
}
/**
* Find the next task that can be run, and mark it taken by
* moving firstPending past it (and moving the task in front of
* any pending tasks that are skipped due to execution constraints).
* If a task is found, set task to it and return true.
*/
private boolean takeTask() {
int size = tasks.size();
for (int i = firstPending; i < size; i++) {
Runnable t = tasks.get(i);
if (!runAfter(t, i)) {
if (i > firstPending) {
tasks.remove(i);
tasks.add(firstPending, t);
}
firstPending++;
task = t;
return true;
}
}
return false;
}
public void run() {
while (true) {
Runnable tsk = null;
synchronized (TaskManager.this) {
if (terminated)
return;
if (task != null) {
for (int i = firstPending; --i >= 0; ) {
if (tasks.get(i) == task) {
tasks.remove(i);
firstPending--;
break;
}
}
task = null;
interrupted(); // clear interrupt bit
}
if (!takeTask()) {
try {
TaskManager.this.wait(timeout);
} catch (InterruptedException e) {
}
if (terminated || !takeTask()) {
threads.remove(this);
return;
}
}
tsk = task;
}
try {
tsk.run();
} catch (Throwable t) {
if (t instanceof Error) throw (Error) t;
try {
logger.log(Level.WARNING, "Task.run exception", t);
} catch (Throwable tt) {
}
}
}
}
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.skyframe;
import com.google.common.eventbus.EventBus;
import com.google.devtools.build.lib.actions.ActionExecutionException;
import com.google.devtools.build.lib.actions.Artifact;
import com.google.devtools.build.lib.actions.MissingInputFileException;
import com.google.devtools.build.lib.analysis.AspectCompleteEvent;
import com.google.devtools.build.lib.analysis.ConfiguredTarget;
import com.google.devtools.build.lib.analysis.LabelAndConfiguration;
import com.google.devtools.build.lib.analysis.TargetCompleteEvent;
import com.google.devtools.build.lib.analysis.TopLevelArtifactContext;
import com.google.devtools.build.lib.analysis.TopLevelArtifactHelper;
import com.google.devtools.build.lib.analysis.TopLevelArtifactHelper.ArtifactsToBuild;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.collect.nestedset.NestedSet;
import com.google.devtools.build.lib.collect.nestedset.NestedSetBuilder;
import com.google.devtools.build.lib.events.Event;
import com.google.devtools.build.lib.skyframe.AspectCompletionValue.AspectCompletionKey;
import com.google.devtools.build.lib.skyframe.AspectValue.AspectKey;
import com.google.devtools.build.lib.skyframe.TargetCompletionValue.TargetCompletionKey;
import com.google.devtools.build.skyframe.SkyFunction;
import com.google.devtools.build.skyframe.SkyFunctionException;
import com.google.devtools.build.skyframe.SkyKey;
import com.google.devtools.build.skyframe.SkyValue;
import com.google.devtools.build.skyframe.ValueOrException2;
import java.util.Map;
import java.util.concurrent.atomic.AtomicReference;
import javax.annotation.Nullable;
/**
* CompletionFunction builds the artifactsToBuild collection of a {@link ConfiguredTarget}.
*/
public final class CompletionFunction<TValue extends SkyValue, TResult extends SkyValue>
implements SkyFunction {
/**
* A strategy for completing the build.
*/
public interface Completor<TValue, TResult extends SkyValue> {
/**
* Obtains an analysis result value from environment.
*/
TValue getValueFromSkyKey(SkyKey skyKey, Environment env);
/**
* Returns the options which determine the artifacts to build for the top-level targets.
* <p>
* For the Top level targets we made a conscious decision to include the TopLevelArtifactContext
* within the SkyKey as an argument to the CompletionFunction rather than a separate SkyKey.
* As a result we do have <num top level targets> extra SkyKeys for every unique
* TopLevelArtifactContexts used over the lifetime of Blaze. This is a minor tradeoff,
* since it significantly improves null build times when we're switching the
* TopLevelArtifactContexts frequently (common for IDEs), by reusing existing SkyKeys
* from earlier runs, instead of causing an eager invalidation
* were the TopLevelArtifactContext modeled as a separate SkyKey.
*/
TopLevelArtifactContext getTopLevelArtifactContext(SkyKey skyKey);
/**
* Returns all artifacts that need to be built to complete the {@code value}
*/
ArtifactsToBuild getAllArtifactsToBuild(TValue value, TopLevelArtifactContext context);
/**
* Creates an event reporting an absent input artifact.
*/
Event getRootCauseError(TValue value, Label rootCause);
/**
* Creates an error message reporting {@code missingCount} missing input files.
*/
MissingInputFileException getMissingFilesException(TValue value, int missingCount);
/**
* Creates a successful completion value.
*/
TResult createResult(TValue value);
/**
* Creates a failed completion value.
*/
SkyValue createFailed(TValue value, NestedSet<Label> rootCauses);
/**
* Extracts a tag given the {@link SkyKey}.
*/
String extractTag(SkyKey skyKey);
}
private static class TargetCompletor
implements Completor<ConfiguredTargetValue, TargetCompletionValue> {
@Override
public ConfiguredTargetValue getValueFromSkyKey(SkyKey skyKey, Environment env) {
TargetCompletionKey tcKey = (TargetCompletionKey) skyKey.argument();
LabelAndConfiguration lac = tcKey.labelAndConfiguration();
return (ConfiguredTargetValue)
env.getValue(ConfiguredTargetValue.key(lac.getLabel(), lac.getConfiguration()));
}
@Override
public TopLevelArtifactContext getTopLevelArtifactContext(SkyKey skyKey) {
TargetCompletionKey tcKey = (TargetCompletionKey) skyKey.argument();
return tcKey.topLevelArtifactContext();
}
@Override
public ArtifactsToBuild getAllArtifactsToBuild(
ConfiguredTargetValue value, TopLevelArtifactContext topLevelContext) {
return TopLevelArtifactHelper.getAllArtifactsToBuild(
value.getConfiguredTarget(), topLevelContext);
}
@Override
public Event getRootCauseError(ConfiguredTargetValue ctValue, Label rootCause) {
return Event.error(
ctValue.getConfiguredTarget().getTarget().getLocation(),
String.format(
"%s: missing input file '%s'", ctValue.getConfiguredTarget().getLabel(), rootCause));
}
@Override
public MissingInputFileException getMissingFilesException(
ConfiguredTargetValue value, int missingCount) {
return new MissingInputFileException(
value.getConfiguredTarget().getTarget().getLocation()
+ " "
+ missingCount
+ " input file(s) do not exist",
value.getConfiguredTarget().getTarget().getLocation());
}
@Override
public TargetCompletionValue createResult(ConfiguredTargetValue value) {
return new TargetCompletionValue(value.getConfiguredTarget());
}
@Override
public SkyValue createFailed(ConfiguredTargetValue value, NestedSet<Label> rootCauses) {
return TargetCompleteEvent.createFailed(value.getConfiguredTarget(), rootCauses);
}
@Override
public String extractTag(SkyKey skyKey) {
return Label.print(
((TargetCompletionKey) skyKey.argument()).labelAndConfiguration().getLabel());
}
}
private static class AspectCompletor implements Completor<AspectValue, AspectCompletionValue> {
@Override
public AspectValue getValueFromSkyKey(SkyKey skyKey, Environment env) {
AspectCompletionKey acKey = (AspectCompletionKey) skyKey.argument();
AspectKey aspectKey = acKey.aspectKey();
return (AspectValue) env.getValue(AspectValue.key(aspectKey));
}
@Override
public TopLevelArtifactContext getTopLevelArtifactContext(SkyKey skyKey) {
AspectCompletionKey acKey = (AspectCompletionKey) skyKey.argument();
return acKey.topLevelArtifactContext();
}
@Override
public ArtifactsToBuild getAllArtifactsToBuild(
AspectValue value, TopLevelArtifactContext topLevelArtifactContext) {
return TopLevelArtifactHelper.getAllArtifactsToBuild(value, topLevelArtifactContext);
}
@Override
public Event getRootCauseError(AspectValue value, Label rootCause) {
return Event.error(
value.getLocation(),
String.format(
"%s, aspect %s: missing input file '%s'",
value.getLabel(),
value.getConfiguredAspect().getName(),
rootCause));
}
@Override
public MissingInputFileException getMissingFilesException(AspectValue value, int missingCount) {
return new MissingInputFileException(
value.getLabel()
+ ", aspect "
+ value.getConfiguredAspect().getName()
+ missingCount
+ " input file(s) do not exist",
value.getLocation());
}
@Override
public AspectCompletionValue createResult(AspectValue value) {
return new AspectCompletionValue(value);
}
@Override
public SkyValue createFailed(AspectValue value, NestedSet<Label> rootCauses) {
return AspectCompleteEvent.createFailed(value, rootCauses);
}
@Override
public String extractTag(SkyKey skyKey) {
return Label.print(((AspectCompletionKey) skyKey.argument()).aspectKey().getLabel());
}
}
public static SkyFunction targetCompletionFunction(AtomicReference<EventBus> eventBusRef) {
return new CompletionFunction<>(eventBusRef, new TargetCompletor());
}
public static SkyFunction aspectCompletionFunction(AtomicReference<EventBus> eventBusRef) {
return new CompletionFunction<>(eventBusRef, new AspectCompletor());
}
private final AtomicReference<EventBus> eventBusRef;
private final Completor<TValue, TResult> completor;
private CompletionFunction(
AtomicReference<EventBus> eventBusRef, Completor<TValue, TResult> completor) {
this.eventBusRef = eventBusRef;
this.completor = completor;
}
@Nullable
@Override
public SkyValue compute(SkyKey skyKey, Environment env) throws CompletionFunctionException {
TValue value = completor.getValueFromSkyKey(skyKey, env);
TopLevelArtifactContext topLevelContext = completor.getTopLevelArtifactContext(skyKey);
if (env.valuesMissing()) {
return null;
}
Map<SkyKey, ValueOrException2<MissingInputFileException, ActionExecutionException>> inputDeps =
env.getValuesOrThrow(
ArtifactValue.mandatoryKeys(
completor.getAllArtifactsToBuild(value, topLevelContext).getAllArtifacts()),
MissingInputFileException.class,
ActionExecutionException.class);
int missingCount = 0;
ActionExecutionException firstActionExecutionException = null;
MissingInputFileException missingInputException = null;
NestedSetBuilder<Label> rootCausesBuilder = NestedSetBuilder.stableOrder();
for (Map.Entry<SkyKey, ValueOrException2<MissingInputFileException, ActionExecutionException>>
depsEntry : inputDeps.entrySet()) {
Artifact input = ArtifactValue.artifact(depsEntry.getKey());
try {
depsEntry.getValue().get();
} catch (MissingInputFileException e) {
missingCount++;
final Label inputOwner = input.getOwner();
if (inputOwner != null) {
rootCausesBuilder.add(inputOwner);
env.getListener().handle(completor.getRootCauseError(value, inputOwner));
}
} catch (ActionExecutionException e) {
rootCausesBuilder.addTransitive(e.getRootCauses());
if (firstActionExecutionException == null) {
firstActionExecutionException = e;
}
}
}
if (missingCount > 0) {
missingInputException = completor.getMissingFilesException(value, missingCount);
}
NestedSet<Label> rootCauses = rootCausesBuilder.build();
if (!rootCauses.isEmpty()) {
eventBusRef.get().post(completor.createFailed(value, rootCauses));
if (firstActionExecutionException != null) {
throw new CompletionFunctionException(firstActionExecutionException);
} else {
throw new CompletionFunctionException(missingInputException);
}
}
return env.valuesMissing() ? null : completor.createResult(value);
}
@Override
public String extractTag(SkyKey skyKey) {
return completor.extractTag(skyKey);
}
private static final class CompletionFunctionException extends SkyFunctionException {
private final ActionExecutionException actionException;
public CompletionFunctionException(ActionExecutionException e) {
super(e, Transience.PERSISTENT);
this.actionException = e;
}
public CompletionFunctionException(MissingInputFileException e) {
super(e, Transience.TRANSIENT);
this.actionException = null;
}
@Override
public boolean isCatastrophic() {
return actionException != null && actionException.isCatastrophe();
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nutch.crawl;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.Map;
import java.util.Set;
// Commons Logging imports
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.ObjectWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.StringUtils;
import org.apache.nutch.protocol.ProtocolStatus;
/**
* A writable map, with a similar behavior as <code>java.util.HashMap</code>. In
* addition to the size of key and value writable tuple two additional bytes are
* stored to identify the Writable classes. This means that a maximum of 255
* different class types can be used for key and value objects. A binary-id to
* class mapping is defined in a static block of this class. However it is
* possible to use custom implementations of Writable. For these custom
* Writables we write the byte id - utf class name tuple into the header of each
* MapWritable that uses these types.
*
* @author Stefan Groschupf
* @deprecated Use org.apache.hadoop.io.MapWritable instead.
*/
@Deprecated
public class MapWritable implements Writable {
public static final Logger LOG = LoggerFactory.getLogger(MapWritable.class);
private KeyValueEntry fFirst;
private KeyValueEntry fLast;
private KeyValueEntry fOld;
private int fSize = 0;
private int fIdCount = 0;
private ClassIdEntry fIdLast;
private ClassIdEntry fIdFirst;
private static Map<Class<?>, Byte> CLASS_ID_MAP = new HashMap<Class<?>, Byte>();
private static Map<Byte, Class<?>> ID_CLASS_MAP = new HashMap<Byte, Class<?>>();
static {
addToMap(NullWritable.class, new Byte((byte) -127));
addToMap(LongWritable.class, new Byte((byte) -126));
addToMap(Text.class, new Byte((byte) -125));
addToMap(MD5Hash.class, new Byte((byte) -124));
addToMap(org.apache.nutch.protocol.Content.class, new Byte((byte) -122));
addToMap(org.apache.nutch.parse.ParseText.class, new Byte((byte) -121));
addToMap(org.apache.nutch.parse.ParseData.class, new Byte((byte) -120));
addToMap(MapWritable.class, new Byte((byte) -119));
addToMap(BytesWritable.class, new Byte((byte) -118));
addToMap(FloatWritable.class, new Byte((byte) -117));
addToMap(IntWritable.class, new Byte((byte) -116));
addToMap(ObjectWritable.class, new Byte((byte) -115));
addToMap(ProtocolStatus.class, new Byte((byte) -114));
}
private static void addToMap(Class<?> clazz, Byte byteId) {
CLASS_ID_MAP.put(clazz, byteId);
ID_CLASS_MAP.put(byteId, clazz);
}
public MapWritable() {
}
/**
* Copy constructor. This constructor makes a deep copy, using serialization /
* deserialization to break any possible references to contained objects.
*
* @param map
* map to copy from
*/
public MapWritable(MapWritable map) {
if (map != null) {
try {
DataOutputBuffer dob = new DataOutputBuffer();
map.write(dob);
DataInputBuffer dib = new DataInputBuffer();
dib.reset(dob.getData(), dob.getLength());
readFields(dib);
} catch (IOException e) {
throw new IllegalArgumentException("this map cannot be copied: "
+ StringUtils.stringifyException(e));
}
}
}
public void clear() {
fOld = fFirst;
fFirst = fLast = null;
fSize = 0;
}
public boolean containsKey(Writable key) {
return findEntryByKey(key) != null;
}
public boolean containsValue(Writable value) {
KeyValueEntry entry = fFirst;
while (entry != null) {
if (entry.fValue.equals(value)) {
return true;
}
entry = entry.fNextEntry;
}
return false;
}
public Writable get(Writable key) {
KeyValueEntry entry = findEntryByKey(key);
if (entry != null) {
return entry.fValue;
}
return null;
}
public int hashCode() {
final int seed = 23;
int hash = 0;
KeyValueEntry entry = fFirst;
while (entry != null) {
hash += entry.fKey.hashCode() * seed;
hash += entry.fValue.hashCode() * seed;
entry = entry.fNextEntry;
}
return hash;
}
public boolean isEmpty() {
return fFirst == null;
}
public Set<Writable> keySet() {
HashSet<Writable> set = new HashSet<Writable>();
if (isEmpty())
return set;
set.add(fFirst.fKey);
KeyValueEntry entry = fFirst;
while ((entry = entry.fNextEntry) != null) {
set.add(entry.fKey);
}
return set;
}
public Writable put(Writable key, Writable value) {
KeyValueEntry entry = findEntryByKey(key);
if (entry != null) {
Writable oldValue = entry.fValue;
entry.fValue = value;
return oldValue;
}
KeyValueEntry newEntry = new KeyValueEntry(key, value);
fSize++;
if (fLast != null) {
fLast = fLast.fNextEntry = newEntry;
return null;
}
fLast = fFirst = newEntry;
return null;
}
public void putAll(MapWritable map) {
if (map == null || map.size() == 0) {
return;
}
Iterator<Writable> iterator = map.keySet().iterator();
while (iterator.hasNext()) {
Writable key = iterator.next();
Writable value = map.get(key);
put(key, value);
}
}
public Writable remove(Writable key) {
Writable oldValue = null;
KeyValueEntry entry = fFirst;
KeyValueEntry predecessor = null;
while (entry != null) {
if (entry.fKey.equals(key)) {
oldValue = entry.fValue;
if (predecessor == null) {
fFirst = fFirst.fNextEntry;
} else {
predecessor.fNextEntry = entry.fNextEntry;
}
if (fLast.equals(entry)) {
fLast = predecessor;
}
fSize--;
return oldValue;
}
predecessor = entry;
entry = entry.fNextEntry;
}
return oldValue;
}
public int size() {
return fSize;
}
public Collection<Writable> values() {
LinkedList<Writable> list = new LinkedList<Writable>();
KeyValueEntry entry = fFirst;
while (entry != null) {
list.add(entry.fValue);
entry = entry.fNextEntry;
}
return list;
}
public boolean equals(Object obj) {
if (obj instanceof MapWritable) {
MapWritable map = (MapWritable) obj;
if (fSize != map.fSize)
return false;
HashSet<KeyValueEntry> set1 = new HashSet<KeyValueEntry>();
KeyValueEntry e1 = fFirst;
while (e1 != null) {
set1.add(e1);
e1 = e1.fNextEntry;
}
HashSet<KeyValueEntry> set2 = new HashSet<KeyValueEntry>();
KeyValueEntry e2 = map.fFirst;
while (e2 != null) {
set2.add(e2);
e2 = e2.fNextEntry;
}
return set1.equals(set2);
}
return false;
}
public String toString() {
if (fFirst != null) {
StringBuffer buffer = new StringBuffer();
KeyValueEntry entry = fFirst;
while (entry != null) {
buffer.append(entry.toString());
buffer.append(" ");
entry = entry.fNextEntry;
}
return buffer.toString();
}
return null;
}
private KeyValueEntry findEntryByKey(final Writable key) {
KeyValueEntry entry = fFirst;
while (entry != null && !entry.fKey.equals(key)) {
entry = entry.fNextEntry;
}
return entry;
}
// serialization methods
public void write(DataOutput out) throws IOException {
out.writeInt(size());
if (size() > 0) {
// scan for unknown classes;
createInternalIdClassEntries();
// write internal map
out.writeByte(fIdCount);
if (fIdCount > 0) {
ClassIdEntry entry = fIdFirst;
while (entry != null) {
out.writeByte(entry.fId);
Text.writeString(out, entry.fclazz.getName());
entry = entry.fNextIdEntry;
}
}
// write meta data
KeyValueEntry entry = fFirst;
while (entry != null) {
out.writeByte(entry.fKeyClassId);
out.writeByte(entry.fValueClassId);
entry.fKey.write(out);
entry.fValue.write(out);
entry = entry.fNextEntry;
}
}
}
public void readFields(DataInput in) throws IOException {
clear();
fSize = in.readInt();
if (fSize > 0) {
// read class-id map
fIdCount = in.readByte();
byte id;
Class<?> clazz;
for (int i = 0; i < fIdCount; i++) {
try {
id = in.readByte();
clazz = Class.forName(Text.readString(in));
addIdEntry(id, clazz);
} catch (Exception e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Unable to load internal map entry" + e.toString());
}
fIdCount--;
}
}
KeyValueEntry entry;
for (int i = 0; i < fSize; i++) {
try {
entry = getKeyValueEntry(in.readByte(), in.readByte());
entry.fKey.readFields(in);
entry.fValue.readFields(in);
if (fFirst == null) {
fFirst = fLast = entry;
} else {
fLast = fLast.fNextEntry = entry;
}
} catch (IOException e) {
if (LOG.isWarnEnabled()) {
LOG.warn("Unable to load meta data entry, ignoring.. : "
+ e.toString());
}
fSize--;
}
}
}
}
private void createInternalIdClassEntries() {
KeyValueEntry entry = fFirst;
byte id;
while (entry != null) {
id = getClassId(entry.fKey.getClass());
if (id == -128) {
id = addIdEntry((byte) (-128 + CLASS_ID_MAP.size() + ++fIdCount),
entry.fKey.getClass());
}
entry.fKeyClassId = id;
id = getClassId(entry.fValue.getClass());
if (id == -128) {
id = addIdEntry((byte) (-128 + CLASS_ID_MAP.size() + ++fIdCount),
entry.fValue.getClass());
}
entry.fValueClassId = id;
entry = entry.fNextEntry;
}
}
private byte addIdEntry(byte id, Class<?> clazz) {
if (fIdFirst == null) {
fIdFirst = fIdLast = new ClassIdEntry(id, clazz);
} else {
fIdLast.fNextIdEntry = fIdLast = new ClassIdEntry(id, clazz);
}
return id;
}
private byte getClassId(Class<?> clazz) {
Byte classId = CLASS_ID_MAP.get(clazz);
if (classId != null) {
return classId.byteValue();
}
ClassIdEntry entry = fIdFirst;
while (entry != null) {
if (entry.fclazz.equals(clazz)) {
return entry.fId;
}
entry = entry.fNextIdEntry;
}
return -128;
}
private KeyValueEntry getKeyValueEntry(final byte keyId, final byte valueId)
throws IOException {
KeyValueEntry entry = fOld;
KeyValueEntry last = null;
byte entryKeyId;
byte entryValueId;
while (entry != null) {
entryKeyId = getClassId(entry.fKey.getClass());
entryValueId = getClassId(entry.fValue.getClass());
if (entryKeyId == keyId && entryValueId == valueId) {
if (last != null) {
last.fNextEntry = entry.fNextEntry;
} else {
fOld = entry.fNextEntry;
}
entry.fNextEntry = null; // reset next entry
return entry;
}
last = entry;
entry = entry.fNextEntry;
}
Class<?> keyClass = getClass(keyId);
Class<?> valueClass = getClass(valueId);
try {
return new KeyValueEntry((Writable) keyClass.newInstance(),
(Writable) valueClass.newInstance());
} catch (Exception e) {
throw new IOException("unable to instantiate class: " + e.toString());
}
}
private Class<?> getClass(final byte id) throws IOException {
Class<?> clazz = ID_CLASS_MAP.get(new Byte(id));
if (clazz == null) {
ClassIdEntry entry = fIdFirst;
while (entry != null) {
if (entry.fId == id) {
return entry.fclazz;
}
entry = entry.fNextIdEntry;
}
} else {
return clazz;
}
throw new IOException("unable to load class for id: " + id);
}
/** an entry holds writable key and value */
private class KeyValueEntry {
private byte fKeyClassId;
private byte fValueClassId;
private Writable fKey;
private Writable fValue;
private KeyValueEntry fNextEntry;
public KeyValueEntry(Writable key, Writable value) {
this.fKey = key;
this.fValue = value;
}
public String toString() {
return fKey.toString() + ":" + fValue.toString();
}
public boolean equals(Object obj) {
if (obj instanceof KeyValueEntry) {
KeyValueEntry entry = (KeyValueEntry) obj;
return entry.fKey.equals(fKey) && entry.fValue.equals(fValue);
}
return false;
}
public int hashCode() {
return toString().hashCode();
}
}
/** container for Id class tuples */
private class ClassIdEntry {
public ClassIdEntry(byte id, Class<?> clazz) {
fId = id;
fclazz = clazz;
}
private byte fId;
private Class<?> fclazz;
private ClassIdEntry fNextIdEntry;
}
}
| |
// Copyright 2022 The KeepTry Open Source Project
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
package dp.knapsack;
import java.util.Arrays;
public class Leetcode322CoinChange {
/*
Leetcode 322. Coin Change
You are given an integer array coins representing coins of different denominations and
an integer amount representing a total amount of money.
Return the fewest number of coins that you need to make up that amount.
If that amount of money cannot be made up by any combination of the coins, return -1.
You may assume that you have an infinite number of each kind of coin.
Input: coins = [1,2,5], amount = 11
Output: 3
Explanation: 11 = 5 + 5 + 1
Input: coins = [2], amount = 3
Output: -1
Input: coins = [1], amount = 0
Output: 0
Constraints:
1 <= coins.length <= 12
1 <= coins[i] <= 231 - 1
0 <= amount <= 104
*/
/*
infinite number of each kind of coin
greedy does not work in this case.
backtracking all possible way is polynomial time
dp[t] = min{ dp[t-vi] | vi in given v[] and vi<=t}
dp[0]=0, other default is MAX
*/
/* --------------------------------------------------------------------------
Time Limit Exceeded
reduce the coins choice scope
without cache which should be 2 dimension cache: target value and choice scope[j, length-1]
*/
public int coinChange_____(int[] coins, int T) {
return bt(0, coins, T);
}
private int bt(int i, int[] coins, int T) {
if (T == 0) return 0;
if (i < coins.length && T > 0) {
int maxn = T / coins[i];
int minCost = Integer.MAX_VALUE;
for (int n = 0; n <= maxn; n++) {
if (T >= n * coins[i]) {
int res = bt(i + 1, coins, T - n * coins[i]);
if (res != -1) minCost = Math.min(minCost, res + n);
}
}
return (minCost == Integer.MAX_VALUE) ? -1 : minCost;
}
return -1;
}
/* --------------------------------------------------------------------------
Time Limit Exceeded
reduce the target and not change the coins choice scope
without cache which should be one dimension: target value
*/
private int min;
public int coinChange____(int[] coins, int T) {
min = T + 1;
if (T < 1) return 0;
bt(coins, T, 0);
return min == T + 1 ? -1 : min;
}
private void bt(int[] coins, int T, int count) {
if (T == 0) {
min = Math.min(min, count);
return;
}
// continue
for (int v : coins) {
if (T - v >= 0) {
bt(coins, T - v, count + 1);
}
}
}
/* --------------------------------------------------------------------------
top down + cache
Watch the above solution process https://imgur.com/3Jk18aZ
Observer:
There is repeated work when current target t changed in different layer.
So, for keeping the repeated work: cache current target t and related min counts
and the comparison will be moved from the end of backtracking into inner loop
check all possible way top down layer by layer but with a cache
*/
public int coinChange___(int[] coins, int T) {
if (T < 1) return 0;
return bt(coins, T, new int[T + 1]);
}
private int bt(int[] coins, int T, int[] cache) {
if (T == 0) return 0;
if (cache[T] != 0) return cache[T];
//
int min = Integer.MAX_VALUE;
for (int v : coins) {
if (T - v >= 0) {
int count = bt(coins, T - v, cache);
if (count != -1 && count + 1 < min) min = count + 1; // 1 is one coin, the current coin.
}
}
// -1 means no solution, required by 'If that amount of money cannot be made up by any
// combination of the coins, return -1.'
cache[T] = (min == Integer.MAX_VALUE) ? -1 : min;
//
return cache[T];
}
/* --------------------------------------------------------------------------
Runtime complexity O(T*N)
dp[i] is minimum count of coins with target vale i. selected from
all coin type each has unlimited counts
bottom up and using the already calculated previous value
*/
public static int coinChange__(int[] coins, int T) {
if (T < 1) return 0;
int[] dp = new int[T + 1];
// Arrays.sort(coins);
dp[0] = 0; // dp[i]: min counts of coins with sum value as i
// bottom up
for (int t = 1; t <= T; t++) {
dp[t] = Integer.MAX_VALUE;
for (int v : coins) {
if (v <= t && dp[t - v] != Integer.MAX_VALUE) {
dp[t] = Math.min(dp[t], dp[t - v] + 1);
}
// if (v > t) break;
}
}
return dp[T] == Integer.MAX_VALUE ? -1 : dp[T];
}
// do not use Integer.MAX_VALUE
public int coinChange_(int[] coins, int T) {
int MAX = T + 1;
int[] dp = new int[T + 1];
Arrays.fill(dp, MAX);
dp[0] = 0;
for (int t = 1; t <= T; t++) {
for (int v : coins) {
if (t - v >= 0) {
dp[t] = Math.min(dp[t], dp[t - v] + 1);
}
}
}
return dp[T] == MAX ? -1 : dp[T];
}
/* --------------------------------------------------------------------------
dp[i] is minimum count of coins with target vale i. selected from
all coin type each has unlimited counts
bottom up: from current valid value to extend
*/
public static int coinChange(int[] coins, int T) {
if (T < 1) return 0;
int MAX = T + 1;
int[] dp = new int[MAX];
for (int i = 1; i <= T; i++) {
dp[i] = MAX;
}
// dp[0] is 0;
Arrays.sort(coins); // when logN < T
// bottom up from valid dp[t]
for (int t = 0; t < T; t++) {
if (dp[t] == MAX) continue;
for (int v : coins) {
if (0 <= t + v && t + v <= T) {
dp[t + v] = Math.min(dp[t + v], dp[t] + 1);
} else break; // benefit from sorted coin values
}
}
return dp[T] == MAX ? -1 : dp[T];
}
/* --------------------------------------------------------------------------
Complete Knapsack => minimum count of coins with target value
dp[i] is minimum count of coins with target value i with the coins type from
coins[0] to coins[current index]
dp[i] is minimum count of coins with target vale i. selected from
coin type range from coins[0] to coins[current index], each type has unlimited counts
Runtime O(T*N),
Space O(N)
N is coins type number.
T is target value
*/
public static int coinChangeCK(int[] coins, int T) {
if (T < 1) return 0;
int MAX = T + 1;
int[] dp = new int[MAX];
for (int i = 1; i <= T; i++) {
dp[i] = MAX;
}
// dp[0] is 0;
Arrays.sort(coins); // when logN < T
for (int i = 0; i < coins.length; i++) {
int v = coins[i];
for (int t = 1; t <= T; t++) {
if (0 <= t - v && dp[t - v] != MAX) {
dp[t] = Math.min(dp[t], dp[t - v] + 1);
}
}
}
return dp[T] == MAX ? -1 : dp[T];
}
// -----------------------------------------------------------------------------
public static void main(String[] args) {
System.out.println("result: " + coinChange(new int[] {1}, 0));
System.out.println("result: " + coinChange(new int[] {1, 2, 4, 5}, 8));
System.out.println("result: " + coinChange(new int[] {2}, 3));
System.out.println("result: " + coinChange(new int[] {1, 2, 5}, 11));
System.out.println("result: " + coinChange(new int[] {1}, 2));
System.out.println("result: " + coinChange(new int[] {470, 35, 120, 81, 121}, 9825));
System.out.println("result: " + coinChange(new int[] {1, 2147483647}, 2));
}
}
| |
package org.apache.maven.plugin.dependency;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.commons.io.FileUtils;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.factory.ArtifactFactory;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.repository.ArtifactRepositoryFactory;
import org.apache.maven.artifact.repository.ArtifactRepositoryPolicy;
import org.apache.maven.artifact.repository.layout.ArtifactRepositoryLayout;
import org.apache.maven.artifact.resolver.AbstractArtifactResolutionException;
import org.apache.maven.artifact.resolver.ArtifactResolver;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.codehaus.plexus.util.StringUtils;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Resolves a single artifact, eventually transitively, from the specified remote repositories. Caveat: will always check the
* central repository defined in the super pom. You could use a mirror entry in your settings.xml
*/
@Mojo( name = "get", requiresProject = false, threadSafe = true )
public class GetMojo
extends AbstractMojo
{
private static final Pattern ALT_REPO_SYNTAX_PATTERN = Pattern.compile( "(.+)::(.*)::(.+)" );
/**
*
*/
@Component
private ArtifactFactory artifactFactory;
/**
*
*/
@Component
private ArtifactResolver artifactResolver;
/**
*
*/
@Component
private ArtifactRepositoryFactory artifactRepositoryFactory;
/**
* Map that contains the layouts.
*/
@Component( role = ArtifactRepositoryLayout.class )
private Map<String, ArtifactRepositoryLayout> repositoryLayouts;
/**
*
*/
@Component
private ArtifactMetadataSource source;
/**
*
*/
@Parameter( defaultValue = "${localRepository}", readonly = true )
private ArtifactRepository localRepository;
/**
* The groupId of the artifact to download. Ignored if {@link #artifact} is used.
*/
@Parameter( property = "groupId" )
private String groupId;
/**
* The artifactId of the artifact to download. Ignored if {@link #artifact} is used.
*/
@Parameter( property = "artifactId" )
private String artifactId;
/**
* The version of the artifact to download. Ignored if {@link #artifact} is used.
*/
@Parameter( property = "version" )
private String version;
/**
* The classifier of the artifact to download. Ignored if {@link #artifact} is used.
*
* @since 2.3
*/
@Parameter( property = "classifier" )
private String classifier;
/**
* The packaging of the artifact to download. Ignored if {@link #artifact} is used.
*/
@Parameter( property = "packaging", defaultValue = "jar" )
private String packaging = "jar";
/**
* The id of the repository from which we'll download the artifact
*
* @deprecated Use remoteRepositories
*/
@Parameter( property = "repoId", defaultValue = "temp" )
private String repositoryId = "temp";
/**
* The url of the repository from which we'll download the artifact. DEPRECATED Use remoteRepositories
*
* @deprecated Use remoteRepositories
*/
@Parameter( property = "repoUrl" )
private String repositoryUrl;
/**
* Repositories in the format id::[layout]::url or just url, separated by comma.
* ie. central::default::http://repo1.maven.apache.org/maven2,myrepo::::http://repo.acme.com,http://repo.acme2.com
*/
@Parameter( property = "remoteRepositories" )
private String remoteRepositories;
/**
* A string of the form groupId:artifactId:version[:packaging][:classifier].
*/
@Parameter( property = "artifact" )
private String artifact;
/**
* The destination file or directory to copy the artifact to, if other than the local repository
*
* @since 2.4
* @deprecated if you need to copy the resolved artifact, use dependency:copy
*/
@Parameter( property = "dest" )
private String destination;
/**
*
*/
@Parameter( defaultValue = "${project.remoteArtifactRepositories}", readonly = true, required = true )
private List<ArtifactRepository> pomRemoteRepositories;
/**
* Download transitively, retrieving the specified artifact and all of its dependencies.
*/
@Parameter( property = "transitive", defaultValue = "true" )
private boolean transitive = true;
/**
* Skip plugin execution completely.
*
* @since 2.7
*/
@Parameter( property = "mdep.skip", defaultValue = "false" )
private boolean skip;
public void execute()
throws MojoExecutionException, MojoFailureException
{
if ( isSkip() )
{
getLog().info( "Skipping plugin execution" );
return;
}
if ( artifactId == null && artifact == null )
{
throw new MojoFailureException( "You must specify an artifact, "
+ "e.g. -Dartifact=org.apache.maven.plugins:maven-downloader-plugin:1.0" );
}
if ( artifact != null )
{
String[] tokens = StringUtils.split( artifact, ":" );
if ( tokens.length < 3 || tokens.length > 5 )
{
throw new MojoFailureException(
"Invalid artifact, you must specify groupId:artifactId:version[:packaging][:classifier] "
+ artifact );
}
groupId = tokens[0];
artifactId = tokens[1];
version = tokens[2];
if ( tokens.length >= 4 )
{
packaging = tokens[3];
}
if ( tokens.length == 5 )
{
classifier = tokens[4];
}
else
{
classifier = null;
}
}
Artifact toDownload = classifier == null
? artifactFactory.createBuildArtifact( groupId, artifactId, version, packaging )
: artifactFactory.createArtifactWithClassifier( groupId, artifactId, version, packaging, classifier );
Artifact dummyOriginatingArtifact =
artifactFactory.createBuildArtifact( "org.apache.maven.plugins", "maven-downloader-plugin", "1.0", "jar" );
ArtifactRepositoryPolicy always =
new ArtifactRepositoryPolicy( true, ArtifactRepositoryPolicy.UPDATE_POLICY_ALWAYS,
ArtifactRepositoryPolicy.CHECKSUM_POLICY_WARN );
List<ArtifactRepository> repoList = new ArrayList<ArtifactRepository>();
if ( pomRemoteRepositories != null )
{
repoList.addAll( pomRemoteRepositories );
}
if ( remoteRepositories != null )
{
// Use the same format as in the deploy plugin id::layout::url
List<String> repos = Arrays.asList( StringUtils.split( remoteRepositories, "," ) );
for ( String repo : repos )
{
repoList.add( parseRepository( repo, always ) );
}
}
if ( repositoryUrl != null )
{
getLog().warn( "repositoryUrl parameter is deprecated. Use remoteRepositories instead" );
ArtifactRepository remoteRepo =
artifactRepositoryFactory.createArtifactRepository( repositoryId, repositoryUrl, getLayout( "default" ),
always, always );
repoList.add( remoteRepo );
}
try
{
if ( transitive )
{
getLog().info( "Resolving " + toDownload + " with transitive dependencies" );
artifactResolver.resolveTransitively( Collections.singleton( toDownload ), dummyOriginatingArtifact,
repoList, localRepository, source );
}
else
{
getLog().info( "Resolving " + toDownload );
artifactResolver.resolve( toDownload, repoList, localRepository );
}
}
catch ( AbstractArtifactResolutionException e )
{
throw new MojoExecutionException( "Couldn't download artifact: " + e.getMessage(), e );
}
if ( destination != null )
{
getLog().warn( "destination/dest parameter is deprecated: it will disappear in future version." );
File src = toDownload.getFile();
File dest = new File( destination );
getLog().info( "Copying " + src.getAbsolutePath() + " to " + dest.getAbsolutePath() );
if ( transitive )
{
getLog().warn( "Notice transitive dependencies won't be copied." );
}
try
{
if ( dest.isDirectory() )
{
FileUtils.copyFileToDirectory( src, dest );
}
else
{
FileUtils.copyFile( src, dest );
}
}
catch ( IOException e )
{
throw new MojoExecutionException(
"Couldn't copy downloaded artifact from " + src.getAbsolutePath() + " to " + dest.getAbsolutePath()
+ " : " + e.getMessage(), e );
}
}
}
ArtifactRepository parseRepository( String repo, ArtifactRepositoryPolicy policy )
throws MojoFailureException
{
// if it's a simple url
String id = repositoryId;
ArtifactRepositoryLayout layout = getLayout( "default" );
String url = repo;
// if it's an extended repo URL of the form id::layout::url
if (repo.contains("::"))
{
Matcher matcher = ALT_REPO_SYNTAX_PATTERN.matcher( repo );
if ( !matcher.matches() )
{
throw new MojoFailureException( repo, "Invalid syntax for repository: " + repo,
"Invalid syntax for repository. Use \"id::layout::url\" or \"URL\"." );
}
id = matcher.group( 1 ).trim();
if ( !StringUtils.isEmpty( matcher.group( 2 ) ) )
{
layout = getLayout( matcher.group( 2 ).trim() );
}
url = matcher.group( 3 ).trim();
}
return artifactRepositoryFactory.createArtifactRepository( id, url, layout, policy, policy );
}
private ArtifactRepositoryLayout getLayout( String id )
throws MojoFailureException
{
ArtifactRepositoryLayout layout = repositoryLayouts.get( id );
if ( layout == null )
{
throw new MojoFailureException( id, "Invalid repository layout", "Invalid repository layout: " + id );
}
return layout;
}
public boolean isSkip()
{
return skip;
}
public void setSkip( boolean skip )
{
this.skip = skip;
}
}
| |
/***
* ASM: a very small and fast Java bytecode manipulation framework
* Copyright (c) 2000-2005 INRIA, France Telecom
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. Neither the name of the copyright holders nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
* THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.mvel2.asm;
/**
* A label represents a position in the bytecode of a method. Labels are used
* for jump, goto, and switch instructions, and for try catch blocks.
*
* @author Eric Bruneton
*/
public class Label {
/**
* Indicates if this label is only used for debug attributes. Such a label
* is not the start of a basic ast, the target of a jump instruction, or
* an exception handler. It can be safely ignored in control flow graph
* analysis algorithms (for optimization purposes).
*/
final static int DEBUG = 1;
/**
* Indicates if the position of this label is known.
*/
final static int RESOLVED = 2;
/**
* Indicates if this label has been updated, after instruction resizing.
*/
final static int RESIZED = 4;
/**
* Indicates if this basic ast has been pushed in the basic ast stack.
* See {@link MethodWriter#visitMaxs visitMaxs}.
*/
final static int PUSHED = 8;
/**
* Indicates if this label is the target of a jump instruction, or the start
* of an exception handler.
*/
final static int TARGET = 16;
/**
* Indicates if a stack map frame must be stored for this label.
*/
final static int STORE = 32;
/**
* Indicates if this label corresponds to a reachable basic ast.
*/
final static int REACHABLE = 64;
/**
* Indicates if this basic ast ends with a JSR instruction.
*/
final static int JSR = 128;
/**
* Indicates if this basic ast ends with a RET instruction.
*/
final static int RET = 256;
/**
* Field used to associate user information to a label.
*/
public Object info;
/**
* Flags that indicate the status of this label.
*
* @see #DEBUG
* @see #RESOLVED
* @see #RESIZED
* @see #PUSHED
* @see #TARGET
* @see #STORE
* @see #REACHABLE
* @see #JSR
* @see #RET
*/
int status;
/**
* The line number corresponding to this label, if known.
*/
int line;
/**
* The position of this label in the code, if known.
*/
int position;
/**
* Number of forward references to this label, times two.
*/
private int referenceCount;
/**
* Informations about forward references. Each forward reference is
* described by two consecutive integers in this array: the first one is the
* position of the first byte of the bytecode instruction that contains the
* forward reference, while the second is the position of the first byte of
* the forward reference itself. In fact the sign of the first integer
* indicates if this reference uses 2 or 4 bytes, and its absolute value
* gives the position of the bytecode instruction.
*/
private int[] srcAndRefPositions;
// ------------------------------------------------------------------------
/*
* Fields for the control flow and data flow graph analysis algorithms (used
* to compute the maximum stack size or the stack map frames). A control
* flow graph contains one node per "basic ast", and one edge per "jump"
* from one basic ast to another. Each node (i.e., each basic ast) is
* represented by the Label object that corresponds to the first instruction
* of this basic ast. Each node also stores the list of its successors in
* the graph, as a linked list of Edge objects.
*
* The control flow analysis algorithms used to compute the maximum stack
* size or the stack map frames are similar and use two steps. The first
* step, during the visit of each instruction, builds information about the
* state of the local variables and the operand stack at the end of each
* basic ast, called the "output frame", <i>relatively</i> to the frame
* state at the beginning of the basic ast, which is called the "input
* frame", and which is <i>unknown</i> during this step. The second step,
* in {@link MethodWriter#visitMaxs}, is a fix point algorithm that
* computes information about the input frame of each basic ast, from the
* input state of the first basic ast (known from the method signature),
* and by the using the previously computed relative output frames.
*
* The algorithm used to compute the maximum stack size only computes the
* relative output and absolute input stack heights, while the algorithm
* used to compute stack map frames computes relative output frames and
* absolute input frames.
*/
/**
* Start of the output stack relatively to the input stack. The exact
* semantics of this field depends on the algorithm that is used.
* <p/>
* When only the maximum stack size is computed, this field is the number of
* elements in the input stack.
* <p/>
* When the stack map frames are completely computed, this field is the
* offset of the first output stack element relatively to the top of the
* input stack. This offset is always negative or null. A null offset means
* that the output stack must be appended to the input stack. A -n offset
* means that the first n output stack elements must replace the top n input
* stack elements, and that the other elements must be appended to the input
* stack.
*/
int inputStackTop;
/**
* Maximum height reached by the output stack, relatively to the top of the
* input stack. This maximum is always positive or null.
*/
int outputStackMax;
/**
* Information about the input and output stack map frames of this basic
* ast. This field is only used when {@link ClassWriter#COMPUTE_FRAMES}
* option is used.
*/
Frame frame;
/**
* The successor of this label, in the order they are visited. This linked
* list does not include labels used for debug info only. If
* {@link ClassWriter#COMPUTE_FRAMES} option is used then, in addition, it
* does not contain successive labels that denote the same bytecode position
* (in this case only the first label appears in this list).
*/
Label successor;
/**
* The successors of this node in the control flow graph. These successors
* are stored in a linked list of {@link Edge Edge} objects, linked to each
* other by their {@link Edge#next} field.
*/
Edge successors;
/**
* The next basic ast in the basic ast stack. This stack is used in the
* main loop of the fix point algorithm used in the second step of the
* control flow analysis algorithms.
*
* @see MethodWriter#visitMaxs
*/
Label next;
// ------------------------------------------------------------------------
// Constructor
// ------------------------------------------------------------------------
/**
* Constructs a new label.
*/
public Label() {
}
/**
* Constructs a new label.
*
* @param debug if this label is only used for debug attributes.
*/
Label(final boolean debug) {
this.status = debug ? DEBUG : 0;
}
// ------------------------------------------------------------------------
// Methods to compute offsets and to manage forward references
// ------------------------------------------------------------------------
/**
* Returns the offset corresponding to this label. This offset is computed
* from the start of the method's bytecode. <i>This method is intended for
* {@link Attribute} sub classes, and is normally not needed by class
* generators or adapters.</i>
*
* @return the offset corresponding to this label.
* @throws IllegalStateException if this label is not resolved yet.
*/
public int getOffset() {
if ((status & RESOLVED) == 0) {
throw new IllegalStateException("Label offset position has not been resolved yet");
}
return position;
}
/**
* Puts a reference to this label in the bytecode of a method. If the
* position of the label is known, the offset is computed and written
* directly. Otherwise, a null offset is written and a new forward reference
* is declared for this label.
*
* @param owner the code writer that calls this method.
* @param out the bytecode of the method.
* @param source the position of first byte of the bytecode instruction that
* contains this label.
* @param wideOffset <tt>true</tt> if the reference must be stored in 4
* bytes, or <tt>false</tt> if it must be stored with 2 bytes.
* @throws IllegalArgumentException if this label has not been created by
* the given code writer.
*/
void put(
final MethodWriter owner,
final ByteVector out,
final int source,
final boolean wideOffset) {
if ((status & RESOLVED) != 0) {
if (wideOffset) {
out.putInt(position - source);
}
else {
out.putShort(position - source);
}
}
else {
if (wideOffset) {
addReference(-1 - source, out.length);
out.putInt(-1);
}
else {
addReference(source, out.length);
out.putShort(-1);
}
}
}
/**
* Adds a forward reference to this label. This method must be called only
* for a true forward reference, i.e. only if this label is not resolved
* yet. For backward references, the offset of the reference can be, and
* must be, computed and stored directly.
*
* @param sourcePosition the position of the referencing instruction. This
* position will be used to compute the offset of this forward
* reference.
* @param referencePosition the position where the offset for this forward
* reference must be stored.
*/
private void addReference(
final int sourcePosition,
final int referencePosition) {
if (srcAndRefPositions == null) {
srcAndRefPositions = new int[6];
}
if (referenceCount >= srcAndRefPositions.length) {
int[] a = new int[srcAndRefPositions.length + 6];
System.arraycopy(srcAndRefPositions,
0,
a,
0,
srcAndRefPositions.length);
srcAndRefPositions = a;
}
srcAndRefPositions[referenceCount++] = sourcePosition;
srcAndRefPositions[referenceCount++] = referencePosition;
}
/**
* Resolves all forward references to this label. This method must be called
* when this label is added to the bytecode of the method, i.e. when its
* position becomes known. This method fills in the blanks that where left
* in the bytecode by each forward reference previously added to this label.
*
* @param owner the code writer that calls this method.
* @param position the position of this label in the bytecode.
* @param data the bytecode of the method.
* @return <tt>true</tt> if a blank that was left for this label was to
* small to store the offset. In such a case the corresponding jump
* instruction is replaced with a pseudo instruction (using unused
* opcodes) using an unsigned two bytes offset. These pseudo
* instructions will need to be replaced with true instructions with
* wider offsets (4 bytes instead of 2). This is done in
* {@link MethodWriter#resizeInstructions}.
* @throws IllegalArgumentException if this label has already been resolved,
* or if it has not been created by the given code writer.
*/
boolean resolve(
final MethodWriter owner,
final int position,
final byte[] data) {
boolean needUpdate = false;
this.status |= RESOLVED;
this.position = position;
int i = 0;
while (i < referenceCount) {
int source = srcAndRefPositions[i++];
int reference = srcAndRefPositions[i++];
int offset;
if (source >= 0) {
offset = position - source;
if (offset < Short.MIN_VALUE || offset > Short.MAX_VALUE) {
/*
* changes the opcode of the jump instruction, in order to
* be able to find it later (see resizeInstructions in
* MethodWriter). These temporary opcodes are similar to
* jump instruction opcodes, except that the 2 bytes offset
* is unsigned (and can therefore represent values from 0 to
* 65535, which is sufficient since the size of a method is
* limited to 65535 bytes).
*/
int opcode = data[reference - 1] & 0xFF;
if (opcode <= Opcodes.JSR) {
// changes IFEQ ... JSR to opcodes 202 to 217
data[reference - 1] = (byte) (opcode + 49);
}
else {
// changes IFNULL and IFNONNULL to opcodes 218 and 219
data[reference - 1] = (byte) (opcode + 20);
}
needUpdate = true;
}
data[reference++] = (byte) (offset >>> 8);
data[reference] = (byte) offset;
}
else {
offset = position + source + 1;
data[reference++] = (byte) (offset >>> 24);
data[reference++] = (byte) (offset >>> 16);
data[reference++] = (byte) (offset >>> 8);
data[reference] = (byte) offset;
}
}
return needUpdate;
}
/**
* Returns the first label of the series to which this label belongs. For an
* isolated label or for the first label in a series of successive labels,
* this method returns the label itself. For other labels it returns the
* first label of the series.
*
* @return the first label of the series to which this label belongs.
*/
Label getFirst() {
return frame == null ? this : frame.owner;
}
// ------------------------------------------------------------------------
// Overriden Object methods
// ------------------------------------------------------------------------
/**
* Returns a string representation of this label.
*
* @return a string representation of this label.
*/
public String toString() {
return "L" + System.identityHashCode(this);
}
}
| |
/**
* MOTECH PLATFORM OPENSOURCE LICENSE AGREEMENT
*
* Copyright (c) 2010-11 The Trustees of Columbia University in the City of
* New York and Grameen Foundation USA. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. Neither the name of Grameen Foundation USA, Columbia University, or
* their respective contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY GRAMEEN FOUNDATION USA, COLUMBIA UNIVERSITY
* AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
* BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL GRAMEEN FOUNDATION
* USA, COLUMBIA UNIVERSITY OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.motechproject.mobile.omp.manager.clickatell;
import org.motechproject.mobile.core.model.GatewayRequest;
import org.motechproject.mobile.core.model.GatewayResponse;
import org.motechproject.mobile.core.model.MStatus;
import org.motechproject.mobile.omp.manager.GatewayManager;
import org.motechproject.mobile.omp.manager.GatewayMessageHandler;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.OutputStreamWriter;
import java.io.UnsupportedEncodingException;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLConnection;
import java.net.URLEncoder;
import java.util.Date;
import java.util.Set;
import org.apache.log4j.Logger;
/**
* Handles all interactions with the OutReach Server message gateway
*
* @author Kofi A. Asamoah (yoofi@dreamoval.com)
* @date Sep 11, 2009
*/
public class ClickatellGatewayManagerImpl implements GatewayManager{
private String baseUrl = "https://api.clickatell.com/http/";
private String apiId;
private String user;
private String password;
private String sender;
private String deliveryAcknowledge;
private String callback;
private String postData;
private GatewayMessageHandler messageHandler;
private static Logger logger = Logger.getLogger(ClickatellGatewayManagerImpl.class);
public ClickatellGatewayManagerImpl(){
}
public Set<GatewayResponse> sendMessage(GatewayRequest messageDetails) {
try {
postData = "api_id=" + URLEncoder.encode(apiId, "UTF-8");
postData += "&user=" + URLEncoder.encode(user, "UTF-8");
postData += "&password=" + URLEncoder.encode(password, "UTF-8");
postData += "&to=" + URLEncoder.encode(messageDetails.getRecipientsNumber(), "UTF-8");
postData += "&text=" + URLEncoder.encode(messageDetails.getMessage(), "UTF-8");
postData += "&from=" + URLEncoder.encode(sender, "UTF-8");
postData += "&concat=" + URLEncoder.encode(String.valueOf(messageDetails.getGatewayRequestDetails().getNumberOfPages()), "UTF-8");
postData += "&deliv_ack=" + URLEncoder.encode(deliveryAcknowledge, "UTF-8");
postData += "&callback=" + URLEncoder.encode(callback, "UTF-8");
}
catch (UnsupportedEncodingException ex) {
logger.fatal("Error constructing request: parameter encoding failed", ex);
throw new RuntimeException("Error constructing message");
}
//Create a url and open a connection to it
URL url;
URLConnection conn;
try {
url = new URL(baseUrl + "sendmsg");
conn = url.openConnection();
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
}
catch (MalformedURLException ex) {
logger.fatal("Error initializing Clikatell Gateway: invalid url", ex);
throw new RuntimeException("Invalid gatewat URL");
}
catch (IOException ex) {
logger.fatal("Error iitializing Clickatell Gateway: unable to open URL connection", ex);
throw new RuntimeException("Could not connect to gateway");
}
//Read in the gateway response
BufferedReader in;
String data = "";
String gatewayResponse = "";
//Flush the post data to the url
try {
conn.setDoOutput(true);
OutputStreamWriter out = new OutputStreamWriter(conn.getOutputStream());
out.write(postData);
out.flush();
in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
while((data = in.readLine()) != null)
gatewayResponse += data + "\n";
//Close the connections to the url reader and writer
out.close();
in.close();
}
catch (IOException ex) {
logger.error("Error processing gateway request", ex);
gatewayResponse = ex.getMessage();
}
messageDetails.setDateSent(new Date());
//Convert the response to a standard format
return messageHandler.parseMessageResponse(messageDetails, gatewayResponse);
}
public String getMessageStatus(GatewayResponse response) {
try {
postData = "api_id=" + URLEncoder.encode(apiId, "UTF-8");
postData += "&user=" + URLEncoder.encode(user, "UTF-8");
postData += "&password=" + URLEncoder.encode(password, "UTF-8");
postData += "&apimsgid=" + URLEncoder.encode(response.getGatewayMessageId(), "UTF-8");
}
catch (UnsupportedEncodingException ex) {
logger.fatal("Error constructing request: parameter encoding failed", ex);
throw new RuntimeException("Error constructing message");
}
//Create a url and open a connection to it
URL url;
URLConnection conn;
try {
url = new URL(baseUrl + "querymsg");
conn = url.openConnection();
conn.setRequestProperty("Content-Type", "application/x-www-form-urlencoded");
}
catch (MalformedURLException ex) {
logger.fatal("Error initializing Clikatell Gateway: invalid url", ex);
throw new RuntimeException("Invalid gatewat URL");
}
catch (IOException ex) {
logger.fatal("Error iitializing Clickatell Gateway: unable to open URL connection", ex);
throw new RuntimeException("Could not connect to gateway");
}
//Read in the gateway response
BufferedReader in;
String data = "";
String gatewayResponse = "";
//Flush the post data to the url
try {
conn.setDoOutput(true);
OutputStreamWriter out = new OutputStreamWriter(conn.getOutputStream());
out.write(postData);
out.flush();
in = new BufferedReader(new InputStreamReader(conn.getInputStream()));
while((data = in.readLine()) != null)
gatewayResponse += data + "\n";
//Close the connections to the url reader and writer
out.close();
in.close();
}
catch (IOException ex) {
logger.fatal("Error processing gateway request", ex);
throw new RuntimeException("Unable to communicate with gateway");
}
return gatewayResponse;
}
public MStatus mapMessageStatus(GatewayResponse response) {
return messageHandler.parseMessageStatus(response.getResponseText());
}
public GatewayMessageHandler getMessageHandler() {
return messageHandler;
}
public void setMessageHandler(GatewayMessageHandler messageHandler) {
logger.debug("Setting ClickatellGatewayManagerImpl.messageHandler");
logger.debug(messageHandler);
this.messageHandler = messageHandler;
}
public String getApiId() {
return apiId;
}
public void setApiId(String apiId) {
this.apiId = apiId;
}
public String getUser() {
return user;
}
public void setUser(String user) {
this.user = user;
}
public String getPassword() {
return password;
}
public void setPassword(String password) {
this.password = password;
}
public String getSender() {
return sender;
}
public void setSender(String sender) {
this.sender = sender;
}
public String getDeliveryAcknowledge() {
return deliveryAcknowledge;
}
public void setDeliveryAcknowledge(String deliveryAcknowledge) {
this.deliveryAcknowledge = deliveryAcknowledge;
}
public String getCallback() {
return callback;
}
public void setCallback(String callback) {
this.callback = callback;
}
/**
* @param baseUrl the baseUrl to set
*/
public void setBaseUrl(String baseUrl) {
this.baseUrl = baseUrl;
}
}
| |
// Stanford Parser -- a probabilistic lexicalized NL CFG parser
// Copyright (c) 2002, 2003, 2004, 2005, 2008 The Board of Trustees of
// The Leland Stanford Junior University. All Rights Reserved.
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of the GNU General Public License
// as published by the Free Software Foundation; either version 2
// of the License, or (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
//
// For more information, bug reports, fixes, contact:
// Christopher Manning
// Dept of Computer Science, Gates 1A
// Stanford CA 94305-9010
// USA
// parser-support@lists.stanford.edu
// http://nlp.stanford.edu/software/lex-parser.shtml
package edu.stanford.nlp.parser.lexparser;
import edu.stanford.nlp.stats.ClassicCounter;
import edu.stanford.nlp.util.Index;
/**
* This is a basic unknown word model for English. It supports 5 different
* types of feature modeling; see {@link #getSignature(String, int)}.
*
* <i>Implementation note: the contents of this class tend to overlap somewhat
* with {@link ArabicUnknownWordModel} and were originally included in {@link BaseLexicon}.
*
* @author Dan Klein
* @author Galen Andrew
* @author Christopher Manning
* @author Anna Rafferty
*/
public class EnglishUnknownWordModel extends BaseUnknownWordModel {
private static final long serialVersionUID = 4825624957364628770L;
private static final boolean DEBUG_UWM = false;
protected final boolean smartMutation;
protected final int unknownSuffixSize;
protected final int unknownPrefixSize;
private static final int MIN_UNKNOWN = 0;
private static final int MAX_UNKNOWN = 7;
public EnglishUnknownWordModel(Options op, Lexicon lex,
Index<String> wordIndex,
Index<String> tagIndex,
ClassicCounter<IntTaggedWord> unSeenCounter) {
super(op, lex, wordIndex, tagIndex, unSeenCounter, null, null, null);
unknownLevel = op.lexOptions.useUnknownWordSignatures;
if (unknownLevel < MIN_UNKNOWN || unknownLevel > MAX_UNKNOWN) {
System.err.println("Invalid value for useUnknownWordSignatures: " + unknownLevel);
if (unknownLevel < MIN_UNKNOWN) {
unknownLevel = MIN_UNKNOWN;
} else if (unknownLevel > MAX_UNKNOWN) {
unknownLevel = MAX_UNKNOWN;
}
}
this.smartMutation = op.lexOptions.smartMutation;
this.unknownSuffixSize = op.lexOptions.unknownSuffixSize;
this.unknownPrefixSize = op.lexOptions.unknownPrefixSize;
}
/**
* This constructor creates an UWM with empty data structures. Only
* use if loading in the data separately, such as by reading in text
* lines containing the data.
*/
public EnglishUnknownWordModel(Options op, Lexicon lex,
Index<String> wordIndex,
Index<String> tagIndex) {
this(op, lex, wordIndex, tagIndex, new ClassicCounter<IntTaggedWord>());
}
@Override
public float score(IntTaggedWord iTW, int loc, double c_Tseen, double total, double smooth, String word) {
double pb_T_S = scoreProbTagGivenWordSignature(iTW, loc, smooth, word);
double p_T = (c_Tseen / total);
double p_W = 1.0 / total;
double pb_W_T = Math.log(pb_T_S * p_W / p_T);
if (pb_W_T > -100.0) {
if (DEBUG_UWM) {
System.err.println(iTW + " tagging has probability " + pb_W_T);
}
return (float) pb_W_T;
}
if (DEBUG_UWM) {
System.err.println(iTW + " tagging is impossible.");
}
return Float.NEGATIVE_INFINITY;
} // end score()
/** Calculate P(Tag|Signature) with Bayesian smoothing via just P(Tag|Unknown) */
@Override
public double scoreProbTagGivenWordSignature(IntTaggedWord iTW, int loc, double smooth, String word) {
// iTW.tag = nullTag;
// double c_W = ((BaseLexicon) l).getCount(iTW);
// iTW.tag = tag;
// unknown word model for P(T|S)
int wordSig = getSignatureIndex(iTW.word, loc, word);
IntTaggedWord temp = new IntTaggedWord(wordSig, iTW.tag);
double c_TS = unSeenCounter.getCount(temp);
temp = new IntTaggedWord(wordSig, nullTag);
double c_S = unSeenCounter.getCount(temp);
double c_U = unSeenCounter.getCount(NULL_ITW);
temp = new IntTaggedWord(nullWord, iTW.tag);
double c_T = unSeenCounter.getCount(temp);
double p_T_U = c_T / c_U;
if (unknownLevel == 0) {
c_TS = 0;
c_S = 0;
}
return (c_TS + smooth * p_T_U) / (c_S + smooth);
}
/**
* Returns the index of the signature of the word numbered wordIndex, where
* the signature is the String representation of unknown word features.
*/
@Override
public int getSignatureIndex(int index, int sentencePosition, String word) {
String uwSig = getSignature(word, sentencePosition);
int sig = wordIndex.indexOf(uwSig, true);
if (DEBUG_UWM) {
System.err.println("Signature (" + unknownLevel + "): mapped " + word +
" (" + index + ") to " + uwSig + " (" + sig + ")");
}
return sig;
}
/**
* This routine returns a String that is the "signature" of the class of a
* word. For, example, it might represent whether it is a number of ends in
* -s. The strings returned by convention matches the pattern UNK(-.+)? ,
* which is just assumed to not match any real word. Behavior depends on the
* unknownLevel (-uwm flag) passed in to the class. The recognized numbers are
* 1-5: 5 is fairly English-specific; 4, 3, and 2 look for various word
* features (digits, dashes, etc.) which are only vaguely English-specific; 1
* uses the last two characters combined with a simple classification by
* capitalization.
*
* @param word The word to make a signature for
* @param loc Its position in the sentence (mainly so sentence-initial
* capitalized words can be treated differently)
* @return A String that is its signature (equivalence class)
*/
@Override
public String getSignature(String word, int loc) {
StringBuilder sb = new StringBuilder("UNK");
switch (unknownLevel) {
case 7:
getSignature7(word, loc, sb);
break;
case 6:
getSignature6(word, loc, sb);
break;
case 5:
getSignature5(word, loc, sb);
break;
case 4:
getSignature4(word, loc, sb);
break;
case 3:
getSignature3(word, loc, sb);
break;
case 2:
getSignature2(word, loc, sb);
break;
case 1:
getSignature1(word, loc, sb);
break;
default:
// 0 = do nothing so it just stays as "UNK"
} // end switch (unknownLevel)
// System.err.println("Summarized " + word + " to " + sb.toString());
return sb.toString();
} // end getSignature()
@SuppressWarnings({"MethodMayBeStatic"})
private void getSignature7(String word, int loc, StringBuilder sb) {
// New Sep 2008. Like 2 but rely more on Caps somewhere than initial Caps
// {-ALLC, -INIT, -UC somewhere, -LC, zero} +
// {-DASH, zero} +
// {-NUM, -DIG, zero} +
// {lowerLastChar, zeroIfShort}
boolean hasDigit = false;
boolean hasNonDigit = false;
boolean hasLower = false;
boolean hasUpper = false;
boolean hasDash = false;
int wlen = word.length();
for (int i = 0; i < wlen; i++) {
char ch = word.charAt(i);
if (Character.isDigit(ch)) {
hasDigit = true;
} else {
hasNonDigit = true;
if (Character.isLetter(ch)) {
if (Character.isLowerCase(ch) || Character.isTitleCase(ch)) {
hasLower = true;
} else {
hasUpper = true;
}
} else if (ch == '-') {
hasDash = true;
}
}
}
if (wlen > 0 && hasUpper) {
if ( ! hasLower) {
sb.append("-ALLC");
} else if (loc == 0) {
sb.append("-INIT");
} else {
sb.append("-UC");
}
} else if (hasLower) { // if (Character.isLowerCase(word.charAt(0))) {
sb.append("-LC");
}
// no suffix = no (lowercase) letters
if (hasDash) {
sb.append("-DASH");
}
if (hasDigit) {
if (!hasNonDigit) {
sb.append("-NUM");
} else {
sb.append("-DIG");
}
} else if (wlen > 3) {
// don't do for very short words: "yes" isn't an "-es" word
// try doing to lower for further densening and skipping digits
char ch = word.charAt(word.length() - 1);
sb.append(Character.toLowerCase(ch));
}
// no suffix = short non-number, non-alphabetic
}
private void getSignature6(String word, int loc, StringBuilder sb) {
// New Sep 2008. Like 5 but rely more on Caps somewhere than initial Caps
// { -INITC, -CAPS, (has) -CAP, -LC lowercase, 0 } +
// { -KNOWNLC, 0 } + [only for INITC]
// { -NUM, 0 } +
// { -DASH, 0 } +
// { -last lowered char(s) if known discriminating suffix, 0}
int wlen = word.length();
int numCaps = 0;
boolean hasDigit = false;
boolean hasDash = false;
boolean hasLower = false;
for (int i = 0; i < wlen; i++) {
char ch = word.charAt(i);
if (Character.isDigit(ch)) {
hasDigit = true;
} else if (ch == '-') {
hasDash = true;
} else if (Character.isLetter(ch)) {
if (Character.isLowerCase(ch)) {
hasLower = true;
} else if (Character.isTitleCase(ch)) {
hasLower = true;
numCaps++;
} else {
numCaps++;
}
}
}
String lowered = word.toLowerCase();
if (numCaps > 1) {
sb.append("-CAPS");
} else if (numCaps > 0) {
if (loc == 0) {
sb.append("-INITC");
if (getLexicon().isKnown(lowered)) {
sb.append("-KNOWNLC");
}
} else {
sb.append("-CAP");
}
} else if (hasLower) { // (Character.isLowerCase(ch0)) {
sb.append("-LC");
}
if (hasDigit) {
sb.append("-NUM");
}
if (hasDash) {
sb.append("-DASH");
}
if (lowered.endsWith("s") && wlen >= 3) {
// here length 3, so you don't miss out on ones like 80s
char ch2 = lowered.charAt(wlen - 2);
// not -ess suffixes or greek/latin -us, -is
if (ch2 != 's' && ch2 != 'i' && ch2 != 'u') {
sb.append("-s");
}
} else if (word.length() >= 5 && !hasDash && !(hasDigit && numCaps > 0)) {
// don't do for very short words;
// Implement common discriminating suffixes
if (lowered.endsWith("ed")) {
sb.append("-ed");
} else if (lowered.endsWith("ing")) {
sb.append("-ing");
} else if (lowered.endsWith("ion")) {
sb.append("-ion");
} else if (lowered.endsWith("er")) {
sb.append("-er");
} else if (lowered.endsWith("est")) {
sb.append("-est");
} else if (lowered.endsWith("ly")) {
sb.append("-ly");
} else if (lowered.endsWith("ity")) {
sb.append("-ity");
} else if (lowered.endsWith("y")) {
sb.append("-y");
} else if (lowered.endsWith("al")) {
sb.append("-al");
// } else if (lowered.endsWith("ble")) {
// sb.append("-ble");
// } else if (lowered.endsWith("e")) {
// sb.append("-e");
}
}
}
private void getSignature5(String word, int loc, StringBuilder sb) {
// Reformed Mar 2004 (cdm); hopefully better now.
// { -CAPS, -INITC ap, -LC lowercase, 0 } +
// { -KNOWNLC, 0 } + [only for INITC]
// { -NUM, 0 } +
// { -DASH, 0 } +
// { -last lowered char(s) if known discriminating suffix, 0}
int wlen = word.length();
int numCaps = 0;
boolean hasDigit = false;
boolean hasDash = false;
boolean hasLower = false;
for (int i = 0; i < wlen; i++) {
char ch = word.charAt(i);
if (Character.isDigit(ch)) {
hasDigit = true;
} else if (ch == '-') {
hasDash = true;
} else if (Character.isLetter(ch)) {
if (Character.isLowerCase(ch)) {
hasLower = true;
} else if (Character.isTitleCase(ch)) {
hasLower = true;
numCaps++;
} else {
numCaps++;
}
}
}
char ch0 = word.charAt(0);
String lowered = word.toLowerCase();
if (Character.isUpperCase(ch0) || Character.isTitleCase(ch0)) {
if (loc == 0 && numCaps == 1) {
sb.append("-INITC");
if (getLexicon().isKnown(lowered)) {
sb.append("-KNOWNLC");
}
} else {
sb.append("-CAPS");
}
} else if (!Character.isLetter(ch0) && numCaps > 0) {
sb.append("-CAPS");
} else if (hasLower) { // (Character.isLowerCase(ch0)) {
sb.append("-LC");
}
if (hasDigit) {
sb.append("-NUM");
}
if (hasDash) {
sb.append("-DASH");
}
if (lowered.endsWith("s") && wlen >= 3) {
// here length 3, so you don't miss out on ones like 80s
char ch2 = lowered.charAt(wlen - 2);
// not -ess suffixes or greek/latin -us, -is
if (ch2 != 's' && ch2 != 'i' && ch2 != 'u') {
sb.append("-s");
}
} else if (word.length() >= 5 && !hasDash && !(hasDigit && numCaps > 0)) {
// don't do for very short words;
// Implement common discriminating suffixes
if (lowered.endsWith("ed")) {
sb.append("-ed");
} else if (lowered.endsWith("ing")) {
sb.append("-ing");
} else if (lowered.endsWith("ion")) {
sb.append("-ion");
} else if (lowered.endsWith("er")) {
sb.append("-er");
} else if (lowered.endsWith("est")) {
sb.append("-est");
} else if (lowered.endsWith("ly")) {
sb.append("-ly");
} else if (lowered.endsWith("ity")) {
sb.append("-ity");
} else if (lowered.endsWith("y")) {
sb.append("-y");
} else if (lowered.endsWith("al")) {
sb.append("-al");
// } else if (lowered.endsWith("ble")) {
// sb.append("-ble");
// } else if (lowered.endsWith("e")) {
// sb.append("-e");
}
}
}
@SuppressWarnings({"MethodMayBeStatic"})
private void getSignature4(String word, int loc, StringBuilder sb) {
boolean hasDigit = false;
boolean hasNonDigit = false;
boolean hasLetter = false;
boolean hasLower = false;
boolean hasDash = false;
boolean hasPeriod = false;
boolean hasComma = false;
for (int i = 0; i < word.length(); i++) {
char ch = word.charAt(i);
if (Character.isDigit(ch)) {
hasDigit = true;
} else {
hasNonDigit = true;
if (Character.isLetter(ch)) {
hasLetter = true;
if (Character.isLowerCase(ch) || Character.isTitleCase(ch)) {
hasLower = true;
}
} else {
if (ch == '-') {
hasDash = true;
} else if (ch == '.') {
hasPeriod = true;
} else if (ch == ',') {
hasComma = true;
}
}
}
}
// 6 way on letters
if (Character.isUpperCase(word.charAt(0)) || Character.isTitleCase(word.charAt(0))) {
if (!hasLower) {
sb.append("-AC");
} else if (loc == 0) {
sb.append("-SC");
} else {
sb.append("-C");
}
} else if (hasLower) {
sb.append("-L");
} else if (hasLetter) {
sb.append("-U");
} else {
// no letter
sb.append("-S");
}
// 3 way on number
if (hasDigit && !hasNonDigit) {
sb.append("-N");
} else if (hasDigit) {
sb.append("-n");
}
// binary on period, dash, comma
if (hasDash) {
sb.append("-H");
}
if (hasPeriod) {
sb.append("-P");
}
if (hasComma) {
sb.append("-C");
}
if (word.length() > 3) {
// don't do for very short words: "yes" isn't an "-es" word
// try doing to lower for further densening and skipping digits
char ch = word.charAt(word.length() - 1);
if (Character.isLetter(ch)) {
sb.append('-');
sb.append(Character.toLowerCase(ch));
}
}
}
@SuppressWarnings({"MethodMayBeStatic"})
private void getSignature3(String word, int loc, StringBuilder sb) {
// This basically works right, except note that 'S' is applied to all
// capitalized letters in first word of sentence, not just first....
sb.append('-');
char lastClass = '-'; // i.e., nothing
int num = 0;
for (int i = 0; i < word.length(); i++) {
char ch = word.charAt(i);
char newClass;
if (Character.isUpperCase(ch) || Character.isTitleCase(ch)) {
if (loc == 0) {
newClass = 'S';
} else {
newClass = 'L';
}
} else if (Character.isLetter(ch)) {
newClass = 'l';
} else if (Character.isDigit(ch)) {
newClass = 'd';
} else if (ch == '-') {
newClass = 'h';
} else if (ch == '.') {
newClass = 'p';
} else {
newClass = 's';
}
if (newClass != lastClass) {
lastClass = newClass;
sb.append(lastClass);
num = 1;
} else {
if (num < 2) {
sb.append('+');
}
num++;
}
}
if (word.length() > 3) {
// don't do for very short words: "yes" isn't an "-es" word
// try doing to lower for further densening and skipping digits
char ch = Character.toLowerCase(word.charAt(word.length() - 1));
sb.append('-');
sb.append(ch);
}
}
@SuppressWarnings({"MethodMayBeStatic"})
private void getSignature2(String word, int loc, StringBuilder sb) {
// {-ALLC, -INIT, -UC, -LC, zero} +
// {-DASH, zero} +
// {-NUM, -DIG, zero} +
// {lowerLastChar, zeroIfShort}
boolean hasDigit = false;
boolean hasNonDigit = false;
boolean hasLower = false;
int wlen = word.length();
for (int i = 0; i < wlen; i++) {
char ch = word.charAt(i);
if (Character.isDigit(ch)) {
hasDigit = true;
} else {
hasNonDigit = true;
if (Character.isLetter(ch)) {
if (Character.isLowerCase(ch) || Character.isTitleCase(ch)) {
hasLower = true;
}
}
}
}
if (wlen > 0
&& (Character.isUpperCase(word.charAt(0)) || Character.isTitleCase(word.charAt(0)))) {
if (!hasLower) {
sb.append("-ALLC");
} else if (loc == 0) {
sb.append("-INIT");
} else {
sb.append("-UC");
}
} else if (hasLower) { // if (Character.isLowerCase(word.charAt(0))) {
sb.append("-LC");
}
// no suffix = no (lowercase) letters
if (word.indexOf('-') >= 0) {
sb.append("-DASH");
}
if (hasDigit) {
if (!hasNonDigit) {
sb.append("-NUM");
} else {
sb.append("-DIG");
}
} else if (wlen > 3) {
// don't do for very short words: "yes" isn't an "-es" word
// try doing to lower for further densening and skipping digits
char ch = word.charAt(word.length() - 1);
sb.append(Character.toLowerCase(ch));
}
// no suffix = short non-number, non-alphabetic
}
@SuppressWarnings({"MethodMayBeStatic"})
private void getSignature1(String word, int loc, StringBuilder sb) {
sb.append('-');
sb.append(word.substring(Math.max(word.length() - 2, 0), word.length()));
sb.append('-');
if (Character.isLowerCase(word.charAt(0))) {
sb.append("LOWER");
} else {
if (Character.isUpperCase(word.charAt(0))) {
if (loc == 0) {
sb.append("INIT");
} else {
sb.append("UPPER");
}
} else {
sb.append("OTHER");
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.scheduler.strategy;
import org.apache.flink.runtime.execution.ExecutionState;
import org.apache.flink.runtime.executiongraph.failover.flip1.PipelinedRegionComputeUtil;
import org.apache.flink.runtime.io.network.partition.ResultPartitionType;
import org.apache.flink.runtime.jobgraph.DistributionPattern;
import org.apache.flink.runtime.jobgraph.IntermediateDataSetID;
import org.apache.flink.runtime.jobgraph.IntermediateResultPartitionID;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.stream.Collectors;
import static org.apache.flink.util.Preconditions.checkState;
/** A simple scheduling topology for testing purposes. */
public class TestingSchedulingTopology implements SchedulingTopology {
// Use linked map here to so we can get the values in inserted order
private final Map<ExecutionVertexID, TestingSchedulingExecutionVertex>
schedulingExecutionVertices = new LinkedHashMap<>();
private final Map<IntermediateResultPartitionID, TestingSchedulingResultPartition>
schedulingResultPartitions = new HashMap<>();
private Map<ExecutionVertexID, TestingSchedulingPipelinedRegion> vertexRegions;
@Override
public Iterable<TestingSchedulingExecutionVertex> getVertices() {
return Collections.unmodifiableCollection(schedulingExecutionVertices.values());
}
@Override
public TestingSchedulingExecutionVertex getVertex(final ExecutionVertexID executionVertexId) {
final TestingSchedulingExecutionVertex executionVertex =
schedulingExecutionVertices.get(executionVertexId);
if (executionVertex == null) {
throw new IllegalArgumentException("can not find vertex: " + executionVertexId);
}
return executionVertex;
}
@Override
public TestingSchedulingResultPartition getResultPartition(
final IntermediateResultPartitionID intermediateResultPartitionId) {
final TestingSchedulingResultPartition resultPartition =
schedulingResultPartitions.get(intermediateResultPartitionId);
if (resultPartition == null) {
throw new IllegalArgumentException(
"can not find partition: " + intermediateResultPartitionId);
}
return resultPartition;
}
@Override
public Iterable<SchedulingPipelinedRegion> getAllPipelinedRegions() {
return new HashSet<>(getVertexRegions().values());
}
@Override
public SchedulingPipelinedRegion getPipelinedRegionOfVertex(ExecutionVertexID vertexId) {
return getVertexRegions().get(vertexId);
}
private Map<ExecutionVertexID, TestingSchedulingPipelinedRegion> getVertexRegions() {
if (vertexRegions == null) {
generatePipelinedRegions();
}
return vertexRegions;
}
private void generatePipelinedRegions() {
vertexRegions = new HashMap<>();
final Set<Set<SchedulingExecutionVertex>> rawRegions =
PipelinedRegionComputeUtil.computePipelinedRegions(getVertices());
for (Set<SchedulingExecutionVertex> rawRegion : rawRegions) {
final Set<TestingSchedulingExecutionVertex> vertices =
rawRegion.stream()
.map(vertex -> schedulingExecutionVertices.get(vertex.getId()))
.collect(Collectors.toSet());
final TestingSchedulingPipelinedRegion region =
new TestingSchedulingPipelinedRegion(vertices);
for (TestingSchedulingExecutionVertex vertex : vertices) {
vertexRegions.put(vertex.getId(), region);
}
}
}
private void resetPipelinedRegions() {
vertexRegions = null;
}
void addSchedulingExecutionVertex(TestingSchedulingExecutionVertex schedulingExecutionVertex) {
checkState(!schedulingExecutionVertices.containsKey(schedulingExecutionVertex.getId()));
schedulingExecutionVertices.put(
schedulingExecutionVertex.getId(), schedulingExecutionVertex);
updateVertexResultPartitions(schedulingExecutionVertex);
resetPipelinedRegions();
}
private void updateVertexResultPartitions(
final TestingSchedulingExecutionVertex schedulingExecutionVertex) {
addSchedulingResultPartitions(schedulingExecutionVertex.getConsumedResults());
addSchedulingResultPartitions(schedulingExecutionVertex.getProducedResults());
}
private void addSchedulingResultPartitions(
final Iterable<TestingSchedulingResultPartition> resultPartitions) {
for (TestingSchedulingResultPartition schedulingResultPartition : resultPartitions) {
schedulingResultPartitions.put(
schedulingResultPartition.getId(), schedulingResultPartition);
}
}
void addSchedulingExecutionVertices(List<TestingSchedulingExecutionVertex> vertices) {
for (TestingSchedulingExecutionVertex vertex : vertices) {
addSchedulingExecutionVertex(vertex);
}
}
public SchedulingExecutionVerticesBuilder addExecutionVertices() {
return new SchedulingExecutionVerticesBuilder();
}
public TestingSchedulingExecutionVertex newExecutionVertex() {
return newExecutionVertex(new JobVertexID(), 0);
}
public TestingSchedulingExecutionVertex newExecutionVertex(ExecutionState executionState) {
final TestingSchedulingExecutionVertex newVertex =
TestingSchedulingExecutionVertex.newBuilder()
.withExecutionState(executionState)
.build();
addSchedulingExecutionVertex(newVertex);
return newVertex;
}
public TestingSchedulingExecutionVertex newExecutionVertex(
final JobVertexID jobVertexId, final int subtaskIndex) {
final TestingSchedulingExecutionVertex newVertex =
TestingSchedulingExecutionVertex.withExecutionVertexID(jobVertexId, subtaskIndex);
addSchedulingExecutionVertex(newVertex);
return newVertex;
}
public TestingSchedulingTopology connect(
final TestingSchedulingExecutionVertex producer,
final TestingSchedulingExecutionVertex consumer) {
return connect(producer, consumer, ResultPartitionType.PIPELINED);
}
public TestingSchedulingTopology connect(
TestingSchedulingExecutionVertex producer,
TestingSchedulingExecutionVertex consumer,
ResultPartitionType resultPartitionType) {
final TestingSchedulingResultPartition resultPartition =
new TestingSchedulingResultPartition.Builder()
.withResultPartitionType(resultPartitionType)
.build();
resultPartition.addConsumer(consumer);
resultPartition.setProducer(producer);
producer.addProducedPartition(resultPartition);
consumer.addConsumedPartition(resultPartition);
updateVertexResultPartitions(producer);
updateVertexResultPartitions(consumer);
resetPipelinedRegions();
return this;
}
public ProducerConsumerConnectionBuilder connectPointwise(
final List<TestingSchedulingExecutionVertex> producers,
final List<TestingSchedulingExecutionVertex> consumers) {
return new ProducerConsumerPointwiseConnectionBuilder(producers, consumers);
}
public ProducerConsumerConnectionBuilder connectAllToAll(
final List<TestingSchedulingExecutionVertex> producers,
final List<TestingSchedulingExecutionVertex> consumers) {
return new ProducerConsumerAllToAllConnectionBuilder(producers, consumers);
}
/** Builder for {@link TestingSchedulingResultPartition}. */
public abstract class ProducerConsumerConnectionBuilder {
protected final List<TestingSchedulingExecutionVertex> producers;
protected final List<TestingSchedulingExecutionVertex> consumers;
protected ResultPartitionType resultPartitionType = ResultPartitionType.BLOCKING;
protected ResultPartitionState resultPartitionState = ResultPartitionState.CONSUMABLE;
protected ProducerConsumerConnectionBuilder(
final List<TestingSchedulingExecutionVertex> producers,
final List<TestingSchedulingExecutionVertex> consumers) {
this.producers = producers;
this.consumers = consumers;
}
public ProducerConsumerConnectionBuilder withResultPartitionType(
final ResultPartitionType resultPartitionType) {
this.resultPartitionType = resultPartitionType;
return this;
}
public ProducerConsumerConnectionBuilder withResultPartitionState(
final ResultPartitionState state) {
this.resultPartitionState = state;
return this;
}
public List<TestingSchedulingResultPartition> finish() {
final List<TestingSchedulingResultPartition> resultPartitions = connect();
producers.stream()
.forEach(TestingSchedulingTopology.this::updateVertexResultPartitions);
consumers.stream()
.forEach(TestingSchedulingTopology.this::updateVertexResultPartitions);
return resultPartitions;
}
TestingSchedulingResultPartition.Builder initTestingSchedulingResultPartitionBuilder() {
return new TestingSchedulingResultPartition.Builder()
.withResultPartitionType(resultPartitionType);
}
protected abstract List<TestingSchedulingResultPartition> connect();
}
/**
* Builder for {@link TestingSchedulingResultPartition} of {@link
* DistributionPattern#POINTWISE}.
*/
private class ProducerConsumerPointwiseConnectionBuilder
extends ProducerConsumerConnectionBuilder {
private ProducerConsumerPointwiseConnectionBuilder(
final List<TestingSchedulingExecutionVertex> producers,
final List<TestingSchedulingExecutionVertex> consumers) {
super(producers, consumers);
// currently we only support one to one
checkState(producers.size() == consumers.size());
}
@Override
protected List<TestingSchedulingResultPartition> connect() {
final List<TestingSchedulingResultPartition> resultPartitions = new ArrayList<>();
final IntermediateDataSetID intermediateDataSetId = new IntermediateDataSetID();
for (int idx = 0; idx < producers.size(); idx++) {
final TestingSchedulingExecutionVertex producer = producers.get(idx);
final TestingSchedulingExecutionVertex consumer = consumers.get(idx);
final TestingSchedulingResultPartition resultPartition =
initTestingSchedulingResultPartitionBuilder()
.withIntermediateDataSetID(intermediateDataSetId)
.withResultPartitionState(resultPartitionState)
.build();
resultPartition.setProducer(producer);
producer.addProducedPartition(resultPartition);
consumer.addConsumedPartition(resultPartition);
resultPartition.addConsumer(consumer);
resultPartitions.add(resultPartition);
}
return resultPartitions;
}
}
/**
* Builder for {@link TestingSchedulingResultPartition} of {@link
* DistributionPattern#ALL_TO_ALL}.
*/
private class ProducerConsumerAllToAllConnectionBuilder
extends ProducerConsumerConnectionBuilder {
private ProducerConsumerAllToAllConnectionBuilder(
final List<TestingSchedulingExecutionVertex> producers,
final List<TestingSchedulingExecutionVertex> consumers) {
super(producers, consumers);
}
@Override
protected List<TestingSchedulingResultPartition> connect() {
final List<TestingSchedulingResultPartition> resultPartitions = new ArrayList<>();
final IntermediateDataSetID intermediateDataSetId = new IntermediateDataSetID();
for (TestingSchedulingExecutionVertex producer : producers) {
final TestingSchedulingResultPartition resultPartition =
initTestingSchedulingResultPartitionBuilder()
.withIntermediateDataSetID(intermediateDataSetId)
.withResultPartitionState(resultPartitionState)
.build();
resultPartition.setProducer(producer);
producer.addProducedPartition(resultPartition);
for (TestingSchedulingExecutionVertex consumer : consumers) {
consumer.addConsumedPartition(resultPartition);
resultPartition.addConsumer(consumer);
}
resultPartitions.add(resultPartition);
}
return resultPartitions;
}
}
/** Builder for {@link TestingSchedulingExecutionVertex}. */
public class SchedulingExecutionVerticesBuilder {
private final JobVertexID jobVertexId = new JobVertexID();
private int parallelism = 1;
public SchedulingExecutionVerticesBuilder withParallelism(final int parallelism) {
this.parallelism = parallelism;
return this;
}
public List<TestingSchedulingExecutionVertex> finish() {
final List<TestingSchedulingExecutionVertex> vertices = new ArrayList<>();
for (int subtaskIndex = 0; subtaskIndex < parallelism; subtaskIndex++) {
vertices.add(createTestingSchedulingExecutionVertex(subtaskIndex));
}
TestingSchedulingTopology.this.addSchedulingExecutionVertices(vertices);
return vertices;
}
private TestingSchedulingExecutionVertex createTestingSchedulingExecutionVertex(
final int subtaskIndex) {
return TestingSchedulingExecutionVertex.newBuilder()
.withExecutionVertexID(jobVertexId, subtaskIndex)
.build();
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.schema.types;
import java.math.BigDecimal;
import java.sql.Types;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.schema.SortOrder;
import com.google.common.base.Preconditions;
import com.google.common.primitives.Doubles;
public class PDouble extends PRealNumber<Double> {
public static final PDouble INSTANCE = new PDouble();
private PDouble() {
super("DOUBLE", Types.DOUBLE, Double.class, new DoubleCodec(), 7);
}
@Override
public int compareTo(Object lhs, Object rhs, PDataType rhsType) {
if (rhsType == PDecimal.INSTANCE) {
return -((BigDecimal) rhs).compareTo(BigDecimal.valueOf(((Number) lhs).doubleValue()));
}
return Doubles.compare(((Number) lhs).doubleValue(), ((Number) rhs).doubleValue());
}
@Override
public boolean isFixedWidth() {
return true;
}
@Override
public Integer getByteSize() {
return Bytes.SIZEOF_DOUBLE;
}
@Override
public Integer getScale(Object o) {
if (o == null) {
return null;
}
Double v = (Double) o;
BigDecimal bd = BigDecimal.valueOf(v);
return bd.scale() == 0 ? null : bd.scale();
}
@Override
public Integer getMaxLength(Object o) {
if (o == null) {
return null;
}
Double v = (Double) o;
BigDecimal db = BigDecimal.valueOf(v);
return db.precision();
}
@Override
public byte[] toBytes(Object object) {
byte[] b = new byte[Bytes.SIZEOF_DOUBLE];
toBytes(object, b, 0);
return b;
}
@Override
public int toBytes(Object object, byte[] bytes, int offset) {
if (object == null) {
throw newIllegalDataException(this + " may not be null");
}
return this.getCodec().encodeDouble(((Number) object).doubleValue(),
bytes, offset);
}
@Override
public Object toObject(String value) {
if (value == null || value.length() == 0) {
return null;
}
try {
return Double.parseDouble(value);
} catch (NumberFormatException e) {
throw newIllegalDataException(e);
}
}
@Override
public Object toObject(Object object, PDataType actualType) {
if (object == null) {
return null;
}
double de;
if (equalsAny(actualType, PDouble.INSTANCE, PUnsignedDouble.INSTANCE)) {
return object;
} else if (equalsAny(actualType, PFloat.INSTANCE, PUnsignedFloat.INSTANCE)) {
de = (Float) object;
return de;
} else if (equalsAny(actualType, PLong.INSTANCE, PUnsignedLong.INSTANCE)) {
de = (Long) object;
return de;
} else if (equalsAny(actualType, PInteger.INSTANCE, PUnsignedInt.INSTANCE)) {
de = (Integer) object;
return de;
} else if (equalsAny(actualType, PTinyint.INSTANCE, PUnsignedTinyint.INSTANCE)) {
de = (Byte) object;
return de;
} else if (equalsAny(actualType, PSmallint.INSTANCE, PUnsignedSmallint.INSTANCE)) {
de = (Short) object;
return de;
} else if (actualType == PDecimal.INSTANCE) {
BigDecimal d = (BigDecimal) object;
return d.doubleValue();
}
return throwConstraintViolationException(actualType, this);
}
@Override
public Double toObject(byte[] b, int o, int l, PDataType actualType,
SortOrder sortOrder, Integer maxLength, Integer scale) {
if (l <= 0) {
return null;
}
if (equalsAny(actualType, PDouble.INSTANCE, PUnsignedDouble.INSTANCE, PFloat.INSTANCE,
PUnsignedFloat.INSTANCE, PLong.INSTANCE, PUnsignedLong.INSTANCE, PInteger.INSTANCE,
PUnsignedInt.INSTANCE, PSmallint.INSTANCE, PUnsignedSmallint.INSTANCE, PTinyint.INSTANCE,
PUnsignedTinyint.INSTANCE)) {
return actualType.getCodec().decodeDouble(b, o, sortOrder);
} else if (actualType == PDecimal.INSTANCE) {
BigDecimal bd = (BigDecimal) actualType.toObject(b, o, l, actualType, sortOrder);
return bd.doubleValue();
}
throwConstraintViolationException(actualType, this);
return null;
}
@Override
public boolean isCoercibleTo(PDataType targetType, Object value) {
if (value != null) {
double d = (Double) value;
if (targetType.equals(PUnsignedDouble.INSTANCE)) {
return d >= 0;
} else if (targetType.equals(PFloat.INSTANCE)) {
return Double.isNaN(d)
|| d == Double.POSITIVE_INFINITY
|| d == Double.NEGATIVE_INFINITY
|| (d >= -Float.MAX_VALUE && d <= Float.MAX_VALUE);
} else if (targetType.equals(PUnsignedFloat.INSTANCE)) {
return Double.isNaN(d) || d == Double.POSITIVE_INFINITY
|| (d >= 0 && d <= Float.MAX_VALUE);
} else if (targetType.equals(PUnsignedLong.INSTANCE)) {
return (d >= 0 && d <= Long.MAX_VALUE);
} else if (targetType.equals(PLong.INSTANCE)) {
return (d >= Long.MIN_VALUE && d <= Long.MAX_VALUE);
} else if (targetType.equals(PUnsignedInt.INSTANCE)) {
return (d >= 0 && d <= Integer.MAX_VALUE);
} else if (targetType.equals(PInteger.INSTANCE)) {
return (d >= Integer.MIN_VALUE && d <= Integer.MAX_VALUE);
} else if (targetType.equals(PUnsignedSmallint.INSTANCE)) {
return (d >= 0 && d <= Short.MAX_VALUE);
} else if (targetType.equals(PSmallint.INSTANCE)) {
return (d >= Short.MIN_VALUE && d <= Short.MAX_VALUE);
} else if (targetType.equals(PTinyint.INSTANCE)) {
return (d >= Byte.MIN_VALUE && d <= Byte.MAX_VALUE);
} else if (targetType.equals(PUnsignedTinyint.INSTANCE)) {
return (d >= 0 && d <= Byte.MAX_VALUE);
}
}
return super.isCoercibleTo(targetType, value);
}
@Override
public boolean isCoercibleTo(PDataType targetType) {
return equalsAny(targetType, this, PDecimal.INSTANCE, PVarbinary.INSTANCE, PBinary.INSTANCE);
}
@Override
public Object getSampleValue(Integer maxLength, Integer arrayLength) {
return RANDOM.get().nextDouble();
}
static class DoubleCodec extends BaseCodec {
@Override
public long decodeLong(byte[] b, int o, SortOrder sortOrder) {
double v = decodeDouble(b, o, sortOrder);
if (v < Long.MIN_VALUE || v > Long.MAX_VALUE) {
throw newIllegalDataException(
"Value " + v + " cannot be cast to Long without changing its value");
}
return (long) v;
}
@Override
public int decodeInt(byte[] b, int o, SortOrder sortOrder) {
double v = decodeDouble(b, o, sortOrder);
if (v < Integer.MIN_VALUE || v > Integer.MAX_VALUE) {
throw newIllegalDataException(
"Value " + v + " cannot be cast to Integer without changing its value");
}
return (int) v;
}
@Override
public byte decodeByte(byte[] b, int o, SortOrder sortOrder) {
double v = decodeDouble(b, o, sortOrder);
if (v < Byte.MIN_VALUE || v > Byte.MAX_VALUE) {
throw newIllegalDataException(
"Value " + v + " cannot be cast to Byte without changing its value");
}
return (byte) v;
}
@Override
public short decodeShort(byte[] b, int o, SortOrder sortOrder) {
double v = decodeDouble(b, o, sortOrder);
if (v < Short.MIN_VALUE || v > Short.MAX_VALUE) {
throw newIllegalDataException(
"Value " + v + " cannot be cast to Short without changing its value");
}
return (short) v;
}
@Override
public double decodeDouble(byte[] bytes, int o, SortOrder sortOrder) {
Preconditions.checkNotNull(sortOrder);
checkForSufficientLength(bytes, o, Bytes.SIZEOF_LONG);
long l;
if (sortOrder == SortOrder.DESC) {
// Copied from Bytes.toLong(), but without using the toLongUnsafe
// TODO: would it be possible to use the toLongUnsafe?
l = 0;
for(int i = o; i < o + Bytes.SIZEOF_LONG; i++) {
l <<= 8;
l ^= (bytes[i] ^ 0xff) & 0xFF;
}
} else {
l = Bytes.toLong(bytes, o);
}
l--;
l ^= (~l >> Long.SIZE - 1) | Long.MIN_VALUE;
return Double.longBitsToDouble(l);
}
@Override
public float decodeFloat(byte[] b, int o, SortOrder sortOrder) {
double v = decodeDouble(b, o, sortOrder);
if (Double.isNaN(v) || v == Double.NEGATIVE_INFINITY
|| v == Double.POSITIVE_INFINITY
|| (v >= -Float.MAX_VALUE && v <= Float.MAX_VALUE)) {
return (float) v;
} else {
throw newIllegalDataException(
"Value " + v + " cannot be cast to Float without changing its value");
}
}
@Override
public int encodeShort(short v, byte[] b, int o) {
return encodeDouble(v, b, o);
}
@Override
public int encodeLong(long v, byte[] b, int o) {
return encodeDouble(v, b, o);
}
@Override
public int encodeInt(int v, byte[] b, int o) {
return encodeDouble(v, b, o);
}
@Override
public int encodeByte(byte v, byte[] b, int o) {
return encodeDouble(v, b, o);
}
@Override
public int encodeDouble(double v, byte[] b, int o) {
checkForSufficientLength(b, o, Bytes.SIZEOF_LONG);
long l = Double.doubleToLongBits(v);
l = (l ^ ((l >> Long.SIZE - 1) | Long.MIN_VALUE)) + 1;
Bytes.putLong(b, o, l);
return Bytes.SIZEOF_LONG;
}
@Override
public int encodeFloat(float v, byte[] b, int o) {
return encodeDouble(v, b, o);
}
@Override
public PhoenixArrayFactory getPhoenixArrayFactory() {
return new PhoenixArrayFactory() {
@Override
public PhoenixArray newArray(PDataType type, Object[] elements) {
return new PhoenixArray.PrimitiveDoublePhoenixArray(type, elements);
}
};
}
}
}
| |
/* Generated file, do not modify. See jython/src/templates/gderived.py. */
package org.python.antlr.ast;
import java.io.Serializable;
import org.python.core.*;
import org.python.core.finalization.FinalizeTrigger;
import org.python.core.finalization.FinalizablePyObjectDerived;
public class ListDerived extends List implements Slotted,FinalizablePyObjectDerived,TraverseprocDerived {
public PyObject getSlot(int index) {
return slots[index];
}
public void setSlot(int index,PyObject value) {
slots[index]=value;
}
private PyObject[]slots;
public void __del_derived__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__del__");
if (impl!=null) {
impl.__get__(this,self_type).__call__();
}
}
public void __ensure_finalizer__() {
FinalizeTrigger.ensureFinalizer(this);
}
/* TraverseprocDerived implementation */
public int traverseDerived(Visitproc visit,Object arg) {
int retVal;
for(int i=0;i<slots.length;++i) {
if (slots[i]!=null) {
retVal=visit.visit(slots[i],arg);
if (retVal!=0) {
return retVal;
}
}
}
retVal=visit.visit(objtype,arg);
return retVal!=0?retVal:traverseDictIfAny(visit,arg);
}
/* end of TraverseprocDerived implementation */
private PyObject dict;
public PyObject fastGetDict() {
return dict;
}
public PyObject getDict() {
return dict;
}
public void setDict(PyObject newDict) {
if (newDict instanceof PyStringMap||newDict instanceof PyDictionary) {
dict=newDict;
if (dict.__finditem__(PyString.fromInterned("__del__"))!=null&&!JyAttribute.hasAttr(this,JyAttribute.FINALIZE_TRIGGER_ATTR)) {
FinalizeTrigger.ensureFinalizer(this);
}
} else {
throw Py.TypeError("__dict__ must be set to a Dictionary "+newDict.getClass().getName());
}
}
public void delDict() {
// deleting an object's instance dict makes it grow a new one
dict=new PyStringMap();
}
public ListDerived(PyType subtype) {
super(subtype);
slots=new PyObject[subtype.getNumSlots()];
dict=subtype.instDict();
if (subtype.needsFinalizer()) {
FinalizeTrigger.ensureFinalizer(this);
}
}
public int traverseDictIfAny(Visitproc visit,Object arg) {
return visit.visit(dict,arg);
}
public PyString __str__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__str__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__str__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__str__();
}
public PyString __repr__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__repr__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__repr__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__repr__();
}
public PyString __hex__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__hex__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__hex__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__hex__();
}
public PyString __oct__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__oct__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyString)
return(PyString)res;
throw Py.TypeError("__oct__"+" returned non-"+"string"+" (type "+res.getType().fastGetName()+")");
}
return super.__oct__();
}
public PyFloat __float__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__float__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyFloat)
return(PyFloat)res;
throw Py.TypeError("__float__"+" returned non-"+"float"+" (type "+res.getType().fastGetName()+")");
}
return super.__float__();
}
public PyComplex __complex__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__complex__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyComplex)
return(PyComplex)res;
throw Py.TypeError("__complex__"+" returned non-"+"complex"+" (type "+res.getType().fastGetName()+")");
}
return super.__complex__();
}
public PyObject __pos__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__pos__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__pos__();
}
public PyObject __neg__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__neg__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__neg__();
}
public PyObject __abs__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__abs__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__abs__();
}
public PyObject __invert__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__invert__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__invert__();
}
public PyObject __reduce__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__reduce__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__reduce__();
}
public PyObject __dir__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__dir__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
return super.__dir__();
}
public PyObject __add__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__add__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__add__(other);
}
public PyObject __radd__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__radd__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__radd__(other);
}
public PyObject __sub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__sub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__sub__(other);
}
public PyObject __rsub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rsub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rsub__(other);
}
public PyObject __mul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__mul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__mul__(other);
}
public PyObject __rmul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rmul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rmul__(other);
}
public PyObject __div__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__div__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__div__(other);
}
public PyObject __rdiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rdiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rdiv__(other);
}
public PyObject __floordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__floordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__floordiv__(other);
}
public PyObject __rfloordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rfloordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rfloordiv__(other);
}
public PyObject __truediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__truediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__truediv__(other);
}
public PyObject __rtruediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rtruediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rtruediv__(other);
}
public PyObject __mod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__mod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__mod__(other);
}
public PyObject __rmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rmod__(other);
}
public PyObject __divmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__divmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__divmod__(other);
}
public PyObject __rdivmod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rdivmod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rdivmod__(other);
}
public PyObject __rpow__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rpow__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rpow__(other);
}
public PyObject __lshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__lshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__lshift__(other);
}
public PyObject __rlshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rlshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rlshift__(other);
}
public PyObject __rshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rshift__(other);
}
public PyObject __rrshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rrshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rrshift__(other);
}
public PyObject __and__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__and__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__and__(other);
}
public PyObject __rand__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rand__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rand__(other);
}
public PyObject __or__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__or__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__or__(other);
}
public PyObject __ror__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ror__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ror__(other);
}
public PyObject __xor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__xor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__xor__(other);
}
public PyObject __rxor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__rxor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__rxor__(other);
}
public PyObject __lt__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__lt__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__lt__(other);
}
public PyObject __le__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__le__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__le__(other);
}
public PyObject __gt__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__gt__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__gt__(other);
}
public PyObject __ge__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ge__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ge__(other);
}
public PyObject __eq__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__eq__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__eq__(other);
}
public PyObject __ne__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ne__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ne__(other);
}
public PyObject __format__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__format__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__format__(other);
}
public PyObject __iadd__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iadd__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__iadd__(other);
}
public PyObject __isub__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__isub__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__isub__(other);
}
public PyObject __imul__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__imul__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__imul__(other);
}
public PyObject __idiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__idiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__idiv__(other);
}
public PyObject __ifloordiv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ifloordiv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ifloordiv__(other);
}
public PyObject __itruediv__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__itruediv__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__itruediv__(other);
}
public PyObject __imod__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__imod__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__imod__(other);
}
public PyObject __ipow__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ipow__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ipow__(other);
}
public PyObject __ilshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ilshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ilshift__(other);
}
public PyObject __irshift__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__irshift__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__irshift__(other);
}
public PyObject __iand__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iand__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__iand__(other);
}
public PyObject __ior__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ior__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ior__(other);
}
public PyObject __ixor__(PyObject other) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__ixor__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__ixor__(other);
}
public PyObject __int__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__int__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger||res instanceof PyLong)
return res;
throw Py.TypeError("__int__"+" should return an integer");
}
return super.__int__();
}
public PyObject __long__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__long__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyLong||res instanceof PyInteger)
return res;
throw Py.TypeError("__long__"+" returned non-"+"long"+" (type "+res.getType().fastGetName()+")");
}
return super.__long__();
}
public int hashCode() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__hash__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger) {
return((PyInteger)res).getValue();
} else
if (res instanceof PyLong) {
return((PyLong)res).getValue().intValue();
}
throw Py.TypeError("__hash__ should return a int");
}
if (self_type.lookup("__eq__")!=null||self_type.lookup("__cmp__")!=null) {
throw Py.TypeError(String.format("unhashable type: '%.200s'",getType().fastGetName()));
}
return super.hashCode();
}
public PyUnicode __unicode__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__unicode__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyUnicode)
return(PyUnicode)res;
if (res instanceof PyString)
return new PyUnicode((PyString)res);
throw Py.TypeError("__unicode__"+" should return a "+"unicode");
}
return super.__unicode__();
}
public int __cmp__(PyObject other) {
PyType self_type=getType();
PyObject[]where_type=new PyObject[1];
PyObject impl=self_type.lookup_where("__cmp__",where_type);
// Full Compatibility with CPython __cmp__:
// If the derived type don't override __cmp__, the
// *internal* super().__cmp__ should be called, not the
// exposed one. The difference is that the exposed __cmp__
// throws a TypeError if the argument is an instance of the same type.
if (impl==null||where_type[0]==TYPE||Py.isSubClass(TYPE,where_type[0])) {
return super.__cmp__(other);
}
PyObject res=impl.__get__(this,self_type).__call__(other);
if (res==Py.NotImplemented) {
return-2;
}
int c=res.asInt();
return c<0?-1:c>0?1:0;
}
public boolean __nonzero__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__nonzero__");
if (impl==null) {
impl=self_type.lookup("__len__");
if (impl==null)
return super.__nonzero__();
}
PyObject o=impl.__get__(this,self_type).__call__();
Class c=o.getClass();
if (c!=PyInteger.class&&c!=PyBoolean.class) {
throw Py.TypeError(String.format("__nonzero__ should return bool or int, returned %s",self_type.getName()));
}
return o.__nonzero__();
}
public boolean __contains__(PyObject o) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__contains__");
if (impl==null)
return super.__contains__(o);
return impl.__get__(this,self_type).__call__(o).__nonzero__();
}
public int __len__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__len__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
return res.asInt();
}
return super.__len__();
}
public PyObject __iter__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__iter__");
if (impl!=null)
return impl.__get__(this,self_type).__call__();
impl=self_type.lookup("__getitem__");
if (impl==null)
return super.__iter__();
return new PySequenceIter(this);
}
public PyObject __iternext__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("next");
if (impl!=null) {
try {
return impl.__get__(this,self_type).__call__();
} catch (PyException exc) {
if (exc.match(Py.StopIteration))
return null;
throw exc;
}
}
return super.__iternext__(); // ???
}
public PyObject __finditem__(PyObject key) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
try {
return impl.__get__(this,self_type).__call__(key);
} catch (PyException exc) {
if (exc.match(Py.LookupError))
return null;
throw exc;
}
return super.__finditem__(key);
}
public PyObject __finditem__(int key) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
try {
return impl.__get__(this,self_type).__call__(new PyInteger(key));
} catch (PyException exc) {
if (exc.match(Py.LookupError))
return null;
throw exc;
}
return super.__finditem__(key);
}
public PyObject __getitem__(PyObject key) {
// Same as __finditem__, without swallowing LookupErrors. This allows
// __getitem__ implementations written in Python to raise custom
// exceptions (such as subclasses of KeyError).
//
// We are forced to duplicate the code, instead of defining __finditem__
// in terms of __getitem__. That's because PyObject defines __getitem__
// in terms of __finditem__. Therefore, we would end with an infinite
// loop when self_type.lookup("__getitem__") returns null:
//
// __getitem__ -> super.__getitem__ -> __finditem__ -> __getitem__
//
// By duplicating the (short) lookup and call code, we are safe, because
// the call chains will be:
//
// __finditem__ -> super.__finditem__
//
// __getitem__ -> super.__getitem__ -> __finditem__ -> super.__finditem__
PyType self_type=getType();
PyObject impl=self_type.lookup("__getitem__");
if (impl!=null)
return impl.__get__(this,self_type).__call__(key);
return super.__getitem__(key);
}
public void __setitem__(PyObject key,PyObject value) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__setitem__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(key,value);
return;
}
super.__setitem__(key,value);
}
public PyObject __getslice__(PyObject start,PyObject stop,PyObject step) { // ???
if (step!=null) {
return __getitem__(new PySlice(start,stop,step));
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__getslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
return impl.__get__(this,self_type).__call__(indices[0],indices[1]);
}
return super.__getslice__(start,stop,step);
}
public void __setslice__(PyObject start,PyObject stop,PyObject step,PyObject value) {
if (step!=null) {
__setitem__(new PySlice(start,stop,step),value);
return;
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__setslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
impl.__get__(this,self_type).__call__(indices[0],indices[1],value);
return;
}
super.__setslice__(start,stop,step,value);
}
public void __delslice__(PyObject start,PyObject stop,PyObject step) {
if (step!=null) {
__delitem__(new PySlice(start,stop,step));
return;
}
PyType self_type=getType();
PyObject impl=self_type.lookup("__delslice__");
if (impl!=null) {
PyObject[]indices=PySlice.indices2(this,start,stop);
impl.__get__(this,self_type).__call__(indices[0],indices[1]);
return;
}
super.__delslice__(start,stop,step);
}
public void __delitem__(PyObject key) { // ???
PyType self_type=getType();
PyObject impl=self_type.lookup("__delitem__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(key);
return;
}
super.__delitem__(key);
}
public PyObject __call__(PyObject args[],String keywords[]) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__call__");
if (impl!=null) {
return impl.__get__(this,self_type).__call__(args,keywords);
}
return super.__call__(args,keywords);
}
public PyObject __findattr_ex__(String name) {
return Deriveds.__findattr_ex__(this,name);
}
public void __setattr__(String name,PyObject value) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__setattr__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(PyString.fromInterned(name),value);
//CPython does not support instance-acquired finalizers.
//So we don't check for __del__ here.
return;
}
super.__setattr__(name,value);
}
public void __delattr__(String name) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__delattr__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(PyString.fromInterned(name));
return;
}
super.__delattr__(name);
}
public PyObject __get__(PyObject obj,PyObject type) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__get__");
if (impl!=null) {
if (obj==null)
obj=Py.None;
if (type==null)
type=Py.None;
return impl.__get__(this,self_type).__call__(obj,type);
}
return super.__get__(obj,type);
}
public void __set__(PyObject obj,PyObject value) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__set__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(obj,value);
return;
}
super.__set__(obj,value);
}
public void __delete__(PyObject obj) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__delete__");
if (impl!=null) {
impl.__get__(this,self_type).__call__(obj);
return;
}
super.__delete__(obj);
}
public PyObject __pow__(PyObject other,PyObject modulo) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__pow__");
if (impl!=null) {
PyObject res;
if (modulo==null) {
res=impl.__get__(this,self_type).__call__(other);
} else {
res=impl.__get__(this,self_type).__call__(other,modulo);
}
if (res==Py.NotImplemented)
return null;
return res;
}
return super.__pow__(other,modulo);
}
public void dispatch__init__(PyObject[]args,String[]keywords) {
Deriveds.dispatch__init__(this,args,keywords);
}
public PyObject __index__() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__index__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (res instanceof PyInteger||res instanceof PyLong) {
return res;
}
throw Py.TypeError(String.format("__index__ returned non-(int,long) (type %s)",res.getType().fastGetName()));
}
return super.__index__();
}
public Object __tojava__(Class c) {
// If we are not being asked by the "default" conversion to java, then
// we can provide this as the result, as long as it is a instance of the
// specified class. Without this, derived.__tojava__(PyObject.class)
// would broke. (And that's not pure speculation: PyReflectedFunction's
// ReflectedArgs asks for things like that).
if ((c!=Object.class)&&(c!=Serializable.class)&&(c.isInstance(this))) {
return this;
}
// Otherwise, we call the derived __tojava__, if it exists:
PyType self_type=getType();
PyObject impl=self_type.lookup("__tojava__");
if (impl!=null) {
PyObject delegate=impl.__get__(this,self_type).__call__(Py.java2py(c));
if (delegate!=this)
return delegate.__tojava__(Object.class);
}
return super.__tojava__(c);
}
public Object __coerce_ex__(PyObject o) {
PyType self_type=getType();
PyObject impl=self_type.lookup("__coerce__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__(o);
if (res==Py.NotImplemented)
return Py.None;
if (!(res instanceof PyTuple))
throw Py.TypeError("__coerce__ didn't return a 2-tuple");
return((PyTuple)res).getArray();
}
return super.__coerce_ex__(o);
}
public String toString() {
PyType self_type=getType();
PyObject impl=self_type.lookup("__repr__");
if (impl!=null) {
PyObject res=impl.__get__(this,self_type).__call__();
if (!(res instanceof PyString))
throw Py.TypeError("__repr__ returned non-string (type "+res.getType().fastGetName()+")");
return((PyString)res).toString();
}
return super.toString();
}
}
| |
/*
* Copyright 2014-2021 Lukas Krejci
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.revapi.java;
import java.io.StringWriter;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.function.Consumer;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import org.revapi.API;
import org.revapi.ApiAnalyzer;
import org.revapi.ArchiveAnalyzer;
import org.revapi.ElementForest;
import org.revapi.TreeFilter;
import org.revapi.java.compilation.CompilationFuture;
import org.revapi.java.compilation.CompilationValve;
import org.revapi.java.compilation.Compiler;
import org.revapi.java.compilation.ProbingEnvironment;
import org.revapi.java.model.JavaElementForest;
import org.revapi.java.model.TypeElement;
import org.revapi.java.spi.JarExtractor;
import org.revapi.java.spi.JavaElement;
import org.revapi.java.spi.UseSite;
/**
* @author Lukas Krejci
*
* @since 0.1
*/
public final class JavaArchiveAnalyzer implements ArchiveAnalyzer<JavaElement> {
private final JavaApiAnalyzer apiAnalyzer;
private final API api;
private final ExecutorService executor;
private final ProbingEnvironment probingEnvironment;
private final AnalysisConfiguration.MissingClassReporting missingClassReporting;
private final boolean ignoreMissingAnnotations;
private final Iterable<JarExtractor> jarExtractors;
private CompilationValve compilationValve;
/**
* @deprecated only to support the obsolete package and class filtering
*/
@Deprecated
private final @Nullable TreeFilter<JavaElement> implicitFilter;
public JavaArchiveAnalyzer(JavaApiAnalyzer apiAnalyzer, API api, Iterable<JarExtractor> jarExtractors,
ExecutorService compilationExecutor, AnalysisConfiguration.MissingClassReporting missingClassReporting,
boolean ignoreMissingAnnotations, @Nullable TreeFilter<JavaElement> implicitFilter) {
this.apiAnalyzer = apiAnalyzer;
this.api = api;
this.jarExtractors = jarExtractors;
this.executor = compilationExecutor;
this.missingClassReporting = missingClassReporting;
this.ignoreMissingAnnotations = ignoreMissingAnnotations;
this.probingEnvironment = new ProbingEnvironment(api);
this.implicitFilter = implicitFilter;
}
@Override
public ApiAnalyzer<JavaElement> getApiAnalyzer() {
return apiAnalyzer;
}
@Override
public API getApi() {
return api;
}
@Nonnull
@Override
public JavaElementForest analyze(TreeFilter<JavaElement> filter) {
if (Timing.LOG.isDebugEnabled()) {
Timing.LOG.debug("Starting analysis of " + api);
}
TreeFilter<JavaElement> finalFilter = implicitFilter == null ? filter
: TreeFilter.intersection(filter, implicitFilter);
StringWriter output = new StringWriter();
Compiler compiler = new Compiler(executor, output, jarExtractors, api.getArchives(),
api.getSupplementaryArchives(), finalFilter);
try {
compilationValve = compiler.compile(probingEnvironment, missingClassReporting, ignoreMissingAnnotations);
probingEnvironment.getTree().setCompilationFuture(new CompilationFuture(compilationValve, output));
if (Timing.LOG.isDebugEnabled()) {
Timing.LOG.debug("Preliminary API tree produced for " + api);
}
return probingEnvironment.getTree();
} catch (Exception e) {
throw new IllegalStateException("Failed to analyze archives in api " + api, e);
}
}
@Override
public void prune(ElementForest<JavaElement> forest) {
if (!(forest instanceof JavaElementForest)) {
return;
}
doPrune(forest);
forest.stream(TypeElement.class, true, null).forEach(TypeElement::initReferences);
}
public ProbingEnvironment getProbingEnvironment() {
return probingEnvironment;
}
public CompilationValve getCompilationValve() {
return compilationValve;
}
private void doPrune(ElementForest<JavaElement> forest) {
boolean changed;
Set<TypeElement> toRemove = new HashSet<>();
do {
Iterator<TypeElement> it = forest.stream(TypeElement.class, true, null).iterator();
toRemove.clear();
while (it.hasNext()) {
TypeElement type = it.next();
boolean remove = true;
Iterator<UseSite> usit = type.getUseSites().iterator();
while (usit.hasNext()) {
UseSite useSite = usit.next();
JavaElement usingElement = useSite.getElement();
if (isInForest(forest, usingElement)) {
if (useSite.isMovingToApi()) {
remove = false;
}
} else {
usit.remove();
}
}
// keep the types that are in the API because they are, not just because something dragged them into it.
if (isInApi(type)) {
continue;
}
if (remove) {
toRemove.add(type);
}
}
changed = !toRemove.isEmpty();
for (TypeElement t : toRemove) {
// the inner classes of the removed type might be used, so we can't just remove them from the tree
// classpath scanner just puts grandchild classes under a parent if the child is excluded from the tree
// so we'll do the same here... add all child types of the removed element to the children of the
// parent of the removed element
Consumer<JavaElement> readd;
if (t.getParent() == null) {
forest.getRoots().remove(t);
readd = c -> {
forest.getRoots().add(c);
c.setParent(null);
};
} else {
JavaElement parent = t.getParent();
parent.getChildren().remove(t);
readd = c -> parent.getChildren().add(c);
}
t.getChildren().stream().filter(c -> c instanceof TypeElement).forEach(readd);
t.getUsedTypes().entrySet().removeIf(e -> {
UseSite.Type useType = e.getKey();
e.getValue().entrySet().removeIf(e2 -> {
TypeElement usedType = e2.getKey();
usedType.getUseSites().removeIf(us -> {
// noinspection SuspiciousMethodCalls
return us.getType() == useType && e2.getValue().contains(us.getElement());
});
return usedType.getUseSites().isEmpty();
});
return e.getValue().isEmpty();
});
}
} while (changed);
// now go through all types again and modify their API status if they no longer are used
forest.stream(TypeElement.class, true, null).forEach(type -> {
if (!type.isInApiThroughUse()) {
return;
}
boolean stillInApi = type.getUseSites().stream().anyMatch(UseSite::isMovingToApi);
if (!stillInApi) {
type.setInApi(false);
type.setInApiThroughUse(false);
}
});
}
private static boolean isInForest(ElementForest<JavaElement> forest, JavaElement element) {
JavaElement parent = element.getParent();
while (parent != null) {
element = parent;
parent = parent.getParent();
}
return forest.getRoots().contains(element);
}
private static boolean isInApi(TypeElement element) {
while (element != null) {
if (element.isInAPI() && !element.isInApiThroughUse()) {
return true;
}
if (element.getParent() instanceof TypeElement) {
element = (TypeElement) element.getParent();
} else {
element = null;
}
}
return false;
}
}
| |
package org.fax4j.spi.vbs;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.io.Reader;
import java.text.MessageFormat;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import org.fax4j.FaxException;
import org.fax4j.FaxJob;
import org.fax4j.FaxJob.FaxJobPriority;
import org.fax4j.FaxJobStatus;
import org.fax4j.common.FaxActionType;
import org.fax4j.common.Logger;
import org.fax4j.spi.AbstractFax4JClientSpi;
import org.fax4j.spi.process.ProcessOutputHandler;
import org.fax4j.spi.process.ProcessOutputValidator;
import org.fax4j.util.IOHelper;
import org.fax4j.util.ProcessExecutorHelper;
import org.fax4j.util.ProcessExecutorHelper.ProcessOutput;
/**
* This class implements the fax client service provider interface.<br>
* This implementation will proxy the requests to VB scripts that will invoke the windows fax API.<br>
* <b>It is highly recommanded that the windows SPI will be used instead of this SPI.</b><br>
* It is possible to see all faxes submitted from this SPI in the windows fax window available from the
* Start→Settings→Printers and Faxes→Fax<br>
* The executable used to invoke the VBS is defaulted to cscript.
* It is possible to set a different executable by setting the <b>org.fax4j.spi.vbs.exe.path property</b>.<br>
* By default the windows 2000 vbs API will be used on all platforms.<br>
* In order for other platforms (other then windows 2000) to invoke the windows XP API,
* the <b>org.fax4j.spi.vbs.always.use.win2000.api</b> property must be set to false.<br>
* <br>
* The configuration of the fax4j framework is made up of 3 layers.<br>
* The configuration is based on simple properties.<br>
* Each layer overrides the lower layers by adding/changing the property values.<br>
* The first layer is the internal fax4j.properties file located in the fax4j jar.<br>
* This layer contains the preconfigured values for the fax4j framework and can be changed
* by updating these properties in the higher layers.<br>
* The second layer is the external fax4j.properties file that is located on the classpath.<br>
* This file is optional and provides the ability to override the internal configuration for the
* entire fax4j framework.<br>
* The top most layer is the optional java.util.Properties object provided by the external classes
* when creating a new fax client.<br>
* These properties enable to override the configuration of the lower 2 layers.<br>
* <br>
* <b>SPI Status (Draft, Beta, Stable): </b>Stable<br>
* <br>
* Below table describes the configuration values relevant for this class.<br>
* <b>Configuration:</b>
* <table summary="" border="1">
* <tr>
* <td>Name</td>
* <td>Description</td>
* <td>Preconfigured Value</td>
* <td>Default Value</td>
* <td>Mandatory</td>
* </tr>
* <tr>
* <td>org.fax4j.spi.vbs.server.name</td>
* <td>The fax server host name.</td>
* <td>none</td>
* <td>none</td>
* <td>false</td>
* </tr>
* <tr>
* <td>org.fax4j.spi.vbs.exe.path</td>
* <td>The VBS executable.</td>
* <td>cscript.exe</td>
* <td>cscript.exe</td>
* <td>false</td>
* </tr>
* <tr>
* <td>org.fax4j.spi.vbs.always.use.win2000.api</td>
* <td>By default the windows 2000 vbs API will be used on all platforms.<br>
* In order for other platforms (other then windows 2000) to invoke the windows XP API,
* this property must be set to false.
* </td>
* <td>true</td>
* <td>true</td>
* <td>false</td>
* </tr>
* </table>
* <br>
* <b>Limitations:</b><br>
* <ul>
* <li>This SPI will only work on Windows OS (windows 2000 and up).
* <li>See windows fax/tapi limitations.
* </ul>
* <br>
* <b>Dependencies:</b><br>
* <ul>
* <li>Required native executable: cscript.exe
* <li>Windows OS (windows 2000 and up) - see http://msdn.microsoft.com/en-us/library/ms692855(VS.85).aspx for more info.
* <li>Windows fax component installed.
* </ul>
* <br>
*
* @author Sagie Gur-Ari
* @version 1.26
* @since 0.1
*/
public class VBSFaxClientSpi extends AbstractFax4JClientSpi
{
/**The fax server name*/
protected String faxServerName;
/**The VBS exe path*/
protected String vbsExePath;
/**The use windows 2000 API flag*/
protected boolean useWin2kAPI;
/**The process output validator*/
private ProcessOutputValidator processOutputValidator;
/**The process output handler*/
private ProcessOutputHandler processOutputHandler;
/**The fax VBS scripts*/
private static final Map<String,String> VBS_SCRIPTS=new HashMap<String,String>();
/**The fax vbs script*/
private static final String VBS_SCRIPT="fax.vbs";
/**The windows XP submit fax vbs script*/
private static final String VBS_WIN_XP_SUBMIT_SCRIPT="winxp_submit_fax.vbs";
/**The windows 2000 submit fax vbs script*/
private static final String VBS_WIN_2K_SUBMIT_SCRIPT="win2k_submit_fax.vbs";
/**The windows XP suspend fax vbs script*/
private static final String VBS_WIN_XP_SUSPEND_SCRIPT="winxp_suspend_fax.vbs";
/**The windows 2000 suspend fax vbs script*/
private static final String VBS_WIN_2K_SUSPEND_SCRIPT="win2k_suspend_fax.vbs";
/**The windows XP resume fax vbs script*/
private static final String VBS_WIN_XP_RESUME_SCRIPT="winxp_resume_fax.vbs";
/**The windows 2000 resume fax vbs script*/
private static final String VBS_WIN_2K_RESUME_SCRIPT="win2k_resume_fax.vbs";
/**The windows XP cancel fax vbs script*/
private static final String VBS_WIN_XP_CANCEL_SCRIPT="winxp_cancel_fax.vbs";
/**The windows 2000 cancel fax vbs script*/
private static final String VBS_WIN_2K_CANCEL_SCRIPT="win2k_cancel_fax.vbs";
/**The windows XP get fax job status vbs script*/
private static final String VBS_WIN_XP_GET_FAX_JOB_STATUS_SCRIPT="winxp_get_fax_job_status.vbs";
/**The windows 2000 get fax job status vbs script*/
private static final String VBS_WIN_2K_GET_FAX_JOB_STATUS_SCRIPT="win2k_get_fax_job_status.vbs";
/**The VBS script names*/
private static final String[] VSB_SCRIPT_NAMES=new String[]{VBSFaxClientSpi.VBS_SCRIPT,
VBSFaxClientSpi.VBS_WIN_XP_SUBMIT_SCRIPT,VBSFaxClientSpi.VBS_WIN_2K_SUBMIT_SCRIPT,
VBSFaxClientSpi.VBS_WIN_XP_SUSPEND_SCRIPT,VBSFaxClientSpi.VBS_WIN_2K_SUSPEND_SCRIPT,
VBSFaxClientSpi.VBS_WIN_XP_RESUME_SCRIPT,VBSFaxClientSpi.VBS_WIN_2K_RESUME_SCRIPT,
VBSFaxClientSpi.VBS_WIN_XP_CANCEL_SCRIPT,VBSFaxClientSpi.VBS_WIN_2K_CANCEL_SCRIPT,
VBSFaxClientSpi.VBS_WIN_XP_GET_FAX_JOB_STATUS_SCRIPT,VBSFaxClientSpi.VBS_WIN_2K_GET_FAX_JOB_STATUS_SCRIPT};
/**The windows 2000 OS name*/
public static final String WIN_2K_OS_NAME="Windows 2000";
/**
* This class holds the SPI configuration constants.
*
* @author Sagie Gur-Ari
* @version 1.03
* @since 0.1
*/
public enum FaxClientSpiConfigurationConstants
{
/**The fax server name property key*/
FAX_SERVER_NAME_PROPERTY_KEY("org.fax4j.spi.vbs.server.name"),
/**The VBS exe path property key*/
VBS_EXE_PATH_PROPERTY_KEY("org.fax4j.spi.vbs.exe.path"),
/**The always use win2k API property key*/
VBS_ALWAYS_USE_WIN2K_API_PROPERTY_KEY("org.fax4j.spi.vbs.always.use.win2000.api"),
/**The VBS exe path default value*/
VBS_EXE_PATH_DEFAULT_VALUE("cscript");
/**The string value*/
private String value;
/**
* This is the class constructor.
*
* @param value
* The string value
*/
private FaxClientSpiConfigurationConstants(String value)
{
this.value=value;
}
/**
* This function returns the string value.
*
* @return The string value
*/
@Override
public final String toString()
{
return this.value;
}
}
/**
* This class holds the fax job extended properties.
*
* @author Sagie Gur-Ari
* @version 1.02
* @since 0.1
*/
public enum FaxJobExtendedPropertyConstants
{
/**The document name fax job input*/
DOCUMENT_NAME_FAX_JOB_PROPERTY_KEY("document.name");
/**The string value*/
private String value;
/**
* This is the class constructor.
*
* @param value
* The string value
*/
private FaxJobExtendedPropertyConstants(String value)
{
this.value=value;
}
/**
* This function returns the string value.
*
* @return The string value
*/
@Override
public final String toString()
{
return this.value;
}
}
static
{
//load scripts
int amount=VBSFaxClientSpi.VSB_SCRIPT_NAMES.length;
String name=null;
InputStream inputStream=null;
String content=null;
for(int index=0;index<amount;index++)
{
//get next element
name=VBSFaxClientSpi.VSB_SCRIPT_NAMES[index];
//get input stream
inputStream=VBSFaxClientSpi.class.getResourceAsStream(name);
try
{
//read next script
Reader reader=IOHelper.createReader(inputStream,null);
content=IOHelper.readTextStream(reader);
IOHelper.closeResource(inputStream);
}
catch(Exception exception)
{
IOHelper.closeResource(inputStream);
throw new FaxException("Unable to read internal script: "+name,exception);
}
//put in map
VBSFaxClientSpi.VBS_SCRIPTS.put(name,content);
}
}
/**
* This is the default constructor.
*/
public VBSFaxClientSpi()
{
super();
}
/**
* This function initializes the fax client SPI.
*/
@Override
protected void initializeImpl()
{
//get logger
Logger logger=this.getLogger();
//get fax server name
this.faxServerName=this.getConfigurationValue(FaxClientSpiConfigurationConstants.FAX_SERVER_NAME_PROPERTY_KEY);
if(this.faxServerName==null)
{
this.faxServerName="";
}
logger.logDebug(new Object[]{"Fax server name: ",this.faxServerName},null);
//get vbs exe path
this.vbsExePath=this.getConfigurationValue(FaxClientSpiConfigurationConstants.VBS_EXE_PATH_PROPERTY_KEY);
if(this.vbsExePath==null)
{
this.vbsExePath=FaxClientSpiConfigurationConstants.VBS_EXE_PATH_DEFAULT_VALUE.toString();
}
logger.logDebug(new Object[]{"VBS exe path: ",this.vbsExePath},null);
//get the win2k flag
String valueStr=this.getConfigurationValue(FaxClientSpiConfigurationConstants.VBS_ALWAYS_USE_WIN2K_API_PROPERTY_KEY);
if(valueStr==null)
{
valueStr=Boolean.toString(true);
}
boolean win2kAPI=Boolean.parseBoolean(valueStr);
if(!win2kAPI)
{
//get OS name
String osName=System.getProperty("os.name");
if(osName.equalsIgnoreCase(VBSFaxClientSpi.WIN_2K_OS_NAME))
{
win2kAPI=true;
}
}
this.useWin2kAPI=win2kAPI;
logger.logDebug(new Object[]{"Use windows 2000 API: ",Boolean.toString(this.useWin2kAPI)},null);
//create process handler and validator
this.processOutputValidator=new VBSProcessOutputValidator();
this.processOutputHandler=new VBSProcessOutputHandler();
}
/**
* This function returns true if the fax monitor events are supported by this SPI.
*
* @return True if the fax monitor events are supported by this SPI
*/
@Override
public boolean isFaxMonitorEventsSupported()
{
return true;
}
/**
* This function return the VBS exe path.
*
* @return The VBS exe path
*/
protected String getVBSExePath()
{
return this.vbsExePath;
}
/**
* This function will submit a new fax job.<br>
* The fax job ID may be populated by this method in the provided
* fax job object.
*
* @param faxJob
* The fax job object containing the needed information
*/
@Override
protected void submitFaxJobImpl(FaxJob faxJob)
{
String name=VBSFaxClientSpi.VBS_WIN_XP_SUBMIT_SCRIPT;
if(this.useWin2kAPI)
{
name=VBSFaxClientSpi.VBS_WIN_2K_SUBMIT_SCRIPT;
}
//setup input data
Object[] input=setupSubmitFaxJobInput(faxJob);
//invoke script
this.invokeScript(faxJob,name,input,FaxActionType.SUBMIT_FAX_JOB);
}
/**
* This function will suspend an existing fax job.
*
* @param faxJob
* The fax job object containing the needed information
*/
@Override
protected void suspendFaxJobImpl(FaxJob faxJob)
{
String name=VBSFaxClientSpi.VBS_WIN_XP_SUSPEND_SCRIPT;
if(this.useWin2kAPI)
{
name=VBSFaxClientSpi.VBS_WIN_2K_SUSPEND_SCRIPT;
}
//invoke script
this.invokeExistingFaxJobAction(name,faxJob,FaxActionType.SUSPEND_FAX_JOB);
}
/**
* This function will resume an existing fax job.
*
* @param faxJob
* The fax job object containing the needed information
*/
@Override
protected void resumeFaxJobImpl(FaxJob faxJob)
{
String name=VBSFaxClientSpi.VBS_WIN_XP_RESUME_SCRIPT;
if(this.useWin2kAPI)
{
name=VBSFaxClientSpi.VBS_WIN_2K_RESUME_SCRIPT;
}
//invoke script
this.invokeExistingFaxJobAction(name,faxJob,FaxActionType.RESUME_FAX_JOB);
}
/**
* This function will cancel an existing fax job.
*
* @param faxJob
* The fax job object containing the needed information
*/
@Override
protected void cancelFaxJobImpl(FaxJob faxJob)
{
String name=VBSFaxClientSpi.VBS_WIN_XP_CANCEL_SCRIPT;
if(this.useWin2kAPI)
{
name=VBSFaxClientSpi.VBS_WIN_2K_CANCEL_SCRIPT;
}
//invoke script
this.invokeExistingFaxJobAction(name,faxJob,FaxActionType.CANCEL_FAX_JOB);
}
/**
* This function returns the fax job status.<br>
* Not all SPIs support extraction of the fax job status.<br>
* In case the SPI is unable to extract or does not support extracting
* of the fax job status, it will return the UNKNOWN status.
*
* @param faxJob
* The fax job object containing the needed information
* @return The fax job status
*/
@Override
protected FaxJobStatus getFaxJobStatusImpl(FaxJob faxJob)
{
String name=VBSFaxClientSpi.VBS_WIN_XP_GET_FAX_JOB_STATUS_SCRIPT;
if(this.useWin2kAPI)
{
name=VBSFaxClientSpi.VBS_WIN_2K_GET_FAX_JOB_STATUS_SCRIPT;
}
//invoke script
FaxJobStatus faxJobStatus=this.invokeExistingFaxJobAction(name,faxJob,FaxActionType.GET_FAX_JOB_STATUS);
return faxJobStatus;
}
/**
* This function creates an input array with the needed info
* to submit a new fax job based on the provided data.
*
* @param faxJob
* The fax job object containing the needed information
* @return The submit fax job script input
*/
protected Object[] setupSubmitFaxJobInput(FaxJob faxJob)
{
//init list
List<Object> inputList=new LinkedList<Object>();
//set fax server information
inputList.add(this.faxServerName);
//set fax values
File file=faxJob.getFile();
inputList.add(file);
String documentName=faxJob.getProperty(FaxJobExtendedPropertyConstants.DOCUMENT_NAME_FAX_JOB_PROPERTY_KEY.toString(),null);
if((documentName==null)||(documentName.length()==0))
{
documentName=file.getName();
}
inputList.add(documentName);
if(this.useWin2kAPI)
{
//set target information
inputList.add(faxJob.getTargetAddress());
inputList.add(faxJob.getTargetName());
//set sender information
inputList.add(faxJob.getSenderName());
inputList.add(faxJob.getSenderFaxNumber());
}
else
{
FaxJobPriority priority=faxJob.getPriority();
String valueStr="fptNORMAL";
if(priority!=null)
{
switch(priority)
{
case LOW_PRIORITY:
valueStr="fptLOW";
break;
case MEDIUM_PRIORITY:
valueStr="fptNORMAL";
break;
case HIGH_PRIORITY:
valueStr="fptHIGH";
break;
}
}
inputList.add(valueStr);
//set target information
inputList.add(faxJob.getTargetAddress());
inputList.add(faxJob.getTargetName());
//set sender information
inputList.add(faxJob.getSenderName());
inputList.add(faxJob.getSenderFaxNumber());
inputList.add(faxJob.getSenderEmail());
}
//convert to array
int size=inputList.size();
Object[] input=inputList.toArray(new Object[size]);
return input;
}
/**
* This function formats the provided object to enable embedding
* in VBS code.
*
* @param object
* The object to format
* @return The formatted object
*/
protected Object formatObject(Object object)
{
Object formattedObject=object;
if(object==null)
{
formattedObject="";
}
else if(object instanceof String)
{
//get string
String string=(String)object;
//remove characters
string=string.replaceAll("\n","");
string=string.replaceAll("\r","");
string=string.replaceAll("\t","");
string=string.replaceAll("\f","");
string=string.replaceAll("\b","");
string=string.replaceAll("'","");
string=string.replaceAll("\"","");
//get reference
formattedObject=string;
}
else if(object instanceof File)
{
//get file
File file=(File)object;
String filePath=null;
try
{
filePath=file.getCanonicalPath();
}
catch(IOException exception)
{
throw new FaxException("Unable to get file path.",exception);
}
filePath=filePath.replaceAll("\\\\","\\\\\\\\");
//get reference
formattedObject=filePath;
}
return formattedObject;
}
/**
* Invokes a basic fax action
*
* @param scriptName
* The script name
* @param faxJob
* The fax job object containing the needed information
* @param faxActionType
* The fax action type
* @return The fax job status (only for get fax job status action, for others null will be returned)
*/
protected FaxJobStatus invokeExistingFaxJobAction(String scriptName,FaxJob faxJob,FaxActionType faxActionType)
{
//initialize array
Object[] input=new String[2];
//set fax server information
input[0]=this.faxServerName;
//set fax job ID
input[1]=faxJob.getID();
//invoke script
return this.invokeScript(faxJob,scriptName,input,faxActionType);
}
/**
* Invokes the VB script and returns its output.
*
* @param faxJob
* The fax job object containing the needed information
* @param name
* The script name
* @param input
* The script input
* @param faxActionType
* The fax action type
* @return The fax job status (only for get fax job status action, for others null will be returned)
*/
protected FaxJobStatus invokeScript(FaxJob faxJob,String name,Object[] input,FaxActionType faxActionType)
{
//generate script
String script=this.generateScript(name,input);
//invoke script
ProcessOutput processOutput=this.invokeScript(script);
//validate output
this.processOutputValidator.validateProcessOutput(this,processOutput,faxActionType);
//handle output
FaxJobStatus output=null;
switch(faxActionType)
{
case SUBMIT_FAX_JOB:
this.processOutputHandler.updateFaxJob(this,faxJob,processOutput,faxActionType);
break;
case GET_FAX_JOB_STATUS:
output=this.processOutputHandler.getFaxJobStatus(this,processOutput);
break;
default:
//do nothing
break;
}
return output;
}
/**
* Invokes the VB script and returns the output.
*
* @param script
* The script to invoke
* @return The script output
*/
protected ProcessOutput invokeScript(String script)
{
File file=null;
try
{
//create temporary file
file=File.createTempFile("fax4j_",".vbs");
}
catch(IOException exception)
{
throw new FaxException("Unable to create temporary vbscript file.",exception);
}
file.deleteOnExit();
//generate command string
StringBuilder buffer=new StringBuilder();
buffer.append(this.getVBSExePath());
buffer.append(" \"");
buffer.append(file.getAbsolutePath());
buffer.append("\"");
String command=buffer.toString();
try
{
//write script to file
IOHelper.writeTextFile(script,file);
}
catch(IOException exception)
{
throw new FaxException("Unable to write vbscript to temporary file.",exception);
}
//get logger
Logger logger=this.getLogger();
logger.logDebug(new Object[]{"Invoking command: ",command," script:",Logger.SYSTEM_EOL,script},null);
//execute command
ProcessOutput vbsOutput=ProcessExecutorHelper.executeProcess(this,command);
//get exit code
int exitCode=vbsOutput.getExitCode();
//delete temp file
boolean fileDeleted=file.delete();
logger.logDebug(new Object[]{"Temp script file deleted: ",String.valueOf(fileDeleted)},null);
if(exitCode!=0)
{
throw new FaxException("Error while invoking script, exit code: "+exitCode+" script output:\n"+vbsOutput.getOutputText()+"\nScript error:\n"+vbsOutput.getErrorText());
}
return vbsOutput;
}
/**
* This function generates the script and returns it.
*
* @param name
* The script name
* @param input
* The script input
* @return The formatted script
*/
protected String generateScript(String name,Object[] input)
{
//get template
String template=VBSFaxClientSpi.VBS_SCRIPTS.get(name);
if((template==null)||(template.length()==0))
{
this.throwUnsupportedException();
}
//get common script
String commonScript=VBSFaxClientSpi.VBS_SCRIPTS.get(VBSFaxClientSpi.VBS_SCRIPT);
//format input
Object[] formattedInput=null;
if(input!=null)
{
//get size
int size=input.length;
//create array
formattedInput=new Object[size];
Object object=null;
for(int index=0;index<size;index++)
{
//get next element
object=input[index];
//format object
object=this.formatObject(object);
//push to array
formattedInput[index]=object;
}
}
//push input to template
String updatedScript=MessageFormat.format(template,formattedInput);
//merge scripts
StringBuilder buffer=new StringBuilder(commonScript.length()+updatedScript.length());
buffer.append(commonScript);
buffer.append(updatedScript);
String script=buffer.toString();
return script;
}
}
| |
package com.daedafusion.knowledge.query.framework.providers;
import com.daedafusion.configuration.Configuration;
import com.daedafusion.sf.AbstractProvider;
import com.daedafusion.sf.LifecycleListener;
import com.daedafusion.knowledge.query.framework.QueryResult;
import com.daedafusion.knowledge.trinity.Query;
import com.daedafusion.knowledge.trinity.QueryException;
import com.daedafusion.knowledge.trinity.dictionary.Dictionary;
import com.daedafusion.knowledge.trinity.dictionary.DictionaryPool;
import com.daedafusion.knowledge.trinity.triples.query.MultiQueryEngine;
import com.daedafusion.knowledge.trinity.triples.query.MultiQueryEnginePool;
import com.daedafusion.knowledge.trinity.triples.query.QueryEngine;
import com.daedafusion.knowledge.trinity.triples.query.QueryEnginePool;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.ListeningExecutorService;
import com.google.common.util.concurrent.MoreExecutors;
import com.hp.hpl.jena.query.ResultSet;
import com.hp.hpl.jena.rdf.model.Model;
import org.apache.commons.pool2.ObjectPool;
import org.apache.log4j.Logger;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.Callable;
import java.util.concurrent.Executors;
/**
* Created by mphilpot on 9/5/14.
*/
public class TrinityQueryManager extends AbstractProvider implements QueryManagerProvider
{
private static final Logger log = Logger.getLogger(TrinityQueryManager.class);
private final ObjectMapper mapper;
private final ObjectPool<QueryEngine> qePool;
private final ObjectPool<MultiQueryEngine> mqePool;
private final ObjectPool<Dictionary> dictPool;
private final ListeningExecutorService service;
public TrinityQueryManager()
{
mapper = new ObjectMapper();
qePool = QueryEnginePool.getInstance().getPool();
mqePool = MultiQueryEnginePool.getInstance().getPool();
dictPool = DictionaryPool.getInstance().getPool();
service = MoreExecutors.listeningDecorator(Executors.newFixedThreadPool(Configuration.getInstance().getInteger("trinityQueryManager.literalPrefixLookup.threads", 20)));
addLifecycleListener(new LifecycleListener()
{
@Override
public void init()
{
}
@Override
public void start()
{
}
@Override
public void stop()
{
}
@Override
public void teardown()
{
qePool.close();
mqePool.close();
dictPool.close();
service.shutdown();
}
});
}
@Override
public QueryResult query(Query query)
{
QueryEngine qe = null;
try
{
qe = qePool.borrowObject();
QueryResult result = new QueryResult();
Model model = qe.graphQuery(query);
result.setCursor(mapper.writeValueAsString(qe.getContext().getCursorMap()));
result.getMetaData().putAll(qe.getContext().getMetadata());
result.setModel(model);
return result;
}
catch (Exception e)
{
log.error("", e);
throw new QueryException(e);
}
finally
{
if (qe != null)
{
try
{
qePool.returnObject(qe);
}
catch (Exception e)
{
log.error("", e);
}
}
}
}
@Override
public QueryResult query(List<Query> queryList)
{
MultiQueryEngine mqe = null;
try
{
mqe = mqePool.borrowObject();
QueryResult result = new QueryResult();
MultiQueryEngine.Result r = mqe.multiQuery(queryList);
result.setCursor(mapper.writeValueAsString(r.getCursorMap()));
result.getMetaData().putAll(r.getMetaData());
result.setModel(r.getModel());
return result;
}
catch (Exception e)
{
log.error("", e);
throw new QueryException(e);
}
finally
{
if (mqe != null)
{
try
{
mqePool.returnObject(mqe);
}
catch (Exception e)
{
log.error("", e);
}
}
}
}
@Override
public QueryResult select(Query query)
{
QueryEngine qe = null;
try
{
qe = qePool.borrowObject();
QueryResult result = new QueryResult();
ResultSet rs = qe.execSelect(query);
result.setCursor(mapper.writeValueAsString(qe.getContext().getCursorMap()));
result.getMetaData().putAll(qe.getContext().getMetadata());
result.setResultSets(Collections.singletonList(rs));
return result;
}
catch (Exception e)
{
log.error("", e);
throw new QueryException(e);
}
finally
{
if (qe != null)
{
try
{
qePool.returnObject(qe);
}
catch (Exception e)
{
log.error("", e);
}
}
}
}
@Override
public QueryResult select(List<Query> queryList)
{
MultiQueryEngine mqe = null;
try
{
mqe = mqePool.borrowObject();
QueryResult result = new QueryResult();
MultiQueryEngine.Result r = mqe.multiSelect(queryList);
result.setCursor(mapper.writeValueAsString(r.getCursorMap()));
result.getMetaData().putAll(r.getMetaData());
result.setResultSets(r.getResultSet());
return result;
}
catch (Exception e)
{
log.error("", e);
throw new QueryException(e);
}
finally
{
if (mqe != null)
{
try
{
mqePool.returnObject(mqe);
}
catch (Exception e)
{
log.error("", e);
}
}
}
}
private class LiteralPrefixCallable implements Callable<Set<String>>
{
private final String predicate;
private final String prefix;
public LiteralPrefixCallable(String predicate, String prefix)
{
this.predicate = predicate;
this.prefix = prefix;
}
@Override
public Set<String> call() throws Exception
{
Dictionary dict = null;
try
{
dict = dictPool.borrowObject();
return dict.getPredicateLiterals(predicate, prefix);
}
finally
{
if(dict != null)
{
dictPool.returnObject(dict);
}
}
}
}
@Override
public List<String> literalPrefixLookup(List<String> predicates, String literalPrefix) throws IOException
{
List<ListenableFuture<Set<String>>> futures = new ArrayList<>();
for(String p : predicates)
{
futures.add(service.submit(new LiteralPrefixCallable(p, literalPrefix)));
}
ListenableFuture<List<Set<String>>> fanIn = Futures.allAsList(futures);
Set<String> result = new HashSet<>();
try
{
List<Set<String>> list = fanIn.get();
list.forEach(result::addAll);
return new ArrayList<>(result);
}
catch (Exception e)
{
log.error("", e);
return new ArrayList<>();
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.data.appconfiguration;
import com.azure.core.http.HttpClient;
import com.azure.core.http.HttpHeaders;
import com.azure.core.http.ProxyOptions;
import com.azure.core.http.netty.NettyAsyncHttpClientBuilder;
import com.azure.core.http.policy.AddHeadersFromContextPolicy;
import com.azure.core.http.rest.PagedIterable;
import com.azure.core.http.rest.Response;
import com.azure.core.util.Context;
import com.azure.data.appconfiguration.models.ConfigurationSetting;
import com.azure.data.appconfiguration.models.SettingSelector;
import com.azure.identity.DefaultAzureCredential;
import com.azure.identity.DefaultAzureCredentialBuilder;
import java.net.InetSocketAddress;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.SQLException;
/**
* WARNING: MODIFYING THIS FILE WILL REQUIRE CORRESPONDING UPDATES TO README.md FILE. LINE NUMBERS
* ARE USED TO EXTRACT APPROPRIATE CODE SEGMENTS FROM THIS FILE. ADD NEW CODE AT THE BOTTOM TO AVOID CHANGING
* LINE NUMBERS OF EXISTING CODE SAMPLES.
*
* Code samples for the README.md
*/
@SuppressWarnings("unused")
public class ReadmeSamples {
private String endpoint = "endpoint";
private String connectionString = "connection string";
private String urlKey = "url key";
private String urlLabel = "url label";
private String periodicUpdateLabel = "periodic update label";
private ConfigurationClient configurationClient = new ConfigurationClientBuilder().buildClient();
public void createClient() {
// BEGIN: readme-sample-createClient
ConfigurationClient configurationClient = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
// END: readme-sample-createClient
}
public void createAsyncClient() {
// BEGIN: readme-sample-createAsyncClient
ConfigurationAsyncClient configurationClient = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
// END: readme-sample-createAsyncClient
}
public void aadAuthentication() {
// BEGIN: readme-sample-aadAuthentication
DefaultAzureCredential credential = new DefaultAzureCredentialBuilder().build();
ConfigurationClient configurationClient = new ConfigurationClientBuilder()
.credential(credential)
.endpoint(endpoint)
.buildClient();
// END: readme-sample-aadAuthentication
}
public void sqlExample() {
// BEGIN: readme-sample-sqlExample
ConfigurationClient configurationClient = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildClient();
// urlLabel is optional
String url = configurationClient.getConfigurationSetting(urlKey, urlLabel).getValue();
Connection conn = null;
try {
conn = DriverManager.getConnection(url);
} catch (SQLException ex) {
System.out.printf("Failed to get connection using url %s", url);
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException ex) {
System.out.printf("Failed to close connection, url %s", url);
}
}
}
// END: readme-sample-sqlExample
}
public void listConfigurationsExample() {
// BEGIN: readme-sample-listConfigurationsExample
ConfigurationAsyncClient configurationClient = new ConfigurationClientBuilder()
.connectionString(connectionString)
.buildAsyncClient();
configurationClient.listConfigurationSettings(new SettingSelector().setLabelFilter(periodicUpdateLabel))
.subscribe(setting -> updateConfiguration(setting));
// END: readme-sample-listConfigurationsExample
}
public void addConfigurationSetting() {
// BEGIN: readme-sample-addConfigurationSetting
ConfigurationSetting setting = configurationClient.addConfigurationSetting("new_key", "new_label", "new_value");
// END: readme-sample-addConfigurationSetting
}
public void setConfigurationSetting() {
// BEGIN: readme-sample-setConfigurationSetting
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
// END: readme-sample-setConfigurationSetting
}
public void getConfigurationSetting() {
// BEGIN: readme-sample-getConfigurationSetting
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
ConfigurationSetting retrievedSetting = configurationClient.getConfigurationSetting("some_key", "some_label");
// END: readme-sample-getConfigurationSetting
}
public void getConfigurationSettingConditionally() {
// BEGIN: readme-sample-getConfigurationSettingConditionally
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
Response<ConfigurationSetting> settingResponse = configurationClient.getConfigurationSettingWithResponse(setting, null, true, Context.NONE);
// END: readme-sample-getConfigurationSettingConditionally
}
public void updateConfigurationSetting() {
// BEGIN: readme-sample-updateConfigurationSetting
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
ConfigurationSetting updatedSetting = configurationClient.setConfigurationSetting("some_key", "some_label", "new_value");
// END: readme-sample-updateConfigurationSetting
}
public void updateConfigurationSettingConditionally() {
// BEGIN: readme-sample-updateConfigurationSettingConditionally
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
Response<ConfigurationSetting> settingResponse = configurationClient.setConfigurationSettingWithResponse(setting, true, Context.NONE);
// END: readme-sample-updateConfigurationSettingConditionally
}
public void deleteConfigurationSetting() {
// BEGIN: readme-sample-deleteConfigurationSetting
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
ConfigurationSetting deletedSetting = configurationClient.deleteConfigurationSetting("some_key", "some_label");
// END: readme-sample-deleteConfigurationSetting
}
public void deleteConfigurationSettingConditionally() {
// BEGIN: readme-sample-deleteConfigurationSettingConditionally
ConfigurationSetting setting = configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
Response<ConfigurationSetting> settingResponse = configurationClient.deleteConfigurationSettingWithResponse(setting, true, Context.NONE);
// END: readme-sample-deleteConfigurationSettingConditionally
}
public void listConfigurationSetting() {
// BEGIN: readme-sample-listConfigurationSetting
String key = "some_key";
String key2 = "new_key";
configurationClient.setConfigurationSetting(key, "some_label", "some_value");
configurationClient.setConfigurationSetting(key2, "new_label", "new_value");
SettingSelector selector = new SettingSelector().setKeyFilter(key + "," + key2);
PagedIterable<ConfigurationSetting> settings = configurationClient.listConfigurationSettings(selector);
// END: readme-sample-listConfigurationSetting
}
public void listRevisions() {
// BEGIN: readme-sample-listRevisions
String key = "revisionKey";
configurationClient.setConfigurationSetting(key, "some_label", "some_value");
configurationClient.setConfigurationSetting(key, "new_label", "new_value");
SettingSelector selector = new SettingSelector().setKeyFilter(key);
PagedIterable<ConfigurationSetting> settings = configurationClient.listRevisions(selector);
// END: readme-sample-listRevisions
}
public void setReadOnly() {
// BEGIN: readme-sample-setReadOnly
configurationClient.setConfigurationSetting("some_key", "some_label", "some_value");
ConfigurationSetting setting = configurationClient.setReadOnly("some_key", "some_label", true);
// END: readme-sample-setReadOnly
}
public void clearReadOnly() {
// BEGIN: readme-sample-clearReadOnly
ConfigurationSetting setting = configurationClient.setReadOnly("some_key", "some_label", false);
// END: readme-sample-clearReadOnly
}
public void customHeaders() {
// BEGIN: readme-sample-customHeaders
// Add your headers
HttpHeaders headers = new HttpHeaders();
headers.set("my-header1", "my-header1-value");
headers.set("my-header2", "my-header2-value");
headers.set("my-header3", "my-header3-value");
// Call API by passing headers in Context.
configurationClient.addConfigurationSettingWithResponse(
new ConfigurationSetting().setKey("key").setValue("value"),
new Context(AddHeadersFromContextPolicy.AZURE_REQUEST_HTTP_HEADERS_KEY, headers));
// Above three HttpHeader will be added in outgoing HttpRequest.
// END: readme-sample-customHeaders
}
public void createClientWithProxyOption() {
// BEGIN: readme-sample-createClientWithProxyOption
// Proxy options
final String hostname = "{your-host-name}";
final int port = 447; // your port number
ProxyOptions proxyOptions = new ProxyOptions(ProxyOptions.Type.HTTP,
new InetSocketAddress(hostname, port));
HttpClient httpClient = new NettyAsyncHttpClientBuilder()
.proxy(proxyOptions)
.build();
ConfigurationAsyncClient configurationAsyncClient = new ConfigurationClientBuilder()
.connectionString("{your_connection_string}")
.httpClient(httpClient)
.buildAsyncClient();
// END: readme-sample-createClientWithProxyOption
}
private void updateConfiguration(ConfigurationSetting setting) {
// do something on the given setting.
}
}
| |
package com.xtremelabs.robolectric.shadows;
import android.accounts.Account;
import android.accounts.AccountManager;
import android.accounts.AccountManagerCallback;
import android.accounts.AccountManagerFuture;
import android.accounts.AuthenticatorException;
import android.accounts.OperationCanceledException;
import android.app.Activity;
import android.content.Context;
import android.os.Bundle;
import android.os.Handler;
import com.xtremelabs.robolectric.Robolectric;
import com.xtremelabs.robolectric.internal.Implementation;
import com.xtremelabs.robolectric.internal.Implements;
import com.xtremelabs.robolectric.internal.RealObject;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.TimeUnit;
import static com.xtremelabs.robolectric.Robolectric.newInstanceOf;
import static com.xtremelabs.robolectric.Robolectric.shadowOf;
/**
* Shadows the {@code android.accounts.AccountManager} class.
*/
@SuppressWarnings({"UnusedDeclaration"})
@Implements(AccountManager.class)
public class ShadowAccountManager {
public static final String AUTH_TOKEN_VALUE = "authToken";
private static AccountManager singleton;
private Account[] accounts;
private HashMap<Account, HashMap<String, String>> cachedAuthTokenValues =
new HashMap<Account, HashMap<String, String>>();
@Implementation
public static AccountManager get(Context context) {
if (singleton == null) {
singleton = Robolectric.newInstanceOf(AccountManager.class);
}
return singleton;
}
@Implementation
public AccountManagerFuture<Bundle> getAuthToken(Account account, String authTokenType, Bundle options, Activity activity, AccountManagerCallback<Bundle> callback, Handler handler) {
//TODO: Add complete activity to perform the account intent dance.
final Account finalAccount = account;
return new AccountManagerFuture<Bundle>() {
private boolean isFutureCancelled;
private boolean isFutureDone;
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
if (isFutureDone) {
return false;
}
isFutureCancelled = true;
return isCancelled();
}
@Override
public Bundle getResult(long timeout, TimeUnit unit) throws OperationCanceledException,
AuthenticatorException, IOException {
Bundle result = new Bundle();
if (!isCancelled()) {
addBundleResults(result, finalAccount);
isFutureDone = true;
}
return result;
}
@Override
public Bundle getResult() throws OperationCanceledException,
AuthenticatorException, IOException {
Bundle result = new Bundle();
if (!isCancelled()) {
addBundleResults(result, finalAccount);
isFutureDone = true;
}
return result;
}
@Override
public boolean isCancelled() {
return isFutureCancelled;
}
@Override
public boolean isDone() {
return isFutureDone || isFutureCancelled;
}
private void addBundleResults(Bundle bundle, final Account account) {
bundle.putString(AccountManager.KEY_AUTHTOKEN, AUTH_TOKEN_VALUE);
bundle.putString(AccountManager.KEY_ACCOUNT_TYPE, account.type);
bundle.putString(AccountManager.KEY_ACCOUNT_NAME, account.name);
}
};
}
@Implementation
public AccountManagerFuture<Bundle> getAuthTokenByFeatures(String accountType, String authTokenType, String[] features, Activity activity, Bundle addAccountOptions, Bundle getAuthTokenOptions, AccountManagerCallback<Bundle> callback, Handler handler) {
//TODO: Add complete activity to perform the account intent dance.
final String finalAccountType = accountType;
return new AccountManagerFuture<Bundle>() {
private boolean isFutureCancelled;
private boolean isFutureDone;
@Override
public boolean cancel(boolean mayInterruptIfRunning) {
if (isFutureDone) {
return false;
}
isFutureCancelled = true;
return isCancelled();
}
@Override
public Bundle getResult(long timeout, TimeUnit unit) throws OperationCanceledException,
AuthenticatorException, IOException {
Bundle result = new Bundle();
if (!isCancelled()) {
addBundleResults(result, finalAccountType);
isFutureDone = true;
}
return result;
}
@Override
public Bundle getResult() throws OperationCanceledException,
AuthenticatorException, IOException {
Bundle result = new Bundle();
if (!isCancelled()) {
addBundleResults(result, finalAccountType);
isFutureDone = true;
}
return result;
}
@Override
public boolean isCancelled() {
return isFutureCancelled;
}
@Override
public boolean isDone() {
return isFutureDone || isFutureCancelled;
}
private void addBundleResults(Bundle bundle, final String accountType) {
bundle.putString(AccountManager.KEY_AUTHTOKEN, AUTH_TOKEN_VALUE);
bundle.putString(AccountManager.KEY_ACCOUNT_TYPE, accountType);
bundle.putString(AccountManager.KEY_ACCOUNT_NAME, "accountName");
}
};
}
@Implementation
public void invalidateAuthToken(String accountType, String authToken) {}
@Implementation
public Account[] getAccounts() {
return getAccountsByType(null);
}
@Implementation
public Account[] getAccountsByType(String accountType) {
if (accountType == null) {
return accounts;
}
ArrayList<Account> accountList = new ArrayList<Account>();
if (accounts != null) {
for (Account account : accounts) {
if (accountType.equals(account.type)) {
accountList.add(account);
}
}
}
return accountList.toArray(new Account[accountList.size()]);
}
@Implementation
public String peekAuthToken(Account account, String authTokenType) {
HashMap<String, String> tokens = cachedAuthTokenValues.get(account);
return (tokens != null) ? tokens.get(authTokenType) : null;
}
public void setCachedAuthToken(Account account, String authTokenType, String authTokenValue) {
if (!cachedAuthTokenValues.containsKey(account)) {
cachedAuthTokenValues.put(account, new HashMap<String, String>());
}
cachedAuthTokenValues.get(account).put(authTokenType, authTokenValue);
}
public void setAccounts(Account[] accounts) {
this.accounts = accounts;
}
}
| |
/*
* ModeShape (http://www.modeshape.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.modeshape.jcr;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.notNullValue;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import javax.jcr.Node;
import javax.jcr.PropertyType;
import javax.jcr.RepositoryException;
import javax.jcr.nodetype.NodeType;
import javax.jcr.nodetype.NodeTypeExistsException;
import org.junit.Before;
import org.junit.Test;
import org.modeshape.jcr.api.nodetype.NodeTypeManager;
/**
* Test of CND-based type definitions. These test cases focus on ensuring that an import of a type from a CND file registers the
* expected type rather than attempting to validate all of the type registration functionality already tested in
* {@link TypeRegistrationTest}.
*/
public class NodeTypeRegistrationTest extends SingleUseAbstractTest {
private NodeTypeManager nodeTypeManager;
@Override
@Before
public void beforeEach() throws Exception {
super.beforeEach();
this.nodeTypeManager = session.getWorkspace().getNodeTypeManager();
}
protected InputStream resourceAsStream( String path ) {
return getClass().getClassLoader().getResourceAsStream(path);
}
protected URL resourceAsUrl( String path ) {
return getClass().getClassLoader().getResource(path);
}
protected NodeType assertNodeType( String name ) throws RepositoryException {
NodeType type = nodeTypeManager.getNodeType(name);
assertThat(type, is(notNullValue()));
return type;
}
@Test
public void shouldAccessCustomNodeTypeManagerViaCasting() throws Exception {
NodeTypeManager nodeTypeMgr = session.getWorkspace().getNodeTypeManager();
assertThat(nodeTypeMgr, is(notNullValue()));
}
@Test
public void shouldAccessCustomNodeTypeManagerViaProtectedMethods() throws Exception {
NodeTypeManager nodeTypeMgr = session.workspace().nodeTypeManager();
assertThat(nodeTypeMgr, is(notNullValue()));
}
@Test( expected = IOException.class )
public void shouldFailIfResourceFileCouldNotBeFoundAsRelativeFile() throws Exception {
File file = new File("/this/resource/file/does/not/exist");
assertThat(file.exists(), is(false));
nodeTypeManager.registerNodeTypes(file, true);
}
@Test( expected = IOException.class )
public void shouldFailIfResourceFileCouldNotBeFoundAsUrl() throws Exception {
File file = new File("/this/resource/file/does/not/exist");
assertThat(file.exists(), is(false));
URL url = file.toURI().toURL();
nodeTypeManager.registerNodeTypes(url, true);
}
@Test
public void shouldLoadNodeTypesFromCndResourceFileFoundOnClasspath() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsStream("cnd/cars.cnd"), true);
assertNodeType("car:Car");
}
@Test
public void shouldLoadNodeTypesFromCndResourceFileFoundWithRelativePathOnFileSystem() throws Exception {
File file = new File("src/test/resources/cnd/cars.cnd");
if (file.exists()) {
nodeTypeManager.registerNodeTypes(file, true);
assertNodeType("car:Car");
}
}
@Test
public void shouldLoadNodeTypesFromCndResourceFileFoundWithAbsolutePathOnFileSystem() throws Exception {
File file = new File("src/test/resources/cnd/cars.cnd");
if (file.exists()) {
nodeTypeManager.registerNodeTypes(file.getAbsoluteFile(), true);
assertNodeType("car:Car");
}
}
@Test
public void shouldLoadNodeTypesFromUrlToCndFile() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsUrl("cnd/cars.cnd"), true);
assertNodeType("car:Car");
}
@Test( expected = NodeTypeExistsException.class )
public void shouldNotAllowRedefinitionOfExistingTypesFromCndFile() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsUrl("cnd/existingType.cnd"), false);
// assertNodeType("nt:folder");
}
@Test
public void shouldLoadMagnoliaTypesFromCndFile() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsUrl("cnd/magnolia.cnd"), true);
assertNodeType("mgnl:contentNode");
}
@Test
public void shouldRegisterValidTypesFromCndFile() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsUrl("cnd/validType.cnd"), true);
NodeType nodeType = assertNodeType("modetest:testType");
assertThat(nodeType, is(notNullValue()));
assertThat(nodeType.isMixin(), is(true));
assertThat(nodeType.hasOrderableChildNodes(), is(true));
assertThat(nodeType.getDeclaredSupertypes().length, is(2));
assertThat(nodeType.getDeclaredChildNodeDefinitions().length, is(1));
JcrNodeDefinition childNode = (JcrNodeDefinition)nodeType.getDeclaredChildNodeDefinitions()[0];
assertThat(childNode.getName(), is("modetest:namespace"));
assertThat(childNode.getDefaultPrimaryType().getName(), is("mode:namespace"));
assertThat(childNode.getRequiredPrimaryTypes().length, is(1));
assertThat(childNode.getRequiredPrimaryTypes()[0].getName(), is("mode:namespace"));
assertThat(childNode.allowsSameNameSiblings(), is(false));
assertThat(childNode.isMandatory(), is(false));
assertThat(nodeType.getDeclaredPropertyDefinitions().length, is(1));
JcrPropertyDefinition property = (JcrPropertyDefinition)nodeType.getDeclaredPropertyDefinitions()[0];
assertThat(property.getName(), is("*"));
assertThat(property.getRequiredType(), is(PropertyType.STRING));
assertThat(property.getValueConstraints().length, is(3));
assertThat(property.getValueConstraints()[0], is("foo"));
assertThat(property.getValueConstraints()[1], is("bar"));
assertThat(property.getValueConstraints()[2], is("baz"));
assertThat(property.getDefaultValues().length, is(1));
assertThat(property.getDefaultValues()[0].getString(), is("foo"));
}
@Test
public void shouldLoadNodeTypesFromXmlResourceFileFoundOnClasspath() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsStream("xml/magnolia_forum_nodetypes.xml"), true);
assertNodeType("mgnl:forum");
}
@Test
public void shouldLoadNodeTypesFromXmlResourceFileFoundWithRelativePathOnFileSystem() throws Exception {
File file = new File("src/test/resources/xml/magnolia_forum_nodetypes.xml");
if (file.exists()) {
nodeTypeManager.registerNodeTypes(file, true);
assertNodeType("mgnl:forum");
}
}
@Test
public void shouldLoadNodeTypesFromXmlResourceFileFoundWithAbsolutePathOnFileSystem() throws Exception {
File file = new File("src/test/resources/xml/magnolia_forum_nodetypes.xml");
if (file.exists()) {
nodeTypeManager.registerNodeTypes(file.getAbsoluteFile(), true);
assertNodeType("mgnl:forum");
}
}
@Test
public void shouldLoadNodeTypesFromUrl() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsUrl("xml/magnolia_forum_nodetypes.xml"), true);
assertNodeType("mgnl:forum");
}
@Test
public void shouldLoadMagnoliaNodeTypesFromXml() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsStream("xml/magnolia_forum_nodetypes.xml"), true);
assertNodeType("mgnl:forum");
}
@Test
public void shouldLoadOwfeNodeTypesFromXml() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsStream("xml/owfe_nodetypes.xml"), true);
assertNodeType("expression");
}
@Test
public void shouldLoadCustomNodeTypesFromXml() throws Exception {
nodeTypeManager.registerNodeTypes(resourceAsStream("xml/custom_nodetypes.xml"), true);
assertNodeType("mgnl:reserve");
}
@Test
public void shouldAllowDisjunctiveResidualChildNodeDefinitions() throws Exception {
// This is an extended test of the MODE-698 fix
nodeTypeManager.registerNodeTypes(resourceAsStream("cnd/magnolia.cnd"), true);
assertNodeType("mgnl:contentNode");
Node rootNode = session.getRootNode();
Node branchNode = rootNode.addNode("disjunctiveTest", "nt:unstructured");
Node testNode = branchNode.addNode("testNode", "mgnl:content");
assertTrue(testNode.hasNode("MetaData"));
session.save();
// This residual definition comes from the ancestor - nt:hierarchyNode
testNode.addNode("hierarchyNode", "nt:folder");
// This residual definition comes from mgnl:content
testNode.addNode("baseNode", "nt:unstructured");
session.save();
}
}
| |
/*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.codeInspection.bytecodeAnalysis;
import com.intellij.util.ArrayUtil;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.org.objectweb.asm.tree.analysis.AnalyzerException;
import java.util.*;
/**
* For lattice, equations and solver description, see http://pat.keldysh.ru/~ilya/faba.pdf (in Russian)
*/
final class ELattice<T extends Enum<T>> {
final T bot;
final T top;
ELattice(T bot, T top) {
this.bot = bot;
this.top = top;
}
final T join(T x, T y) {
if (x == bot) return y;
if (y == bot) return x;
if (x == y) return x;
return top;
}
final T meet(T x, T y) {
if (x == top) return y;
if (y == top) return x;
if (x == y) return x;
return bot;
}
}
class ResultUtil {
private final ELattice<Value> lattice;
final Value top;
ResultUtil(ELattice<Value> lattice) {
this.lattice = lattice;
top = lattice.top;
}
Result join(Result r1, Result r2) throws AnalyzerException {
if (r1 instanceof Final && ((Final) r1).value == top) {
return r1;
}
if (r2 instanceof Final && ((Final) r2).value == top) {
return r2;
}
if (r1 instanceof Final && r2 instanceof Final) {
return new Final(lattice.join(((Final) r1).value, ((Final) r2).value));
}
if (r1 instanceof Final && r2 instanceof Pending) {
Final f1 = (Final)r1;
Pending pending = (Pending) r2;
Set<Product> sum1 = new HashSet<>(pending.sum);
sum1.add(new Product(f1.value, Collections.emptySet()));
return new Pending(sum1);
}
if (r1 instanceof Pending && r2 instanceof Final) {
Final f2 = (Final)r2;
Pending pending = (Pending) r1;
Set<Product> sum1 = new HashSet<>(pending.sum);
sum1.add(new Product(f2.value, Collections.emptySet()));
return new Pending(sum1);
}
Pending pending1 = (Pending) r1;
Pending pending2 = (Pending) r2;
Set<Product> sum = new HashSet<>();
sum.addAll(pending1.sum);
sum.addAll(pending2.sum);
checkLimit(sum);
return new Pending(sum);
}
private static void checkLimit(Set<Product> sum) throws AnalyzerException {
int size = sum.stream().mapToInt(prod -> prod.ids.size()).sum();
if (size > Analysis.EQUATION_SIZE_LIMIT) {
throw new AnalyzerException(null, "Equation size is too big");
}
}
}
class HResultUtil {
private static final HKey[] EMPTY_PRODUCT = new HKey[0];
private final ELattice<Value> lattice;
final Value top;
HResultUtil(ELattice<Value> lattice) {
this.lattice = lattice;
top = lattice.top;
}
HResult join(HResult r1, HResult r2) {
if (r1 instanceof HFinal && ((HFinal) r1).value == top) {
return r1;
}
if (r2 instanceof HFinal && ((HFinal) r2).value == top) {
return r2;
}
if (r1 instanceof HFinal && r2 instanceof HFinal) {
return new HFinal(lattice.join(((HFinal) r1).value, ((HFinal) r2).value));
}
if (r1 instanceof HFinal && r2 instanceof HPending) {
HFinal f1 = (HFinal)r1;
HPending pending = (HPending) r2;
HComponent[] delta = new HComponent[pending.delta.length + 1];
delta[0] = new HComponent(f1.value, EMPTY_PRODUCT);
System.arraycopy(pending.delta, 0, delta, 1, pending.delta.length);
return new HPending(delta);
}
if (r1 instanceof HPending && r2 instanceof HFinal) {
HFinal f2 = (HFinal)r2;
HPending pending = (HPending) r1;
HComponent[] delta = new HComponent[pending.delta.length + 1];
delta[0] = new HComponent(f2.value, EMPTY_PRODUCT);
System.arraycopy(pending.delta, 0, delta, 1, pending.delta.length);
return new HPending(delta);
}
HPending pending1 = (HPending) r1;
HPending pending2 = (HPending) r2;
return new HPending(ArrayUtil.mergeArrays(pending1.delta, pending2.delta, HComponent.ARRAY_FACTORY));
}
}
final class Product {
@NotNull final Value value;
@NotNull final Set<Key> ids;
Product(@NotNull Value value, @NotNull Set<Key> ids) {
this.value = value;
this.ids = ids;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
Product product = (Product)o;
if (!ids.equals(product.ids)) return false;
if (!value.equals(product.value)) return false;
return true;
}
@Override
public int hashCode() {
int result = value.hashCode();
result = 31 * result + ids.hashCode();
return result;
}
}
interface Result {}
final class Final implements Result {
final Value value;
Final(Value value) {
this.value = value;
}
@Override
public String toString() {
return "Final{" + "value=" + value + '}';
}
}
final class Pending implements Result {
final Set<Product> sum;
Pending(Set<Product> sum) {
this.sum = sum;
}
}
final class Effects implements Result {
final Set<EffectQuantum> effects;
Effects(Set<EffectQuantum> effects) {
this.effects = effects;
}
}
final class Equation {
final Key id;
final Result rhs;
Equation(Key id, Result rhs) {
this.id = id;
this.rhs = rhs;
}
@Override
public String toString() {
return "Equation{" + "id=" + id + ", rhs=" + rhs + '}';
}
}
final class CoreHKey {
@NotNull
final byte[] key;
final int dirKey;
CoreHKey(@NotNull byte[] key, int dirKey) {
this.key = key;
this.dirKey = dirKey;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
CoreHKey coreHKey = (CoreHKey)o;
if (dirKey != coreHKey.dirKey) return false;
if (!Arrays.equals(key, coreHKey.key)) return false;
return true;
}
@Override
public int hashCode() {
int result = Arrays.hashCode(key);
result = 31 * result + dirKey;
return result;
}
}
final class Solver {
private final ELattice<Value> lattice;
private final HashMap<HKey, HashSet<HKey>> dependencies = new HashMap<>();
private final HashMap<HKey, HPending> pending = new HashMap<>();
private final HashMap<HKey, Value> solved = new HashMap<>();
private final Stack<HKey> moving = new Stack<>();
private final HResultUtil resultUtil;
private final HashMap<CoreHKey, HEquation> equations = new HashMap<>();
private final Value unstableValue;
Solver(ELattice<Value> lattice, Value unstableValue) {
this.lattice = lattice;
this.unstableValue = unstableValue;
resultUtil = new HResultUtil(lattice);
}
void addEquation(HEquation equation) {
HKey key = equation.key;
CoreHKey coreKey = new CoreHKey(key.key, key.dirKey);
HEquation previousEquation = equations.get(coreKey);
if (previousEquation == null) {
equations.put(coreKey, equation);
} else {
HKey joinKey = new HKey(coreKey.key, coreKey.dirKey, equation.key.stable && previousEquation.key.stable, true);
HResult joinResult = resultUtil.join(equation.result, previousEquation.result);
HEquation joinEquation = new HEquation(joinKey, joinResult);
equations.put(coreKey, joinEquation);
}
}
void queueEquation(HEquation equation) {
HResult rhs = equation.result;
if (rhs instanceof HFinal) {
solved.put(equation.key, ((HFinal) rhs).value);
moving.push(equation.key);
} else if (rhs instanceof HPending) {
HPending pendResult = ((HPending)rhs).copy();
HResult norm = normalize(pendResult.delta);
if (norm instanceof HFinal) {
solved.put(equation.key, ((HFinal) norm).value);
moving.push(equation.key);
}
else {
HPending pendResult1 = ((HPending)rhs).copy();
for (HComponent component : pendResult1.delta) {
for (HKey trigger : component.ids) {
HashSet<HKey> set = dependencies.get(trigger);
if (set == null) {
set = new HashSet<>();
dependencies.put(trigger, set);
}
set.add(equation.key);
}
pending.put(equation.key, pendResult1);
}
}
}
}
Value negate(Value value) {
switch (value) {
case True:
return Value.False;
case False:
return Value.True;
default:
return value;
}
}
Map<HKey, Value> solve() {
for (HEquation hEquation : equations.values()) {
queueEquation(hEquation);
}
while (!moving.empty()) {
HKey id = moving.pop();
Value value = solved.get(id);
HKey[] initialPIds = id.stable ? new HKey[]{id, id.invertStability()} : new HKey[]{id.invertStability(), id};
Value[] initialPVals = id.stable ? new Value[]{value, value} : new Value[]{value, unstableValue};
HKey[] pIds = new HKey[]{initialPIds[0], initialPIds[1], initialPIds[0].negate(), initialPIds[1].negate()};
Value[] pVals = new Value[]{initialPVals[0], initialPVals[1], negate(initialPVals[0]), negate(initialPVals[1])};
for (int i = 0; i < pIds.length; i++) {
HKey pId = pIds[i];
Value pVal = pVals[i];
HashSet<HKey> dIds = dependencies.get(pId);
if (dIds == null) {
continue;
}
for (HKey dId : dIds) {
HPending pend = pending.remove(dId);
if (pend != null) {
HResult pend1 = substitute(pend, pId, pVal);
if (pend1 instanceof HFinal) {
HFinal fi = (HFinal)pend1;
solved.put(dId, fi.value);
moving.push(dId);
}
else {
pending.put(dId, (HPending)pend1);
}
}
}
}
}
pending.clear();
return solved;
}
// substitute id -> value into pending
HResult substitute(@NotNull HPending pending, @NotNull HKey id, @NotNull Value value) {
HComponent[] sum = pending.delta;
for (HComponent intIdComponent : sum) {
if (intIdComponent.remove(id)) {
intIdComponent.value = lattice.meet(intIdComponent.value, value);
}
}
return normalize(sum);
}
@NotNull HResult normalize(@NotNull HComponent[] sum) {
Value acc = lattice.bot;
boolean computableNow = true;
for (HComponent prod : sum) {
if (prod.isEmpty() || prod.value == lattice.bot) {
acc = lattice.join(acc, prod.value);
} else {
computableNow = false;
}
}
return (acc == lattice.top || computableNow) ? new HFinal(acc) : new HPending(sum);
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysml.runtime.matrix;
import java.util.ArrayList;
import java.util.HashSet;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.Counters.Group;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RunningJob;
import org.apache.sysml.conf.ConfigurationManager;
import org.apache.sysml.conf.DMLConfig;
import org.apache.sysml.lops.Lop;
import org.apache.sysml.parser.Expression.DataType;
import org.apache.sysml.runtime.DMLRuntimeException;
import org.apache.sysml.runtime.controlprogram.ParForProgramBlock.PDataPartitionFormat;
import org.apache.sysml.runtime.controlprogram.parfor.stat.InfrastructureAnalyzer;
import org.apache.sysml.runtime.instructions.Instruction;
import org.apache.sysml.runtime.instructions.InstructionUtils;
import org.apache.sysml.runtime.instructions.MRInstructionParser;
import org.apache.sysml.runtime.instructions.MRJobInstruction;
import org.apache.sysml.runtime.instructions.mr.IDistributedCacheConsumer;
import org.apache.sysml.runtime.instructions.mr.MRInstruction;
import org.apache.sysml.runtime.instructions.mr.PickByCountInstruction;
import org.apache.sysml.runtime.matrix.data.InputInfo;
import org.apache.sysml.runtime.matrix.data.MatrixIndexes;
import org.apache.sysml.runtime.matrix.data.NumItemsByEachReducerMetaData;
import org.apache.sysml.runtime.matrix.data.OutputInfo;
import org.apache.sysml.runtime.matrix.data.TaggedMatrixBlock;
import org.apache.sysml.runtime.matrix.data.TaggedMatrixPackedCell;
import org.apache.sysml.runtime.matrix.mapred.GMRCombiner;
import org.apache.sysml.runtime.matrix.mapred.GMRMapper;
import org.apache.sysml.runtime.matrix.mapred.GMRReducer;
import org.apache.sysml.runtime.matrix.mapred.MRBaseForCommonInstructions;
import org.apache.sysml.runtime.matrix.mapred.MRConfigurationNames;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration.ConvertTarget;
import org.apache.sysml.runtime.matrix.mapred.MRJobConfiguration.MatrixChar_N_ReducerGroups;
import org.apache.sysml.runtime.matrix.sort.PickFromCompactInputFormat;
import org.apache.sysml.runtime.util.MapReduceTool;
import org.apache.sysml.runtime.util.UtilFunctions;
import org.apache.sysml.yarn.DMLAppMasterUtils;
public class GMR
{
private static final Log LOG = LogFactory.getLog(GMR.class.getName());
private GMR() {
//prevent instantiation via private constructor
}
/**
* Execute job.
*
* @param inst MR job instruction
* @param inputs input matrices, the inputs are indexed by 0, 1, 2, .. based on the position in this string
* @param inputInfos the input format information for the input matrices
* @param rlens array of number of rows
* @param clens array of number of columns
* @param brlens array of number of rows in block
* @param bclens array of number of columns in block
* @param partitioned boolean array of partitioned status
* @param pformats array of data partition formats
* @param psizes does nothing
* @param recordReaderInstruction record reader instruction
* @param instructionsInMapper in Mapper, the set of unary operations that need to be performed on each input matrix
* @param aggInstructionsInReducer in Reducer, right after sorting, the set of aggreagte operations
* that need to be performed on each input matrix
* @param otherInstructionsInReducer the mixed operations that need to be performed on matrices after the aggregate operations
* @param numReducers the number of reducers
* @param replication the replication factor for the output
* @param jvmReuse if true, reuse JVM
* @param resultIndexes the indexes of the result matrices that needs to be outputted
* @param dimsUnknownFilePrefix file path prefix when dimensions unknown
* @param outputs the names for the output directories, one for each result index
* @param outputInfos output format information for the output matrices
* @return job return object
* @throws Exception if Exception occurs
*/
@SuppressWarnings({ "unchecked", "rawtypes" })
public static JobReturn runJob(MRJobInstruction inst, String[] inputs, InputInfo[] inputInfos, long[] rlens, long[] clens,
int[] brlens, int[] bclens,
boolean[] partitioned, PDataPartitionFormat[] pformats, int[] psizes,
String recordReaderInstruction, String instructionsInMapper, String aggInstructionsInReducer,
String otherInstructionsInReducer, int numReducers, int replication, boolean jvmReuse, byte[] resultIndexes, String dimsUnknownFilePrefix,
String[] outputs, OutputInfo[] outputInfos)
throws Exception
{
JobConf job = new JobConf(GMR.class);
job.setJobName("G-MR");
boolean inBlockRepresentation=MRJobConfiguration.deriveRepresentation(inputInfos);
//whether use block representation or cell representation
MRJobConfiguration.setMatrixValueClass(job, inBlockRepresentation);
//added for handling recordreader instruction
String[] realinputs=inputs;
InputInfo[] realinputInfos=inputInfos;
long[] realrlens=rlens;
long[] realclens=clens;
int[] realbrlens=brlens;
int[] realbclens=bclens;
byte[] realIndexes=new byte[inputs.length];
for(byte b=0; b<realIndexes.length; b++)
realIndexes[b]=b;
if(recordReaderInstruction!=null && !recordReaderInstruction.isEmpty())
{
assert(inputs.length<=2);
PickByCountInstruction ins=(PickByCountInstruction) PickByCountInstruction.parseInstruction(recordReaderInstruction);
PickFromCompactInputFormat.setKeyValueClasses(job, (Class<? extends WritableComparable>) inputInfos[ins.input1].inputKeyClass,
inputInfos[ins.input1].inputValueClass);
job.setInputFormat(PickFromCompactInputFormat.class);
PickFromCompactInputFormat.setZeroValues(job, (NumItemsByEachReducerMetaData)inputInfos[ins.input1].metadata);
if(ins.isValuePick)
{
double[] probs=MapReduceTool.readColumnVectorFromHDFS(inputs[ins.input2], inputInfos[ins.input2], rlens[ins.input2],
clens[ins.input2], brlens[ins.input2], bclens[ins.input2]);
PickFromCompactInputFormat.setPickRecordsInEachPartFile(job, (NumItemsByEachReducerMetaData) inputInfos[ins.input1].metadata, probs);
realinputs=new String[inputs.length-1];
realinputInfos=new InputInfo[inputs.length-1];
realrlens=new long[inputs.length-1];
realclens=new long[inputs.length-1];
realbrlens=new int[inputs.length-1];
realbclens=new int[inputs.length-1];
realIndexes=new byte[inputs.length-1];
byte realIndex=0;
for(byte i=0; i<inputs.length; i++)
{
if(i==ins.input2)
continue;
realinputs[realIndex]=inputs[i];
realinputInfos[realIndex]=inputInfos[i];
if(i==ins.input1)
{
realrlens[realIndex]=rlens[ins.input2];
realclens[realIndex]=clens[ins.input2];
realbrlens[realIndex]=1;
realbclens[realIndex]=1;
realIndexes[realIndex]=ins.output;
}else
{
realrlens[realIndex]=rlens[i];
realclens[realIndex]=clens[i];
realbrlens[realIndex]=brlens[i];
realbclens[realIndex]=bclens[i];
realIndexes[realIndex]=i;
}
realIndex++;
}
}else
{
//PickFromCompactInputFormat.setPickRecordsInEachPartFile(job, (NumItemsByEachReducerMetaData) inputInfos[ins.input1].metadata, ins.cst, 1-ins.cst);
PickFromCompactInputFormat.setRangePickPartFiles(job, (NumItemsByEachReducerMetaData) inputInfos[ins.input1].metadata, ins.cst, 1-ins.cst);
realrlens[ins.input1]=UtilFunctions.getLengthForInterQuantile((NumItemsByEachReducerMetaData)inputInfos[ins.input1].metadata, ins.cst);
realclens[ins.input1]=clens[ins.input1];
realbrlens[ins.input1]=1;
realbclens[ins.input1]=1;
realIndexes[ins.input1]=ins.output;
}
}
boolean resetDistCache = setupDistributedCache(job, instructionsInMapper,
otherInstructionsInReducer, realinputs, realrlens, realclens);
//set up the input files and their format information
boolean[] distCacheOnly = getDistCacheOnlyInputs(realIndexes, recordReaderInstruction, instructionsInMapper, aggInstructionsInReducer, otherInstructionsInReducer);
MRJobConfiguration.setUpMultipleInputs(job, realIndexes, realinputs, realinputInfos, realbrlens, realbclens, distCacheOnly,
true, inBlockRepresentation? ConvertTarget.BLOCK: ConvertTarget.CELL);
MRJobConfiguration.setInputPartitioningInfo(job, pformats);
//set up the dimensions of input matrices
MRJobConfiguration.setMatricesDimensions(job, realIndexes, realrlens, realclens);
MRJobConfiguration.setDimsUnknownFilePrefix(job, dimsUnknownFilePrefix);
//set up the block size
MRJobConfiguration.setBlocksSizes(job, realIndexes, realbrlens, realbclens);
//set up unary instructions that will perform in the mapper
MRJobConfiguration.setInstructionsInMapper(job, instructionsInMapper);
//set up the aggregate instructions that will happen in the combiner and reducer
MRJobConfiguration.setAggregateInstructions(job, aggInstructionsInReducer);
//set up the instructions that will happen in the reducer, after the aggregation instructions
MRJobConfiguration.setInstructionsInReducer(job, otherInstructionsInReducer);
//set up the replication factor for the results
job.setInt(MRConfigurationNames.DFS_REPLICATION, replication);
//set up preferred custom serialization framework for binary block format
if( MRJobConfiguration.USE_BINARYBLOCK_SERIALIZATION )
MRJobConfiguration.addBinaryBlockSerializationFramework( job );
//set up map/reduce memory configurations (if in AM context)
DMLConfig config = ConfigurationManager.getDMLConfig();
DMLAppMasterUtils.setupMRJobRemoteMaxMemory(job, config);
//set up custom map/reduce configurations
MRJobConfiguration.setupCustomMRConfigurations(job, config);
//set up jvm reuse (incl. reuse of loaded dist cache matrices)
if( jvmReuse )
job.setNumTasksToExecutePerJvm(-1);
//set up what matrices are needed to pass from the mapper to reducer
HashSet<Byte> mapoutputIndexes=MRJobConfiguration.setUpOutputIndexesForMapper(job, realIndexes, instructionsInMapper, aggInstructionsInReducer,
otherInstructionsInReducer, resultIndexes);
MatrixChar_N_ReducerGroups ret=MRJobConfiguration.computeMatrixCharacteristics(job, realIndexes,
instructionsInMapper, aggInstructionsInReducer, null, otherInstructionsInReducer, resultIndexes, mapoutputIndexes, false);
MatrixCharacteristics[] stats=ret.stats;
//set up the number of reducers
MRJobConfiguration.setNumReducers(job, ret.numReducerGroups, numReducers);
// Print the complete instruction
if (LOG.isTraceEnabled())
inst.printCompleteMRJobInstruction(stats);
// Update resultDimsUnknown based on computed "stats"
byte[] dimsUnknown = new byte[resultIndexes.length];
for ( int i=0; i < resultIndexes.length; i++ ) {
if ( stats[i].getRows() == -1 || stats[i].getCols() == -1 ) {
dimsUnknown[i] = (byte)1;
}
else {
dimsUnknown[i] = (byte) 0;
}
}
//MRJobConfiguration.updateResultDimsUnknown(job,resultDimsUnknown);
//set up the multiple output files, and their format information
MRJobConfiguration.setUpMultipleOutputs(job, resultIndexes, dimsUnknown, outputs, outputInfos, inBlockRepresentation, true);
// configure mapper and the mapper output key value pairs
job.setMapperClass(GMRMapper.class);
if(numReducers==0)
{
job.setMapOutputKeyClass(Writable.class);
job.setMapOutputValueClass(Writable.class);
}else
{
job.setMapOutputKeyClass(MatrixIndexes.class);
if(inBlockRepresentation)
job.setMapOutputValueClass(TaggedMatrixBlock.class);
else
job.setMapOutputValueClass(TaggedMatrixPackedCell.class);
}
//set up combiner
if(numReducers!=0 && aggInstructionsInReducer!=null
&& !aggInstructionsInReducer.isEmpty())
{
job.setCombinerClass(GMRCombiner.class);
}
//configure reducer
job.setReducerClass(GMRReducer.class);
//job.setReducerClass(PassThroughReducer.class);
// By default, the job executes in "cluster" mode.
// Determine if we can optimize and run it in "local" mode.
MatrixCharacteristics[] inputStats = new MatrixCharacteristics[inputs.length];
for ( int i=0; i < inputs.length; i++ ) {
inputStats[i] = new MatrixCharacteristics(rlens[i], clens[i], brlens[i], bclens[i]);
}
//set unique working dir
MRJobConfiguration.setUniqueWorkingDir(job);
RunningJob runjob=JobClient.runJob(job);
Group group=runjob.getCounters().getGroup(MRJobConfiguration.NUM_NONZERO_CELLS);
for(int i=0; i<resultIndexes.length; i++)
stats[i].setNonZeros(group.getCounter(Integer.toString(i)));
//cleanups
String dir = dimsUnknownFilePrefix + "/" + runjob.getID().toString() + "_dimsFile";
stats = MapReduceTool.processDimsFiles(dir, stats);
MapReduceTool.deleteFileIfExistOnHDFS(dir);
if( resetDistCache )
MRBaseForCommonInstructions.resetDistCache();
return new JobReturn(stats, outputInfos, runjob.isSuccessful());
}
private static boolean setupDistributedCache(JobConf job, String instMap, String instRed, String[] inputs, long[] rlens, long[] clens)
throws DMLRuntimeException
{
//concatenate mapper and reducer instructions
String allInsts = (instMap!=null && !instMap.trim().isEmpty() ) ? instMap : null;
if( allInsts==null )
allInsts = instRed;
else if( instRed!=null && !instRed.trim().isEmpty() )
allInsts = allInsts + Instruction.INSTRUCTION_DELIM + instRed;
//setup distributed cache inputs (at least one)
if( allInsts != null && !allInsts.trim().isEmpty()
&& InstructionUtils.isDistributedCacheUsed(allInsts) )
{
//get all indexes of distributed cache inputs
ArrayList<Byte> indexList = new ArrayList<Byte>();
String[] inst = allInsts.split(Instruction.INSTRUCTION_DELIM);
for( String tmp : inst ) {
if( InstructionUtils.isDistributedCacheUsed(tmp) )
{
ArrayList<Byte> tmpindexList = new ArrayList<Byte>();
MRInstruction mrinst = MRInstructionParser.parseSingleInstruction(tmp);
if( mrinst instanceof IDistributedCacheConsumer )
((IDistributedCacheConsumer)mrinst).addDistCacheIndex(tmp, tmpindexList);
//copy distinct indexes only (prevent redundant add to distcache)
for( Byte tmpix : tmpindexList )
if( !indexList.contains(tmpix) )
indexList.add(tmpix);
}
}
//construct index and path strings
ArrayList<String> pathList = new ArrayList<String>(); // list of paths to be placed in Distributed cache
StringBuilder indexString = new StringBuilder(); // input indices to be placed in Distributed Cache (concatenated)
StringBuilder pathString = new StringBuilder(); // input paths to be placed in Distributed Cache (concatenated)
for( byte index : indexList )
{
if( pathList.size()>0 ) {
indexString.append(Instruction.INSTRUCTION_DELIM);
pathString.append(Instruction.INSTRUCTION_DELIM);
}
pathList.add( inputs[index] );
indexString.append(index);
pathString.append(inputs[index]);
}
//configure mr job with distcache indexes
MRJobConfiguration.setupDistCacheInputs(job, indexString.toString(), pathString.toString(), pathList);
//clean in-memory cache (prevent job interference in local mode)
if( InfrastructureAnalyzer.isLocalMode(job) ) {
MRBaseForCommonInstructions.resetDistCache();
return true;
}
}
return false;
}
/**
* Determine which indices are only used as inputs through distributed cache and hence would
* be redundant job inputs.
*
* @param realIndexes array of byte indexes
* @param inst1 instruction 1
* @param inst2 instruction 2
* @param inst3 instruction 3
* @param inst4 instruction 4
* @return array of byte indexes
* @throws DMLRuntimeException if DMLRuntimeException occurs
*/
private static boolean[] getDistCacheOnlyInputs(byte[] realIndexes, String inst1, String inst2, String inst3, String inst4)
throws DMLRuntimeException
{
boolean[] ret = new boolean[realIndexes.length];
String[] inst = new String[]{inst1, inst2, inst3, inst4};
//for all result indexes
for( int i=0; i<ret.length; i++ )
{
byte index = realIndexes[i];
String indexStr = index+Lop.DATATYPE_PREFIX+DataType.MATRIX.toString();
boolean distCacheOnly = true;
boolean use = false;
for( String linst : inst ){ //for all instruction categories
if(linst!=null && !linst.trim().isEmpty()){
String[] alinst = linst.split(Lop.INSTRUCTION_DELIMITOR);
for( String tmp : alinst ) //for each individual instruction
{
boolean lcache = false;
if( InstructionUtils.isDistributedCacheUsed(tmp) ) {
MRInstruction mrinst = MRInstructionParser.parseSingleInstruction(tmp);
if( mrinst instanceof IDistributedCacheConsumer )
lcache = ((IDistributedCacheConsumer)mrinst).isDistCacheOnlyIndex(tmp, index);
}
distCacheOnly &= (lcache || !tmp.contains(indexStr));
use |= tmp.contains(indexStr);
}
}
}
//probe for use in order to account for write only jobs
ret[i] = distCacheOnly && use;
}
return ret;
}
}
| |
package org.osgl.util;
/*-
* #%L
* Java Tool
* %%
* Copyright (C) 2014 - 2017 OSGL (Open Source General Library)
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.osgl.$;
import org.osgl.exception.InvalidArgException;
import org.osgl.exception.NotAppliedException;
import java.io.IOException;
import java.io.InvalidObjectException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.util.Comparator;
import java.util.Iterator;
/**
* Implement {@link C.Range} using {@link LazySeq}.
*/
public class LazyRange<ELEMENT> extends LazySeq<ELEMENT>
implements C.Range<ELEMENT>, Serializable {
private final ELEMENT to;
private final Comparator<ELEMENT> order;
private final $.Func2<ELEMENT, Integer, ELEMENT> step;
protected final int ordering;
private final int size;
protected final $.F1<ELEMENT, ELEMENT> next;
protected final $.F1<ELEMENT, ELEMENT> prev;
public LazyRange(final ELEMENT from, final ELEMENT to, final $.Func2<ELEMENT, Integer, ELEMENT> step) {
this(from, to, $.F.NATURAL_ORDER, step);
}
public LazyRange(final ELEMENT from, final ELEMENT to, final Comparator<ELEMENT> order,
final $.Func2<ELEMENT, Integer, ELEMENT> step
) {
E.NPE(from, to, order, step);
ordering = N.sign(order.compare(from, to));
boolean eq = $.eq(from, to);
E.invalidArgIf(eq, "[from] shall not be equals to [to]");
// check if step align with order
ELEMENT next = step.apply(from, -ordering);
int ordering2 = order.compare(from, next);
if (N.sign(ordering2) != N.sign(ordering)) {
E.invalidArg("step function doesn't align to the direction between [from] and [to]");
}
// find out the size of the range
if (from instanceof Number) {
int n0 = ((Number)from).intValue();
int n1 = ((Number)to).intValue();
int n2 = ((Number)next).intValue();
int distance = n1 - n0;
int unit = n2 - n0;
int mod = distance%unit;
if (mod > 0) {
size = (distance + mod) / unit - 1;
} else {
size = distance / unit;
}
} else {
size = -1;
}
this.to = to;
this.head = from;
this.order = order;
this.step = step;
$.F2<ELEMENT, Integer, ELEMENT> f2 = $.f2(step());
this.next = f2.curry(-ordering);
this.prev = f2.curry(ordering);
this.tail = new $.F0<C.Sequence<ELEMENT>>() {
@Override
public C.Sequence<ELEMENT> apply() throws NotAppliedException, $.Break {
if ($.eq(from, to)) {
return Nil.seq();
} else {
return of(LazyRange.this.next.apply(from), to);
}
}
};
this.setFeature(C.Feature.LIMITED);
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (obj instanceof C.Range) {
C.Range<ELEMENT> that = (C.Range<ELEMENT>) obj;
return $.eq(that.from(), from()) && $.eq(that.to(), to()) && $.eq(that.order(), order()) && $.eq(that.step(), step());
}
return false;
}
@Override
public int hashCode() {
return $.hc(from(), to(), order(), step());
}
@Override
public String toString() {
return new StringBuilder("[").append(from()).append(",").append(to()).append(")").toString();
}
@Override
public int size() throws UnsupportedOperationException {
if (size < 0) {
throw new UnsupportedOperationException();
} else {
return size;
}
}
protected LazyRange<ELEMENT> of(ELEMENT from, ELEMENT to) {
return new LazyRange<ELEMENT>(from, to, order, step);
}
public final ELEMENT from() {
return head();
}
@Override
public final ELEMENT to() {
return to;
}
@Override
public Comparator<ELEMENT> order() {
return order;
}
@Override
public $.Func2<ELEMENT, Integer, ELEMENT> step() {
return step;
}
@Override
public C.Range<ELEMENT> merge(C.Range<ELEMENT> r2) throws InvalidArgException {
if ($.ne(step(), r2.step()) || $.ne(order(), r2.order())) {
throw E.invalidArg("r2 and this range does not have the same step or order operator");
}
int ordering2 = N.sign(order.compare(r2.from(), r2.to()));
if (ordering2 != ordering) {
throw E.invalidArg("r2 and this range doesn't have the same ordering direction");
}
ELEMENT from1 = from(), to1 = step().apply(to, -1), from2 = r2.from(), to2 = r2.step().apply(r2.to(), -1);
boolean fromInThis = contains(from2), toInThis = contains(to2);
if (fromInThis && toInThis) {
return this;
}
boolean fromInThat = r2.contains(from1), toInThat = r2.contains(to1);
if (fromInThat && toInThat) {
return r2;
}
if ((fromInThis && toInThat) || ($.eq(to(), from2))) {
return of(from1, r2.to());
}
if ((toInThis && fromInThat) || ($.eq(from1, r2.to()))) {
return of(from2, to);
}
throw E.invalidArg("r2 and this range cannot be merged together");
}
@Override
public ELEMENT last() throws UnsupportedOperationException {
return prev.apply(to);
}
@Override
public C.Range<ELEMENT> tail() throws UnsupportedOperationException {
ELEMENT from = next.apply(from());
if ($.eq(from, to)) {
return Nil.range();
}
return of(next.apply(from()), to);
}
@Override
public C.Range<ELEMENT> head(int n) {
return take(n);
}
@Override
public C.Range<ELEMENT> tail(int n) throws UnsupportedOperationException {
E.illegalArgumentIf(n <= 0, "n must be a positive int");
return of(step().apply(to, -n), to);
}
@Override
public C.Range<ELEMENT> take(int n) {
E.invalidArgIf(n <= 0, "n must be a positive int");
ELEMENT from = from();
return of(from, step().apply(from, n));
}
@Override
public C.Range<ELEMENT> drop(int n) {
E.invalidArgIf(n <= 0, "n must be a positive int");
ELEMENT from = from();
return of(step().apply(from, n), to);
}
@Override
public C.Range<ELEMENT> reverse() throws UnsupportedOperationException {
return of(prev.apply(to), prev.apply(from()));
}
@Override
public Iterator<ELEMENT> reverseIterator() {
return reverse().iterator();
}
@Override
public <R> R reduceRight(R identity, $.Func2<R, ELEMENT, R> accumulator) {
return reverse().reduceLeft(identity, accumulator);
}
@Override
public LazyRange<ELEMENT> accept($.Visitor<? super ELEMENT> visitor) {
super.accept(visitor);
return this;
}
@Override
public LazyRange<ELEMENT> forEach($.Visitor<? super ELEMENT> visitor) {
return accept(visitor);
}
@Override
public LazyRange<ELEMENT> each($.Visitor<? super ELEMENT> visitor) {
return accept(visitor);
}
@Override
public LazyRange<ELEMENT> acceptLeft($.Visitor<? super ELEMENT> visitor) {
super.acceptLeft(visitor);
return this;
}
@Override
public LazyRange<ELEMENT> acceptRight($.Visitor<? super ELEMENT> visitor) {
reverse().acceptLeft(visitor);
return this;
}
@Override
public $.Option<ELEMENT> reduceRight($.Func2<ELEMENT, ELEMENT, ELEMENT> accumulator) {
return reverse().reduceLeft(accumulator);
}
@Override
public $.Option<ELEMENT> findLast($.Function<? super ELEMENT, Boolean> predicate) {
return reverse().findFirst(predicate);
}
@Override
public boolean contains(ELEMENT t) {
E.NPE(t);
if (0 == ordering) {
return $.eq(to, t);
}
ELEMENT from = from();
if ($.eq(from, t)) {
return true;
}
int withFrom = order.compare(t, from);
if (ordering < 0 && withFrom < 0) {
return false;
}
int withTo = order.compare(t, to);
return withFrom * withTo < 0;
}
@Override
public boolean containsAll(C.Range<ELEMENT> range) {
E.NPE(range);
return contains(range.from()) && contains(prev.apply(range.to()));
}
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
throw new InvalidObjectException("Proxy required");
}
private static class SerializationProxy<ELEMENT> implements Serializable {
ELEMENT from;
ELEMENT to;
Comparator<ELEMENT> order;
$.Func2<ELEMENT, Integer, ELEMENT> step;
SerializationProxy(LazyRange<ELEMENT> r) {
from = r.from();
to = r.to();
order = r.order;
step = r.step;
}
private Object readResolve() {
return new LazyRange<ELEMENT>(from, to, order, step);
}
private static final long serialVersionUID = 21864874113505L;
}
private Object writeReplace() {
return new SerializationProxy<ELEMENT>(this);
}
}
| |
package org.goodev.discourse;
import android.content.ContentValues;
import android.database.Cursor;
import android.os.Bundle;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentActivity;
import android.support.v4.app.FragmentManager;
import android.support.v4.app.NavUtils;
import android.text.TextUtils;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.Toast;
import org.goodev.discourse.contentprovider.Provider;
import org.goodev.discourse.database.tables.SiteTable;
import org.goodev.discourse.database.tables.UserInfoTable;
import org.goodev.discourse.ui.AddSiteFragment;
import org.goodev.discourse.ui.AddSiteFragment.AddSiteListener;
import org.goodev.discourse.ui.AddUserFragment.AddUserListener;
import org.goodev.discourse.ui.CheckLoginInfoFragment;
import org.goodev.discourse.ui.CheckLoginInfoFragment.CheckLoginInfoCallback;
import org.goodev.discourse.ui.CheckUrlFragment;
import org.goodev.discourse.ui.CheckUrlFragment.CheckUrlCallback;
import org.goodev.discourse.ui.ProgressFragment;
import org.goodev.discourse.ui.SettingsFragment;
import org.goodev.discourse.utils.L;
import org.goodev.discourse.utils.MCrypt;
import org.goodev.discourse.utils.Utils;
import java.net.MalformedURLException;
import java.net.URL;
public class SettingsActivity extends FragmentActivity implements AddSiteListener, AddUserListener, CheckUrlCallback, CheckLoginInfoCallback {
private static final String FRAG_TAG = "fragment_tag";
private Fragment mFragment;
private String mCurrentSiteUrl;
private String mName;
private String mPassword;
private boolean mRememberLoginInfo;
private CheckUrlFragment mCheckUrlFragment;
private String mSiteName;
private String mSiteUrl;
private ProgressFragment mProgressFragment;
private CheckLoginInfoFragment mCheckLoginFragment;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
// setContentView(R.layout.activity_settings);
setupActionBar();
if (savedInstanceState == null) {
mFragment = new SettingsFragment();
getSupportFragmentManager().beginTransaction().add(android.R.id.content, mFragment, FRAG_TAG).commit();
} else {
mFragment = getSupportFragmentManager().findFragmentByTag(FRAG_TAG);
}
}
private void setupActionBar() {
getActionBar().setDisplayHomeAsUpEnabled(true);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
getMenuInflater().inflate(R.menu.settings, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
// This ID represents the Home or Up button. In the case of this
// activity, the Up button is shown. Use NavUtils to allow users
// to navigate up one level in the application structure. For
// more details, see the Navigation pattern on Android Design:
//
// http://developer.android.com/design/patterns/navigation.html#up-vs-back
//
NavUtils.navigateUpFromSameTask(this);
return true;
case R.id.action_add_website:
showAddSiteDialog();
return true;
}
return super.onOptionsItemSelected(item);
}
private void showAddSiteDialog() {
new AddSiteFragment().show(getSupportFragmentManager(), "add_site_tag");
}
@Override
public void add(String name, String url) {
if (TextUtils.isEmpty(url)) {
Toast.makeText(this, R.string.add_site_url_is_empty, Toast.LENGTH_SHORT).show();
return;
}
if (TextUtils.isEmpty(name)) {
name = url;
}
if (url.startsWith(Utils.HTTP_PREFIX) || url.startsWith(Utils.HTTPS_PREFIX)) {
// nothing
} else {
url = Utils.HTTP_PREFIX + url;
}
if (!url.endsWith(Utils.SLASH)) {
url = url + Utils.SLASH;
}
mSiteName = name;
mSiteUrl = url;
try {
URL urlTest = new URL(url);
} catch (MalformedURLException e) {
Toast.makeText(this, R.string.add_site_url_error, Toast.LENGTH_LONG).show();
return;
}
FragmentManager manager = getSupportFragmentManager();
if (mCheckUrlFragment != null) {
manager.beginTransaction().remove(mCheckUrlFragment).commit();
}
mCheckUrlFragment = new CheckUrlFragment();
Bundle args = new Bundle();
args.putString(Utils.EXTRA_URL, url);
mCheckUrlFragment.setArguments(args);
manager.beginTransaction().add(mCheckUrlFragment, "check_url").commit();
}
@Override
public void onPreExecute() {
mProgressFragment = new ProgressFragment();
mProgressFragment.show(getSupportFragmentManager(), "progress");
}
@Override
public void onResult(boolean ok) {
getSupportFragmentManager().beginTransaction().remove(mProgressFragment).commit();
if (ok) {
saveSite();
} else {
Toast.makeText(this, getString(R.string.add_site_url_error, mSiteUrl), Toast.LENGTH_LONG).show();
}
}
private void saveSite() {
Cursor cursor = getContentResolver().query(Provider.SITE_CONTENT_URI, SiteTable.ALL_COLUMNS, SiteTable.URL + " = \"" + mSiteUrl + "\"", null, null);
if (cursor.getCount() > 0) {
Toast.makeText(this, getString(R.string.add_site_is_exist, mSiteUrl), Toast.LENGTH_LONG).show();
return;
} else {
ContentValues values = new ContentValues();
values.put(SiteTable.TITLE, mSiteName);
values.put(SiteTable.URL, mSiteUrl);
getContentResolver().insert(Provider.SITE_CONTENT_URI, values);
}
}
public void setCurrentSite(String url) {
mCurrentSiteUrl = url;
}
@Override
public void add(String name, String password, boolean remember) {
if (TextUtils.isEmpty(name) || TextUtils.isEmpty(password)) {
Toast.makeText(this, R.string.name_and_password_is_empty, Toast.LENGTH_LONG).show();
return;
}
if (TextUtils.isEmpty(mCurrentSiteUrl)) {
return;
}
mRememberLoginInfo = remember;
mName = name;
mPassword = password;
FragmentManager manager = getSupportFragmentManager();
if (mCheckLoginFragment != null) {
manager.beginTransaction().remove(mCheckLoginFragment).commit();
}
mCheckLoginFragment = new CheckLoginInfoFragment();
Bundle args = new Bundle();
args.putString(Utils.EXTRA_URL, mCurrentSiteUrl);
args.putString(Utils.EXTRA_NAME, name);
args.putString(Utils.EXTRA_PASSWORD, password);
mCheckLoginFragment.setArguments(args);
manager.beginTransaction().add(mCheckLoginFragment, "check_user").commit();
}
private void saveLoginInfo(String name, String password) {
App.setUserInfo(name, password);
if (mRememberLoginInfo) {
Cursor cursor = getContentResolver().query(Provider.SITE_CONTENT_URI, SiteTable.ALL_COLUMNS, SiteTable.URL + " = \"" + mCurrentSiteUrl + "\"", null, null);
if (cursor.moveToFirst()) {
long id = cursor.getLong(cursor.getColumnIndex(SiteTable.ID));
ContentValues values = new ContentValues();
values.put(UserInfoTable.NAME, name);
String crypto;
try {
crypto = MCrypt.bytesToHex(new MCrypt().encrypt(password));
L.i("p: '%s' c: '%s'", password, crypto);
} catch (Exception e) {
crypto = password;
}
values.put(UserInfoTable.PASSWORD, crypto);
values.put(UserInfoTable.SITEID, id);
values.put(UserInfoTable.SITEURL, mCurrentSiteUrl);
getContentResolver().insert(Provider.USERINFO_CONTENT_URI, values);
}
}
}
@Override
public void onPreCheckExecute() {
mProgressFragment = new ProgressFragment();
mProgressFragment.show(getSupportFragmentManager(), "progress");
}
@Override
public void onCheckResult(boolean ok) {
getSupportFragmentManager().beginTransaction().remove(mProgressFragment).commit();
if (ok) {
saveLoginInfo(mName, mPassword);
} else {
Toast.makeText(this, R.string.name_and_password_is_error, Toast.LENGTH_LONG).show();
}
}
}
| |
package it.unibz.krdb.obda.owlrefplatform.questdb;
/*
* #%L
* ontop-quest-db
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.krdb.obda.model.OBDAException;
import it.unibz.krdb.obda.model.TupleResultSet;
import it.unibz.krdb.obda.owlrefplatform.core.QuestDBStatement;
import it.unibz.krdb.obda.owlrefplatform.questdb.QuestDB.StoreStatus;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URI;
import java.sql.SQLException;
import java.text.DecimalFormat;
import java.util.List;
import java.util.Properties;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class QuestDBCMD {
QuestDB dbInstance = null;
String currentstore = null;
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
private static final Logger log = LoggerFactory.getLogger(QuestDBCMD.class);
private static final String CMD_DEFINE_PREFIX = "DEFINE PREFIX prefix URI";
private static final String CMD_DELETE_PREFIX = "DROP PREFIX prefix";
private static final String CMD_DELETE_ALL_PREFIXES = "DROP ALL PREFIXES";
private static final String CMD_DEFINE_BASE = "DEFINE BASE URI";
private static final String CMD_SET = "SET key=value";
private static final String CMD_SHOW_PARAMETERS = "SHOW PARAMETERS";
private static final String CMD_SHOW = "SHOW";
private static final String CMD_SHUTDOWN = "SHUTDOWN";
private static final String RX_DROP = "[Dd][Rr][Oo][Pp]";
private static final String RX_START = "[Ss][Tt][Aa][Rr][Tt]";
private static final String RX_STOP = "[Ss][Tt][Oo][Pp]";
private static final String RX_LIST = "[Ll][Ii][Ss][Tt]";
private static final String RX_FAST = "[Ff][Aa][Ss][Tt]";
private static final String RX_LOAD = "[Ll][Oo][Aa][Dd]";
private static final String RX_SET = "[Ss][Ee][Tt]";
private static final String RX_STORES = "[Ss][Tt][Oo][Rr][Ee][Ss]";
private static final String RX_CREATE = "[Cc][Rr][Ee][Aa][Tt][Ee]";
private static final String RX_VIRTUAL = "[Vv][Ii][Rr][Tt][Uu][Aa][Ll]";
private static final String RX_INDEX = "[Ii][Nn][Dd][Ee][Xx]";
private static final String RX_TBOX = "[Tt][Bb][Oo][Xx]";
private static final String RX_WITH = "[Ww][Ii][Tt][Hh]";
private static final String RX_PARAMETERS = "[Pp][Aa][Rr][Aa][Mm][Ss]";
private static final String RX_MAP = "[Mm][Aa][Pp]";
private static final String RX_STORE = "[Ss][Tt][Oo][Rr][Ee]";
private static final String RX_WS = "[\\s]+";
private static final String RX_QUOTED_PARAMETER = "\"([^\"\\r\\n\\t]*)\"";
private static final String RX_NAME_PARAMETER = "([\\w]+)";
/* CREATE STORE name WITH TBOX "tboxfile" PARAMS "paramsfile" */
private static final String CMD_CREATE_STORE = RX_CREATE + RX_WS + RX_STORE + RX_WS + RX_NAME_PARAMETER + RX_WS + RX_WITH + RX_WS
+ RX_TBOX + RX_WS + RX_QUOTED_PARAMETER + RX_WS + RX_PARAMETERS + RX_WS + RX_QUOTED_PARAMETER;
/* CREATE VIRTUAL STORE name WITH TBOX "tboxfile" MAP "paramsfile" */
private static final String CMD_CREATE_VIRTUAL_STORE = RX_CREATE + RX_WS + RX_VIRTUAL + RX_WS + RX_STORE + RX_WS + RX_NAME_PARAMETER
+ RX_WS + RX_WITH + RX_WS + RX_TBOX + RX_WS + RX_QUOTED_PARAMETER + RX_WS + RX_MAP + RX_WS + RX_QUOTED_PARAMETER;
private static final Pattern PTR_CREATE_STORE = Pattern.compile(CMD_CREATE_STORE);
private static final Pattern PTR_CREATE_VIRTUAL_STORE = Pattern.compile(CMD_CREATE_VIRTUAL_STORE);
/* DROP STORE name */
private static final String CMD_DROP_STORE = RX_DROP + RX_WS + RX_STORE + RX_WS + RX_NAME_PARAMETER;
private static final Pattern PTR_DROP_STORE = Pattern.compile(CMD_DROP_STORE);
/* START STORE name */
private static final String CMD_START_STORE = RX_START + RX_WS + RX_STORE + RX_WS + RX_NAME_PARAMETER;
private static final Pattern PTR_START_STORE = Pattern.compile(CMD_START_STORE);
/* STOP STORE name */
private static final String CMD_STOP_STORE = RX_STOP + RX_WS + RX_STORE + RX_WS + RX_NAME_PARAMETER;
private static final Pattern PTR_STOP_STORE = Pattern.compile(CMD_STOP_STORE);
/* LIST STORES */
private static final String CMD_LIST_STORES = RX_LIST + RX_WS + RX_STORES;
private static final Pattern PTR_LIST_STORES = Pattern.compile(CMD_LIST_STORES);
/* LOAD "datafile" */
private static final String CMD_LOAD = RX_LOAD + RX_WS + RX_QUOTED_PARAMETER;
private static final Pattern PTR_LOAD = Pattern.compile(CMD_LOAD);
/* LOAD "datafile" FAST */
private static final String CMD_LOAD_FAST = RX_LOAD + RX_WS + RX_QUOTED_PARAMETER + RX_WS + RX_FAST;
private static final Pattern PTR_LOAD_FAST = Pattern.compile(CMD_LOAD_FAST);
/* CREATE INDEX */
private static final String CMD_CREATE_INDEX = RX_CREATE + RX_WS + RX_INDEX;
private static final Pattern PTR_CREATE_INDEX = Pattern.compile(CMD_CREATE_INDEX);
/* DROP INDEX */
private static final String CMD_DROP_INDEX = RX_DROP + RX_WS + RX_INDEX;
private static final Pattern PTR_DROP_INDEX = Pattern.compile(CMD_DROP_INDEX);
/* FOR CLASSIC STORES */
public static String NEWLINE = System.getProperty("line.separator");
public QuestDBCMD() {
initialize();
printhelp();
Thread cmdloop = new Thread() {
public void run() {
processCommands();
}
};
cmdloop.start();
}
private void processCommands() {
while (true) {
printprompt1();
String cmd = "";
try {
cmd = readCommand();
processCommand(cmd);
} catch (Exception e) {
log.error(e.getMessage(), e);
}
}
}
private void processCommand(String cmd) throws Exception {
cmd = cmd.trim();
if (cmd.charAt(cmd.length() - 1) == ';') {
startTimer();
processQuery(cmd.substring(0, cmd.length() - 1));
stopTimer();
} else if (cmd.equals("\\q")) {
System.exit(0);
} else if (cmd.equals("\\?") || cmd.equals("help")) {
printhelp();
} else if (cmd.startsWith("\\c")) {
String[] s = cmd.split(" ");
if (s.length < 2) {
printInvalidCommand();
}
String store = s[1].trim();
if (store == null || store.length() == 0 || !dbInstance.exists(store)) {
System.out.println("\nCannot connect to the provided store. Does it exist?.");
}
connect(store);
} else {
printInvalidCommand();
}
}
private void printInvalidCommand() {
System.out.println("\nInvalid command. Try \\? for help.");
}
private void processQuery(String query) throws Exception {
if (PTR_CREATE_STORE.matcher(query).matches()) {
processCreateStore(query);
} else if (PTR_CREATE_VIRTUAL_STORE.matcher(query).matches()) {
processCreateVirtualStore(query);
} else if (PTR_DROP_STORE.matcher(query).matches()) {
processDropStore(query);
} else if (PTR_START_STORE.matcher(query).matches()) {
processStartStore(query);
} else if (PTR_STOP_STORE.matcher(query).matches()) {
processStartStore(query);
} else if (PTR_LIST_STORES.matcher(query).matches()) {
processListStores();
} else if (PTR_LOAD_FAST.matcher(query).matches()) {
processLoad(query, true);
} else if (PTR_LOAD.matcher(query).matches()) {
processLoad(query, false);
} else if (PTR_CREATE_INDEX.matcher(query).matches()) {
processCreateIndexes();
} else if (PTR_DROP_INDEX.matcher(query).matches()) {
processDropIndexes();
} else if (query.startsWith("GET SQL ")) {
if (!checkStoreIsSet()) {
System.out.println("\nYou must set an active store first using the command \"\\c storename\"");
return;
}
QuestDBStatement st = dbInstance.getStatement(currentstore);
String sql = st.getSQL(query.substring("GET SQL ".length()));
st.close();
System.out.println("SQL:");
System.out.println(sql);
} else if (query.startsWith("GET REF ")) {
if (!checkStoreIsSet()) {
System.out.println("\nYou must set an active store first using the command \"\\c storename\"");
return;
}
QuestDBStatement st = dbInstance.getStatement(currentstore);
String rew = st.getRewriting(query.substring("GET REF".length()));
System.out.println("Reformulation:");
System.out.println(rew);
} else {
if (!checkStoreIsSet()) {
System.out.println("\nYou must set an active store first using the command \"\\c storename\"");
return;
}
try {
QuestDBStatement st = dbInstance.getStatement(currentstore);
TupleResultSet result = (TupleResultSet) st.execute(query);
int count = printResultSet(result);
System.out.println(count + " rows.");
result.close();
st.close();
} catch (Exception e) {
System.out.println("\nError executing query: " + e.getMessage());
}
}
}
private int printResultSet(TupleResultSet result) throws OBDAException {
int cols = result.getColumnCount();
List<String> signature = result.getSignature();
for (int i = 0; i < signature.size(); i++) {
if (i > 0)
System.out.print(" | ");
System.out.print(signature.get(i));
}
int count = 0;
System.out.println("\n---------------------------------");
while (result.nextRow()) {
for (int i = 1; i < cols + 1; i++) {
if (i > 1)
System.out.print(" | ");
System.out.print(result.getConstant(i));
}
System.out.println();
count += 1;
}
return count;
}
private void processDropIndexes() {
try {
dbInstance.dropIndexes(currentstore);
System.out.println("\nIndex droped.");
} catch (Exception e) {
System.out.println("\nUnable to drop indexes.");
log.error(e.getMessage());
}
}
private void processCreateIndexes() {
try {
dbInstance.createIndexes(currentstore);
System.out.println("\nIndex created.");
} catch (Exception e) {
System.out.println("\nUnable to create indexes.");
log.error(e.getMessage());
}
}
private long start = 0;
DecimalFormat df = new DecimalFormat("##.###");
private void startTimer() {
start = System.nanoTime();
}
private void stopTimer() {
long stop = System.nanoTime();
double seconds = (double) (stop - start) / 1000000000.0;
System.out.println(String.format("(%s s)", df.format(seconds)));
}
private void processLoad(String cmd, boolean fast) {
if (!checkStoreIsSet()) {
System.out.println("\nYou must set an active store first using the command \"\\c storename\"");
return;
}
String data = null;
Matcher m = PTR_LOAD.matcher(cmd);
if (m.find()) {
data = m.group(1);
} else {
Matcher m2 = PTR_LOAD_FAST.matcher(cmd);
data = m2.group(1);
}
try {
URI dataURI = getFileURI(data);
int tuples = dbInstance.load(currentstore, dataURI, fast);
System.out.println(String.format("\n%s tuples inserted.", tuples));
} catch (Exception e) {
System.out.println("\nUnable to load data.");
log.error(e.getMessage(), e);
if (e instanceof SQLException) {
SQLException ex = ((SQLException) e).getNextException();
while (ex != null) {
log.error(ex.getMessage());
ex = ex.getNextException();
}
}
}
}
private boolean checkStoreIsSet() {
if (currentstore == null)
return false;
return true;
}
/*
* Tests for the formaat of the path string and find teh correct
* interpretation to generate a working URI. Returns null if the path is not
* valid or there is no accessible file at the path.
*/
private URI getFileURI(String path) {
File file = new File(path);
if (file.exists())
return file.toURI();
file = new File(URI.create(path));
if (file.canRead())
return file.toURI();
return null;
}
private void processListStores() {
List<StoreStatus> stores = dbInstance.listStores();
String format = "%s | %s";
System.out.println(String.format(format, "name", "online"));
System.out.println("-------------------");
for (StoreStatus status : stores) {
System.out.println(String.format(format, status.name, status.isOnline));
}
}
private void processStopStore(String cmd) {
Matcher m = PTR_STOP_STORE.matcher(cmd);
m.find();
String name = m.group(1);
try {
dbInstance.stopStore(name);
} catch (Exception e) {
System.out.println("\nUnable to stop store.");
log.error(e.getMessage());
}
}
private void processStartStore(String cmd) {
Matcher m = PTR_START_STORE.matcher(cmd);
m.find();
String name = m.group(1);
try {
dbInstance.startStore(name);
} catch (Exception e) {
System.out.println("\nUnable to start store.");
log.error(e.getMessage());
}
}
private void processDropStore(String cmd) {
Matcher m = PTR_DROP_STORE.matcher(cmd);
m.find();
String name = m.group(1);
try {
dbInstance.dropStore(name);
} catch (Exception e) {
System.out.println("\nUnable to drop store.");
log.error(e.getMessage());
}
}
private void processCreateStore(String cmd) {
Matcher m = PTR_CREATE_STORE.matcher(cmd);
m.find();
String name = m.group(1);
String tboxfile = m.group(2);
String paramfile = m.group(3);
URI tboxURI = getFileURI(tboxfile);
Properties prop = new Properties();
try {
prop.load(new FileReader(new File(getFileURI(paramfile))));
dbInstance.createClassicStore(name, tboxURI, prop);
System.out.println("\nStore has been created.");
} catch (Exception e) {
System.out.println("\nUnable to create store.");
log.error(e.getMessage(), e);
}
}
private void processCreateVirtualStore(String cmd) {
Matcher m = PTR_CREATE_VIRTUAL_STORE.matcher(cmd);
m.find();
String name = m.group(1);
String tboxfile = m.group(2);
String obdaModel = m.group(3);
URI tboxURI = getFileURI(tboxfile);
URI obdaModelURI = getFileURI(obdaModel);
try {
dbInstance.createVirtualStore(name, tboxURI, obdaModelURI);
System.out.println("\nStore has been created.");
} catch (Exception e) {
System.out.println("\nUnable to create store.");
log.error(e.getMessage(), e);
}
}
private void connect(String repository) {
if (!dbInstance.exists(repository))
System.out.println("\nThere is no repository by this name.");
currentstore = repository;
System.out.println("\n" + repository + " is now the active store.");
}
private void initialize() {
System.out.println("Welcome to questcmd 1.0, the QuestDB interactive terminal.");
System.out.println("questcmd is working in localdb mode");
dbInstance = new QuestDB();
/*
* Called when System.exit() is called or Control+C happens.
*/
Runtime.getRuntime().addShutdownHook(new Thread() {
public void run() {
System.out.println("\nGood bye.");
/*
* This cleans all resources and saves the current state of each
* store
*/
dbInstance.shutdown();
}
});
}
private void printhelp() {
System.out.println("");
System.out.println("Type: \\h for Quest commands");
System.out.println(" ");
System.out.println(" \\? for help with questcmd commands");
System.out.println(" \\q to quit");
System.out.println("Type \"help\" for help.");
System.out.println("Terminate with semicolon to execute a query.");
System.out.println("");
}
private void printprompt1() {
System.out.print("quest");
if (currentstore != null) {
System.out.print(":");
System.out.print(currentstore);
}
System.out.print("=# ");
}
private void printprompt2() {
System.out.print("quest");
if (currentstore != null) {
System.out.print(":");
System.out.print(currentstore);
}
System.out.print("-# ");
}
private String readCommand() throws Exception {
StringBuffer bf = new StringBuffer();
while (true) {
String line = in.readLine();
if (line.trim().equals(""))
continue;
line = line.replaceAll("\\s+$", "");
bf.append(line);
bf.append(NEWLINE);
if (line.charAt(line.length() - 1) == ';') {
/* Terminated the command */
return bf.toString();
} else if (line.charAt(0) == '\\') {
return bf.toString();
}
printprompt2();
}
}
public static void main(String args[]) throws IOException {
QuestDBCMD cmd = new QuestDBCMD();
}
}
| |
package dateadog.dateadog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Color;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.support.annotation.RequiresApi;
import android.support.design.widget.Snackbar;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.widget.Toolbar;
import android.text.format.DateUtils;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.ImageView;
import android.widget.RelativeLayout;
import android.widget.TextView;
import com.android.volley.VolleyError;
import com.android.volley.toolbox.ImageLoader;
import java.net.URLEncoder;
import java.util.Calendar;
import java.util.Date;
import java.util.Set;
public class DogProfileActivity extends AppCompatActivity implements DatePickerFragment.DateDialogListener, TimePickerFragment.TimeDialogListener {
/** The max number of date requests a user can have pending at one time. */
private static final int MAX_PENDING_REQUESTS = 1;
/**
* The dog that this profile displays information for. Passed via an intent when starting
* this activity.
* */
private Dog dog;
private DADServer server;
private Button requestDateButton;
private TextView feedbackTitle;
private TextView feedback;
//count the number of pending doggie date requests
private int countPendingRequests(Set<DateRequest> dates) {
int count = 0;
for (DateRequest date : dates) {
if (date.getStatus() == DateRequest.Status.PENDING) {
count++;
}
}
return count;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
server = server.getInstance(getApplicationContext());
setContentView(R.layout.activity_dog_profile);
Toolbar toolbar = (Toolbar) findViewById(R.id.toolbar);
setSupportActionBar(toolbar);
getSupportActionBar().setDisplayHomeAsUpEnabled(true);
getSupportActionBar().setHomeButtonEnabled(true);
requestDateButton = (Button) findViewById(R.id.requestDateButton);
RelativeLayout locationRelativeLayout = (RelativeLayout) findViewById(R.id.locationRelativeLayout);
locationRelativeLayout.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(Intent.ACTION_VIEW);
intent.setData(Uri.parse("geo:0,0?q=" + URLEncoder.encode(dog.getCity())));
if (intent.resolveActivity(getPackageManager()) != null) {
startActivity(intent);
}
}
});
feedbackTitle = (TextView) findViewById(R.id.title_feedback);
feedback = (TextView) findViewById(R.id.feedback);
requestDateButton.setOnClickListener(new View.OnClickListener() {
@RequiresApi(api = Build.VERSION_CODES.N)
@Override
public void onClick(View v) {
server.getUser(new DADServer.UserProfileDataListener() {
@Override
public void onGotUserProfile(final UserProfile userProfile) {
if (userProfile.isComplete()) {
server.getDateRequests(new DADServer.DateRequestsDataListener() {
@Override
public void onGotDateRequests(Set<DateRequest> dateRequests) {
int pendingRequests = countPendingRequests(dateRequests);
if (pendingRequests > MAX_PENDING_REQUESTS) {
AlertDialog alertDialog = new AlertDialog.Builder(DogProfileActivity.this).create();
alertDialog.setTitle(R.string.no_more_dates);
alertDialog.setMessage(getString(R.string.no_more_dates));
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
alertDialog.show();
} else {
DatePickerFragment dateDialog = new DatePickerFragment();
dateDialog.show(getSupportFragmentManager(), "DateDialog");
}
}
});
} else {
Snackbar.make(findViewById(android.R.id.content), R.string.complete_profile_message, Snackbar.LENGTH_LONG)
.setAction("Edit Profile", new View.OnClickListener() {
@Override
public void onClick(View view) {
UserProfileDialogFragment dialog = UserProfileDialogFragment.newInstance(userProfile);
dialog.show(getSupportFragmentManager(), "dialog");
}
})
.setActionTextColor(Color.RED)
.show();
}
}
});
}
});
dog = (Dog) getIntent().getExtras().get("Dog");
updateUI();
}
@Override
public void onFinishDialog(int hour, int minute, String description) {
calendar.set(Calendar.HOUR, hour);
calendar.set(Calendar.MINUTE, minute);
server.requestDate(dog.getDogId(), calendar.getTimeInMillis(), description);
findViewById(R.id.requestDateButton).setEnabled(false);
((TextView) findViewById(R.id.requestDateButton)).setText(R.string.request_sent);
}
Calendar calendar = Calendar.getInstance();
@Override
public void onFinishDialog(Date date) {
calendar.setTime(date);
Date today = Calendar.getInstance().getTime();
if (date.before(today)) {
// The user is attempting to set a date for today or earlier.
AlertDialog alertDialog = new AlertDialog.Builder(DogProfileActivity.this).create();
alertDialog.setTitle(R.string.past_date_error_title);
alertDialog.setMessage(getString(R.string.past_date_error_message));
alertDialog.setButton(AlertDialog.BUTTON_NEUTRAL, "OK",
new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
alertDialog.show();
} else {
TimePickerFragment timeDialog = new TimePickerFragment();
timeDialog.show(getSupportFragmentManager(), "TimeDialog");
}
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case android.R.id.home:
onBackPressed();
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onResume() {
super.onStart();
updateUI();
}
private void updateUI() {
VolleySingleton.getInstance(getApplicationContext()).getImageLoader()
.get(dog.getImageURL(), new ImageLoader.ImageListener() {
@Override
public void onResponse(ImageLoader.ImageContainer response, boolean isImmediate) {
ImageView profileImage = (ImageView) findViewById(R.id.profile_image_view);
profileImage.setImageBitmap(response.getBitmap());
}
@Override
public void onErrorResponse(VolleyError error) {
error.printStackTrace();
}
});
setTitle(dog.getName());
((TextView) findViewById(R.id.ageTextView)).setText(dog.getAge());
((TextView) findViewById(R.id.sexTextView)).setText(dog.getSex());
((TextView) findViewById(R.id.breedsTextView)).setText(dog.getBreedsString());
((TextView) findViewById(R.id.sizeTextView)).setText(dog.getSize());
((TextView) findViewById(R.id.locationTextView)).setText(dog.getCity());
// Get and display the request status for this dog.
requestDateButton.setEnabled(false);
server.getDateRequests(new DADServer.DateRequestsDataListener() {
@Override
public void onGotDateRequests(Set<DateRequest> dateRequests) {
boolean existingDateRequest = false;
for (DateRequest request : dateRequests) {
if (request.getDogId() == dog.getDogId()) {
existingDateRequest = true;
DateRequest.Status status = request.getStatus();
CharSequence dateString = DateUtils.getRelativeDateTimeString(DogProfileActivity.this, request.getDate().getTime(), DateUtils.MINUTE_IN_MILLIS, DateUtils.WEEK_IN_MILLIS, 0);
if (status == DateRequest.Status.APPROVED) {
requestDateButton.setText(getString(R.string.request_approved)
+ " for " + dateString);
} else if (status == DateRequest.Status.REJECTED) {
requestDateButton.setText(getString(R.string.request_rejected));
feedbackTitle.setText(R.string.feedback_title);
feedback.setText(request.getFeedback());
} else if (status == DateRequest.Status.PENDING) {
requestDateButton.setText(getString(R.string.request_pending)
+ " for " + dateString);
}
}
}
if (!existingDateRequest) {
requestDateButton.setEnabled(true);
}
}
});
}
}
| |
/**
* Derby - Class org.apache.derbyTesting.functionTests.tests.derbynet.DerbyNetAutoStartTest
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.derbyTesting.functionTests.tests.derbynet;
import java.io.BufferedReader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.PrintStream;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.sql.DatabaseMetaData;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.util.Locale;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.derby.drda.NetworkServerControl;
import org.apache.derbyTesting.functionTests.util.PrivilegedFileOpsForTests;
import org.apache.derbyTesting.junit.BaseJDBCTestCase;
import org.apache.derbyTesting.junit.Derby;
import org.apache.derbyTesting.junit.NetworkServerTestSetup;
import org.apache.derbyTesting.junit.TestConfiguration;
/**
* Test the network server derby.drda.startNetworkServer property.
*
* Test that:
* <ul>
* <li> 1 ) The network server is not started when the property value is false.
* <li> 2 ) The network server is started when the property value is true, and
* <li> a) uses the default port when the port property is not specified.
* <li> b) uses a non-default port when a port number is specified
* <li> c) uses an invalid port number (-1)
* <li> 3 ) A message is printed to derby.log when the server is
* already started.
* </ul>
*/
public class DerbyNetAutoStartTest extends BaseJDBCTestCase {
private Locale oldLocale = Locale.getDefault();
public DerbyNetAutoStartTest(String name) {
super(name);
}
//args to helper method. With or without port
private static boolean WITHOUTPORT = false;
private static boolean WITHPORT = true;
/**
* Do some steps to prepare for the test.
*/
public void setUp() {
// make sure no network server is running
TestConfiguration.getCurrent().shutdownEngine();
}
/**
* Test case 1
* Test that if derby.drda.startNetworkServer property is false
* that server does not come up.
*
* @throws Exception
*/
public void testStartNetworkServerFalse() throws Exception {
setSystemProperty("derby.drda.startNetworkServer", "false");
// Boot with an embedded connection
// Should not start network server
getConnection();
NetworkServerControl ns =
NetworkServerTestSetup.getNetworkServerControl();
// Verify the server is not up
assertFalse(NetworkServerTestSetup.pingForServerUp(ns,null, false));
}
/**
* Test case 2a.
* Test setting derby.drda.startNetworkServer property without
* specifying anything in the port number property.
* Should start, using the default port.
*
* To avoid possible conflict with other tests running concurrently,
* this test may only run if baseport is not set and we are
* using the default 1527 port
*
* @throws Exception
*/
public void ttestStartNetworkServerTrueNoPort() throws Exception {
startNetworkServerTrueHelper(WITHOUTPORT);
}
/**
* Test case 2b.
* Test setting derby.drda.startNetworkServer property
* and specifying a port number
*
* @throws Exception
*/
public void testStartNetworkServerTrueWithPort() throws Exception {
startNetworkServerTrueHelper(WITHPORT);
}
/**
* Test case 2c.
* Test setting derby.drda.startNetworkServer property
* and specifying an invalid port number
* Should fail to start network server
*
* @throws Exception
*/
public void testStartNetworkServerTrueWithInvalidPort() throws Exception {
setSystemProperty("derby.drda.startNetworkServer", "true");
// Note that internally, portNumber -1 means 'no port number provided'
setSystemProperty("derby.drda.portNumber", "-1");
// Boot with an embedded connection
// Should not start network server
// But it still appears to find an embedded connection.
// Check by attempting something on a connection
// This will currently print an InvocationException to the console
// Is it a bug that it will not print to derby.log?
// But, for now, capture the output and throw it away
final PrintStream realSystemOut = System.out;
final PrintStream realSystemErr = System.err;
ByteArrayOutputStream serverOutputBOS = new ByteArrayOutputStream();
final PrintStream serverOutputOut = new PrintStream( serverOutputBOS);
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
System.setOut(new PrintStream(serverOutputOut));
System.setErr(new PrintStream(serverOutputOut));
return null;
}
});
try {
try
{
// Network start fails, but we get an Embedded connection
DatabaseMetaData dbmd = getConnection().getMetaData();
ResultSet rs = dbmd.getSchemas();
assertNotNull(rs);
rs.close();
}
catch( SQLException e)
{
fail();
}
} finally {
// Restore the original out streams
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
System.setOut(realSystemOut);
System.setErr(realSystemErr);
return null;
}
});
}
// Verify the server - use default port - is not up
NetworkServerControl ns =
NetworkServerTestSetup.getNetworkServerControl();
assertFalse(NetworkServerTestSetup.pingForServerUp(ns,null, false));
}
/**
* Helper method that actually starts the server.
*
* @param withport
* @throws Exception
*/
private void startNetworkServerTrueHelper(boolean withport)
throws Exception {
int theport = withport ?
TestConfiguration.getCurrent().getNextAvailablePort() :
TestConfiguration.getCurrent().getBasePort();
setSystemProperty("derby.drda.startNetworkServer", "true");
if (withport)
{
setSystemProperty("derby.drda.portNumber",
Integer.toString(theport));
}
// Boot with an embedded connection
// Should start network server
getConnection();
// Check the server is up and then bring it back down
NetworkServerControl ns = NetworkServerTestSetup
.getNetworkServerControl(theport);
NetworkServerTestSetup.waitForServerStart(ns);
ns.shutdown();
assertFalse
(NetworkServerTestSetup.pingForServerUp(ns, null, false));
}
/**
* Test case 3
* Test that if a network server is already running on
* a certain port, starting the server after setting
* derby.drda.startNetworkServer reflects an error message
* indicating the server is already in use.
*
* To avoid possible conflict with other tests running concurrently,
* this test will also set derby.drda.portNumber.
*
* @throws Exception
*/
public void testStartNetworkServerLogMessageOnDualStart()
throws Exception {
final Locale newLocale = Locale.ENGLISH;
// first force English locale
AccessController.doPrivileged
(new java.security.PrivilegedAction() {
public Object run() {
Locale.setDefault(newLocale);
return null;
}
}
);
int doubleport = TestConfiguration.getCurrent().getPort();
// start a network server
NetworkServerControl ns =
NetworkServerTestSetup.getNetworkServerControl(doubleport);
ns.start(null);
NetworkServerTestSetup.waitForServerStart(ns);
// shutdown to ensure getConnection reads the properties
TestConfiguration.getCurrent().shutdownEngine();
setSystemProperty("derby.drda.startNetworkServer", "true");
setSystemProperty("derby.drda.portNumber",
Integer.toString(doubleport));
// Boot with an embedded connection
// Should attempt to start network server
getConnection();
// Check the server is still up
assertTrue(NetworkServerTestSetup.pingForServerUp(ns, null, true));
String logFileName =
getSystemProperty("derby.system.home") +
File.separator + "derby.log";
// Give it a little time to write the message
// There should be a warning in the derby.log file.
// With some JVMS there will be a java.net.BindException
// But always there will be the more generic message.
// Note that by checking on the generic message, we cannot
// distinguish the expected from any other exception.
String expectedString =
"An exception was thrown during network server startup";
final long startTime = System.currentTimeMillis();
final long waitTime = NetworkServerTestSetup.getWaitTime();
while (true)
{
Thread.sleep(1000);
if (checkLog( logFileName, new String[] {expectedString})){
break;
}
long elapsed = System.currentTimeMillis() - startTime;
if (elapsed > waitTime) {
fail("did not find the expected string: " + expectedString
+ " within the maximum wait time " + waitTime);
}
}
assertTrue(checkLog( logFileName, new String[] {expectedString}));
ns.shutdown();
}
private static boolean checkLog( String logFileName, String[] expected)
throws IOException
{
boolean allFound = true;
boolean[] found = new boolean[ expected.length];
FileInputStream is =
PrivilegedFileOpsForTests
.getFileInputStream(new File(logFileName));
BufferedReader br = new BufferedReader(new InputStreamReader(is));
String logLine;
while((logLine = br.readLine()) != null)
{
// to print out derby.log, uncomment this line:
// System.out.println(logLine);
for( int i = 0; i < expected.length; i++)
{
if( (! found[i]) && logLine.indexOf( expected[i]) >= 0)
found[i] = true;
}
}
for( int i = 0; i < expected.length; i++)
{
if( ! found[i])
{
allFound = false;
}
}
return allFound;
} // end of checkLog
public static Test suite() {
TestSuite suite = new TestSuite("DerbyNetAutoStartTest");
suite.addTest(baseSuite("DerbyNetAutoStartTest:embedded"));
return suite;
}
private static Test baseSuite(String name) {
TestSuite suite = new TestSuite(name);
// Need derbynet.jar in the classpath, and cannot run with ME/JSR169/cdc profile
if (!Derby.hasServer())
return suite;
// Adds all tests that can run with baseport set or not.
suite.addTestSuite(DerbyNetAutoStartTest.class);
if (getSystemProperty("derby.tests.basePort") != null )
{
return suite;
}
// We assume, that if baseport is set, then the intention is that
// tests are run concurrently, so we cannot use the default port
// 1527. Lists tests that rely on/test the usage of that port here:
suite.addTest
(new DerbyNetAutoStartTest("ttestStartNetworkServerTrueNoPort"));
return suite;
}
protected void tearDown() throws Exception {
// unset the system properties
removeSystemProperty("derby.drda.startNetworkServer");
removeSystemProperty("derby.drda.portNumber");
// set the old locale back to the original
AccessController.doPrivileged
(new java.security.PrivilegedAction() {
public Object run() {
Locale.setDefault(oldLocale);
return null;
}
}
);
oldLocale=null;
super.tearDown();
}
}
| |
/**
* Appcelerator Titanium Mobile
* Copyright (c) 2009-2012 by Appcelerator, Inc. All Rights Reserved.
* Licensed under the terms of the Apache Public License
* Please see the LICENSE included with this distribution for details.
*/
package org.appcelerator.titanium.util;
import java.util.HashMap;
import org.appcelerator.kroll.KrollDict;
import org.appcelerator.kroll.KrollFunction;
import org.appcelerator.kroll.common.Log;
import org.appcelerator.titanium.TiC;
import org.appcelerator.titanium.TiDimension;
import org.appcelerator.titanium.proxy.TiViewProxy;
import org.appcelerator.titanium.view.Ti2DMatrix;
import org.appcelerator.titanium.view.TiAnimation;
import org.appcelerator.titanium.view.TiCompositeLayout;
import org.appcelerator.titanium.view.TiCompositeLayout.LayoutParams;
import org.appcelerator.titanium.view.TiUIView;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.graphics.drawable.TransitionDrawable;
import android.os.Build;
import android.os.Looper;
import android.os.MessageQueue;
import android.util.FloatMath;
import android.util.TypedValue;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewParent;
import android.view.animation.AlphaAnimation;
import android.view.animation.Animation;
import android.view.animation.AnimationSet;
import android.view.animation.LinearInterpolator;
import android.view.animation.Transformation;
import android.view.animation.TranslateAnimation;
public class TiAnimationBuilder
{
private static final String TAG = "TiAnimationBuilder";
protected float anchorX;
protected float anchorY;
protected Ti2DMatrix tdm = null;
protected Double delay = null;
protected Double duration = null;
protected Double toOpacity = null;
protected Double repeat = null;
protected Boolean autoreverse = null;
protected String top = null, bottom = null, left = null, right = null;
protected String centerX = null, centerY = null;
protected String width = null, height = null;
protected Integer backgroundColor = null;
protected TiAnimation animationProxy;
protected KrollFunction callback;
protected boolean relayoutChild = false, applyOpacity = false;
@SuppressWarnings("rawtypes")
protected HashMap options;
protected View view;
protected TiViewProxy viewProxy;
public TiAnimationBuilder()
{
anchorX = Ti2DMatrix.DEFAULT_ANCHOR_VALUE;
anchorY = Ti2DMatrix.DEFAULT_ANCHOR_VALUE;
}
@SuppressWarnings({"unchecked", "rawtypes"})
public void applyOptions(HashMap options)
{
if (options == null) {
return;
}
if (options.containsKey(TiC.PROPERTY_ANCHOR_POINT)) {
Object anchorPoint = options.get(TiC.PROPERTY_ANCHOR_POINT);
if (anchorPoint instanceof HashMap) {
HashMap point = (HashMap) anchorPoint;
anchorX = TiConvert.toFloat(point, TiC.PROPERTY_X);
anchorY = TiConvert.toFloat(point, TiC.PROPERTY_Y);
} else {
Log.e(TAG, "Invalid argument type for anchorPoint property. Ignoring");
}
}
if (options.containsKey(TiC.PROPERTY_TRANSFORM)) {
tdm = (Ti2DMatrix) options.get(TiC.PROPERTY_TRANSFORM);
}
if (options.containsKey(TiC.PROPERTY_DELAY)) {
delay = TiConvert.toDouble(options, TiC.PROPERTY_DELAY);
}
if (options.containsKey(TiC.PROPERTY_DURATION)) {
duration = TiConvert.toDouble(options, TiC.PROPERTY_DURATION);
}
if (options.containsKey(TiC.PROPERTY_OPACITY)) {
toOpacity = TiConvert.toDouble(options, TiC.PROPERTY_OPACITY);
}
if (options.containsKey(TiC.PROPERTY_REPEAT)) {
repeat = TiConvert.toDouble(options, TiC.PROPERTY_REPEAT);
if (repeat == 0d) {
// A repeat of 0 is probably non-sensical. Titanium iOS
// treats it as 1 and so should we.
repeat = 1d;
}
} else {
repeat = 1d; // Default as indicated in our documentation.
}
if (options.containsKey(TiC.PROPERTY_AUTOREVERSE)) {
autoreverse = TiConvert.toBoolean(options, TiC.PROPERTY_AUTOREVERSE);
}
if (options.containsKey(TiC.PROPERTY_TOP)) {
top = TiConvert.toString(options, TiC.PROPERTY_TOP);
}
if (options.containsKey(TiC.PROPERTY_BOTTOM)) {
bottom = TiConvert.toString(options, TiC.PROPERTY_BOTTOM);
}
if (options.containsKey(TiC.PROPERTY_LEFT)) {
left = TiConvert.toString(options, TiC.PROPERTY_LEFT);
}
if (options.containsKey(TiC.PROPERTY_RIGHT)) {
right = TiConvert.toString(options, TiC.PROPERTY_RIGHT);
}
if (options.containsKey(TiC.PROPERTY_CENTER)) {
Object centerPoint = options.get(TiC.PROPERTY_CENTER);
if (centerPoint instanceof HashMap) {
HashMap center = (HashMap) centerPoint;
centerX = TiConvert.toString(center, TiC.PROPERTY_X);
centerY = TiConvert.toString(center, TiC.PROPERTY_Y);
} else {
Log.e(TAG, "Invalid argument type for center property. Ignoring");
}
}
if (options.containsKey(TiC.PROPERTY_WIDTH)) {
width = TiConvert.toString(options, TiC.PROPERTY_WIDTH);
}
if (options.containsKey(TiC.PROPERTY_HEIGHT)) {
height = TiConvert.toString(options, TiC.PROPERTY_HEIGHT);
}
if (options.containsKey(TiC.PROPERTY_BACKGROUND_COLOR)) {
backgroundColor = TiConvert.toColor(options, TiC.PROPERTY_BACKGROUND_COLOR);
}
this.options = options;
}
public void applyAnimation(TiAnimation anim)
{
this.animationProxy = anim;
applyOptions(anim.getProperties());
}
public void setCallback(KrollFunction callback)
{
this.callback = callback;
}
public AnimationSet render(TiViewProxy viewProxy, View view)
{
ViewParent parent = view.getParent();
int parentWidth = 0;
int parentHeight = 0;
if (parent instanceof ViewGroup) {
ViewGroup group = (ViewGroup) parent;
parentHeight = group.getMeasuredHeight();
parentWidth = group.getMeasuredWidth();
}
return render(viewProxy, view, view.getLeft(), view.getTop(), view.getMeasuredWidth(),
view.getMeasuredHeight(), parentWidth, parentHeight);
}
private void addAnimation(AnimationSet animationSet, Animation animation)
{
// repeatCount is ignored at the AnimationSet level, so it needs to
// be set for each child animation manually.
// We need to reduce the repeat count by 1, since for native Android
// 1 would mean repeating it once.
int repeatCount = (repeat == null ? 0 : repeat.intValue() - 1);
// In Android (native), the repeat count includes reverses. So we
// need to double-up and add one to the repeat count if we're reversing.
if (autoreverse != null && autoreverse.booleanValue()) {
repeatCount = repeatCount * 2 + 1;
}
animation.setRepeatCount(repeatCount);
animationSet.addAnimation(animation);
}
public TiMatrixAnimation createMatrixAnimation(Ti2DMatrix matrix)
{
return new TiMatrixAnimation(matrix, anchorX, anchorY);
}
public AnimationSet render(TiViewProxy viewProxy, View view, int x, int y, int w, int h, int parentWidth,
int parentHeight)
{
this.view = view;
this.viewProxy = viewProxy;
AnimationSet as = new AnimationSet(false);
AnimationListener animationListener = new AnimationListener();
if (callback != null || animationProxy != null) {
as.setAnimationListener(animationListener);
}
TiUIView tiView = viewProxy.peekView();
if (toOpacity != null) {
// Determine which value to use for "from" value, in this order:
// 1.) If we previously performed an alpha animation on the view,
// use that as the from value.
// 2.) Else, if we have set an opacity property on the view, use
// that as the from value.
// 3.) Else, use 1.0f as the from value.
float fromOpacity;
float currentAnimatedAlpha =
tiView == null ? Float.MIN_VALUE : tiView.getAnimatedAlpha();
if (currentAnimatedAlpha != Float.MIN_VALUE) {
// MIN_VALUE is used as a signal that no value has been set.
fromOpacity = currentAnimatedAlpha;
} else if (viewProxy.hasProperty(TiC.PROPERTY_OPACITY)) {
fromOpacity = TiConvert.toFloat(viewProxy.getProperty(TiC.PROPERTY_OPACITY));
} else {
fromOpacity = 1.0f;
}
Animation animation = new AlphaAnimation(fromOpacity, toOpacity.floatValue());
// Remember the toOpacity value for next time, since we no way of looking
// up animated alpha values on the Android native view itself.
if (tiView != null) {
tiView.setAnimatedAlpha(toOpacity.floatValue());
}
applyOpacity = true; // Used in the animation listener
addAnimation(as, animation);
animation.setAnimationListener(animationListener);
if (viewProxy.hasProperty(TiC.PROPERTY_OPACITY) && toOpacity != null
&& tiView != null) {
// Initialize the opacity to 1 when we are going to change it in
// the animation. If the opacity of the view was initialized to
// 0, the animation doesn't work at all. If it was initialized to
// something less than 1.0, then it "works" but doesn't give the
// expected results. The reason seems to be partially explained
// here:
// http://stackoverflow.com/a/11387049/67842
// Basically, the AlphaAnimation is transforming the
// *existing* alpha value of the view. So to do what we want it
// to do, we need to start with a base of 1. Surprisingly, this
// does not seem to show a blip if the opacity was less than
// 1.0 to begin with.
tiView.setOpacity(1.0f);
}
}
if (backgroundColor != null) {
int fromBackgroundColor = 0;
if (viewProxy.hasProperty(TiC.PROPERTY_BACKGROUND_COLOR)) {
fromBackgroundColor = TiConvert.toColor(TiConvert.toString(viewProxy
.getProperty(TiC.PROPERTY_BACKGROUND_COLOR)));
} else {
Log.w(TAG, "Cannot animate view without a backgroundColor. View doesn't have that property. Using #00000000");
fromBackgroundColor = Color.argb(0, 0, 0, 0);
}
Animation a = new TiColorAnimation(view, fromBackgroundColor, backgroundColor);
addAnimation(as, a);
}
if (tdm != null) {
Animation anim;
if (tdm.hasScaleOperation() && tiView != null) {
tiView.setAnimatedScaleValues(tdm.verifyScaleValues(tiView,
(autoreverse != null && autoreverse.booleanValue())));
}
if (tdm.hasRotateOperation() && tiView != null) {
tiView.setAnimatedRotationDegrees(tdm.verifyRotationValues(tiView,
(autoreverse != null && autoreverse.booleanValue())));
}
anim = new TiMatrixAnimation(tdm, anchorX, anchorY);
addAnimation(as, anim);
}
if (top != null || bottom != null || left != null || right != null || centerX != null || centerY != null) {
TiDimension optionTop = null, optionBottom = null;
TiDimension optionLeft = null, optionRight = null;
TiDimension optionCenterX = null, optionCenterY = null;
// Note that we're stringifying the values to make sure we
// use the correct TiDimension constructor, except when
// we know the values are expressed for certain in pixels.
if (top != null) {
optionTop = new TiDimension(top, TiDimension.TYPE_TOP);
} else if (bottom == null && centerY == null) {
// Fix a top value since no other y-axis value is being set.
optionTop = new TiDimension(view.getTop(), TiDimension.TYPE_TOP);
optionTop.setUnits(TypedValue.COMPLEX_UNIT_PX);
}
if (bottom != null) {
optionBottom = new TiDimension(bottom, TiDimension.TYPE_BOTTOM);
}
if (left != null) {
optionLeft = new TiDimension(left, TiDimension.TYPE_LEFT);
} else if (right == null && centerX == null) {
// Fix a left value since no other x-axis value is being set.
optionLeft = new TiDimension(view.getLeft(), TiDimension.TYPE_LEFT);
optionLeft.setUnits(TypedValue.COMPLEX_UNIT_PX);
}
if (right != null) {
optionRight = new TiDimension(right, TiDimension.TYPE_RIGHT);
}
if (centerX != null) {
optionCenterX = new TiDimension(centerX, TiDimension.TYPE_CENTER_X);
}
if (centerY != null) {
optionCenterY = new TiDimension(centerY, TiDimension.TYPE_CENTER_Y);
}
int horizontal[] = new int[2];
int vertical[] = new int[2];
ViewParent parent = view.getParent();
View parentView = null;
if (parent instanceof View) {
parentView = (View) parent;
}
TiCompositeLayout.computePosition(parentView, optionLeft, optionCenterX, optionRight, w, 0, parentWidth,
horizontal);
TiCompositeLayout.computePosition(parentView, optionTop, optionCenterY, optionBottom, h, 0, parentHeight,
vertical);
Animation animation = new TranslateAnimation(Animation.ABSOLUTE, 0, Animation.ABSOLUTE,
horizontal[0] - x, Animation.ABSOLUTE, 0, Animation.ABSOLUTE, vertical[0] - y);
animation.setAnimationListener(animationListener);
addAnimation(as, animation);
// Will need to update layout params at end of animation
// so that touch events will be recognized at new location,
// and so that view will stay at new location after changes in
// orientation. But if autoreversing to original layout, no
// need to re-layout.
relayoutChild = (autoreverse == null || !autoreverse.booleanValue());
Log.d(TAG, "animate " + viewProxy + " relative to self: " + (horizontal[0] - x) + ", " + (vertical[0] - y),
Log.DEBUG_MODE);
}
if (tdm == null && (width != null || height != null)) {
TiDimension optionWidth, optionHeight;
if (width != null) {
optionWidth = new TiDimension(width, TiDimension.TYPE_WIDTH);
} else {
optionWidth = new TiDimension(w, TiDimension.TYPE_WIDTH);
optionWidth.setUnits(TypedValue.COMPLEX_UNIT_PX);
}
if (height != null) {
optionHeight = new TiDimension(height, TiDimension.TYPE_HEIGHT);
} else {
optionHeight = new TiDimension(w, TiDimension.TYPE_HEIGHT);
optionHeight.setUnits(TypedValue.COMPLEX_UNIT_PX);
}
int toWidth = optionWidth.getAsPixels(view);
int toHeight = optionHeight.getAsPixels(view);
SizeAnimation sizeAnimation = new SizeAnimation(view, w, h, toWidth, toHeight);
if (duration != null) {
sizeAnimation.setDuration(duration.longValue());
}
sizeAnimation.setInterpolator(new LinearInterpolator());
sizeAnimation.setAnimationListener(animationListener);
addAnimation(as, sizeAnimation);
// Will need to update layout params at end of animation
// so that touch events will be recognized within new
// size rectangle, and so that new size will survive
// any changes in orientation. But if autoreversing
// to original layout, no need to re-layout.
relayoutChild = (autoreverse == null || !autoreverse.booleanValue());
}
// Set duration, repeatMode and fillAfter only after adding children.
// The values are pushed down to the child animations.
as.setFillAfter(true);
if (duration != null) {
as.setDuration(duration.longValue());
}
if (autoreverse != null && autoreverse.booleanValue()) {
as.setRepeatMode(Animation.REVERSE);
} else {
as.setRepeatMode(Animation.RESTART);
}
// startOffset is relevant to the animation set and thus
// not also set on the child animations.
if (delay != null) {
as.setStartOffset(delay.longValue());
}
return as;
}
protected class SizeAnimation extends Animation
{
protected View view;
protected float fromWidth, fromHeight, toWidth, toHeight;
protected static final String TAG = "TiSizeAnimation";
public SizeAnimation(View view, float fromWidth, float fromHeight, float toWidth, float toHeight)
{
this.view = view;
this.fromWidth = fromWidth;
this.fromHeight = fromHeight;
this.toWidth = toWidth;
this.toHeight = toHeight;
Log.d(TAG, "animate view from (" + fromWidth + "x" + fromHeight + ") to (" + toWidth + "x" + toHeight + ")",
Log.DEBUG_MODE);
}
@Override
protected void applyTransformation(float interpolatedTime, Transformation transformation)
{
super.applyTransformation(interpolatedTime, transformation);
int width = 0;
if (fromWidth == toWidth) {
width = (int) fromWidth;
} else {
width = (int) FloatMath.floor(fromWidth + ((toWidth - fromWidth) * interpolatedTime));
}
int height = 0;
if (fromHeight == toHeight) {
height = (int) fromHeight;
} else {
height = (int) FloatMath.floor(fromHeight + ((toHeight - fromHeight) * interpolatedTime));
}
ViewGroup.LayoutParams params = view.getLayoutParams();
params.width = width;
params.height = height;
if (params instanceof TiCompositeLayout.LayoutParams) {
TiCompositeLayout.LayoutParams tiParams = (TiCompositeLayout.LayoutParams) params;
tiParams.optionHeight = new TiDimension(height, TiDimension.TYPE_HEIGHT);
tiParams.optionHeight.setUnits(TypedValue.COMPLEX_UNIT_PX);
tiParams.optionWidth = new TiDimension(width, TiDimension.TYPE_WIDTH);
tiParams.optionWidth.setUnits(TypedValue.COMPLEX_UNIT_PX);
}
view.setLayoutParams(params);
}
}
public static class TiMatrixAnimation extends Animation
{
protected Ti2DMatrix matrix;
protected int childWidth, childHeight;
protected float anchorX = -1, anchorY = -1;
public boolean interpolate = true;
public TiMatrixAnimation(Ti2DMatrix matrix, float anchorX, float anchorY)
{
this.matrix = matrix;
this.anchorX = anchorX;
this.anchorY = anchorY;
}
@Override
public void initialize(int width, int height, int parentWidth, int parentHeight)
{
super.initialize(width, height, parentWidth, parentHeight);
this.childWidth = width;
this.childHeight = height;
}
@Override
protected void applyTransformation(float interpolatedTime, Transformation transformation)
{
super.applyTransformation(interpolatedTime, transformation);
if (interpolate) {
Matrix m = matrix.interpolate(interpolatedTime, childWidth, childHeight, anchorX, anchorY);
transformation.getMatrix().set(m);
} else {
transformation.getMatrix().set(getFinalMatrix(childWidth, childHeight));
}
}
public Matrix getFinalMatrix(int childWidth, int childHeight)
{
return matrix.interpolate(1.0f, childWidth, childHeight, anchorX, anchorY);
}
public void invalidateWithMatrix(View view)
{
int width = view.getWidth();
int height = view.getHeight();
Matrix m = getFinalMatrix(width, height);
RectF rectF = new RectF(0, 0, width, height);
m.mapRect(rectF);
rectF.inset(-1.0f, -1.0f);
Rect rect = new Rect();
rectF.round(rect);
if (view.getParent() instanceof ViewGroup) {
int left = view.getLeft();
int top = view.getTop();
((ViewGroup) view.getParent()).invalidate(left + rect.left, top + rect.top, left + rect.width(),
top + rect.height());
}
}
}
public static class TiColorAnimation extends Animation
{
View view;
TransitionDrawable transitionDrawable;
boolean reversing = false;
int duration = 0;
public TiColorAnimation(View view, int fromColor, int toColor)
{
this.view = view;
ColorDrawable fromColorDrawable = new ColorDrawable(fromColor);
ColorDrawable toColorDrawable = new ColorDrawable(toColor);
transitionDrawable = new TransitionDrawable(new Drawable[] { fromColorDrawable, toColorDrawable });
this.setAnimationListener(new android.view.animation.Animation.AnimationListener() {
public void onAnimationStart(Animation animation)
{
TiColorAnimation.this.view.setBackgroundDrawable(transitionDrawable);
TiColorAnimation.this.duration = Long.valueOf(animation.getDuration()).intValue();
transitionDrawable.startTransition(TiColorAnimation.this.duration);
}
public void onAnimationRepeat(Animation animation)
{
if (animation.getRepeatMode() == Animation.REVERSE) {
reversing = !reversing;
}
if (reversing) {
transitionDrawable.reverseTransition(TiColorAnimation.this.duration);
} else {
transitionDrawable.startTransition(TiColorAnimation.this.duration);
}
}
public void onAnimationEnd(Animation animation)
{
}
});
}
}
protected class AnimationListener implements Animation.AnimationListener
{
public void onAnimationEnd(Animation a)
{
if (relayoutChild) {
LayoutParams params = (LayoutParams) view.getLayoutParams();
TiConvert.fillLayout(options, params);
view.setLayoutParams(params);
view.clearAnimation();
relayoutChild = false;
// TIMOB-11298 Propagate layout property changes to proxy
for (Object key : options.keySet()) {
if (TiC.PROPERTY_TOP.equals(key) || TiC.PROPERTY_BOTTOM.equals(key) || TiC.PROPERTY_LEFT.equals(key)
|| TiC.PROPERTY_RIGHT.equals(key) || TiC.PROPERTY_CENTER.equals(key)
|| TiC.PROPERTY_WIDTH.equals(key) || TiC.PROPERTY_HEIGHT.equals(key)
|| TiC.PROPERTY_BACKGROUND_COLOR.equals(key)) {
viewProxy.setProperty((String) key, options.get(key));
}
}
}
if (applyOpacity && (autoreverse == null || !autoreverse.booleanValue())) {
// There is an android bug where animations still occur after
// this method. We clear it from the view to
// correct this.
view.clearAnimation();
if (toOpacity.floatValue() == 0) {
view.setVisibility(View.INVISIBLE);
} else {
if (view.getVisibility() == View.INVISIBLE) {
view.setVisibility(View.VISIBLE);
}
// this is apparently the only way to apply an opacity to
// the entire view and have it stick
AlphaAnimation aa = new AlphaAnimation(toOpacity.floatValue(), toOpacity.floatValue());
aa.setDuration(1);
aa.setFillAfter(true);
view.setLayoutParams(view.getLayoutParams());
view.startAnimation(aa);
}
applyOpacity = false;
}
if (a instanceof AnimationSet) {
if (callback != null) {
callback.callAsync(viewProxy.getKrollObject(), new Object[] { new KrollDict() });
}
if (animationProxy != null) {
// In versions prior to Honeycomb, don't fire the event
// until the message queue is empty. There appears to be
// a bug in versions before Honeycomb where this
// onAnimationEnd listener can be called even before the
// animation is really complete.
if (Build.VERSION.SDK_INT >= TiC.API_LEVEL_HONEYCOMB) {
animationProxy.fireEvent(TiC.EVENT_COMPLETE, null);
} else {
Looper.myQueue().addIdleHandler(new MessageQueue.IdleHandler() {
public boolean queueIdle()
{
animationProxy.fireEvent(TiC.EVENT_COMPLETE, null);
return false;
}
});
}
}
}
}
public void onAnimationRepeat(Animation a)
{
}
public void onAnimationStart(Animation a)
{
if (animationProxy != null) {
animationProxy.fireEvent(TiC.EVENT_START, null);
}
}
}
}
| |
package org.openapitools.model;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.util.ArrayList;
import java.util.List;
import org.openapitools.model.FreeStyleBuild;
import org.openapitools.model.FreeStyleProjectactions;
import org.openapitools.model.FreeStyleProjecthealthReport;
import org.openapitools.model.NullSCM;
import javax.validation.constraints.*;
import javax.validation.Valid;
import io.swagger.annotations.*;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonValue;
import com.fasterxml.jackson.annotation.JsonTypeName;
@JsonTypeName("FreeStyleProject")
@javax.annotation.Generated(value = "org.openapitools.codegen.languages.JavaJAXRSSpecServerCodegen", date = "2022-02-13T02:22:19.792787Z[Etc/UTC]")public class FreeStyleProject {
private @Valid String propertyClass;
private @Valid String name;
private @Valid String url;
private @Valid String color;
private @Valid List<FreeStyleProjectactions> actions = new ArrayList<FreeStyleProjectactions>();
private @Valid String description;
private @Valid String displayName;
private @Valid String displayNameOrNull;
private @Valid String fullDisplayName;
private @Valid String fullName;
private @Valid Boolean buildable;
private @Valid List<FreeStyleBuild> builds = new ArrayList<FreeStyleBuild>();
private @Valid FreeStyleBuild firstBuild;
private @Valid List<FreeStyleProjecthealthReport> healthReport = new ArrayList<FreeStyleProjecthealthReport>();
private @Valid Boolean inQueue;
private @Valid Boolean keepDependencies;
private @Valid FreeStyleBuild lastBuild;
private @Valid FreeStyleBuild lastCompletedBuild;
private @Valid String lastFailedBuild;
private @Valid FreeStyleBuild lastStableBuild;
private @Valid FreeStyleBuild lastSuccessfulBuild;
private @Valid String lastUnstableBuild;
private @Valid String lastUnsuccessfulBuild;
private @Valid Integer nextBuildNumber;
private @Valid String queueItem;
private @Valid Boolean concurrentBuild;
private @Valid NullSCM scm;
/**
**/
public FreeStyleProject propertyClass(String propertyClass) {
this.propertyClass = propertyClass;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("_class")
public String getPropertyClass() {
return propertyClass;
}
@JsonProperty("_class")
public void setPropertyClass(String propertyClass) {
this.propertyClass = propertyClass;
}
/**
**/
public FreeStyleProject name(String name) {
this.name = name;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("name")
public String getName() {
return name;
}
@JsonProperty("name")
public void setName(String name) {
this.name = name;
}
/**
**/
public FreeStyleProject url(String url) {
this.url = url;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("url")
public String getUrl() {
return url;
}
@JsonProperty("url")
public void setUrl(String url) {
this.url = url;
}
/**
**/
public FreeStyleProject color(String color) {
this.color = color;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("color")
public String getColor() {
return color;
}
@JsonProperty("color")
public void setColor(String color) {
this.color = color;
}
/**
**/
public FreeStyleProject actions(List<FreeStyleProjectactions> actions) {
this.actions = actions;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("actions")
public List<FreeStyleProjectactions> getActions() {
return actions;
}
@JsonProperty("actions")
public void setActions(List<FreeStyleProjectactions> actions) {
this.actions = actions;
}
public FreeStyleProject addActionsItem(FreeStyleProjectactions actionsItem) {
if (this.actions == null) {
this.actions = new ArrayList<FreeStyleProjectactions>();
}
this.actions.add(actionsItem);
return this;
}
public FreeStyleProject removeActionsItem(FreeStyleProjectactions actionsItem) {
if (actionsItem != null && this.actions != null) {
this.actions.remove(actionsItem);
}
return this;
}
/**
**/
public FreeStyleProject description(String description) {
this.description = description;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("description")
public String getDescription() {
return description;
}
@JsonProperty("description")
public void setDescription(String description) {
this.description = description;
}
/**
**/
public FreeStyleProject displayName(String displayName) {
this.displayName = displayName;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("displayName")
public String getDisplayName() {
return displayName;
}
@JsonProperty("displayName")
public void setDisplayName(String displayName) {
this.displayName = displayName;
}
/**
**/
public FreeStyleProject displayNameOrNull(String displayNameOrNull) {
this.displayNameOrNull = displayNameOrNull;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("displayNameOrNull")
public String getDisplayNameOrNull() {
return displayNameOrNull;
}
@JsonProperty("displayNameOrNull")
public void setDisplayNameOrNull(String displayNameOrNull) {
this.displayNameOrNull = displayNameOrNull;
}
/**
**/
public FreeStyleProject fullDisplayName(String fullDisplayName) {
this.fullDisplayName = fullDisplayName;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("fullDisplayName")
public String getFullDisplayName() {
return fullDisplayName;
}
@JsonProperty("fullDisplayName")
public void setFullDisplayName(String fullDisplayName) {
this.fullDisplayName = fullDisplayName;
}
/**
**/
public FreeStyleProject fullName(String fullName) {
this.fullName = fullName;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("fullName")
public String getFullName() {
return fullName;
}
@JsonProperty("fullName")
public void setFullName(String fullName) {
this.fullName = fullName;
}
/**
**/
public FreeStyleProject buildable(Boolean buildable) {
this.buildable = buildable;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("buildable")
public Boolean getBuildable() {
return buildable;
}
@JsonProperty("buildable")
public void setBuildable(Boolean buildable) {
this.buildable = buildable;
}
/**
**/
public FreeStyleProject builds(List<FreeStyleBuild> builds) {
this.builds = builds;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("builds")
public List<FreeStyleBuild> getBuilds() {
return builds;
}
@JsonProperty("builds")
public void setBuilds(List<FreeStyleBuild> builds) {
this.builds = builds;
}
public FreeStyleProject addBuildsItem(FreeStyleBuild buildsItem) {
if (this.builds == null) {
this.builds = new ArrayList<FreeStyleBuild>();
}
this.builds.add(buildsItem);
return this;
}
public FreeStyleProject removeBuildsItem(FreeStyleBuild buildsItem) {
if (buildsItem != null && this.builds != null) {
this.builds.remove(buildsItem);
}
return this;
}
/**
**/
public FreeStyleProject firstBuild(FreeStyleBuild firstBuild) {
this.firstBuild = firstBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("firstBuild")
public FreeStyleBuild getFirstBuild() {
return firstBuild;
}
@JsonProperty("firstBuild")
public void setFirstBuild(FreeStyleBuild firstBuild) {
this.firstBuild = firstBuild;
}
/**
**/
public FreeStyleProject healthReport(List<FreeStyleProjecthealthReport> healthReport) {
this.healthReport = healthReport;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("healthReport")
public List<FreeStyleProjecthealthReport> getHealthReport() {
return healthReport;
}
@JsonProperty("healthReport")
public void setHealthReport(List<FreeStyleProjecthealthReport> healthReport) {
this.healthReport = healthReport;
}
public FreeStyleProject addHealthReportItem(FreeStyleProjecthealthReport healthReportItem) {
if (this.healthReport == null) {
this.healthReport = new ArrayList<FreeStyleProjecthealthReport>();
}
this.healthReport.add(healthReportItem);
return this;
}
public FreeStyleProject removeHealthReportItem(FreeStyleProjecthealthReport healthReportItem) {
if (healthReportItem != null && this.healthReport != null) {
this.healthReport.remove(healthReportItem);
}
return this;
}
/**
**/
public FreeStyleProject inQueue(Boolean inQueue) {
this.inQueue = inQueue;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("inQueue")
public Boolean getInQueue() {
return inQueue;
}
@JsonProperty("inQueue")
public void setInQueue(Boolean inQueue) {
this.inQueue = inQueue;
}
/**
**/
public FreeStyleProject keepDependencies(Boolean keepDependencies) {
this.keepDependencies = keepDependencies;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("keepDependencies")
public Boolean getKeepDependencies() {
return keepDependencies;
}
@JsonProperty("keepDependencies")
public void setKeepDependencies(Boolean keepDependencies) {
this.keepDependencies = keepDependencies;
}
/**
**/
public FreeStyleProject lastBuild(FreeStyleBuild lastBuild) {
this.lastBuild = lastBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastBuild")
public FreeStyleBuild getLastBuild() {
return lastBuild;
}
@JsonProperty("lastBuild")
public void setLastBuild(FreeStyleBuild lastBuild) {
this.lastBuild = lastBuild;
}
/**
**/
public FreeStyleProject lastCompletedBuild(FreeStyleBuild lastCompletedBuild) {
this.lastCompletedBuild = lastCompletedBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastCompletedBuild")
public FreeStyleBuild getLastCompletedBuild() {
return lastCompletedBuild;
}
@JsonProperty("lastCompletedBuild")
public void setLastCompletedBuild(FreeStyleBuild lastCompletedBuild) {
this.lastCompletedBuild = lastCompletedBuild;
}
/**
**/
public FreeStyleProject lastFailedBuild(String lastFailedBuild) {
this.lastFailedBuild = lastFailedBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastFailedBuild")
public String getLastFailedBuild() {
return lastFailedBuild;
}
@JsonProperty("lastFailedBuild")
public void setLastFailedBuild(String lastFailedBuild) {
this.lastFailedBuild = lastFailedBuild;
}
/**
**/
public FreeStyleProject lastStableBuild(FreeStyleBuild lastStableBuild) {
this.lastStableBuild = lastStableBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastStableBuild")
public FreeStyleBuild getLastStableBuild() {
return lastStableBuild;
}
@JsonProperty("lastStableBuild")
public void setLastStableBuild(FreeStyleBuild lastStableBuild) {
this.lastStableBuild = lastStableBuild;
}
/**
**/
public FreeStyleProject lastSuccessfulBuild(FreeStyleBuild lastSuccessfulBuild) {
this.lastSuccessfulBuild = lastSuccessfulBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastSuccessfulBuild")
public FreeStyleBuild getLastSuccessfulBuild() {
return lastSuccessfulBuild;
}
@JsonProperty("lastSuccessfulBuild")
public void setLastSuccessfulBuild(FreeStyleBuild lastSuccessfulBuild) {
this.lastSuccessfulBuild = lastSuccessfulBuild;
}
/**
**/
public FreeStyleProject lastUnstableBuild(String lastUnstableBuild) {
this.lastUnstableBuild = lastUnstableBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastUnstableBuild")
public String getLastUnstableBuild() {
return lastUnstableBuild;
}
@JsonProperty("lastUnstableBuild")
public void setLastUnstableBuild(String lastUnstableBuild) {
this.lastUnstableBuild = lastUnstableBuild;
}
/**
**/
public FreeStyleProject lastUnsuccessfulBuild(String lastUnsuccessfulBuild) {
this.lastUnsuccessfulBuild = lastUnsuccessfulBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("lastUnsuccessfulBuild")
public String getLastUnsuccessfulBuild() {
return lastUnsuccessfulBuild;
}
@JsonProperty("lastUnsuccessfulBuild")
public void setLastUnsuccessfulBuild(String lastUnsuccessfulBuild) {
this.lastUnsuccessfulBuild = lastUnsuccessfulBuild;
}
/**
**/
public FreeStyleProject nextBuildNumber(Integer nextBuildNumber) {
this.nextBuildNumber = nextBuildNumber;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("nextBuildNumber")
public Integer getNextBuildNumber() {
return nextBuildNumber;
}
@JsonProperty("nextBuildNumber")
public void setNextBuildNumber(Integer nextBuildNumber) {
this.nextBuildNumber = nextBuildNumber;
}
/**
**/
public FreeStyleProject queueItem(String queueItem) {
this.queueItem = queueItem;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("queueItem")
public String getQueueItem() {
return queueItem;
}
@JsonProperty("queueItem")
public void setQueueItem(String queueItem) {
this.queueItem = queueItem;
}
/**
**/
public FreeStyleProject concurrentBuild(Boolean concurrentBuild) {
this.concurrentBuild = concurrentBuild;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("concurrentBuild")
public Boolean getConcurrentBuild() {
return concurrentBuild;
}
@JsonProperty("concurrentBuild")
public void setConcurrentBuild(Boolean concurrentBuild) {
this.concurrentBuild = concurrentBuild;
}
/**
**/
public FreeStyleProject scm(NullSCM scm) {
this.scm = scm;
return this;
}
@ApiModelProperty(value = "")
@JsonProperty("scm")
public NullSCM getScm() {
return scm;
}
@JsonProperty("scm")
public void setScm(NullSCM scm) {
this.scm = scm;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
FreeStyleProject freeStyleProject = (FreeStyleProject) o;
return Objects.equals(this.propertyClass, freeStyleProject.propertyClass) &&
Objects.equals(this.name, freeStyleProject.name) &&
Objects.equals(this.url, freeStyleProject.url) &&
Objects.equals(this.color, freeStyleProject.color) &&
Objects.equals(this.actions, freeStyleProject.actions) &&
Objects.equals(this.description, freeStyleProject.description) &&
Objects.equals(this.displayName, freeStyleProject.displayName) &&
Objects.equals(this.displayNameOrNull, freeStyleProject.displayNameOrNull) &&
Objects.equals(this.fullDisplayName, freeStyleProject.fullDisplayName) &&
Objects.equals(this.fullName, freeStyleProject.fullName) &&
Objects.equals(this.buildable, freeStyleProject.buildable) &&
Objects.equals(this.builds, freeStyleProject.builds) &&
Objects.equals(this.firstBuild, freeStyleProject.firstBuild) &&
Objects.equals(this.healthReport, freeStyleProject.healthReport) &&
Objects.equals(this.inQueue, freeStyleProject.inQueue) &&
Objects.equals(this.keepDependencies, freeStyleProject.keepDependencies) &&
Objects.equals(this.lastBuild, freeStyleProject.lastBuild) &&
Objects.equals(this.lastCompletedBuild, freeStyleProject.lastCompletedBuild) &&
Objects.equals(this.lastFailedBuild, freeStyleProject.lastFailedBuild) &&
Objects.equals(this.lastStableBuild, freeStyleProject.lastStableBuild) &&
Objects.equals(this.lastSuccessfulBuild, freeStyleProject.lastSuccessfulBuild) &&
Objects.equals(this.lastUnstableBuild, freeStyleProject.lastUnstableBuild) &&
Objects.equals(this.lastUnsuccessfulBuild, freeStyleProject.lastUnsuccessfulBuild) &&
Objects.equals(this.nextBuildNumber, freeStyleProject.nextBuildNumber) &&
Objects.equals(this.queueItem, freeStyleProject.queueItem) &&
Objects.equals(this.concurrentBuild, freeStyleProject.concurrentBuild) &&
Objects.equals(this.scm, freeStyleProject.scm);
}
@Override
public int hashCode() {
return Objects.hash(propertyClass, name, url, color, actions, description, displayName, displayNameOrNull, fullDisplayName, fullName, buildable, builds, firstBuild, healthReport, inQueue, keepDependencies, lastBuild, lastCompletedBuild, lastFailedBuild, lastStableBuild, lastSuccessfulBuild, lastUnstableBuild, lastUnsuccessfulBuild, nextBuildNumber, queueItem, concurrentBuild, scm);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class FreeStyleProject {\n");
sb.append(" propertyClass: ").append(toIndentedString(propertyClass)).append("\n");
sb.append(" name: ").append(toIndentedString(name)).append("\n");
sb.append(" url: ").append(toIndentedString(url)).append("\n");
sb.append(" color: ").append(toIndentedString(color)).append("\n");
sb.append(" actions: ").append(toIndentedString(actions)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" displayName: ").append(toIndentedString(displayName)).append("\n");
sb.append(" displayNameOrNull: ").append(toIndentedString(displayNameOrNull)).append("\n");
sb.append(" fullDisplayName: ").append(toIndentedString(fullDisplayName)).append("\n");
sb.append(" fullName: ").append(toIndentedString(fullName)).append("\n");
sb.append(" buildable: ").append(toIndentedString(buildable)).append("\n");
sb.append(" builds: ").append(toIndentedString(builds)).append("\n");
sb.append(" firstBuild: ").append(toIndentedString(firstBuild)).append("\n");
sb.append(" healthReport: ").append(toIndentedString(healthReport)).append("\n");
sb.append(" inQueue: ").append(toIndentedString(inQueue)).append("\n");
sb.append(" keepDependencies: ").append(toIndentedString(keepDependencies)).append("\n");
sb.append(" lastBuild: ").append(toIndentedString(lastBuild)).append("\n");
sb.append(" lastCompletedBuild: ").append(toIndentedString(lastCompletedBuild)).append("\n");
sb.append(" lastFailedBuild: ").append(toIndentedString(lastFailedBuild)).append("\n");
sb.append(" lastStableBuild: ").append(toIndentedString(lastStableBuild)).append("\n");
sb.append(" lastSuccessfulBuild: ").append(toIndentedString(lastSuccessfulBuild)).append("\n");
sb.append(" lastUnstableBuild: ").append(toIndentedString(lastUnstableBuild)).append("\n");
sb.append(" lastUnsuccessfulBuild: ").append(toIndentedString(lastUnsuccessfulBuild)).append("\n");
sb.append(" nextBuildNumber: ").append(toIndentedString(nextBuildNumber)).append("\n");
sb.append(" queueItem: ").append(toIndentedString(queueItem)).append("\n");
sb.append(" concurrentBuild: ").append(toIndentedString(concurrentBuild)).append("\n");
sb.append(" scm: ").append(toIndentedString(scm)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
| |
// Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.cloud.hypervisor.vmware.util;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.PrintWriter;
import java.io.StringWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.apache.log4j.Logger;
import com.cloud.hypervisor.vmware.mo.DatacenterMO;
import com.cloud.hypervisor.vmware.mo.DatastoreMO;
import com.cloud.hypervisor.vmware.mo.HostMO;
import com.cloud.hypervisor.vmware.mo.VirtualEthernetCardType;
import com.cloud.hypervisor.vmware.mo.VirtualMachineMO;
import com.cloud.utils.Pair;
import com.cloud.utils.Ternary;
import com.cloud.utils.exception.ExceptionUtil;
import com.vmware.vim25.DistributedVirtualSwitchPortConnection;
import com.vmware.vim25.DynamicProperty;
import com.vmware.vim25.ManagedObjectReference;
import com.vmware.vim25.MethodFault;
import com.vmware.vim25.ObjectContent;
import com.vmware.vim25.OptionValue;
import com.vmware.vim25.ResourceAllocationInfo;
import com.vmware.vim25.VirtualCdrom;
import com.vmware.vim25.VirtualCdromIsoBackingInfo;
import com.vmware.vim25.VirtualCdromRemotePassthroughBackingInfo;
import com.vmware.vim25.VirtualDevice;
import com.vmware.vim25.VirtualDeviceBackingInfo;
import com.vmware.vim25.VirtualDeviceConnectInfo;
import com.vmware.vim25.VirtualDisk;
import com.vmware.vim25.VirtualDiskFlatVer1BackingInfo;
import com.vmware.vim25.VirtualDiskFlatVer2BackingInfo;
import com.vmware.vim25.VirtualDiskMode;
import com.vmware.vim25.VirtualDiskRawDiskMappingVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer1BackingInfo;
import com.vmware.vim25.VirtualDiskSparseVer2BackingInfo;
import com.vmware.vim25.VirtualE1000;
import com.vmware.vim25.VirtualEthernetCard;
import com.vmware.vim25.VirtualEthernetCardDistributedVirtualPortBackingInfo;
import com.vmware.vim25.VirtualEthernetCardNetworkBackingInfo;
import com.vmware.vim25.VirtualMachineConfigSpec;
import com.vmware.vim25.VirtualMachineSnapshotTree;
import com.vmware.vim25.VirtualPCNet32;
import com.vmware.vim25.VirtualVmxnet2;
import com.vmware.vim25.VirtualVmxnet3;
public class VmwareHelper {
private static final Logger s_logger = Logger.getLogger(VmwareHelper.class);
public static VirtualDevice prepareNicDevice(VirtualMachineMO vmMo, ManagedObjectReference morNetwork, VirtualEthernetCardType deviceType,
String portGroupName, String macAddress, int deviceNumber, int contextNumber, boolean conntected, boolean connectOnStart) throws Exception {
VirtualEthernetCard nic;
switch(deviceType) {
case E1000 :
nic = new VirtualE1000();
break;
case PCNet32 :
nic = new VirtualPCNet32();
break;
case Vmxnet2 :
nic = new VirtualVmxnet2();
break;
case Vmxnet3 :
nic = new VirtualVmxnet3();
break;
default :
assert(false);
nic = new VirtualE1000();
}
VirtualEthernetCardNetworkBackingInfo nicBacking = new VirtualEthernetCardNetworkBackingInfo();
nicBacking.setDeviceName(portGroupName);
nicBacking.setNetwork(morNetwork);
nic.setBacking(nicBacking);
VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
connectInfo.setAllowGuestControl(true);
connectInfo.setConnected(conntected);
connectInfo.setStartConnected(connectOnStart);
nic.setAddressType("Manual");
nic.setConnectable(connectInfo);
nic.setMacAddress(macAddress);
nic.setUnitNumber(deviceNumber);
nic.setKey(-contextNumber);
return nic;
}
public static VirtualDevice prepareDvNicDevice(VirtualMachineMO vmMo, ManagedObjectReference morNetwork, VirtualEthernetCardType deviceType,
String dvPortGroupName, String dvSwitchUuid, String macAddress, int deviceNumber, int contextNumber, boolean conntected, boolean connectOnStart) throws Exception {
VirtualEthernetCard nic;
switch (deviceType) {
case E1000:
nic = new VirtualE1000();
break;
case PCNet32:
nic = new VirtualPCNet32();
break;
case Vmxnet2:
nic = new VirtualVmxnet2();
break;
case Vmxnet3:
nic = new VirtualVmxnet3();
break;
default:
assert (false);
nic = new VirtualE1000();
}
final VirtualEthernetCardDistributedVirtualPortBackingInfo dvPortBacking = new VirtualEthernetCardDistributedVirtualPortBackingInfo();
final DistributedVirtualSwitchPortConnection dvPortConnection = new DistributedVirtualSwitchPortConnection();
final VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
dvPortConnection.setSwitchUuid(dvSwitchUuid);
dvPortConnection.setPortgroupKey(morNetwork.get_value());
dvPortBacking.setPort(dvPortConnection);
nic.setBacking(dvPortBacking);
nic.setKey(30);
connectInfo.setAllowGuestControl(true);
connectInfo.setConnected(conntected);
connectInfo.setStartConnected(connectOnStart);
nic.setAddressType("Manual");
nic.setConnectable(connectInfo);
nic.setMacAddress(macAddress);
nic.setUnitNumber(deviceNumber);
nic.setKey(-contextNumber);
return nic;
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public static VirtualDevice prepareDiskDevice(VirtualMachineMO vmMo, int controllerKey, String vmdkDatastorePath,
int sizeInMb, ManagedObjectReference morDs, int deviceNumber, int contextNumber) throws Exception {
VirtualDisk disk = new VirtualDisk();
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDiskMode(VirtualDiskMode.persistent.toString());
backingInfo.setThinProvisioned(true);
backingInfo.setEagerlyScrub(false);
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
disk.setBacking(backingInfo);
if(controllerKey < 0)
controllerKey = vmMo.getIDEDeviceControllerKey();
if(deviceNumber < 0)
deviceNumber = vmMo.getNextDeviceNumber(controllerKey);
disk.setControllerKey(controllerKey);
disk.setKey(-contextNumber);
disk.setUnitNumber(deviceNumber);
disk.setCapacityInKB(sizeInMb*1024);
VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
connectInfo.setConnected(true);
connectInfo.setStartConnected(true);
disk.setConnectable(connectInfo);
return disk;
}
// vmdkDatastorePath: [datastore name] vmdkFilePath, create delta disk based on disk from template
public static VirtualDevice prepareDiskDevice(VirtualMachineMO vmMo, int controllerKey, String vmdkDatastorePath,
int sizeInMb, ManagedObjectReference morDs, VirtualDisk templateDisk, int deviceNumber, int contextNumber) throws Exception {
assert(templateDisk != null);
VirtualDeviceBackingInfo parentBacking = templateDisk.getBacking();
assert(parentBacking != null);
// TODO Not sure if we need to check if the disk in template and the new disk needs to share the
// same datastore
VirtualDisk disk = new VirtualDisk();
if(parentBacking instanceof VirtualDiskFlatVer1BackingInfo) {
VirtualDiskFlatVer1BackingInfo backingInfo = new VirtualDiskFlatVer1BackingInfo();
backingInfo.setDiskMode(((VirtualDiskFlatVer1BackingInfo)parentBacking).getDiskMode());
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
backingInfo.setParent((VirtualDiskFlatVer1BackingInfo)parentBacking);
disk.setBacking(backingInfo);
} else if(parentBacking instanceof VirtualDiskFlatVer2BackingInfo) {
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDiskMode(((VirtualDiskFlatVer2BackingInfo)parentBacking).getDiskMode());
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
backingInfo.setParent((VirtualDiskFlatVer2BackingInfo)parentBacking);
disk.setBacking(backingInfo);
} else if(parentBacking instanceof VirtualDiskRawDiskMappingVer1BackingInfo) {
VirtualDiskRawDiskMappingVer1BackingInfo backingInfo = new VirtualDiskRawDiskMappingVer1BackingInfo();
backingInfo.setDiskMode(((VirtualDiskRawDiskMappingVer1BackingInfo)parentBacking).getDiskMode());
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
backingInfo.setParent((VirtualDiskRawDiskMappingVer1BackingInfo)parentBacking);
disk.setBacking(backingInfo);
} else if(parentBacking instanceof VirtualDiskSparseVer1BackingInfo) {
VirtualDiskSparseVer1BackingInfo backingInfo = new VirtualDiskSparseVer1BackingInfo();
backingInfo.setDiskMode(((VirtualDiskSparseVer1BackingInfo)parentBacking).getDiskMode());
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
backingInfo.setParent((VirtualDiskSparseVer1BackingInfo)parentBacking);
disk.setBacking(backingInfo);
} else if(parentBacking instanceof VirtualDiskSparseVer2BackingInfo) {
VirtualDiskSparseVer2BackingInfo backingInfo = new VirtualDiskSparseVer2BackingInfo();
backingInfo.setDiskMode(((VirtualDiskSparseVer2BackingInfo)parentBacking).getDiskMode());
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePath);
backingInfo.setParent((VirtualDiskSparseVer2BackingInfo)parentBacking);
disk.setBacking(backingInfo);
} else {
throw new Exception("Unsupported disk backing: " + parentBacking.getClass().getCanonicalName());
}
if(controllerKey < 0)
controllerKey = vmMo.getIDEDeviceControllerKey();
disk.setControllerKey(controllerKey);
if(deviceNumber < 0)
deviceNumber = vmMo.getNextDeviceNumber(controllerKey);
disk.setKey(-contextNumber);
disk.setUnitNumber(deviceNumber);
disk.setCapacityInKB(sizeInMb*1024);
VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
connectInfo.setConnected(true);
connectInfo.setStartConnected(true);
disk.setConnectable(connectInfo);
return disk;
}
// vmdkDatastorePath: [datastore name] vmdkFilePath
public static VirtualDevice prepareDiskDevice(VirtualMachineMO vmMo, int controllerKey, String vmdkDatastorePathChain[],
ManagedObjectReference morDs, int deviceNumber, int contextNumber) throws Exception {
assert(vmdkDatastorePathChain != null);
assert(vmdkDatastorePathChain.length >= 1);
VirtualDisk disk = new VirtualDisk();
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDatastore(morDs);
backingInfo.setFileName(vmdkDatastorePathChain[0]);
backingInfo.setDiskMode(VirtualDiskMode.persistent.toString());
if(vmdkDatastorePathChain.length > 1) {
String[] parentDisks = new String[vmdkDatastorePathChain.length - 1];
for(int i = 0; i < vmdkDatastorePathChain.length - 1; i++)
parentDisks[i] = vmdkDatastorePathChain[i + 1];
setParentBackingInfo(backingInfo, morDs, parentDisks);
}
disk.setBacking(backingInfo);
if(controllerKey < 0)
controllerKey = vmMo.getIDEDeviceControllerKey();
if(deviceNumber < 0)
deviceNumber = vmMo.getNextDeviceNumber(controllerKey);
disk.setControllerKey(controllerKey);
disk.setKey(-contextNumber);
disk.setUnitNumber(deviceNumber);
VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
connectInfo.setConnected(true);
connectInfo.setStartConnected(true);
disk.setConnectable(connectInfo);
return disk;
}
public static VirtualDevice prepareDiskDevice(VirtualMachineMO vmMo, int controllerKey,
Pair<String, ManagedObjectReference>[] vmdkDatastorePathChain,
int deviceNumber, int contextNumber) throws Exception {
assert(vmdkDatastorePathChain != null);
assert(vmdkDatastorePathChain.length >= 1);
VirtualDisk disk = new VirtualDisk();
VirtualDiskFlatVer2BackingInfo backingInfo = new VirtualDiskFlatVer2BackingInfo();
backingInfo.setDatastore(vmdkDatastorePathChain[0].second());
backingInfo.setFileName(vmdkDatastorePathChain[0].first());
backingInfo.setDiskMode(VirtualDiskMode.persistent.toString());
if(vmdkDatastorePathChain.length > 1) {
Pair<String, ManagedObjectReference>[] parentDisks = new Pair[vmdkDatastorePathChain.length - 1];
for(int i = 0; i < vmdkDatastorePathChain.length - 1; i++)
parentDisks[i] = vmdkDatastorePathChain[i + 1];
setParentBackingInfo(backingInfo, parentDisks);
}
disk.setBacking(backingInfo);
if(controllerKey < 0)
controllerKey = vmMo.getIDEDeviceControllerKey();
if(deviceNumber < 0)
deviceNumber = vmMo.getNextDeviceNumber(controllerKey);
disk.setControllerKey(controllerKey);
disk.setKey(-contextNumber);
disk.setUnitNumber(deviceNumber);
VirtualDeviceConnectInfo connectInfo = new VirtualDeviceConnectInfo();
connectInfo.setConnected(true);
connectInfo.setStartConnected(true);
disk.setConnectable(connectInfo);
return disk;
}
private static void setParentBackingInfo(VirtualDiskFlatVer2BackingInfo backingInfo,
ManagedObjectReference morDs, String[] parentDatastorePathList) {
VirtualDiskFlatVer2BackingInfo parentBacking = new VirtualDiskFlatVer2BackingInfo();
parentBacking.setDatastore(morDs);
parentBacking.setDiskMode(VirtualDiskMode.persistent.toString());
if(parentDatastorePathList.length > 1) {
String[] nextDatastorePathList = new String[parentDatastorePathList.length -1];
for(int i = 0; i < parentDatastorePathList.length -1; i++)
nextDatastorePathList[i] = parentDatastorePathList[i + 1];
setParentBackingInfo(parentBacking, morDs, nextDatastorePathList);
}
parentBacking.setFileName(parentDatastorePathList[0]);
backingInfo.setParent(parentBacking);
}
private static void setParentBackingInfo(VirtualDiskFlatVer2BackingInfo backingInfo,
Pair<String, ManagedObjectReference>[] parentDatastorePathList) {
VirtualDiskFlatVer2BackingInfo parentBacking = new VirtualDiskFlatVer2BackingInfo();
parentBacking.setDatastore(parentDatastorePathList[0].second());
parentBacking.setDiskMode(VirtualDiskMode.persistent.toString());
if(parentDatastorePathList.length > 1) {
Pair<String, ManagedObjectReference>[] nextDatastorePathList = new Pair[parentDatastorePathList.length -1];
for(int i = 0; i < parentDatastorePathList.length -1; i++)
nextDatastorePathList[i] = parentDatastorePathList[i + 1];
setParentBackingInfo(parentBacking, nextDatastorePathList);
}
parentBacking.setFileName(parentDatastorePathList[0].first());
backingInfo.setParent(parentBacking);
}
public static Pair<VirtualDevice, Boolean> prepareIsoDevice(VirtualMachineMO vmMo, String isoDatastorePath, ManagedObjectReference morDs,
boolean connect, boolean connectAtBoot, int deviceNumber, int contextNumber) throws Exception {
boolean newCdRom = false;
VirtualCdrom cdRom = (VirtualCdrom )vmMo.getIsoDevice();
if(cdRom == null) {
newCdRom = true;
cdRom = new VirtualCdrom();
assert(vmMo.getIDEDeviceControllerKey() >= 0);
cdRom.setControllerKey(vmMo.getIDEDeviceControllerKey());
if(deviceNumber < 0)
deviceNumber = vmMo.getNextIDEDeviceNumber();
cdRom.setUnitNumber(deviceNumber);
cdRom.setKey(-contextNumber);
}
VirtualDeviceConnectInfo cInfo = new VirtualDeviceConnectInfo();
cInfo.setConnected(connect);
cInfo.setStartConnected(connectAtBoot);
cdRom.setConnectable(cInfo);
if(isoDatastorePath != null) {
VirtualCdromIsoBackingInfo backingInfo = new VirtualCdromIsoBackingInfo();
backingInfo.setFileName(isoDatastorePath);
backingInfo.setDatastore(morDs);
cdRom.setBacking(backingInfo);
} else {
VirtualCdromRemotePassthroughBackingInfo backingInfo = new VirtualCdromRemotePassthroughBackingInfo();
backingInfo.setDeviceName("");
cdRom.setBacking(backingInfo);
}
return new Pair<VirtualDevice, Boolean>(cdRom, newCdRom);
}
public static VirtualDisk getRootDisk(VirtualDisk[] disks) {
if(disks.length == 1)
return disks[0];
// TODO : for now, always return the first disk as root disk
return disks[0];
}
public static ManagedObjectReference findSnapshotInTree(VirtualMachineSnapshotTree[] snapTree, String findName) {
assert(findName != null);
ManagedObjectReference snapMor = null;
if (snapTree == null)
return snapMor;
for (int i = 0; i < snapTree.length && snapMor == null; i++) {
VirtualMachineSnapshotTree node = snapTree[i];
if (node.getName().equals(findName)) {
snapMor = node.getSnapshot();
} else {
VirtualMachineSnapshotTree[] childTree = node.getChildSnapshotList();
snapMor = findSnapshotInTree(childTree, findName);
}
}
return snapMor;
}
public static byte[] composeDiskInfo(List<Ternary<String, String, String>> diskInfo, int disksInChain, boolean includeBase) throws IOException {
BufferedWriter out = null;
ByteArrayOutputStream bos = new ByteArrayOutputStream();
try {
out = new BufferedWriter(new OutputStreamWriter(bos));
out.write("disksInChain=" + disksInChain);
out.newLine();
out.write("disksInBackup=" + diskInfo.size());
out.newLine();
out.write("baseDiskIncluded=" + includeBase);
out.newLine();
int seq = disksInChain - 1;
for(Ternary<String, String, String> item : diskInfo) {
out.write(String.format("disk%d.fileName=%s", seq, item.first()));
out.newLine();
out.write(String.format("disk%d.baseFileName=%s", seq, item.second()));
out.newLine();
if(item.third() != null) {
out.write(String.format("disk%d.parentFileName=%s", seq, item.third()));
out.newLine();
}
seq--;
}
out.newLine();
} finally {
if(out != null)
out.close();
}
return bos.toByteArray();
}
public static OptionValue[] composeVncOptions(OptionValue[] optionsToMerge,
boolean enableVnc, String vncPassword, int vncPort, String keyboardLayout) {
int numOptions = 3;
boolean needKeyboardSetup = false;
if(keyboardLayout != null && !keyboardLayout.isEmpty()) {
numOptions++;
needKeyboardSetup = true;
}
if(optionsToMerge != null)
numOptions += optionsToMerge.length;
OptionValue[] options = new OptionValue[numOptions];
int i = 0;
if(optionsToMerge != null) {
for(int j = 0; j < optionsToMerge.length; j++)
options[i++] = optionsToMerge[j];
}
options[i] = new OptionValue();
options[i].setKey("RemoteDisplay.vnc.enabled");
options[i++].setValue(enableVnc ? "true" : "false");
options[i] = new OptionValue();
options[i].setKey("RemoteDisplay.vnc.password");
options[i++].setValue(vncPassword);
options[i] = new OptionValue();
options[i].setKey("RemoteDisplay.vnc.port");
options[i++].setValue("" + vncPort);
if(needKeyboardSetup) {
options[i] = new OptionValue();
options[i].setKey("RemoteDisplay.vnc.keymap");
options[i++].setValue(keyboardLayout);
}
return options;
}
public static void setBasicVmConfig(VirtualMachineConfigSpec vmConfig, int cpuCount, int cpuSpeedMHz, int cpuReservedMhz,
int memoryMB, int memoryReserveMB, String guestOsIdentifier, boolean limitCpuUse) {
// VM config basics
vmConfig.setMemoryMB((long)memoryMB);
vmConfig.setNumCPUs(cpuCount);
ResourceAllocationInfo cpuInfo = new ResourceAllocationInfo();
if (limitCpuUse) {
cpuInfo.setLimit((long)(cpuSpeedMHz * cpuCount));
} else {
cpuInfo.setLimit(-1L);
}
cpuInfo.setReservation((long)cpuReservedMhz);
vmConfig.setCpuAllocation(cpuInfo);
ResourceAllocationInfo memInfo = new ResourceAllocationInfo();
memInfo.setLimit((long)memoryMB);
memInfo.setReservation((long)memoryReserveMB);
vmConfig.setMemoryAllocation(memInfo);
vmConfig.setGuestId(guestOsIdentifier);
}
public static ManagedObjectReference getDiskDeviceDatastore(VirtualDisk diskDevice) throws Exception {
VirtualDeviceBackingInfo backingInfo = diskDevice.getBacking();
assert(backingInfo instanceof VirtualDiskFlatVer2BackingInfo);
return ((VirtualDiskFlatVer2BackingInfo)backingInfo).getDatastore();
}
public static Object getPropValue(ObjectContent oc, String name) {
DynamicProperty[] props = oc.getPropSet();
for(DynamicProperty prop : props) {
if(prop.getName().equalsIgnoreCase(name))
return prop.getVal();
}
return null;
}
public static String getFileExtension(String fileName, String defaultExtension) {
int pos = fileName.lastIndexOf('.');
if(pos < 0)
return defaultExtension;
return fileName.substring(pos);
}
public static boolean isSameHost(String ipAddress, String destName) {
// TODO : may need to do DNS lookup to compare IP address exactly
return ipAddress.equals(destName);
}
public static void deleteVolumeVmdkFiles(DatastoreMO dsMo, String volumeName, DatacenterMO dcMo) throws Exception {
String volumeDatastorePath = String.format("[%s] %s.vmdk", dsMo.getName(), volumeName);
dsMo.deleteFile(volumeDatastorePath, dcMo.getMor(), true);
volumeDatastorePath = String.format("[%s] %s-flat.vmdk", dsMo.getName(), volumeName);
dsMo.deleteFile(volumeDatastorePath, dcMo.getMor(), true);
volumeDatastorePath = String.format("[%s] %s-delta.vmdk", dsMo.getName(), volumeName);
dsMo.deleteFile(volumeDatastorePath, dcMo.getMor(), true);
}
public static String getExceptionMessage(Throwable e) {
return getExceptionMessage(e, false);
}
public static String getExceptionMessage(Throwable e, boolean printStack) {
if(e instanceof MethodFault) {
final StringWriter writer = new StringWriter();
writer.append("Exception: " + e.getClass().getName() + "\n");
writer.append("message: " + ((MethodFault)e).getFaultString() + "\n");
if(printStack) {
writer.append("stack: ");
e.printStackTrace(new PrintWriter(writer));
}
return writer.toString();
}
return ExceptionUtil.toString(e, printStack);
}
public static VirtualMachineMO pickOneVmOnRunningHost(List<VirtualMachineMO> vmList, boolean bFirstFit) throws Exception {
List<VirtualMachineMO> candidates = new ArrayList<VirtualMachineMO>();
for(VirtualMachineMO vmMo : vmList) {
HostMO hostMo = vmMo.getRunningHost();
if(hostMo.isHyperHostConnected())
candidates.add(vmMo);
}
if(candidates.size() == 0)
return null;
if(bFirstFit)
return candidates.get(0);
Random random = new Random();
return candidates.get(random.nextInt(candidates.size()));
}
}
| |
/**
* Copyright 2007 The Apache Software Foundation
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.regionserver.wal;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.regionserver.wal.HLog.Reader;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSHDFSUtils;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.coprocessor.SampleRegionWALObserver;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.io.SequenceFile;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
/** JUnit test case for HLog */
@Category(LargeTests.class)
public class TestHLog {
private static final Log LOG = LogFactory.getLog(TestHLog.class);
{
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.server.namenode.FSNamesystem"))
.getLogger().setLevel(Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)HLog.LOG).getLogger().setLevel(Level.ALL);
}
private static Configuration conf;
private static FileSystem fs;
private static Path dir;
private static MiniDFSCluster cluster;
private final static HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
private static Path hbaseDir;
private static Path oldLogDir;
@Before
public void setUp() throws Exception {
FileStatus[] entries = fs.listStatus(new Path("/"));
for (FileStatus dir : entries) {
fs.delete(dir.getPath(), true);
}
}
@After
public void tearDown() throws Exception {
}
@BeforeClass
public static void setUpBeforeClass() throws Exception {
// Make block sizes small.
TEST_UTIL.getConfiguration().setInt("dfs.blocksize", 1024 * 1024);
// needed for testAppendClose()
TEST_UTIL.getConfiguration().setBoolean("dfs.support.broken.append", true);
TEST_UTIL.getConfiguration().setBoolean("dfs.support.append", true);
// quicker heartbeat interval for faster DN death notification
TEST_UTIL.getConfiguration().setInt("heartbeat.recheck.interval", 5000);
TEST_UTIL.getConfiguration().setInt("dfs.heartbeat.interval", 1);
TEST_UTIL.getConfiguration().setInt("dfs.socket.timeout", 5000);
// faster failover with cluster.shutdown();fs.close() idiom
TEST_UTIL.getConfiguration()
.setInt("ipc.client.connect.max.retries", 1);
TEST_UTIL.getConfiguration().setInt(
"dfs.client.block.recovery.retries", 1);
TEST_UTIL.getConfiguration().setInt(
"ipc.client.connection.maxidletime", 500);
TEST_UTIL.getConfiguration().set(CoprocessorHost.WAL_COPROCESSOR_CONF_KEY,
SampleRegionWALObserver.class.getName());
TEST_UTIL.startMiniDFSCluster(3);
conf = TEST_UTIL.getConfiguration();
cluster = TEST_UTIL.getDFSCluster();
fs = cluster.getFileSystem();
hbaseDir = TEST_UTIL.createRootDir();
oldLogDir = new Path(hbaseDir, ".oldlogs");
dir = new Path(hbaseDir, getName());
}
@AfterClass
public static void tearDownAfterClass() throws Exception {
TEST_UTIL.shutdownMiniCluster();
}
private static String getName() {
// TODO Auto-generated method stub
return "TestHLog";
}
/**
* Test that with three concurrent threads we still write edits in sequence
* edit id order.
* @throws Exception
*/
@Test
public void testMaintainOrderWithConcurrentWrites() throws Exception {
// Run the HPE tool with three threads writing 3000 edits each concurrently.
// When done, verify that all edits were written and that the order in the
// WALs is of ascending edit sequence ids.
int errCode =
HLogPerformanceEvaluation.innerMain(new String [] {"-threads", "3", "-verify", "-iterations", "3000"});
assertEquals(0, errCode);
}
/**
* Just write multiple logs then split. Before fix for HADOOP-2283, this
* would fail.
* @throws IOException
*/
@Test
public void testSplit() throws IOException {
final byte [] tableName = Bytes.toBytes(getName());
final byte [] rowName = tableName;
Path logdir = new Path(hbaseDir, HConstants.HREGION_LOGDIR_NAME);
HLog log = new HLog(fs, logdir, oldLogDir, conf);
final int howmany = 3;
HRegionInfo[] infos = new HRegionInfo[3];
Path tabledir = new Path(hbaseDir, getName());
fs.mkdirs(tabledir);
for(int i = 0; i < howmany; i++) {
infos[i] = new HRegionInfo(tableName,
Bytes.toBytes("" + i), Bytes.toBytes("" + (i+1)), false);
fs.mkdirs(new Path(tabledir, infos[i].getEncodedName()));
LOG.info("allo " + new Path(tabledir, infos[i].getEncodedName()).toString());
}
HTableDescriptor htd = new HTableDescriptor(tableName);
htd.addFamily(new HColumnDescriptor("column"));
// Add edits for three regions.
try {
for (int ii = 0; ii < howmany; ii++) {
for (int i = 0; i < howmany; i++) {
for (int j = 0; j < howmany; j++) {
WALEdit edit = new WALEdit();
byte [] family = Bytes.toBytes("column");
byte [] qualifier = Bytes.toBytes(Integer.toString(j));
byte [] column = Bytes.toBytes("column:" + Integer.toString(j));
edit.add(new KeyValue(rowName, family, qualifier,
System.currentTimeMillis(), column));
LOG.info("Region " + i + ": " + edit);
log.append(infos[i], tableName, edit,
System.currentTimeMillis(), htd);
}
}
log.rollWriter();
}
log.close();
HLogSplitter logSplitter = HLogSplitter.createLogSplitter(conf,
hbaseDir, logdir, this.oldLogDir, this.fs);
List<Path> splits =
logSplitter.splitLog();
verifySplits(splits, howmany);
log = null;
} finally {
if (log != null) {
log.closeAndDelete();
}
}
}
/**
* Test new HDFS-265 sync.
* @throws Exception
*/
@Test
public void Broken_testSync() throws Exception {
byte [] bytes = Bytes.toBytes(getName());
// First verify that using streams all works.
Path p = new Path(dir, getName() + ".fsdos");
FSDataOutputStream out = fs.create(p);
out.write(bytes);
Method syncMethod = null;
try {
syncMethod = out.getClass().getMethod("hflush", new Class<?> []{});
} catch (NoSuchMethodException e) {
try {
syncMethod = out.getClass().getMethod("sync", new Class<?> []{});
} catch (NoSuchMethodException ex) {
fail("This version of Hadoop supports neither Syncable.sync() " +
"nor Syncable.hflush().");
}
}
syncMethod.invoke(out, new Object[]{});
FSDataInputStream in = fs.open(p);
assertTrue(in.available() > 0);
byte [] buffer = new byte [1024];
int read = in.read(buffer);
assertEquals(bytes.length, read);
out.close();
in.close();
Path subdir = new Path(dir, "hlogdir");
HLog wal = new HLog(fs, subdir, oldLogDir, conf);
final int total = 20;
HLog.Reader reader = null;
try {
HRegionInfo info = new HRegionInfo(bytes,
null,null, false);
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor(bytes));
for (int i = 0; i < total; i++) {
WALEdit kvs = new WALEdit();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, bytes));
wal.append(info, bytes, kvs, System.currentTimeMillis(), htd);
}
// Now call sync and try reading. Opening a Reader before you sync just
// gives you EOFE.
wal.sync();
// Open a Reader.
Path walPath = wal.computeFilename();
reader = HLog.getReader(fs, walPath, conf);
int count = 0;
HLog.Entry entry = new HLog.Entry();
while ((entry = reader.next(entry)) != null) count++;
assertEquals(total, count);
reader.close();
// Add test that checks to see that an open of a Reader works on a file
// that has had a sync done on it.
for (int i = 0; i < total; i++) {
WALEdit kvs = new WALEdit();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, bytes));
wal.append(info, bytes, kvs, System.currentTimeMillis(), htd);
}
reader = HLog.getReader(fs, walPath, conf);
count = 0;
while((entry = reader.next(entry)) != null) count++;
assertTrue(count >= total);
reader.close();
// If I sync, should see double the edits.
wal.sync();
reader = HLog.getReader(fs, walPath, conf);
count = 0;
while((entry = reader.next(entry)) != null) count++;
assertEquals(total * 2, count);
// Now do a test that ensures stuff works when we go over block boundary,
// especially that we return good length on file.
final byte [] value = new byte[1025 * 1024]; // Make a 1M value.
for (int i = 0; i < total; i++) {
WALEdit kvs = new WALEdit();
kvs.add(new KeyValue(Bytes.toBytes(i), bytes, value));
wal.append(info, bytes, kvs, System.currentTimeMillis(), htd);
}
// Now I should have written out lots of blocks. Sync then read.
wal.sync();
reader = HLog.getReader(fs, walPath, conf);
count = 0;
while((entry = reader.next(entry)) != null) count++;
assertEquals(total * 3, count);
reader.close();
// Close it and ensure that closed, Reader gets right length also.
wal.close();
reader = HLog.getReader(fs, walPath, conf);
count = 0;
while((entry = reader.next(entry)) != null) count++;
assertEquals(total * 3, count);
reader.close();
} finally {
if (wal != null) wal.closeAndDelete();
if (reader != null) reader.close();
}
}
/**
* Test the findMemstoresWithEditsEqualOrOlderThan method.
* @throws IOException
*/
@Test
public void testFindMemstoresWithEditsEqualOrOlderThan() throws IOException {
Map<byte [], Long> regionsToSeqids = new HashMap<byte [], Long>();
for (int i = 0; i < 10; i++) {
Long l = Long.valueOf(i);
regionsToSeqids.put(l.toString().getBytes(), l);
}
byte [][] regions =
HLog.findMemstoresWithEditsEqualOrOlderThan(1, regionsToSeqids);
assertEquals(2, regions.length);
assertTrue(Bytes.equals(regions[0], "0".getBytes()) ||
Bytes.equals(regions[0], "1".getBytes()));
regions = HLog.findMemstoresWithEditsEqualOrOlderThan(3, regionsToSeqids);
int count = 4;
assertEquals(count, regions.length);
// Regions returned are not ordered.
for (int i = 0; i < count; i++) {
assertTrue(Bytes.equals(regions[i], "0".getBytes()) ||
Bytes.equals(regions[i], "1".getBytes()) ||
Bytes.equals(regions[i], "2".getBytes()) ||
Bytes.equals(regions[i], "3".getBytes()));
}
}
private void verifySplits(List<Path> splits, final int howmany)
throws IOException {
assertEquals(howmany, splits.size());
for (int i = 0; i < splits.size(); i++) {
LOG.info("Verifying=" + splits.get(i));
HLog.Reader reader = HLog.getReader(fs, splits.get(i), conf);
try {
int count = 0;
String previousRegion = null;
long seqno = -1;
HLog.Entry entry = new HLog.Entry();
while((entry = reader.next(entry)) != null) {
HLogKey key = entry.getKey();
String region = Bytes.toString(key.getEncodedRegionName());
// Assert that all edits are for same region.
if (previousRegion != null) {
assertEquals(previousRegion, region);
}
LOG.info("oldseqno=" + seqno + ", newseqno=" + key.getLogSeqNum());
assertTrue(seqno < key.getLogSeqNum());
seqno = key.getLogSeqNum();
previousRegion = region;
count++;
}
assertEquals(howmany * howmany, count);
} finally {
reader.close();
}
}
}
/*
* We pass different values to recoverFileLease() so that different code paths are covered
*
* For this test to pass, requires:
* 1. HDFS-200 (append support)
* 2. HDFS-988 (SafeMode should freeze file operations
* [FSNamesystem.nextGenerationStampForBlock])
* 3. HDFS-142 (on restart, maintain pendingCreates)
*/
@Test
public void testAppendClose() throws Exception {
testAppendClose(true);
testAppendClose(false);
}
/*
* @param triggerDirectAppend whether to trigger direct call of fs.append()
*/
public void testAppendClose(final boolean triggerDirectAppend) throws Exception {
byte [] tableName = Bytes.toBytes(getName());
HRegionInfo regioninfo = new HRegionInfo(tableName,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW, false);
Path subdir = new Path(dir, "hlogdir" + triggerDirectAppend);
Path archdir = new Path(dir, "hlogdir_archive");
HLog wal = new HLog(fs, subdir, archdir, conf);
final int total = 20;
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor(tableName));
for (int i = 0; i < total; i++) {
WALEdit kvs = new WALEdit();
kvs.add(new KeyValue(Bytes.toBytes(i), tableName, tableName));
wal.append(regioninfo, tableName, kvs, System.currentTimeMillis(), htd);
}
// Now call sync to send the data to HDFS datanodes
wal.sync();
int namenodePort = cluster.getNameNodePort();
final Path walPath = wal.computeFilename();
// Stop the cluster. (ensure restart since we're sharing MiniDFSCluster)
try {
DistributedFileSystem dfs = (DistributedFileSystem) cluster.getFileSystem();
dfs.setSafeMode(FSConstants.SafeModeAction.SAFEMODE_ENTER);
cluster.shutdown();
try {
// wal.writer.close() will throw an exception,
// but still call this since it closes the LogSyncer thread first
wal.close();
} catch (IOException e) {
LOG.info(e);
}
fs.close(); // closing FS last so DFSOutputStream can't call close
LOG.info("STOPPED first instance of the cluster");
} finally {
// Restart the cluster
while (cluster.isClusterUp()){
LOG.error("Waiting for cluster to go down");
Thread.sleep(1000);
}
// Workaround a strange issue with Hadoop's RPC system - if we don't
// sleep here, the new datanodes will pick up a cached IPC connection to
// the old (dead) NN and fail to start. Sleeping 2 seconds goes past
// the idle time threshold configured in the conf above
Thread.sleep(2000);
cluster = new MiniDFSCluster(namenodePort, conf, 5, false, true, true, null, null, null, null);
TEST_UTIL.setDFSCluster(cluster);
cluster.waitActive();
fs = cluster.getFileSystem();
LOG.info("START second instance.");
}
// set the lease period to be 1 second so that the
// namenode triggers lease recovery upon append request
Method setLeasePeriod = cluster.getClass()
.getDeclaredMethod("setLeasePeriod", new Class[]{Long.TYPE, Long.TYPE});
setLeasePeriod.setAccessible(true);
setLeasePeriod.invoke(cluster,
new Object[]{new Long(1000), new Long(1000)});
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
LOG.info(e);
}
// Now try recovering the log, like the HMaster would do
final FileSystem recoveredFs = fs;
final Configuration rlConf = conf;
class RecoverLogThread extends Thread {
public Exception exception = null;
public void run() {
try {
rlConf.setBoolean(FSHDFSUtils.TEST_TRIGGER_DFS_APPEND, triggerDirectAppend);
FSUtils.getInstance(fs, rlConf)
.recoverFileLease(recoveredFs, walPath, rlConf);
} catch (IOException e) {
exception = e;
}
}
}
RecoverLogThread t = new RecoverLogThread();
t.start();
// Timeout after 60 sec. Without correct patches, would be an infinite loop
t.join(60 * 1000);
if(t.isAlive()) {
t.interrupt();
throw new Exception("Timed out waiting for HLog.recoverLog()");
}
if (t.exception != null)
throw t.exception;
// Make sure you can read all the content
HLog.Reader reader = HLog.getReader(this.fs, walPath, this.conf);
int count = 0;
HLog.Entry entry = new HLog.Entry();
while (reader.next(entry) != null) {
count++;
assertTrue("Should be one KeyValue per WALEdit",
entry.getEdit().getKeyValues().size() == 1);
}
assertEquals(total, count);
reader.close();
// Reset the lease period
setLeasePeriod.invoke(cluster, new Object[]{new Long(60000), new Long(3600000)});
}
/**
* Tests that we can write out an edit, close, and then read it back in again.
* @throws IOException
*/
@Test
public void testEditAdd() throws IOException {
final int COL_COUNT = 10;
final byte [] tableName = Bytes.toBytes("tablename");
final byte [] row = Bytes.toBytes("row");
HLog.Reader reader = null;
HLog log = null;
try {
log = new HLog(fs, dir, oldLogDir, conf);
// Write columns named 1, 2, 3, etc. and then values of single byte
// 1, 2, 3...
long timestamp = System.currentTimeMillis();
WALEdit cols = new WALEdit();
for (int i = 0; i < COL_COUNT; i++) {
cols.add(new KeyValue(row, Bytes.toBytes("column"),
Bytes.toBytes(Integer.toString(i)),
timestamp, new byte[] { (byte)(i + '0') }));
}
HRegionInfo info = new HRegionInfo(tableName,
row,Bytes.toBytes(Bytes.toString(row) + "1"), false);
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor("column"));
log.append(info, tableName, cols, System.currentTimeMillis(), htd);
long logSeqId = log.startCacheFlush(info.getEncodedNameAsBytes());
log.completeCacheFlush(info.getEncodedNameAsBytes(), tableName, logSeqId,
info.isMetaRegion());
log.close();
Path filename = log.computeFilename();
log = null;
// Now open a reader on the log and assert append worked.
reader = HLog.getReader(fs, filename, conf);
// Above we added all columns on a single row so we only read one
// entry in the below... thats why we have '1'.
for (int i = 0; i < 1; i++) {
HLog.Entry entry = reader.next(null);
if (entry == null) break;
HLogKey key = entry.getKey();
WALEdit val = entry.getEdit();
assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
assertTrue(Bytes.equals(tableName, key.getTablename()));
KeyValue kv = val.getKeyValues().get(0);
assertTrue(Bytes.equals(row, kv.getRow()));
assertEquals((byte)(i + '0'), kv.getValue()[0]);
System.out.println(key + " " + val);
}
HLog.Entry entry = null;
while ((entry = reader.next(null)) != null) {
HLogKey key = entry.getKey();
WALEdit val = entry.getEdit();
// Assert only one more row... the meta flushed row.
assertTrue(Bytes.equals(info.getEncodedNameAsBytes(), key.getEncodedRegionName()));
assertTrue(Bytes.equals(tableName, key.getTablename()));
KeyValue kv = val.getKeyValues().get(0);
assertTrue(Bytes.equals(HLog.METAROW, kv.getRow()));
assertTrue(Bytes.equals(HLog.METAFAMILY, kv.getFamily()));
assertEquals(0, Bytes.compareTo(HLog.COMPLETE_CACHE_FLUSH,
val.getKeyValues().get(0).getValue()));
System.out.println(key + " " + val);
}
} finally {
if (log != null) {
log.closeAndDelete();
}
if (reader != null) {
reader.close();
}
}
}
/**
* @throws IOException
*/
@Test
public void testAppend() throws IOException {
final int COL_COUNT = 10;
final byte [] tableName = Bytes.toBytes("tablename");
final byte [] row = Bytes.toBytes("row");
Reader reader = null;
HLog log = new HLog(fs, dir, oldLogDir, conf);
try {
// Write columns named 1, 2, 3, etc. and then values of single byte
// 1, 2, 3...
long timestamp = System.currentTimeMillis();
WALEdit cols = new WALEdit();
for (int i = 0; i < COL_COUNT; i++) {
cols.add(new KeyValue(row, Bytes.toBytes("column"),
Bytes.toBytes(Integer.toString(i)),
timestamp, new byte[] { (byte)(i + '0') }));
}
HRegionInfo hri = new HRegionInfo(tableName,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor("column"));
log.append(hri, tableName, cols, System.currentTimeMillis(), htd);
long logSeqId = log.startCacheFlush(hri.getEncodedNameAsBytes());
log.completeCacheFlush(hri.getEncodedNameAsBytes(), tableName, logSeqId, false);
log.close();
Path filename = log.computeFilename();
log = null;
// Now open a reader on the log and assert append worked.
reader = HLog.getReader(fs, filename, conf);
HLog.Entry entry = reader.next();
assertEquals(COL_COUNT, entry.getEdit().size());
int idx = 0;
for (KeyValue val : entry.getEdit().getKeyValues()) {
assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(),
entry.getKey().getEncodedRegionName()));
assertTrue(Bytes.equals(tableName, entry.getKey().getTablename()));
assertTrue(Bytes.equals(row, val.getRow()));
assertEquals((byte)(idx + '0'), val.getValue()[0]);
System.out.println(entry.getKey() + " " + val);
idx++;
}
// Get next row... the meta flushed row.
entry = reader.next();
assertEquals(1, entry.getEdit().size());
for (KeyValue val : entry.getEdit().getKeyValues()) {
assertTrue(Bytes.equals(hri.getEncodedNameAsBytes(),
entry.getKey().getEncodedRegionName()));
assertTrue(Bytes.equals(tableName, entry.getKey().getTablename()));
assertTrue(Bytes.equals(HLog.METAROW, val.getRow()));
assertTrue(Bytes.equals(HLog.METAFAMILY, val.getFamily()));
assertEquals(0, Bytes.compareTo(HLog.COMPLETE_CACHE_FLUSH,
val.getValue()));
System.out.println(entry.getKey() + " " + val);
}
} finally {
if (log != null) {
log.closeAndDelete();
}
if (reader != null) {
reader.close();
}
}
}
/**
* Test that we can visit entries before they are appended
* @throws Exception
*/
@Test
public void testVisitors() throws Exception {
final int COL_COUNT = 10;
final byte [] tableName = Bytes.toBytes("tablename");
final byte [] row = Bytes.toBytes("row");
HLog log = new HLog(fs, dir, oldLogDir, conf);
try {
DumbWALActionsListener visitor = new DumbWALActionsListener();
log.registerWALActionsListener(visitor);
long timestamp = System.currentTimeMillis();
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor("column"));
HRegionInfo hri = new HRegionInfo(tableName,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
for (int i = 0; i < COL_COUNT; i++) {
WALEdit cols = new WALEdit();
cols.add(new KeyValue(row, Bytes.toBytes("column"),
Bytes.toBytes(Integer.toString(i)),
timestamp, new byte[]{(byte) (i + '0')}));
log.append(hri, tableName, cols, System.currentTimeMillis(), htd);
}
assertEquals(COL_COUNT, visitor.increments);
log.unregisterWALActionsListener(visitor);
WALEdit cols = new WALEdit();
cols.add(new KeyValue(row, Bytes.toBytes("column"),
Bytes.toBytes(Integer.toString(11)),
timestamp, new byte[]{(byte) (11 + '0')}));
log.append(hri, tableName, cols, System.currentTimeMillis(), htd);
assertEquals(COL_COUNT, visitor.increments);
} finally {
if (log != null) log.closeAndDelete();
}
}
@Test
public void testLogCleaning() throws Exception {
LOG.info("testLogCleaning");
final byte [] tableName = Bytes.toBytes("testLogCleaning");
final byte [] tableName2 = Bytes.toBytes("testLogCleaning2");
HLog log = new HLog(fs, dir, oldLogDir, conf);
try {
HRegionInfo hri = new HRegionInfo(tableName,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
HRegionInfo hri2 = new HRegionInfo(tableName2,
HConstants.EMPTY_START_ROW, HConstants.EMPTY_END_ROW);
// Add a single edit and make sure that rolling won't remove the file
// Before HBASE-3198 it used to delete it
addEdits(log, hri, tableName, 1);
log.rollWriter();
assertEquals(1, log.getNumLogFiles());
// See if there's anything wrong with more than 1 edit
addEdits(log, hri, tableName, 2);
log.rollWriter();
assertEquals(2, log.getNumLogFiles());
// Now mix edits from 2 regions, still no flushing
addEdits(log, hri, tableName, 1);
addEdits(log, hri2, tableName2, 1);
addEdits(log, hri, tableName, 1);
addEdits(log, hri2, tableName2, 1);
log.rollWriter();
assertEquals(3, log.getNumLogFiles());
// Flush the first region, we expect to see the first two files getting
// archived
long seqId = log.startCacheFlush(hri.getEncodedNameAsBytes());
log.completeCacheFlush(hri.getEncodedNameAsBytes(), tableName, seqId, false);
log.rollWriter();
assertEquals(2, log.getNumLogFiles());
// Flush the second region, which removes all the remaining output files
// since the oldest was completely flushed and the two others only contain
// flush information
seqId = log.startCacheFlush(hri2.getEncodedNameAsBytes());
log.completeCacheFlush(hri2.getEncodedNameAsBytes(), tableName2, seqId, false);
log.rollWriter();
assertEquals(0, log.getNumLogFiles());
} finally {
if (log != null) log.closeAndDelete();
}
}
/**
* A loaded WAL coprocessor won't break existing HLog test cases.
*/
@Test
public void testWALCoprocessorLoaded() throws Exception {
// test to see whether the coprocessor is loaded or not.
HLog log = new HLog(fs, dir, oldLogDir, conf);
try {
WALCoprocessorHost host = log.getCoprocessorHost();
Coprocessor c = host.findCoprocessor(SampleRegionWALObserver.class.getName());
assertNotNull(c);
} finally {
if (log != null) log.closeAndDelete();
}
}
private void addEdits(HLog log, HRegionInfo hri, byte [] tableName,
int times) throws IOException {
HTableDescriptor htd = new HTableDescriptor();
htd.addFamily(new HColumnDescriptor("row"));
final byte [] row = Bytes.toBytes("row");
for (int i = 0; i < times; i++) {
long timestamp = System.currentTimeMillis();
WALEdit cols = new WALEdit();
cols.add(new KeyValue(row, row, row, timestamp, row));
log.append(hri, tableName, cols, timestamp, htd);
}
}
static class DumbWALActionsListener implements WALActionsListener {
int increments = 0;
@Override
public void visitLogEntryBeforeWrite(HRegionInfo info, HLogKey logKey,
WALEdit logEdit) {
increments++;
}
@Override
public void visitLogEntryBeforeWrite(HTableDescriptor htd, HLogKey logKey, WALEdit logEdit) {
//To change body of implemented methods use File | Settings | File Templates.
increments++;
}
@Override
public void preLogRoll(Path oldFile, Path newFile) {
// TODO Auto-generated method stub
}
@Override
public void postLogRoll(Path oldFile, Path newFile) {
// TODO Auto-generated method stub
}
@Override
public void preLogArchive(Path oldFile, Path newFile) {
// TODO Auto-generated method stub
}
@Override
public void postLogArchive(Path oldFile, Path newFile) {
// TODO Auto-generated method stub
}
@Override
public void logRollRequested() {
// TODO Auto-generated method stub
}
@Override
public void logCloseRequested() {
// not interested
}
}
@org.junit.Rule
public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
}
| |
/*
* Copyright 2000-2010 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package git4idea.history;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.openapi.vcs.FilePath;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.Change;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.util.ArrayUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.vcsUtil.VcsUtil;
import git4idea.GitUtil;
import git4idea.commands.GitHandler;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.*;
import static git4idea.history.GitLogParser.GitLogOption.*;
/**
* One record (commit information) returned by git log output.
* The access methods try heavily to return some default value if real is unavailable, for example, blank string is better than null.
* BUT if one tries to get an option which was not specified to the GitLogParser, one will get null.
*
* @see git4idea.history.GitLogParser
*/
class GitLogRecord {
private static final Logger LOG = Logger.getInstance(GitLogRecord.class);
@NotNull private final Map<GitLogParser.GitLogOption, String> myOptions;
@NotNull private final List<GitLogStatusInfo> myStatusInfo;
private final boolean mySupportsRawBody;
private GitHandler myHandler;
GitLogRecord(@NotNull Map<GitLogParser.GitLogOption, String> options,
@NotNull List<GitLogStatusInfo> statusInfo,
boolean supportsRawBody) {
myOptions = options;
myStatusInfo = statusInfo;
mySupportsRawBody = supportsRawBody;
}
@NotNull
private Collection<String> getPaths() {
LinkedHashSet<String> result = ContainerUtil.newLinkedHashSet();
for (GitLogStatusInfo info : myStatusInfo) {
result.add(info.getFirstPath());
if (info.getSecondPath() != null) result.add(info.getSecondPath());
}
return result;
}
@NotNull
List<GitLogStatusInfo> getStatusInfos() {
return myStatusInfo;
}
@NotNull
public List<FilePath> getFilePaths(@NotNull VirtualFile root) throws VcsException {
List<FilePath> res = new ArrayList<>();
String prefix = root.getPath() + "/";
for (String strPath : getPaths()) {
final String subPath = GitUtil.unescapePath(strPath);
final FilePath revisionPath = VcsUtil.getFilePath(prefix + subPath, false);
res.add(revisionPath);
}
return res;
}
@NotNull
private String lookup(@NotNull GitLogParser.GitLogOption key) {
String value = myOptions.get(key);
if (value == null) {
LOG.error("Missing value for option " + key);
return "";
}
return shortBuffer(value);
}
// trivial access methods
@NotNull
String getHash() {
return lookup(HASH);
}
@NotNull
String getTreeHash() {
return lookup(TREE);
}
@NotNull
String getAuthorName() {
return lookup(AUTHOR_NAME);
}
@NotNull
String getAuthorEmail() {
return lookup(AUTHOR_EMAIL);
}
@NotNull
String getCommitterName() {
return lookup(COMMITTER_NAME);
}
@NotNull
String getCommitterEmail() {
return lookup(COMMITTER_EMAIL);
}
@NotNull
String getSubject() {
return lookup(SUBJECT);
}
@NotNull
String getBody() {
return lookup(BODY);
}
@NotNull
String getRawBody() {
return lookup(RAW_BODY);
}
@NotNull
String getShortenedRefLog() {
return lookup(SHORT_REF_LOG_SELECTOR);
}
// access methods with some formatting or conversion
@NotNull
Date getDate() {
return new Date(getCommitTime());
}
long getCommitTime() {
try {
return Long.parseLong(myOptions.get(COMMIT_TIME).trim()) * 1000;
}
catch (NumberFormatException e) {
LOG.error("Couldn't get commit time from " + toString() + ", while executing " + myHandler, e);
return 0;
}
}
long getAuthorTimeStamp() {
try {
return Long.parseLong(myOptions.get(AUTHOR_TIME).trim()) * 1000;
}
catch (NumberFormatException e) {
LOG.error("Couldn't get author time from " + toString() + ", while executing " + myHandler, e);
return 0;
}
}
String getFullMessage() {
return mySupportsRawBody ? getRawBody().trim() : ((getSubject() + "\n\n" + getBody()).trim());
}
@NotNull
String[] getParentsHashes() {
final String parents = lookup(PARENTS);
if (parents.trim().length() == 0) return ArrayUtil.EMPTY_STRING_ARRAY;
return parents.split(" ");
}
@NotNull
public Collection<String> getRefs() {
final String decorate = myOptions.get(REF_NAMES);
return parseRefNames(decorate);
}
@NotNull
public Map<GitLogParser.GitLogOption, String> getOptions() {
return myOptions;
}
public boolean isSupportsRawBody() {
return mySupportsRawBody;
}
@NotNull
private static List<String> parseRefNames(@Nullable final String decoration) {
if (decoration == null) {
return ContainerUtil.emptyList();
}
final int startParentheses = decoration.indexOf("(");
final int endParentheses = decoration.indexOf(")");
if ((startParentheses == -1) || (endParentheses == -1)) return Collections.emptyList();
String refs = decoration.substring(startParentheses + 1, endParentheses);
String[] names = refs.split(", ");
List<String> result = ContainerUtil.newArrayList();
for (String item : names) {
final String POINTER = " -> "; // HEAD -> refs/heads/master in Git 2.4.3+
if (item.contains(POINTER)) {
List<String> parts = StringUtil.split(item, POINTER);
result.addAll(ContainerUtil.map(parts, s -> shortBuffer(s.trim())));
}
else {
int colon = item.indexOf(':'); // tags have the "tag:" prefix.
result.add(shortBuffer(colon > 0 ? item.substring(colon + 1).trim() : item));
}
}
return result;
}
@NotNull
private static String shortBuffer(@NotNull String raw) {
return new String(raw);
}
@NotNull
public List<Change> parseChanges(@NotNull Project project, @NotNull VirtualFile vcsRoot) throws VcsException {
String[] hashes = getParentsHashes();
return GitChangesParser.parse(project, vcsRoot, myStatusInfo, getHash(), getDate(), hashes.length == 0 ? null : hashes[0]);
}
/**
* for debugging purposes - see {@link GitUtil#parseTimestampWithNFEReport(String, git4idea.commands.GitHandler, String)}.
*/
public void setUsedHandler(GitHandler handler) {
myHandler = handler;
}
@Override
public String toString() {
return String.format("GitLogRecord{myOptions=%s, myStatusInfo=%s, mySupportsRawBody=%s, myHandler=%s}",
myOptions, myStatusInfo, mySupportsRawBody, myHandler);
}
}
| |
/*
* Copyright 2016, 2017 DTCC, Fujitsu Australia Software Technology, IBM - All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hyperledger.fabric.sdk;
import java.util.Objects;
import java.util.Properties;
import com.google.common.util.concurrent.ListenableFuture;
import io.netty.util.internal.StringUtil;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.hyperledger.fabric.protos.peer.FabricProposal;
import org.hyperledger.fabric.protos.peer.FabricProposalResponse;
import org.hyperledger.fabric.sdk.exception.InvalidArgumentException;
import org.hyperledger.fabric.sdk.exception.PeerException;
import static java.lang.String.format;
import static org.hyperledger.fabric.sdk.helper.SDKUtil.checkGrpcUrl;
/**
* The Peer class represents a peer to which SDK sends deploy, or query proposals requests.
*/
public class Peer {
private static final Log logger = LogFactory.getLog(Peer.class);
private volatile EndorserClient endorserClent;
private final Properties properties;
private final String name;
private final String url;
private boolean shutdown = false;
private Chain chain;
Peer(String name, String grpcURL, Properties properties) throws InvalidArgumentException {
Exception e = checkGrpcUrl(grpcURL);
if (e != null) {
throw new InvalidArgumentException("Bad peer url.", e);
}
if (StringUtil.isNullOrEmpty(name)) {
throw new InvalidArgumentException("Invalid name for peer");
}
this.url = grpcURL;
this.name = name;
this.properties = properties == null ? null : (Properties) properties.clone(); //keep our own copy.
}
/**
* Peer's name
*
* @return return the peer's name.
*/
public String getName() {
return name;
}
public Properties getProperties() {
return properties == null ? null : (Properties) properties.clone();
}
/**
* Set the chain the peer is on.
*
* @param chain
*/
void setChain(Chain chain) throws InvalidArgumentException {
if (null != this.chain) {
throw new InvalidArgumentException(format("Can not add peer %s to chain %s because it already belongs to chain %s.",
name, chain.getName(), this.chain.getName()));
}
this.chain = chain;
}
/**
* The chain the peer is set on.
*
* @return
*/
Chain getChain() {
return chain;
}
/**
* Get the URL of the peer.
*
* @return {string} Get the URL associated with the peer.
*/
public String getUrl() {
return this.url;
}
/**
* for use in list of peers comparisons , e.g. list.contains() calls
*
* @param otherPeer the peer instance to compare against
* @return true if both peer instances have the same name and url
*/
@Override
public boolean equals(Object otherPeer) {
if (this == otherPeer) {
return true;
}
if (otherPeer == null) {
return false;
}
if (!(otherPeer instanceof Peer)) {
return false;
}
Peer p = (Peer) otherPeer;
return Objects.equals(getName(), p.getName()) && Objects.equals(getUrl(), p.getUrl());
}
ListenableFuture<FabricProposalResponse.ProposalResponse> sendProposalAsync(FabricProposal.SignedProposal proposal)
throws PeerException, InvalidArgumentException {
checkSendProposal(proposal);
logger.debug(format("peer.sendProposalAsync name: %s, url: %s", name, url));
EndorserClient localEndorserClient = endorserClent; //work off thread local copy.
if (null == localEndorserClient || !localEndorserClient.isChannelActive()) {
endorserClent = localEndorserClient = new EndorserClient(new Endpoint(url, properties).getChannelBuilder());
}
try {
return localEndorserClient.sendProposalAsync(proposal);
} catch (PeerException e) { //Any error start with a clean connection.
endorserClent = null;
throw e;
} catch (Throwable t) {
endorserClent = null;
throw t;
}
}
FabricProposalResponse.ProposalResponse sendProposal(FabricProposal.SignedProposal proposal)
throws PeerException, InvalidArgumentException {
checkSendProposal(proposal);
logger.debug(format("peer.sendProposalAsync name: %s, url: %s", name, url));
EndorserClient localEndorserClient = endorserClent; //work off thread local copy.
if (null == localEndorserClient || !localEndorserClient.isChannelActive()) {
endorserClent = localEndorserClient = new EndorserClient(new Endpoint(url, properties).getChannelBuilder());
}
try {
return localEndorserClient.sendProposal(proposal);
} catch (PeerException e) { //Any error start with a clean connection.
endorserClent = null;
throw e;
} catch (Throwable t) {
endorserClent = null;
throw t;
}
}
private void checkSendProposal(FabricProposal.SignedProposal proposal) throws PeerException, InvalidArgumentException {
if (shutdown) {
throw new PeerException(format("Peer %s was shutdown.", name));
}
if (proposal == null) {
throw new PeerException("Proposal is null");
}
Exception e = checkGrpcUrl(url);
if (e != null) {
throw new InvalidArgumentException("Bad peer url.", e);
}
}
static Peer createNewInstance(String name, String grpcURL, Properties properties) throws InvalidArgumentException {
return new Peer(name, grpcURL, properties);
}
synchronized void shutdown(boolean force) {
if (shutdown) {
return;
}
shutdown = true;
chain = null;
EndorserClient lendorserClent = endorserClent;
//allow resources to finalize
endorserClent = null;
if (lendorserClent == null) {
return;
}
lendorserClent.shutdown(force);
}
@Override
protected void finalize() throws Throwable {
shutdown(true);
super.finalize();
}
} // end Peer
| |
package pb.parse;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import pb.board.Board;
import pb.board.BoardConstants;
import pb.board.Gizmo;
import pb.board.StyleRegistry;
import pb.board.StyleRegistryBuilder;
import pb.gizmos.Absorber;
import pb.gizmos.Ball;
import pb.gizmos.CircleBumper;
import pb.gizmos.Flipper;
import pb.gizmos.Portal;
import pb.gizmos.SquareBumper;
import pb.gizmos.TriangleBumper;
import pb.gizmos.Flipper.Type;
/**
* Parses board definition into built boards.
*/
public class BoardBuilder {
/**
* Creates a board from the description.
*
* @param source the path to the board file
* @return the board obtained by parsing the file
* @throws IOException
*/
public static Board buildBoard(File source) throws IOException {
List<ElementDescription> elements = parse(source);
StyleRegistryBuilder styleBuilder = new StyleRegistryBuilder();
Board board = null;
for (ElementDescription element : elements) {
String type = element.getType();
if (type.equals("board")) {
if (board != null)
throw new IllegalArgumentException(
"Duplicate board definition");
board = buildBoard(element);
} else if (type.equals("fire")) {
String triggerName = element.getString("trigger");
Gizmo trigger = board.findByName(triggerName);
if (trigger == null)
throw new IllegalArgumentException("Trigger not found");
String actionName = element.getString("action");
Gizmo action = board.findByName(actionName);
if (action == null)
throw new IllegalArgumentException("Action not found");
trigger.addListener(action);
} else if (type.equals("keyup") || type.equals("keydown")) {
boolean press = type.equals("keydown");
String keyName = element.getString("key");
String actionName = element.getString("action");
Gizmo action = board.findByName(actionName);
if (action == null)
throw new IllegalArgumentException("Action not found");
board.getKeyBindings().addListener(keyName, press, action);
} else if (type.equals("style")) {
String styleClass = element.getString("class");
for (Iterator<String> iterator = element.getPropertyNames();
iterator.hasNext(); ) {
String propertyName = iterator.next();
String value = element.getString(propertyName);
styleBuilder.setProperty(styleClass, propertyName, value);
}
} else {
Gizmo gizmo = buildGizmo(element);
if (board == null) {
throw new IllegalArgumentException(
"Board not defined first");
}
if (board.findByName(gizmo.name()) != null) {
throw new IllegalArgumentException(
"Duplicate element name");
}
board.add(gizmo);
}
}
board.setStyleRegistry(styleBuilder.build());
return board;
}
/**
* Creates a board element from a line description in a board file.
*
* @param element a parsed line describing a board element
* @return a board element; the element is not added to any board
*/
static Gizmo buildGizmo(ElementDescription element) {
String type = element.getType();
String name = element.getString("name");
Gizmo gizmo = null;
if (type.equals("ball")) {
double x = element.getFloat("x");
double y = element.getFloat("y");
double r = element.getFloat("radius", 0.25);
double vx = element.getFloat("xVelocity");
double vy = element.getFloat("yVelocity");
gizmo = new Ball(name, x, y, r, vx, vy);
} else if (type.equals("absorber")) {
int x = element.getInteger("x");
int y = element.getInteger("y");
int width = element.getInteger("width");
int height = element.getInteger("height");
gizmo = new Absorber(name, x, y, width, height);
} else if (type.equals("portal")) {
double x = element.getFloat("x");
double y = element.getFloat("y");
String otherBoard = element.getString("otherBoard", null);
String otherPortal = element.getString("otherPortal");
gizmo = new Portal(name, x, y, otherBoard, otherPortal);
} else if (type.indexOf("Bumper") >= 0) {
int x = element.getInteger("x");
int y = element.getInteger("y");
boolean isExploding =
!element.getString("explode", "false").equals("false");
if (type.equals("squareBumper")) {
gizmo = new SquareBumper(name, x, y, isExploding);
} else if (type.equals("circleBumper")) {
gizmo = new CircleBumper(name, x, y, isExploding);
} else if (type.equals("triangleBumper")) {
int orientation = element.getInteger("orientation");
gizmo = new TriangleBumper(name, x, y, orientation,
isExploding);
}
} else if (type.indexOf("Flipper") >= 0) {
int x = element.getInteger("x");
int y = element.getInteger("y");
int orientation = element.getInteger("orientation");
if (type.equals("leftFlipper")) {
gizmo = new Flipper(name, Type.LEFT, x, y, orientation);
} else if (type.equals("rightFlipper")) {
gizmo = new Flipper(name, Type.RIGHT, x, y, orientation);
}
}
if (gizmo == null) {
throw new IllegalArgumentException("Unsupported statement " +
element.getType());
}
String styleClass = element.getString("class",
StyleRegistry.DEFAULT_CLASS);
gizmo.setStyleClass(styleClass);
return gizmo;
}
/**
* Creates a board from a line description in a board file.
*
* @param element a parsed line describing a board
* @return an empty board
*/
static Board buildBoard(ElementDescription element) {
if (!element.getType().equals("board"))
throw new IllegalArgumentException("The element is not a board");
String name = element.getString("name", null);
int xSize = element.getInteger("xSize", 20);
int ySize = element.getInteger("ySize", 20);
double gravity = element.getFloat("gravity", 25.0);
double friction1 = element.getFloat("friction1", 0.025);
double friction2 = element.getFloat("friction2", 0.025);
BoardConstants constants = new BoardConstants(name, xSize, ySize,
gravity, friction1, friction2);
return new Board(constants);
}
/**
* Reads a board file and parses it into element descriptions.
*
* @param file path to the board file
* @return a list of board element descriptions
* @throws IOException
*/
static List<ElementDescription> parse(File file) throws IOException {
BufferedReader reader = new BufferedReader(new FileReader(file));
List<ElementDescription> elements = new ArrayList<ElementDescription>();
while (true) {
String line = reader.readLine();
if (line == null)
break;
ElementDescription element = ElementDescription.fromLine(line);
if (element != null)
elements.add(element);
}
reader.close();
return elements;
}
}
| |
package com.martindisch.weather;
import android.graphics.Color;
import android.os.Bundle;
import android.support.v4.content.ContextCompat;
import android.support.v4.widget.SwipeRefreshLayout;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuInflater;
import android.view.MenuItem;
import android.view.View;
import android.widget.Button;
import android.widget.RadioGroup;
import android.widget.TextView;
import android.widget.Toast;
import com.github.mikephil.charting.charts.LineChart;
import com.github.mikephil.charting.components.AxisBase;
import com.github.mikephil.charting.components.XAxis;
import com.github.mikephil.charting.data.Entry;
import com.github.mikephil.charting.data.LineData;
import com.github.mikephil.charting.data.LineDataSet;
import com.github.mikephil.charting.formatter.IAxisValueFormatter;
import com.loopj.android.http.AsyncHttpClient;
import com.loopj.android.http.AsyncHttpResponseHandler;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import cz.msebera.android.httpclient.Header;
public class MainActivity extends AppCompatActivity implements View.OnClickListener {
private TextView mLatestTemp, mLatestHum;
private SwipeRefreshLayout mSwipeContainer;
private LineChart mChart;
private RadioGroup mTimeframe;
private Button mLoadGraph;
private byte[] mLastResponse = null;
private int mTimeFrameSelection = ALL;
private static final int ALL = 0, WEEK = 1, DAY = 2;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mLatestTemp = (TextView) findViewById(R.id.tvLatestTemp);
mLatestHum = (TextView) findViewById(R.id.tvLatestHum);
mSwipeContainer = (SwipeRefreshLayout) findViewById(R.id.swipeContainer);
mChart = (LineChart) findViewById(R.id.chart);
mLoadGraph = (Button) findViewById(R.id.bLoadGraph);
mLoadGraph.setOnClickListener(this);
mTimeframe = (RadioGroup) findViewById(R.id.rgTimeframe);
mTimeframe.check(R.id.rbAll);
mTimeframe.setOnCheckedChangeListener(new RadioGroup.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(RadioGroup radioGroup, int i) {
switch (i) {
case R.id.rbAll:
mTimeFrameSelection = ALL;
break;
case R.id.rbWeek:
mTimeFrameSelection = WEEK;
break;
case R.id.rbDay:
mTimeFrameSelection = DAY;
break;
}
if (mLastResponse != null) updateAll(mLastResponse, mTimeFrameSelection);
}
});
mChart.setDescription(null);
mChart.setHighlightPerDragEnabled(false);
mChart.setHighlightPerTapEnabled(false);
mChart.setPinchZoom(true);
mChart.getLegend().setDrawInside(true);
mChart.setExtraTopOffset(10);
XAxis xAxis = mChart.getXAxis();
xAxis.setLabelRotationAngle(-90);
xAxis.setLabelCount(10);
mSwipeContainer.setColorSchemeColors(ContextCompat.getColor(this, R.color.colorAccent));
mSwipeContainer.setEnabled(false);
}
@Override
protected void onResume() {
super.onResume();
fetchAndUpdate(false);
}
/**
* Gets data from the server (either whole history or only latest data depending on parameter
* and calls the respective update method to display it.
*
* @param all whether to get whole history (true) or only latest data (false)
*/
private void fetchAndUpdate(final boolean all) {
mSwipeContainer.setRefreshing(true);
AsyncHttpClient client = new AsyncHttpClient();
client.setMaxRetriesAndTimeout(1, 500);
client.get("http://" + getString(R.string.IP) + (all ? "/history" : "/latest"), new AsyncHttpResponseHandler() {
@Override
public void onSuccess(int statusCode, Header[] headers, byte[] responseBody) {
if (all) {
mLastResponse = responseBody;
updateAll(responseBody, mTimeFrameSelection);
} else {
updateLatest(responseBody);
}
}
@Override
public void onFailure(int statusCode, Header[] headers, byte[] responseBody, Throwable error) {
Toast.makeText(getApplicationContext(), getString(R.string.error_connecting), Toast.LENGTH_SHORT).show();
mSwipeContainer.setRefreshing(false);
}
});
}
/**
* Receives the server's response (full history), parses it, and displays the latest data as
* well as the graph.
*
* @param responseBody the server's response
* @param timeframe the timeframe to show in the graph, one of<br/>
* ALL = 0<br/>
* WEEK = 1<br/>
* DAY = 2
*/
private void updateAll(final byte[] responseBody, final int timeframe) {
new Thread(new Runnable() {
@Override
public void run() {
try {
ArrayList<String[]> history = Util.parseHistory(responseBody);
List<String[]> cutHistory = history;
if (timeframe == WEEK && history.size() >= 10080) {
cutHistory = history.subList(history.size() - 10080, history.size());
} else if (timeframe == DAY && history.size() >= 1440) {
cutHistory = history.subList(history.size() - 1440, history.size());
}
final List<String[]> fCutHistory = cutHistory;
final String[] latest = history.get(history.size() - 1);
ArrayList<Entry> temperature = new ArrayList<>(fCutHistory.size());
ArrayList<Entry> humidity = new ArrayList<>(fCutHistory.size());
float counter = 0;
for (String[] current : fCutHistory) {
temperature.add(new Entry(counter++, Float.parseFloat(current[1])));
humidity.add(new Entry(counter, Float.parseFloat(current[2])));
}
LineDataSet tempSet = new LineDataSet(temperature, getString(R.string.temperature));
tempSet.setDrawCircles(false);
tempSet.setColor(Color.RED);
LineDataSet humSet = new LineDataSet(humidity, getString(R.string.humidity));
humSet.setDrawCircles(false);
humSet.setColor(Color.BLUE);
final LineData lineData = new LineData(tempSet, humSet);
runOnUiThread(new Runnable() {
@Override
public void run() {
mLatestTemp.setText(String.format(getString(R.string.format_temp), latest[1]));
mLatestHum.setText(String.format(getString(R.string.format_hum), latest[2]));
mChart.fitScreen();
IAxisValueFormatter formatter = new IAxisValueFormatter() {
@Override
public String getFormattedValue(float value, AxisBase axis) {
int xValue = value >= fCutHistory.size() ? fCutHistory.size() - 1 : (int) value;
return Util.shortenTime(fCutHistory.get(xValue)[0]);
}
};
mChart.getXAxis().setValueFormatter(formatter);
mChart.setData(lineData);
mChart.invalidate();
}
});
} catch (IOException e) {
runOnUiThread(new Runnable() {
@Override
public void run() {
Toast.makeText(getApplicationContext(), getString(R.string.error_parsing), Toast.LENGTH_SHORT).show();
}
});
} finally {
runOnUiThread(new Runnable() {
@Override
public void run() {
mSwipeContainer.setRefreshing(false);
}
});
}
}
}).start();
}
/**
* Receives the server's response (latest data), parses and displays it.
*
* @param responseBody the server's response
*/
private void updateLatest(final byte[] responseBody) {
try {
String[] latest = Util.parseEntry(responseBody);
mLatestTemp.setText(String.format(getString(R.string.format_temp), latest[1]));
mLatestHum.setText(String.format(getString(R.string.format_hum), latest[2]));
} catch (IOException e) {
Toast.makeText(getApplicationContext(), getString(R.string.error_parsing), Toast.LENGTH_SHORT).show();
}
mSwipeContainer.setRefreshing(false);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.action_refresh:
fetchAndUpdate(false);
break;
}
return super.onOptionsItemSelected(item);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
MenuInflater inflater = getMenuInflater();
inflater.inflate(R.menu.menu_main, menu);
return super.onCreateOptionsMenu(menu);
}
@Override
public void onClick(View view) {
if (view.getId() == R.id.bLoadGraph) {
fetchAndUpdate(true);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package myservice.mynamespace.service;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind;
import org.apache.olingo.commons.api.edm.FullQualifiedName;
import org.apache.olingo.commons.api.edm.provider.CsdlAbstractEdmProvider;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainer;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityContainerInfo;
import org.apache.olingo.commons.api.edm.provider.CsdlEntitySet;
import org.apache.olingo.commons.api.edm.provider.CsdlEntityType;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlNavigationPropertyBinding;
import org.apache.olingo.commons.api.edm.provider.CsdlProperty;
import org.apache.olingo.commons.api.edm.provider.CsdlPropertyRef;
import org.apache.olingo.commons.api.edm.provider.CsdlSchema;
public class DemoEdmProvider extends CsdlAbstractEdmProvider {
// Service Namespace
public static final String NAMESPACE = "OData.Demo";
// EDM Container
public static final String CONTAINER_NAME = "Container";
public static final FullQualifiedName CONTAINER = new FullQualifiedName(NAMESPACE, CONTAINER_NAME);
// Entity Types Names
public static final String ET_PRODUCT_NAME = "Product";
public static final FullQualifiedName ET_PRODUCT_FQN = new FullQualifiedName(NAMESPACE, ET_PRODUCT_NAME);
public static final String ET_CATEGORY_NAME = "Category";
public static final FullQualifiedName ET_CATEGORY_FQN = new FullQualifiedName(NAMESPACE, ET_CATEGORY_NAME);
// Entity Set Names
public static final String ES_PRODUCTS_NAME = "Products";
public static final String ES_CATEGORIES_NAME = "Categories";
@Override
public CsdlEntityType getEntityType(FullQualifiedName entityTypeName) {
// this method is called for each EntityType that are configured in the Schema
CsdlEntityType entityType = null;
if (entityTypeName.equals(ET_PRODUCT_FQN)) {
// create EntityType properties
CsdlProperty id = new CsdlProperty().setName("ID")
.setType(EdmPrimitiveTypeKind.Int32.getFullQualifiedName());
CsdlProperty name = new CsdlProperty().setName("Name")
.setType(EdmPrimitiveTypeKind.String.getFullQualifiedName());
CsdlProperty description = new CsdlProperty().setName("Description")
.setType(EdmPrimitiveTypeKind.String.getFullQualifiedName());
// create PropertyRef for Key element
CsdlPropertyRef propertyRef = new CsdlPropertyRef();
propertyRef.setName("ID");
// navigation property: many-to-one, null not allowed (product must have a category)
CsdlNavigationProperty navProp = new CsdlNavigationProperty().setName("Category")
.setType(ET_CATEGORY_FQN).setNullable(false).setPartner("Products");
List<CsdlNavigationProperty> navPropList = new ArrayList<CsdlNavigationProperty>();
navPropList.add(navProp);
// configure EntityType
entityType = new CsdlEntityType();
entityType.setName(ET_PRODUCT_NAME);
entityType.setProperties(Arrays.asList(id, name, description));
entityType.setKey(Arrays.asList(propertyRef));
entityType.setNavigationProperties(navPropList);
} else if (entityTypeName.equals(ET_CATEGORY_FQN)) {
// create EntityType properties
CsdlProperty id = new CsdlProperty().setName("ID")
.setType(EdmPrimitiveTypeKind.Int32.getFullQualifiedName());
CsdlProperty name = new CsdlProperty().setName("Name")
.setType(EdmPrimitiveTypeKind.String.getFullQualifiedName());
// create PropertyRef for Key element
CsdlPropertyRef propertyRef = new CsdlPropertyRef();
propertyRef.setName("ID");
// navigation property: one-to-many
CsdlNavigationProperty navProp = new CsdlNavigationProperty().setName("Products")
.setType(ET_PRODUCT_FQN).setCollection(true).setPartner("Category");
List<CsdlNavigationProperty> navPropList = new ArrayList<CsdlNavigationProperty>();
navPropList.add(navProp);
// configure EntityType
entityType = new CsdlEntityType();
entityType.setName(ET_CATEGORY_NAME);
entityType.setProperties(Arrays.asList(id, name));
entityType.setKey(Arrays.asList(propertyRef));
entityType.setNavigationProperties(navPropList);
}
return entityType;
}
@Override
public CsdlEntitySet getEntitySet(FullQualifiedName entityContainer, String entitySetName) {
CsdlEntitySet entitySet = null;
if (entityContainer.equals(CONTAINER)) {
if (entitySetName.equals(ES_PRODUCTS_NAME)) {
entitySet = new CsdlEntitySet();
entitySet.setName(ES_PRODUCTS_NAME);
entitySet.setType(ET_PRODUCT_FQN);
// navigation
CsdlNavigationPropertyBinding navPropBinding = new CsdlNavigationPropertyBinding();
navPropBinding.setTarget("Categories"); // the target entity set, where the navigation property points to
navPropBinding.setPath("Category"); // the path from entity type to navigation property
List<CsdlNavigationPropertyBinding> navPropBindingList = new ArrayList<CsdlNavigationPropertyBinding>();
navPropBindingList.add(navPropBinding);
entitySet.setNavigationPropertyBindings(navPropBindingList);
} else if (entitySetName.equals(ES_CATEGORIES_NAME)) {
entitySet = new CsdlEntitySet();
entitySet.setName(ES_CATEGORIES_NAME);
entitySet.setType(ET_CATEGORY_FQN);
// navigation
CsdlNavigationPropertyBinding navPropBinding = new CsdlNavigationPropertyBinding();
navPropBinding.setTarget("Products"); // the target entity set, where the navigation property points to
navPropBinding.setPath("Products"); // the path from entity type to navigation property
List<CsdlNavigationPropertyBinding> navPropBindingList = new ArrayList<CsdlNavigationPropertyBinding>();
navPropBindingList.add(navPropBinding);
entitySet.setNavigationPropertyBindings(navPropBindingList);
}
}
return entitySet;
}
@Override
public CsdlEntityContainerInfo getEntityContainerInfo(FullQualifiedName entityContainerName) {
// This method is invoked when displaying the service document at
// e.g. http://localhost:8080/DemoService/DemoService.svc
if (entityContainerName == null || entityContainerName.equals(CONTAINER)) {
CsdlEntityContainerInfo entityContainerInfo = new CsdlEntityContainerInfo();
entityContainerInfo.setContainerName(CONTAINER);
return entityContainerInfo;
}
return null;
}
@Override
public List<CsdlSchema> getSchemas() {
// create Schema
CsdlSchema schema = new CsdlSchema();
schema.setNamespace(NAMESPACE);
// add EntityTypes
List<CsdlEntityType> entityTypes = new ArrayList<CsdlEntityType>();
entityTypes.add(getEntityType(ET_PRODUCT_FQN));
entityTypes.add(getEntityType(ET_CATEGORY_FQN));
schema.setEntityTypes(entityTypes);
// add EntityContainer
schema.setEntityContainer(getEntityContainer());
// finally
List<CsdlSchema> schemas = new ArrayList<CsdlSchema>();
schemas.add(schema);
return schemas;
}
@Override
public CsdlEntityContainer getEntityContainer() {
// create EntitySets
List<CsdlEntitySet> entitySets = new ArrayList<CsdlEntitySet>();
entitySets.add(getEntitySet(CONTAINER, ES_PRODUCTS_NAME));
entitySets.add(getEntitySet(CONTAINER, ES_CATEGORIES_NAME));
// create EntityContainer
CsdlEntityContainer entityContainer = new CsdlEntityContainer();
entityContainer.setName(CONTAINER_NAME);
entityContainer.setEntitySets(entitySets);
return entityContainer;
}
}
| |
/*
* Licensed to Crate under one or more contributor license agreements.
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Crate licenses this file
* to you under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*
* However, if you have executed another commercial license agreement
* with Crate these terms will supersede the license and you may use the
* software solely pursuant to the terms of the relevant commercial
* agreement.
*/
package io.crate.statistics;
import io.crate.Streamer;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
public final class MostCommonValues {
public static final MostCommonValues EMPTY = new MostCommonValues(new Object[0], new double[0]);
static final int MCV_TARGET = 100;
private final Object[] values;
private final double[] frequencies;
public static <T> MostCommonValues fromCandidates(double nullFraction,
int numTracked,
int distinctValues,
double approxDistinct,
List<T> samples,
long numTotalRows,
MVCCandidate[] candidates) {
/* From PostgreSQL:
*
* Decide how many values are worth storing as most-common values. If
* we are able to generate a complete MCV list (all the values in the
* sample will fit, and we think these are all the ones in the table),
* then do so. Otherwise, store only those values that are
* significantly more common than the values not in the list.
*
* Note: the first of these cases is meant to address columns with
* small, fixed sets of possible values, such as boolean or enum
* columns. If we can *completely* represent the column population by
* an MCV list that will fit into the stats target, then we should do
* so and thus provide the planner with complete information. But if
* the MCV list is not complete, it's generally worth being more
* selective, and not just filling it all the way up to the stats
* target.
*/
int numMcv = MCV_TARGET;
if (numTracked == distinctValues && approxDistinct > 0 && numTracked <= numMcv) {
numMcv = numTracked;
} else {
if (numMcv > numTracked) {
numMcv = numTracked;
}
if (numMcv > 0) {
int[] mcvCounts = new int[numMcv];
for (int i = 0; i < numMcv; i++) {
mcvCounts[i] = candidates[i].count;
}
numMcv = decodeHowManyCommonValuesToKeep(
mcvCounts, numMcv, approxDistinct, nullFraction, samples.size(), numTotalRows);
}
}
if (numMcv == 0) {
return EMPTY;
}
Object[] values = new Object[numMcv];
double[] frequencies = new double[numMcv];
for (int i = 0; i < numMcv; i++) {
values[i] = samples.get(candidates[i].first);
frequencies[i] = (double) candidates[i].count / (double) samples.size();
}
return new MostCommonValues(values, frequencies);
}
private static int decodeHowManyCommonValuesToKeep(int[] mcvCounts,
int numMcv,
double approxDistinct,
double nullFraction,
int numSampleRows,
long numTotalRows) {
assert mcvCounts.length == numMcv : "mcvCounts.length must be equal to numMcv";
if (numSampleRows == numTotalRows || numTotalRows <= 1) {
// Entire table was sampled, keep all candidates
return numMcv;
}
/* From PostgreSQL:
*
* Exclude the least common values from the MCV list, if they are not
* significantly more common than the estimated selectivity they would
* have if they weren't in the list. All non-MCV values are assumed to be
* equally common, after taking into account the frequencies of all the
* values in the MCV list and the number of nulls (c.f. eqsel()).
*
* Here sumcount tracks the total count of all but the last (least common)
* value in the MCV list, allowing us to determine the effect of excluding
* that value from the list.
*
* Note that we deliberately do this by removing values from the full
* list, rather than starting with an empty list and adding values,
* because the latter approach can fail to add any values if all the most
* common values have around the same frequency and make up the majority
* of the table, so that the overall average frequency of all values is
* roughly the same as that of the common values. This would lead to any
* uncommon values being significantly overestimated.
*/
int sumCount = 0;
for (int i = 0; i < numMcv - 1; i++) {
sumCount += mcvCounts[i];
}
while (numMcv > 0) {
/*
* Estimated selectivity the least common value would have if it
* wasn't in the MCV list (c.f. eqsel()).
*/
double selectivity = 1.0 - (double) sumCount / numSampleRows - nullFraction;
selectivity = Math.max(0.0, Math.min(1.0, selectivity));
double otherDistinct = approxDistinct - (numMcv - 1);
if (otherDistinct > 1) {
selectivity /= otherDistinct;
}
/*
* If the value is kept in the MCV list, its population frequency is
* assumed to equal its sample frequency. We use the lower end of a
* textbook continuity-corrected Wald-type confidence interval to
* determine if that is significantly more common than the non-MCV
* frequency --- specifically we assume the population frequency is
* highly likely to be within around 2 standard errors of the sample
* frequency, which equates to an interval of 2 standard deviations
* either side of the sample count, plus an additional 0.5 for the
* continuity correction. Since we are sampling without replacement,
* this is a hypergeometric distribution.
*
* XXX: Empirically, this approach seems to work quite well, but it
* may be worth considering more advanced techniques for estimating
* the confidence interval of the hypergeometric distribution.
*/
double N = numTotalRows;
double n = numSampleRows;
double K = N * mcvCounts[numMcv - 1] / n;
double variance = n * K * (N - K) * (N - n) / (N * N * (N - 1));
double stddev = Math.sqrt(variance);
if (mcvCounts[numMcv - 1] > selectivity * numSampleRows + 2 * stddev + 0.5) {
/*
* The value is significantly more common than the non-MCV
* selectivity would suggest. Keep it, and all the other more
* common values in the list.
*/
break;
} else {
/* Discard this value and consider the next least common value */
numMcv--;
if (numMcv == 0)
break;
sumCount -= mcvCounts[numMcv - 1];
}
}
return numMcv;
}
public MostCommonValues(Object[] values, double[] frequencies) {
assert values.length == frequencies.length : "values and frequencies must have the same number of items";
this.values = values;
this.frequencies = frequencies;
}
public MostCommonValues(Streamer valueStreamer, StreamInput in) throws IOException {
int numValues = in.readVInt();
values = new Object[numValues];
frequencies = new double[numValues];
for (int i = 0; i < numValues; i++) {
values[i] = valueStreamer.readValueFrom(in);
frequencies[i] = in.readDouble();
}
}
public void writeTo(Streamer valueStreamer, StreamOutput out) throws IOException {
out.writeVInt(values.length);
for (int i = 0; i < values.length; i++) {
valueStreamer.writeValueTo(out, values[i]);
out.writeDouble(frequencies[i]);
}
}
public Object[] values() {
return values;
}
public double[] frequencies() {
return frequencies;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
MostCommonValues that = (MostCommonValues) o;
if (!Arrays.equals(values, that.values)) {
return false;
}
return Arrays.equals(frequencies, that.frequencies);
}
@Override
public int hashCode() {
int result = Arrays.hashCode(values);
result = 31 * result + Arrays.hashCode(frequencies);
return result;
}
static class MVCCandidate {
int first = 0;
int count = 0;
@Override
public String toString() {
return "MVCCandidate{" +
"first=" + first +
", count=" + count +
'}';
}
}
}
| |
/*
* Copyright 2016 Yahoo Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.yahoo.athenz.auth.token;
import java.util.Arrays;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.yahoo.athenz.auth.util.Crypto;
public class RoleToken extends Token {
protected List<String> roles;
private String principal = null;
private String proxyUser = null;
private boolean domainCompleteRoleSet = false;
private static final Logger LOG = LoggerFactory.getLogger(RoleToken.class);
public static class Builder {
// required attributes
private String domain;
private List<String> roles;
private String version;
private String principal = null;
private String proxyUser = null;
private boolean domainCompleteRoleSet = false;
// optional attributes with default values
private String salt = Crypto.randomSalt();
private String host = null;
private String ip = null;
private String keyId = "0";
private long expirationWindow = 3600;
private long issueTime = 0;
// Note that it is expected that the Strings in roles should already be lowercased.
public Builder(String version, String domain, List<String> roles) {
if (version == null || domain == null || roles == null) {
throw new IllegalArgumentException("version, domain and roles parameters must not be null.");
}
if (version.isEmpty() || domain.isEmpty() || roles.isEmpty()) {
throw new IllegalArgumentException("version, domain and roles parameters must have values.");
}
this.version = version;
this.domain = domain;
this.roles = roles;
}
public Builder principal(String value) {
this.principal = value;
return this;
}
public Builder host(String value) {
this.host = value;
return this;
}
public Builder salt(String value) {
this.salt = value;
return this;
}
public Builder ip(String value) {
this.ip = value;
return this;
}
public Builder keyId(String value) {
this.keyId = value;
return this;
}
public Builder proxyUser(String value) {
this.proxyUser = value;
return this;
}
public Builder issueTime(long value) {
this.issueTime = value;
return this;
}
public Builder expirationWindow(long value) {
this.expirationWindow = value;
return this;
}
public Builder domainCompleteRoleSet(boolean value) {
this.domainCompleteRoleSet = value;
return this;
}
public RoleToken build() {
return new RoleToken(this);
}
}
private RoleToken(Builder builder) {
this.version = builder.version;
this.domain = builder.domain;
this.roles = builder.roles;
this.host = builder.host;
this.salt = builder.salt;
this.keyId = builder.keyId;
this.ip = builder.ip;
this.principal = builder.principal;
this.proxyUser = builder.proxyUser;
this.domainCompleteRoleSet = builder.domainCompleteRoleSet;
super.setTimeStamp(builder.issueTime, builder.expirationWindow);
StringBuilder strBuilder = new StringBuilder(defaultBuilderBufSize);
strBuilder.append("v=");
strBuilder.append(version);
strBuilder.append(";d=");
strBuilder.append(domain);
strBuilder.append(";r=");
int i = 0;
for (String role : roles) {
strBuilder.append(role);
if (++i != roles.size()) {
strBuilder.append(",");
}
}
if (domainCompleteRoleSet) {
strBuilder.append(";c=1");
}
if (principal != null && !principal.isEmpty()) {
strBuilder.append(";p=");
strBuilder.append(principal);
}
if (host != null && !host.isEmpty()) {
strBuilder.append(";h=");
strBuilder.append(host);
}
if (proxyUser != null && !proxyUser.isEmpty()) {
strBuilder.append(";proxy=");
strBuilder.append(proxyUser);
}
strBuilder.append(";a=");
strBuilder.append(salt);
strBuilder.append(";t=");
strBuilder.append(timestamp);
strBuilder.append(";e=");
strBuilder.append(expiryTime);
strBuilder.append(";k=");
strBuilder.append(keyId);
if (ip != null && !ip.isEmpty()) {
strBuilder.append(";i=");
strBuilder.append(ip);
}
unsignedToken = strBuilder.toString();
if (LOG.isDebugEnabled()) {
LOG.debug("RoleToken created: " + unsignedToken);
}
}
public RoleToken(String signedToken) {
if (LOG.isDebugEnabled()) {
LOG.debug("Constructing RoleToken with input string: " + signedToken);
}
if (signedToken == null || signedToken.isEmpty()) {
throw new IllegalArgumentException("Input String signedToken must not be empty");
}
/*
* first we need to extract data and signature parts
* the signature is always at the end of the token.
* The format for the Token is as follows:
*
* v=Z1;d=sports;r=role1,role2;a=salt;t=tstamp;e=expiry;k=1;s=sig
*
* v: version number Z1 (string)
* d: domain name where the roles are valid for
* r: list of comma separated roles
* c: the list of roles is complete in domain
* p: principal that got the token issued for
* a: random 8 byte salt value hex encoded
* t: timestamp when the token was generated
* h: host that issued this role token
* e: expiry timestamp based on SIA configuration
* k: identifier - either version or zone name
* s: signature generated over the "v=Z1;a=salt;...;e=expiry" string
* using Service's private Key and y64 encoded
* proxy: request was done by this authorized proxy user
*/
int idx = signedToken.indexOf(";s=");
if (idx != -1) {
unsignedToken = signedToken.substring(0, idx);
signature = signedToken.substring(idx + 3);
}
final String parseToken = unsignedToken != null ? unsignedToken : signedToken;
String roleNames = null;
for (String item : parseToken.split(";")) {
String [] kv = item.split("=");
if (kv.length == 2) {
switch (kv[0]) {
case "a":
salt = kv[1];
break;
case "c":
if (Integer.parseInt(kv[1]) == 1) {
domainCompleteRoleSet = true;
}
break;
case "d":
domain = kv[1];
break;
case "e":
expiryTime = Long.parseLong(kv[1]);
break;
case "h":
host = kv[1];
break;
case "i":
ip = kv[1];
break;
case "k":
keyId = kv[1];
break;
case "p":
principal = kv[1];
break;
case "r":
roleNames = kv[1];
break;
case "t":
timestamp = Long.parseLong(kv[1]);
break;
case "proxy":
proxyUser = kv[1];
break;
case "v":
version = kv[1];
break;
}
}
}
/* the required attributes for the token are
* domain and roles. The signature will be verified
* during the authenticate phase but now we'll make
* sure that domain and roles are present
*/
if (domain == null || domain.isEmpty()) {
throw new IllegalArgumentException("SignedToken does not contain required domain component");
}
if (roleNames == null || roleNames.isEmpty()) {
throw new IllegalArgumentException("SignedToken does not contain required roles component");
}
roles = Arrays.asList(roleNames.split(","));
this.signedToken = signedToken;
if (LOG.isDebugEnabled()) {
LOG.debug("Values extracted from token " +
" version:" + version +
" domain:" + domain +
" roles:" + roleNames +
" principal:" + principal +
" host:" + host +
" salt:" + salt +
" timestamp:" + timestamp +
" expiryTime:" + expiryTime +
" domainCompleteRoleSet" + domainCompleteRoleSet +
" keyId:" + keyId +
" ip:" + ip +
" proxyUser: " + proxyUser +
" signature:" + signature);
}
}
public String getPrincipal() {
return principal;
}
public List<String> getRoles() {
return roles;
}
public String getProxyUser() {
return proxyUser;
}
public boolean getDomainCompleteRoleSet() {
return domainCompleteRoleSet;
}
}
| |
/*
* Swift Parallel Scripting Language (http://swift-lang.org)
* Code from Java CoG Kit Project (see notice below) with modifications.
*
* Copyright 2005-2014 University of Chicago
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//----------------------------------------------------------------------
//This code is developed as part of the Java CoG Kit project
//The terms of the license can be found at http://www.cogkit.org/license
//This message may not be removed or altered.
//----------------------------------------------------------------------
/*
* Created on Jul 6, 2014
*/
package org.griphyn.vdl.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.NoSuchElementException;
import java.util.Set;
import java.util.TreeSet;
public class ConfigTree<T> {
public static class Node<T> {
private Map<String, Node<T>> nodes;
private T value;
protected void checkEmpty(String k) {
if (k.isEmpty()) {
throw new IllegalArgumentException();
}
}
protected String first(String k) {
int ix = k.indexOf('.');
if (ix == -1) {
return k;
}
else {
return k.substring(0, ix);
}
}
protected String rest(String k) {
int ix = k.indexOf('.');
if (ix == -1) {
return "";
}
else {
return k.substring(ix + 1);
}
}
public T get() {
return value;
}
public T get(String k) {
return get(k, null);
}
public T get(String k, String wildcard) {
if (k.isEmpty()) {
return value;
}
if (nodes == null || nodes.isEmpty()) {
throw new NoSuchElementException();
}
Node<T> t = nodes.get(first(k));
if (t == null && wildcard != null) {
t = nodes.get(wildcard);
}
if (t == null) {
throw new NoSuchElementException();
}
return t.get(rest(k), wildcard);
}
public boolean hasKey(String k) {
if (k.isEmpty()) {
return value != null;
}
if (nodes == null || nodes.isEmpty()) {
return false;
}
Node<T> t = nodes.get(first(k));
if (t == null) {
return false;
}
return t.hasKey(rest(k));
}
public T put(String k, T v) {
if (k.isEmpty()) {
return set(v);
}
else {
if (nodes == null) {
nodes = new HashMap<String, Node<T>>();
}
String first = first(k);
if (k == first) {
return getOrCreateTree(k).set(v);
}
else {
return getOrCreateTree(first).put(rest(k), v);
}
}
}
private Node<T> getOrCreateTree(String k) {
Node<T> t = nodes.get(k);
if (t == null) {
t = new Node<T>();
nodes.put(k, t);
return t;
}
else {
return t;
}
}
public void getLeafPaths(List<String> l, String partial) {
if (nodes != null) {
for (Map.Entry<String, Node<T>> e : nodes.entrySet()) {
if (partial == null) {
e.getValue().getLeafPaths(l, e.getKey());
}
else {
e.getValue().getLeafPaths(l, partial + "." + e.getKey());
}
}
}
else {
l.add(partial);
}
}
public void expandWildcards(List<String> l, String k, String wildcard, String partial) {
if (nodes == null || nodes.isEmpty()) {
if (k.isEmpty()) {
l.add(partial);
}
else {
throw new IllegalArgumentException("No such path: " + partial + "." + k);
}
return;
}
String mk = first(k);
if (mk.equals(wildcard)) {
for (Map.Entry<String, Node<T>> e : nodes.entrySet()) {
Node<T> n = e.getValue();
String rest = rest(k);
String p;
if (partial == null) {
p = e.getKey();
}
else {
p = partial + "." + e.getKey();
}
n.expandWildcards(l, rest, wildcard, p);
}
}
else {
Node<T> t = nodes.get(mk);
if (t == null) {
if (first(rest(k)).equals(wildcard)) {
// x.* is allowed to not be there
return;
}
if (partial == null || k.equals("")) {
l.add(k);
}
else {
l.add(partial + "." + k);
}
return;
}
String rest = rest(k);
String p;
if (partial == null) {
p = mk;
}
else {
p = partial + "." + mk;
}
t.expandWildcards(l, rest, wildcard, p);
}
}
public T set(T v) {
T old = value;
value = v;
return old;
}
public boolean isLeaf() {
return nodes == null;
}
public Set<Map.Entry<String, Node<T>>> entrySet() {
if (nodes == null) {
Map<String, Node<T>> empty = Collections.emptyMap();
return empty.entrySet();
}
else {
return nodes.entrySet();
}
}
private void toString(StringBuilder sb, int level, String k, String full, boolean sort, ValueFormatter f) {
if (nodes == null || nodes.isEmpty()) {
f.format(k, full, value, level, sb);
}
else if (nodes.size() == 1) {
String key = nodes.keySet().iterator().next();
if (k == null) {
nodes.values().iterator().next().toString(sb, level, key, cat(full, key), sort, f);
}
else {
String nkey = cat(k, key);
nodes.values().iterator().next().toString(sb, level, nkey, cat(full, key), sort, f);
}
}
else {
for (int i = 0; i < level; i++) {
sb.append('\t');
}
if (k != null) {
sb.append(k);
sb.append(' ');
}
sb.append("{\n");
Collection<String> keys;
if (sort) {
keys = new TreeSet<String>(nodes.keySet());
}
else {
keys = nodes.keySet();
}
for (String key : keys) {
nodes.get(key).toString(sb, level + 1, key, cat(full, key), sort, f);
}
for (int i = 0; i < level; i++) {
sb.append('\t');
}
sb.append("}\n");
}
}
private String cat(String full, String key) {
if (full == null) {
return key;
}
else {
return full + "." + key;
}
}
public List<String> getLeafPaths() {
List<String> l = new ArrayList<String>();
getLeafPaths(l, null);
return l;
}
public String toString() {
StringBuilder sb = new StringBuilder();
toString(sb, 0, null, null, false, DEFAULT_VALUE_FORMATTER);
return sb.toString();
}
}
private Node<T> root;
public ConfigTree() {
root = new Node<T>();
}
public T get(String k) {
return get(k, null);
}
public T put(String k, T v) {
return root.put(k, v);
}
public T get(String k, String wildcard) {
try {
return root.get(k, wildcard);
}
catch (IllegalArgumentException e) {
throw new NoSuchElementException("Not a leaf: " + k);
}
catch (NoSuchElementException e) {
return null;
}
}
public List<String> getLeafPaths() {
return root.getLeafPaths();
}
/**
* Find all paths matching the given path. Wildcards are expanded based
* on what's in the tree, but the full paths do not need to exist in the tree.
*
* So if a.1.b.2 and a.1.b.3 were in the tree, a.*.b.*.c would generate
* a.1.b.2.c and a.1.b.3.c
*
*/
public List<String> expandWildcards(String key, String wildcard) {
List<String> l = new ArrayList<String>();
root.expandWildcards(l, key, wildcard, null);
return l;
}
public boolean hasKey(String k) {
return root.hasKey(k);
}
public Set<Map.Entry<String, Node<T>>> entrySet() {
return root.entrySet();
}
public String toString() {
return toString(false, DEFAULT_VALUE_FORMATTER);
}
public String toString(boolean sort, ValueFormatter f) {
StringBuilder sb = new StringBuilder();
root.toString(sb, 0, null, null, sort, f);
return sb.toString();
}
public interface ValueFormatter {
void format(String key, String full, Object value, int indentationLevel, StringBuilder sb);
}
public static class DefaultValueFormatter implements ValueFormatter {
@Override
public void format(String key, String full, Object value, int indentationLevel, StringBuilder sb) {
for (int i = 0; i < indentationLevel; i++) {
sb.append('\t');
}
if (value != null) {
sb.append(key);
sb.append(": ");
if (value instanceof String) {
sb.append('\"');
sb.append(value);
sb.append('\"');
}
else {
sb.append(value);
}
sb.append('\n');
}
}
}
public static final ValueFormatter DEFAULT_VALUE_FORMATTER = new DefaultValueFormatter();
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hive.ql.io.parquet.read;
import java.time.DateTimeException;
import java.time.ZoneId;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.ListIterator;
import java.util.Map;
import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.io.IOConstants;
import org.apache.hadoop.hive.ql.io.parquet.convert.DataWritableRecordConverter;
import org.apache.hadoop.hive.ql.io.parquet.serde.ParquetHiveSerDe;
import org.apache.hadoop.hive.ql.io.parquet.write.DataWritableWriteSupport;
import org.apache.hadoop.hive.ql.metadata.VirtualColumn;
import org.apache.hadoop.hive.ql.optimizer.FieldNode;
import org.apache.hadoop.hive.ql.optimizer.NestedColumnFieldPruningUtils;
import org.apache.hadoop.hive.serde2.ColumnProjectionUtils;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
import org.apache.hadoop.hive.serde2.typeinfo.ListTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.StructTypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoFactory;
import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
import org.apache.hadoop.io.ArrayWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.StringUtils;
import org.apache.parquet.hadoop.api.InitContext;
import org.apache.parquet.hadoop.api.ReadSupport;
import org.apache.parquet.io.api.RecordMaterializer;
import org.apache.parquet.schema.GroupType;
import org.apache.parquet.schema.MessageType;
import org.apache.parquet.schema.OriginalType;
import org.apache.parquet.schema.PrimitiveType.PrimitiveTypeName;
import org.apache.parquet.schema.Type;
import org.apache.parquet.schema.Type.Repetition;
import org.apache.parquet.schema.Types;
/**
*
* A MapWritableReadSupport
*
* Manages the translation between Hive and Parquet
*
*/
public class DataWritableReadSupport extends ReadSupport<ArrayWritable> {
public static final String HIVE_TABLE_AS_PARQUET_SCHEMA = "HIVE_TABLE_SCHEMA";
public static final String PARQUET_COLUMN_INDEX_ACCESS = "parquet.column.index.access";
private TypeInfo hiveTypeInfo;
/**
* From a string which columns names (including hive column), return a list
* of string columns
*
* @param columns comma separated list of columns
* @return list with virtual columns removed
*/
public static List<String> getColumnNames(final String columns) {
return (List<String>) VirtualColumn.
removeVirtualColumns(StringUtils.getStringCollection(columns));
}
/**
* Returns a list of TypeInfo objects from a string which contains column
* types strings.
*
* @param types Comma separated list of types
* @return A list of TypeInfo objects.
*/
public static List<TypeInfo> getColumnTypes(final String types) {
return TypeInfoUtils.getTypeInfosFromTypeString(types);
}
/**
* Searchs for a fieldName into a parquet GroupType by ignoring string case.
* GroupType#getType(String fieldName) is case sensitive, so we use this method.
*
* @param groupType Group of field types where to search for fieldName
* @param fieldName The field what we are searching
* @return The Type object of the field found; null otherwise.
*/
private static Type getFieldTypeIgnoreCase(GroupType groupType, String fieldName) {
for (Type type : groupType.getFields()) {
if (type.getName().equalsIgnoreCase(fieldName)) {
return type;
}
}
return null;
}
/**
* Searchs column names by name on a given Parquet schema, and returns its corresponded
* Parquet schema types.
*
* @param schema Group schema where to search for column names.
* @param colNames List of column names.
* @param colTypes List of column types.
* @return List of GroupType objects of projected columns.
*/
private static List<Type> getProjectedGroupFields(GroupType schema, List<String> colNames, List<TypeInfo> colTypes) {
List<Type> schemaTypes = new ArrayList<Type>();
ListIterator<String> columnIterator = colNames.listIterator();
while (columnIterator.hasNext()) {
TypeInfo colType = colTypes.get(columnIterator.nextIndex());
String colName = columnIterator.next();
Type fieldType = getFieldTypeIgnoreCase(schema, colName);
if (fieldType == null) {
schemaTypes.add(Types.optional(PrimitiveTypeName.BINARY).named(colName));
} else {
schemaTypes.add(getProjectedType(colType, fieldType));
}
}
return schemaTypes;
}
private static Type getProjectedType(TypeInfo colType, Type fieldType) {
switch (colType.getCategory()) {
case STRUCT:
List<Type> groupFields = getProjectedGroupFields(
fieldType.asGroupType(),
((StructTypeInfo) colType).getAllStructFieldNames(),
((StructTypeInfo) colType).getAllStructFieldTypeInfos()
);
Type[] typesArray = groupFields.toArray(new Type[0]);
return Types.buildGroup(fieldType.getRepetition())
.addFields(typesArray)
.named(fieldType.getName());
case LIST:
TypeInfo elemType = ((ListTypeInfo) colType).getListElementTypeInfo();
if (elemType.getCategory() == ObjectInspector.Category.STRUCT) {
Type subFieldType = fieldType.asGroupType().getType(0);
if (!subFieldType.isPrimitive()) {
String subFieldName = subFieldType.getName();
Text name = new Text(subFieldName);
if (name.equals(ParquetHiveSerDe.ARRAY) || name.equals(ParquetHiveSerDe.LIST)) {
subFieldType = new GroupType(Repetition.REPEATED, subFieldName,
getProjectedType(elemType, subFieldType.asGroupType().getType(0)));
} else {
subFieldType = getProjectedType(elemType, subFieldType);
}
return Types.buildGroup(Repetition.OPTIONAL).as(OriginalType.LIST).addFields(
subFieldType).named(fieldType.getName());
}
}
break;
default:
}
return fieldType;
}
/**
* Searches column names by name on a given Parquet message schema, and returns its projected
* Parquet schema types.
*
* @param schema Message type schema where to search for column names.
* @param colNames List of column names.
* @param colTypes List of column types.
* @return A MessageType object of projected columns.
*/
public static MessageType getSchemaByName(MessageType schema, List<String> colNames, List<TypeInfo> colTypes) {
List<Type> projectedFields = getProjectedGroupFields(schema, colNames, colTypes);
Type[] typesArray = projectedFields.toArray(new Type[0]);
return Types.buildMessage()
.addFields(typesArray)
.named(schema.getName());
}
/**
* Searches column names by indexes on a given Parquet file schema, and returns its corresponded
* Parquet schema types.
*
* @param schema Message schema where to search for column names.
* @param colNames List of column names.
* @param colIndexes List of column indexes.
* @return A MessageType object of the column names found.
*/
public static MessageType getSchemaByIndex(MessageType schema, List<String> colNames, List<Integer> colIndexes) {
List<Type> schemaTypes = new ArrayList<Type>();
for (Integer i : colIndexes) {
if (i < colNames.size()) {
if (i < schema.getFieldCount()) {
schemaTypes.add(schema.getType(i));
} else {
//prefixing with '_mask_' to ensure no conflict with named
//columns in the file schema
schemaTypes.add(
Types.optional(PrimitiveTypeName.BINARY).named("_mask_" + colNames.get(i)));
}
}
}
return new MessageType(schema.getName(), schemaTypes);
}
/**
* Generate the projected schema from colIndexes and nested column paths. If the column is
* contained by colIndex, it will be added directly, otherwise it will build a group type which
* contains all required sub types using nestedColumnPaths.
* @param schema original schema
* @param colNames
* @param colIndexes the index of needed columns
* @param nestedColumnPaths the paths for nested columns
* @return
*/
public static MessageType getProjectedSchema(
MessageType schema,
List<String> colNames,
List<Integer> colIndexes,
Set<String> nestedColumnPaths) {
List<Type> schemaTypes = new ArrayList<Type>();
Map<String, FieldNode> prunedCols = getPrunedNestedColumns(nestedColumnPaths);
for (Integer i : colIndexes) {
if (i < colNames.size()) {
if (i < schema.getFieldCount()) {
Type t = schema.getType(i);
String tn = t.getName().toLowerCase();
if (!prunedCols.containsKey(tn)) {
schemaTypes.add(schema.getType(i));
} else {
if (t.isPrimitive()) {
// For primitive type, add directly.
schemaTypes.add(t);
} else {
// For group type, we need to build the projected group type with required leaves
List<Type> g =
projectLeafTypes(Arrays.asList(t), Arrays.asList(prunedCols.get(tn)));
if (!g.isEmpty()) {
schemaTypes.addAll(g);
}
}
}
} else {
//prefixing with '_mask_' to ensure no conflict with named
//columns in the file schema
schemaTypes.add(Types.optional(PrimitiveTypeName.BINARY).named("_mask_" + colNames.get(i)));
}
}
}
return new MessageType(schema.getName(), schemaTypes);
}
/**
* Get a valid zoneId from some metadata, otherwise return null.
*/
public static ZoneId getWriterTimeZoneId(Map<String, String> metadata) {
if (metadata == null) {
return null;
}
String value = metadata.get(DataWritableWriteSupport.WRITER_TIMEZONE);
try {
if (value != null) {
return ZoneId.of(value);
}
} catch (DateTimeException e) {
throw new RuntimeException("Can't parse writer time zone stored in file metadata", e);
}
return null;
}
/**
* Return the columns which contains required nested attribute level
* E.g., given struct a:<x:int, y:int> while 'x' is required and 'y' is not, the method will return
* a pruned struct for 'a' which only contains the attribute 'x'
*
* @param nestedColPaths the paths for required nested attribute
* @return a map from the column to its selected nested column paths, of which the keys are all lower-cased.
*/
private static Map<String, FieldNode> getPrunedNestedColumns(Set<String> nestedColPaths) {
Map<String, FieldNode> resMap = new HashMap<>();
if (nestedColPaths.isEmpty()) {
return resMap;
}
for (String s : nestedColPaths) {
String c = StringUtils.split(s, '.')[0].toLowerCase();
if (!resMap.containsKey(c)) {
FieldNode f = NestedColumnFieldPruningUtils.addNodeByPath(null, s);
resMap.put(c, f);
} else {
resMap.put(c, NestedColumnFieldPruningUtils.addNodeByPath(resMap.get(c), s));
}
}
return resMap;
}
private static GroupType buildProjectedGroupType(
GroupType originalType,
List<Type> types) {
if (types == null || types.isEmpty()) {
return null;
}
return new GroupType(originalType.getRepetition(), originalType.getName(), types);
}
private static List<Type> projectLeafTypes(
List<Type> types,
List<FieldNode> nodes) {
List<Type> res = new ArrayList<>();
if (nodes.isEmpty()) {
return res;
}
Map<String, FieldNode> fieldMap = new HashMap<>();
for (FieldNode n : nodes) {
fieldMap.put(n.getFieldName().toLowerCase(), n);
}
for (Type type : types) {
String tn = type.getName().toLowerCase();
if (fieldMap.containsKey(tn)) {
FieldNode f = fieldMap.get(tn);
if (f.getNodes().isEmpty()) {
// no child, no need for pruning
res.add(type);
} else {
if (type instanceof GroupType) {
GroupType groupType = type.asGroupType();
List<Type> ts = projectLeafTypes(groupType.getFields(), f.getNodes());
GroupType g = buildProjectedGroupType(groupType, ts);
if (g != null) {
res.add(g);
}
} else {
throw new RuntimeException(
"Primitive type " + f.getFieldName() + "should not " + "doesn't match type" + f
.toString());
}
}
}
}
return res;
}
/**
* It creates the readContext for Parquet side with the requested schema during the init phase.
*
* @param context
* @return the parquet ReadContext
*/
@Override
public org.apache.parquet.hadoop.api.ReadSupport.ReadContext init(InitContext context) {
Configuration configuration = context.getConfiguration();
MessageType fileSchema = context.getFileSchema();
String columnNames = configuration.get(IOConstants.COLUMNS);
Map<String, String> contextMetadata = new HashMap<String, String>();
boolean indexAccess = configuration.getBoolean(PARQUET_COLUMN_INDEX_ACCESS, false);
if (columnNames != null) {
List<String> columnNamesList = getColumnNames(columnNames);
String columnTypes = configuration.get(IOConstants.COLUMNS_TYPES);
List<TypeInfo> columnTypesList = getColumnTypes(columnTypes);
MessageType tableSchema =
getRequestedSchemaForIndexAccess(indexAccess, columnNamesList, columnTypesList, fileSchema);
contextMetadata.put(HIVE_TABLE_AS_PARQUET_SCHEMA, tableSchema.toString());
contextMetadata.put(PARQUET_COLUMN_INDEX_ACCESS, String.valueOf(indexAccess));
this.hiveTypeInfo = TypeInfoFactory.getStructTypeInfo(columnNamesList, columnTypesList);
return new ReadContext(getRequestedPrunedSchema(columnNamesList, tableSchema, configuration),
contextMetadata);
} else {
contextMetadata.put(HIVE_TABLE_AS_PARQUET_SCHEMA, fileSchema.toString());
return new ReadContext(fileSchema, contextMetadata);
}
}
/**
* It's used for vectorized code path.
* @param indexAccess
* @param columnNamesList
* @param columnTypesList
* @param fileSchema
* @param configuration
* @return
*/
public static MessageType getRequestedSchema(
boolean indexAccess,
List<String> columnNamesList,
List<TypeInfo> columnTypesList,
MessageType fileSchema,
Configuration configuration) {
MessageType tableSchema =
getRequestedSchemaForIndexAccess(indexAccess, columnNamesList, columnTypesList, fileSchema);
List<Integer> indexColumnsWanted = ColumnProjectionUtils.getReadColumnIDs(configuration);
//TODO Duplicated code for init method since vectorization reader path doesn't support Nested
// column pruning so far. See HIVE-15156
if (!ColumnProjectionUtils.isReadAllColumns(configuration) && !indexColumnsWanted.isEmpty()) {
return DataWritableReadSupport
.getSchemaByIndex(tableSchema, columnNamesList, indexColumnsWanted);
} else {
return tableSchema;
}
}
private static MessageType getRequestedSchemaForIndexAccess(
boolean indexAccess,
List<String> columnNamesList,
List<TypeInfo> columnTypesList,
MessageType fileSchema) {
if (indexAccess) {
List<Integer> indexSequence = new ArrayList<Integer>();
// Generates a sequence list of indexes
for (int i = 0; i < columnNamesList.size(); i++) {
indexSequence.add(i);
}
return getSchemaByIndex(fileSchema, columnNamesList, indexSequence);
} else {
return getSchemaByName(fileSchema, columnNamesList, columnTypesList);
}
}
private static MessageType getRequestedPrunedSchema(
List<String> columnNamesList,
MessageType fileSchema,
Configuration configuration) {
Set<String> groupPaths = ColumnProjectionUtils.getNestedColumnPaths(configuration);
List<Integer> indexColumnsWanted = ColumnProjectionUtils.getReadColumnIDs(configuration);
if (!ColumnProjectionUtils.isReadAllColumns(configuration) && !indexColumnsWanted.isEmpty()) {
return getProjectedSchema(fileSchema, columnNamesList, indexColumnsWanted, groupPaths);
} else {
return fileSchema;
}
}
/**
*
* It creates the hive read support to interpret data from parquet to hive
*
* @param configuration // unused
* @param keyValueMetaData
* @param fileSchema // unused
* @param readContext containing the requested schema and the schema of the hive table
* @return Record Materialize for Hive
*/
@Override
public RecordMaterializer<ArrayWritable> prepareForRead(final Configuration configuration,
final Map<String, String> keyValueMetaData, final MessageType fileSchema,
final org.apache.parquet.hadoop.api.ReadSupport.ReadContext readContext) {
final Map<String, String> metadata = readContext.getReadSupportMetadata();
if (metadata == null) {
throw new IllegalStateException("ReadContext not initialized properly. " +
"Don't know the Hive Schema.");
}
String key = HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION.varname;
if (!metadata.containsKey(key)) {
metadata.put(key, String.valueOf(HiveConf.getBoolVar(
configuration, HiveConf.ConfVars.HIVE_PARQUET_TIMESTAMP_SKIP_CONVERSION)));
}
String writerTimezone = DataWritableWriteSupport.WRITER_TIMEZONE;
if (!metadata.containsKey(writerTimezone)) {
if (keyValueMetaData.containsKey(writerTimezone)) {
metadata.put(writerTimezone, keyValueMetaData.get(writerTimezone));
}
} else if (!metadata.get(writerTimezone).equals(keyValueMetaData.get(writerTimezone))) {
throw new IllegalStateException("Metadata contains a writer time zone that does not match "
+ "file footer's writer time zone.");
}
return new DataWritableRecordConverter(readContext.getRequestedSchema(), metadata, hiveTypeInfo);
}
}
| |
package de.mineformers.core.asm.util;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Maps;
import com.google.common.collect.Sets;
import net.minecraft.launchwrapper.Launch;
import net.minecraftforge.fml.relauncher.ReflectionHelper;
import org.objectweb.asm.Type;
import org.objectweb.asm.tree.ClassNode;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.objectweb.asm.Opcodes.*;
/**
* Some information about a class, obtain via {@link ClassInfo#of(org.objectweb.asm.tree.ClassNode)}
* Licensed under LGPL v3
*
* @author diesieben07
*/
public abstract class ClassInfo
{
private ClassInfo zuper;
// limit subclasses to this package
ClassInfo()
{
}
/**
* <p>Create a {@code ClassInfo} representing the given class.</p>
*
* @param clazz the Class
* @return a ClassInfo
*/
public static ClassInfo of(Class<?> clazz)
{
return new ClassInfoReflect(clazz);
}
/**
* <p>Create a {@code ClassInfo} representing the given ClassNode.</p>
*
* @param clazz the ClassNode
* @return a ClassInfo
*/
public static ClassInfo of(ClassNode clazz)
{
return new ClassInfoASM(clazz);
}
/**
* <p>Create a {@code ClassInfo} representing the given Type.</p>
* <p>This method will try to avoid loading actual classes into the JVM, but will instead use the ASM library
* to analyze the raw class bytes if possible.</p>
*
* @param type a Type representing the class to load, must not be a method type
* @return a ClassInfo
*/
public static ClassInfo of(Type type)
{
switch (type.getSort())
{
case Type.ARRAY:
case Type.OBJECT:
// Type.getClassName incorrectly returns something like "java.lang.Object[][]" instead of "[[Ljava.lang.Object"
// so we have to convert the internal name (which is correct) manually
return create(ASMUtils.binaryName(type.getInternalName()));
case Type.METHOD:
throw new IllegalArgumentException("Invalid Type!");
default:
// primitives
return of(type.getClassName());
}
}
/**
* <p>Create a {@code ClassInfo} representing the given class.</p>
* <p>This method will try to avoid loading actual classes into the JVM, but will instead use the ASM library
* to analyze the raw class bytes if possible.</p>
*
* @param className the internal or binary name representing the class
* @return a ClassInfo
*/
public static ClassInfo of(String className)
{
return create(ASMUtils.binaryName(className));
}
static ClassInfo create(String className)
{
switch (className)
{
case "boolean":
return of(boolean.class);
case "byte":
return of(byte.class);
case "short":
return of(short.class);
case "int":
return of(int.class);
case "long":
return of(long.class);
case "float":
return of(float.class);
case "double":
return of(double.class);
case "char":
return of(char.class);
default:
if (className.indexOf('[') >= 0)
{
try
{
// array classes should always be accessible via Class.forName
// without loading the element-type class (Object[].class doesn't load Object.class)
return forceLoad(className);
}
catch (ReflectiveOperationException e)
{
return null;
}
}
else
{
return ofObject(className);
}
}
}
private static Method findLoadedClass =
ReflectionHelper.findMethod(ClassLoader.class, Launch.classLoader, new String[] {"findLoadedClass"}, String.class);
private static ClassInfo ofObject(String className)
{
Class<?> clazz;
findLoadedClass.setAccessible(true);
// first, try to get the class if it's already loaded
try
{
if ((clazz = ((Class<?>) findLoadedClass.invoke(Launch.classLoader, className))) != null)
{
return new ClassInfoReflect(clazz);
}
else if ((clazz = ((Class<?>) findLoadedClass.invoke(Launch.classLoader, SevenASMUtils.transformName(className)))) != null)
{
return new ClassInfoReflect(clazz);
}
else
{
try
{
// the class is definitely not loaded, get it's bytes
byte[] bytes = Launch.classLoader.getClassBytes(SevenASMUtils.untransformName(className));
// somehow we can't access the class bytes (happens for JDK classes for example)
// we try and load the class now
if (bytes == null)
{
return forceLoad(className);
}
else
{
// we found the bytes, lets use them
return new ClassInfoASM(ASMUtils.getThinClassNode(bytes));
}
}
catch (IOException e)
{
// something went wrong getting the class bytes. try and load it
return forceLoad(className);
}
}
}
catch (ReflectiveOperationException e)
{
e.printStackTrace();
}
return null;
}
private static ClassInfo forceLoad(String className) throws ReflectiveOperationException
{
try
{
return of(Class.forName(className));
}
catch (ClassNotFoundException e)
{
return of(Class.forName(SevenASMUtils.transformName(className)));
}
}
/**
* <p>Get all interfaces directly implemented by this class (equivalent to {@link Class#getInterfaces()}.</p>
*
* @return the interfaces implemented by this class
*/
public abstract List<String> interfaces();
/**
* <p>Get the internal name of the superclass of this class.</p>
*
* @return the superclass, or null if this ClassInfo is an interface or represents {@code java/lang/Object}.
*/
public abstract String superName();
public boolean hasSuper()
{
return superName() != null;
}
/**
* <p>Get the internal name of this class (e.g. {@code java/lang/Object}.</p>
*
* @return the internal name
*/
public abstract String internalName();
/**
* <p>Get a {@code ClassInfo} representing the superclass of this class.</p>
*
* @return the superclass, or null if this class has no superclass (see {@link #superName()}
*/
public ClassInfo superclass()
{
if (zuper != null)
{
return zuper;
}
if (superName() == null)
{
return null;
}
return (zuper = of(superName()));
}
/**
* <p>Determine if the given class can be safely casted to this class (equivalent to {@link java.lang.Class#isAssignableFrom(Class)}.</p>
* <p>Like {@link #of(String)} this method will try to avoid loading actual classes.</p>
*
* @param child the class to check for
* @return true if the given class can be casted to this class
*/
public final boolean isAssignableFrom(ClassInfo child)
{
return child.callRightAssignableFrom(this);
}
boolean callRightAssignableFrom(ClassInfo parent)
{
return parent.isAssignableFromNormal(this);
}
boolean isAssignableFromNormal(ClassInfo child)
{
// some cheap tests first
String childName = child.internalName();
String myName = internalName();
if (childName.equals("java/lang/Object"))
{
// Object is only assignable to itself
return myName.equals("java/lang/Object");
}
if (myName.equals("java/lang/Object") // everything is assignable to Object
|| childName.equals(myName) // we are the same
|| myName.equals(child.superName()) // we are the superclass of child
|| child.interfaces().contains(myName))
{ // we are an interface that child implements
return true;
}
// if we are a class no interface can be cast to us
if (!isInterface() && child.isInterface())
{
return false;
}
// need to compute supers now
return child.getSupers().contains(myName);
}
boolean isAssignableFromReflect(ClassInfoReflect child)
{
return isAssignableFromNormal(child);
}
/**
* <p>Get all superclasses in the hierarchy chain of this class as well as all interfaces this class
* implements directly or indirectly.</p>
* <p>In other words return all classes that this class can be safely casted to.</p>
*
* @return an immutable Set containing all superclasses and interfaces
*/
public Set<String> getSupers()
{
return getSupers(this);
}
public static Map<String, Set<String>> superCache = Maps.newHashMap();
public static Set<String> getSupers(ClassInfo classInfo)
{
// grab a local var in case of concurrency
Map<String, Set<String>> superCacheLocal = superCache;
if (superCacheLocal != null)
{
Set<String> supers = superCacheLocal.get(classInfo.internalName());
if (supers == null)
{
superCacheLocal.put(classInfo.internalName(), (supers = buildSupers(classInfo)));
}
return supers;
}
else
{
return buildSupers(classInfo);
}
}
private static Set<String> buildSupers(ClassInfo classInfo)
{
Set<String> set = Sets.newHashSet();
String superName = classInfo.superName();
if (superName != null)
{
set.add(superName);
set.addAll(classInfo.superclass().getSupers());
}
for (String iface : classInfo.interfaces())
{
if (set.add(iface))
{
set.addAll(ClassInfo.of(iface).getSupers());
}
}
// use immutable set to reduce memory footprint and potentially increase performance
// cannot use builder because we need the boolean return from set.add
return ImmutableSet.copyOf(set);
}
/**
* <p>Get the number of dimensions of this array class, or 0 if this ClassInfo does not represent an array class.</p>
*
* @return the number of dimensions
*/
public abstract int getDimensions();
/**
* <p>Determine if this class is an array class (equivalent to {@link Class#isArray()}</p>
*
* @return true if this class is an array class
*/
public boolean isArray()
{
return getDimensions() > 0;
}
/**
* <p>Get the component type of this array class.</p>
* <p>The component type of {@code int[][]} is {@code int[]}.</p>
*
* @return the component type
* @throws java.lang.IllegalStateException if this class is not an array
*/
public abstract Type getComponentType();
/**
* <p>Get the root component type of this array class.</p>
* <p>The root component type of {@code int[][]} is {@code int}.</p>
*
* @return the root component type
* @throws java.lang.IllegalStateException if this class is not an array
*/
public Type getRootComponentType()
{
Type t = getComponentType();
if (t.getSort() == Type.ARRAY)
{
return t.getElementType();
}
else
{
return t;
}
}
/**
* <p>Determine if this class is an interface.</p>
*
* @return true if this class is an interface
*/
public boolean isInterface()
{
return hasModifier(ACC_INTERFACE);
}
/**
* <p>Determine if this class is abstract</p>
*
* @return true if this class is abstract
*/
public boolean isAbstract()
{
return hasModifier(ACC_ABSTRACT);
}
/**
* <p>Determine if this class is an annotation.</p>
*
* @return true if this class is an annotation
*/
public boolean isAnnotation()
{
return hasModifier(ACC_ANNOTATION);
}
/**
* <p>Determine if this ClassInfo represents an enum class (equivalent to {@link Class#isEnum()}.</p>
* <p>Note: Like the JDK method this method will return false for the classes generated for specialized enum constants.
* Use {@code hasModifier(ACC_ENUM)} to include those explicitly.</p>
*
* @return true if this ClassInfo represents an enum class
*/
public boolean isEnum()
{
return hasModifier(ACC_ENUM) && superName().equals("java/lang/Enum");
}
/**
* <p>Get all Java modifiers present on this element.</p>
*
* @return the modifiers
* @see java.lang.reflect.Modifier
*/
public abstract int modifiers();
/**
* <p>Determine if the given Java language modifier is set on this element.</p>
*
* @param mod the modifier to check
* @return true if the given modifier is set
* @see java.lang.reflect.Modifier
*/
public boolean hasModifier(int mod)
{
return (modifiers() & mod) == mod;
}
/**
* <p>Determine if this element has public visibility.</p>
*
* @return true if this element has public visibility
*/
public boolean isPublic()
{
return hasModifier(ACC_PUBLIC);
}
/**
* <p>Determine if this element has protected visibility.</p>
*
* @return true if this element has protected visibility
*/
public boolean isProtected()
{
return hasModifier(ACC_PROTECTED);
}
/**
* <p>Determine if this element has private visibility.</p>
*
* @return true if this element has private visibility
*/
public boolean isPrivate()
{
return hasModifier(ACC_PRIVATE);
}
/**
* <p>Determine if this element has package-private (default) visibility.</p>
*
* @return true if this element has package-private (default) visibility
*/
public boolean isPackagePrivate()
{
return !isPrivate() && !isPublic() && !isProtected();
}
/**
* <p>Determine if this element is final.</p>
*
* @return true if this element is final
*/
public boolean isFinal()
{
return hasModifier(ACC_FINAL);
}
/**
* <p>Determine if this element is a synthetic element generated by the compiler.</p>
*
* @return true if this element is synthetic
*/
public boolean isSynthetic()
{
return hasModifier(ACC_SYNTHETIC);
}
@Override
public boolean equals(Object o)
{
return this == o || o instanceof ClassInfo && internalName().equals(((ClassInfo) o).internalName());
}
@Override
public int hashCode()
{
return internalName().hashCode();
}
}
| |
/*
* Copyright 2015 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.hawkular.inventory.base.spi;
import java.io.InputStream;
import java.util.Iterator;
import java.util.Map;
import java.util.Set;
import java.util.function.Function;
import org.hawkular.inventory.api.Relationships;
import org.hawkular.inventory.api.model.AbstractElement;
import org.hawkular.inventory.api.model.Blueprint;
import org.hawkular.inventory.api.model.CanonicalPath;
import org.hawkular.inventory.api.model.Entity;
import org.hawkular.inventory.api.model.RelativePath;
import org.hawkular.inventory.api.model.StructuredData;
import org.hawkular.inventory.api.paging.Page;
import org.hawkular.inventory.api.paging.Pager;
import org.hawkular.inventory.base.Query;
/**
* The backend for the base inventory that does all the "low level" stuff like querying the actual inventory store,
* its modifications, etc.
*
* @param <E> the type of the backend-specific objects representing the inventory entities and relationships. It is
* assumed that the backend is "untyped" and stores all different inventory entities using this single type.
* @author Lukas Krejci
* @since 0.1.0
*/
public interface InventoryBackend<E> extends AutoCloseable {
/**
* Starts a transaction in the backend.
*
* @param mutating whether there will be calls mutating the data or not
* @return the newly started transaction
*/
Transaction startTransaction(boolean mutating);
/**
* Tries to find an element at given canonical path.
*
* @param element the canonical path of the element to find
* @return the element
* @throws ElementNotFoundException if the element is not found
*/
E find(CanonicalPath element) throws ElementNotFoundException;
/**
* Translates the query to the backend-specific representation and runs it, returning a correct page of results
* as prescribed by the provided pager object.
*
* @param startingPoint the element which should be the starting point of the traversal
* @param query the query to perform
* @param pager pager to limit the number of results with
* @return the page of results, possibly empty, never null
*/
Page<E> traverse(E startingPoint, Query query, Pager pager);
/**
* Translates the query to the backend-specific representation and runs it, returning a correct page of results
* as prescribed by the provided pager object.
*
* <p>The difference between this method and {@link #traverse(Object, Query, Pager)} is that this method performs
* a graph-wide query, while traverse starts from a single element.
*
* @param query the query to execute
* @param pager the page to return
* @return a page of results corresponding to the parameters, possibly empty, never null.
*/
Page<E> query(Query query, Pager pager);
/**
* A variant of the {@link #query(Query, Pager)} method which in addition to querying also converts the results
* using the provided conversion function and, more importantly, filters the results using the provided (possibly
* null) filter function PRIOR TO paging is applied.
*
* <p>Because the total count and the paging is dependent on the filtering it needs to be applied during the
* querying process and not only after the fact be the caller.
*
* @param query the query to perform
* @param pager the page to retrieve
* @param conversion a conversion function to apply on the elements, never null
* @param filter possibly null filter to filter the results with
* @param <T> the type of the returned elements
* @return the page of results according to the supplied parameters
*/
<T> Page<T> query(Query query, Pager pager, Function<E, T> conversion, Function<T, Boolean> filter);
/**
* Going from the starting poing, this will return an iterator over all elements that are connected to the starting
* point using relationships with provided name and recursively down to the elements connected in the same way to
* them.
*
* @param startingPoint the starting element
* @param direction any of the valid directions including
* {@link Relationships.Direction#both}.
* @param relationshipNames the names of the relationships to follow when composing the transitive closure
* @return an iterator over the transitive closure, may be "lazy" and evaluate the closure on demand.
*/
Iterator<E> getTransitiveClosureOver(E startingPoint, Relationships.Direction direction,
String... relationshipNames);
/**
* Checks whether there exists any relationship in given direction relative to the given entity with given name.
*
* @param entity the entity in question
* @param direction the direction the relationship should have relative to the entity (
* {@link org.hawkular.inventory.api.Relationships.Direction#both} means "any" in this
* context).
* @param relationshipName the name of the relationship to seek
* @return true if there is such relationship, false otherwise
* @see #getRelationships(Object, Relationships.Direction, String...)
*/
boolean hasRelationship(E entity, Relationships.Direction direction, String relationshipName);
/**
* Checks whether there exists a relationship with given name between the provided entities.
*
* @param source the source of the relationship
* @param target the target of the relationship
* @param relationshipName the name of the relationship
* @return true, if such relationship exists, false otherwise
*/
boolean hasRelationship(E source, E target, String relationshipName);
/**
* Similar to {@link #hasRelationship(Object, Relationships.Direction, String)} but this method actually returns
* the relationship objects.
*
* @param entity the entity in question
* @param direction the direction in which the relationships should be going
* @param names the names of the relationships to return
* @return the possibly empty set of the relationships, never null
* @see #hasRelationship(Object, Relationships.Direction, String)
*/
Set<E> getRelationships(E entity, Relationships.Direction direction, String... names);
/**
* Get a single relationship with the provided name between the source and target.
*
* @param source the source of the relationship
* @param target the target of the relationship
* @param relationshipName the name of the relationship
* @return the relationship
* @throws ElementNotFoundException if the relationship is not found
* @throws IllegalArgumentException if source or target are not entities or relationship name is null
*/
E getRelationship(E source, E target, String relationshipName) throws ElementNotFoundException;
/**
* @param relationship the relationship in question
* @return the source of the relationship
*/
E getRelationshipSource(E relationship);
/**
* @param relationship the relationship in question
* @return the target of the relationship
*/
E getRelationshipTarget(E relationship);
/**
* @param relationship the relationship in question
* @return the name of the relationship
*/
String extractRelationshipName(E relationship);
/**
* The element type is opaque from the point of the caller. This method provides the caller with the ability to
* extract the ID of the entity represented by the object.
*
* @param entityRepresentation the object representing an element
* @return the ID
*/
String extractId(E entityRepresentation);
/**
* Similar to {@link #extractId(Object)} but extracts the type of element from the representation.
*
* @param entityRepresentation the representation object.
* @return the type of the object represented
*/
Class<?> extractType(E entityRepresentation);
/**
* Each element (including relationships) stores the canonical path to it. This will extract that value from the
* entity representation.
*
* @param entityRepresentation the representation object
* @return the extracted canonical path
*/
CanonicalPath extractCanonicalPath(E entityRepresentation);
/**
* Converts the provided representation object to an inventory element of provided type.
*
* <p>This must support all the concrete subclasses of {@link AbstractElement}, {@link StructuredData} <b>and</b>
* {@link ShallowStructuredData}.
*
* @param entityRepresentation the object representing the element
* @param entityType the desired type of the element
* @param <T> the desired type of the element
* @return the converted inventory element
* @throws ClassCastException if the representation object doesn't correspond to the provided type
*/
<T> T convert(E entityRepresentation, Class<T> entityType);
/**
* Given the representation of the data entity, this will return the representation of a structured data element
* on the given path "inside" the data entity.
*
* @param dataEntityRepresentation the representation of the {@link org.hawkular.inventory.api.model.DataEntity}
* instance
* @param dataPath the path in the data to descend to.
* @see org.hawkular.inventory.api.Datas.Single#data(RelativePath)
*/
E descendToData(E dataEntityRepresentation, RelativePath dataPath);
/**
* Creates a new relationship from source to target with given name and properties.
*
* @param sourceEntity the source of the relationship
* @param targetEntity the target of the relationship
* @param name the name of the relationship
* @param properties the properties of the relationship, may be null
* @return the representation of the newly created relationship
* @throws IllegalArgumentException if source or target are relationships themselves or if name is null
*/
E relate(E sourceEntity, E targetEntity, String name, Map<String, Object> properties);
/**
* Persists a new entity with the provided assigned path.
*
* @param path the canonical path to the entity
* @param blueprint the blueprint of the entity
* @return the representation object of the newly created entity
*/
E persist(CanonicalPath path, Blueprint blueprint);
/**
* Persists the structured data and returns a reference to it. It is the responsibility of the caller to wire it up
* to some other entity by some relationship.
*
* @param structuredData the structured data to persist
* @return the representation of the newly persisted structured data
*/
E persist(StructuredData structuredData);
/**
* Updates given entity with the data provided in the update object.
*
* @param entity the entity to update
* @param update the update object
* @throws IllegalArgumentException if the entity is of different type than the update
*/
void update(E entity, AbstractElement.Update update);
/**
* Simply deletes the entity from the storage.
*
* @param entity the entity to delete
*/
void delete(E entity);
/**
* Deletes the structured data represented by the provided object.
*
* @param dataRepresentation the backend-specific object representing the structured data to delete
*/
void deleteStructuredData(E dataRepresentation);
/**
* Commits the transaction.
* @param transaction the transaction to commit
*/
void commit(Transaction transaction) throws CommitFailureException;
/**
* Rolls back the transaction.
* @param transaction the transaction to roll back
*/
void rollback(Transaction transaction);
/**
* See the javadoc in {@link org.hawkular.inventory.api.Inventory#getGraphSON(String)}
*/
InputStream getGraphSON(String tenantId);
<T extends Entity<?, ?>> Iterator<T> getTransitiveClosureOver(CanonicalPath startingPoint,
Relationships.Direction direction, Class<T> clazz,
String... relationshipNames);
/**
* Represents a transaction being performed. Implementations of the {@link InventoryBackend} interface are
* encouraged to inherit from this class and add additional information to it. The base inventory implementation
* only needs and provides the information stored in this class though.
*/
class Transaction {
private final boolean mutating;
public Transaction(boolean mutating) {
this.mutating = mutating;
}
public boolean isMutating() {
return mutating;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.activemq.artemis.jms.client;
import javax.jms.IllegalStateException;
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.Queue;
import javax.jms.QueueReceiver;
import javax.jms.Session;
import javax.jms.Topic;
import javax.jms.TopicSubscriber;
import org.apache.activemq.artemis.api.core.ActiveMQException;
import org.apache.activemq.artemis.api.core.SimpleString;
import org.apache.activemq.artemis.api.core.client.ClientConsumer;
import org.apache.activemq.artemis.api.core.client.ClientMessage;
import org.apache.activemq.artemis.api.core.client.MessageHandler;
import org.apache.activemq.artemis.api.jms.ActiveMQJMSConstants;
/**
* ActiveMQ Artemis implementation of a JMS MessageConsumer.
*/
public final class ActiveMQMessageConsumer implements QueueReceiver, TopicSubscriber {
private final ClientConsumer consumer;
private MessageListener listener;
private MessageHandler coreListener;
private final ActiveMQConnection connection;
private final ActiveMQSession session;
private final int ackMode;
private final boolean noLocal;
private final ActiveMQDestination destination;
private final String selector;
private final SimpleString autoDeleteQueueName;
// Constructors --------------------------------------------------
protected ActiveMQMessageConsumer(final ActiveMQConnection connection,
final ActiveMQSession session,
final ClientConsumer consumer,
final boolean noLocal,
final ActiveMQDestination destination,
final String selector,
final SimpleString autoDeleteQueueName) throws JMSException {
this.connection = connection;
this.session = session;
this.consumer = consumer;
ackMode = session.getAcknowledgeMode();
this.noLocal = noLocal;
this.destination = destination;
this.selector = selector;
this.autoDeleteQueueName = autoDeleteQueueName;
}
// MessageConsumer implementation --------------------------------
@Override
public String getMessageSelector() throws JMSException {
checkClosed();
return selector;
}
@Override
public MessageListener getMessageListener() throws JMSException {
checkClosed();
return listener;
}
@Override
public void setMessageListener(final MessageListener listener) throws JMSException {
this.listener = listener;
coreListener = listener == null ? null : new JMSMessageListenerWrapper(connection, session, consumer, listener, ackMode);
try {
consumer.setMessageHandler(coreListener);
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
@Override
public Message receive() throws JMSException {
return getMessage(0, false);
}
@Override
public Message receive(final long timeout) throws JMSException {
return getMessage(timeout, false);
}
@Override
public Message receiveNoWait() throws JMSException {
return getMessage(0, true);
}
@Override
public void close() throws JMSException {
try {
consumer.close();
if (autoDeleteQueueName != null) {
// If non durable subscriber need to delete subscription too
session.deleteQueue(autoDeleteQueueName);
}
session.removeConsumer(this);
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
// QueueReceiver implementation ----------------------------------
@Override
public Queue getQueue() throws JMSException {
checkClosed();
return (Queue) destination;
}
// TopicSubscriber implementation --------------------------------
@Override
public Topic getTopic() throws JMSException {
checkClosed();
return (Topic) destination;
}
@Override
public boolean getNoLocal() throws JMSException {
checkClosed();
return noLocal;
}
// Public --------------------------------------------------------
@Override
public String toString() {
return "ActiveMQMessageConsumer[" + consumer + "]";
}
public boolean isClosed() {
return consumer.isClosed();
}
// Package protected ---------------------------------------------
// Protected -----------------------------------------------------
// Private -------------------------------------------------------
private void checkClosed() throws JMSException {
if (consumer.isClosed() || session.getCoreSession().isClosed()) {
throw new IllegalStateException("Consumer is closed");
}
}
private ActiveMQMessage getMessage(final long timeout, final boolean noWait) throws JMSException {
try {
ClientMessage coreMessage;
if (noWait) {
coreMessage = consumer.receiveImmediate();
}
else {
coreMessage = consumer.receive(timeout);
}
ActiveMQMessage jmsMsg = null;
if (coreMessage != null) {
boolean needSession = ackMode == Session.CLIENT_ACKNOWLEDGE || ackMode == ActiveMQJMSConstants.INDIVIDUAL_ACKNOWLEDGE;
jmsMsg = ActiveMQMessage.createMessage(coreMessage, needSession ? session.getCoreSession() : null);
jmsMsg.doBeforeReceive();
// We Do the ack after doBeforeRecive, as in the case of large messages, this may fail so we don't want messages redelivered
// https://issues.jboss.org/browse/JBPAPP-6110
if (session.getAcknowledgeMode() == ActiveMQJMSConstants.INDIVIDUAL_ACKNOWLEDGE) {
jmsMsg.setIndividualAcknowledge();
}
else {
coreMessage.acknowledge();
}
}
return jmsMsg;
}
catch (ActiveMQException e) {
throw JMSExceptionHelper.convertFromActiveMQException(e);
}
}
// Inner classes -------------------------------------------------
}
| |
package io.quarkus.maven;
import static java.util.function.Predicate.not;
import static org.twdata.maven.mojoexecutor.MojoExecutor.artifactId;
import static org.twdata.maven.mojoexecutor.MojoExecutor.configuration;
import static org.twdata.maven.mojoexecutor.MojoExecutor.executeMojo;
import static org.twdata.maven.mojoexecutor.MojoExecutor.executionEnvironment;
import static org.twdata.maven.mojoexecutor.MojoExecutor.goal;
import static org.twdata.maven.mojoexecutor.MojoExecutor.groupId;
import static org.twdata.maven.mojoexecutor.MojoExecutor.plugin;
import static org.twdata.maven.mojoexecutor.MojoExecutor.version;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Properties;
import java.util.Set;
import java.util.function.Consumer;
import java.util.stream.Collectors;
import org.aesh.readline.terminal.impl.ExecPty;
import org.aesh.readline.terminal.impl.Pty;
import org.aesh.terminal.Attributes;
import org.aesh.terminal.utils.ANSI;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.model.BuildBase;
import org.apache.maven.model.Dependency;
import org.apache.maven.model.Plugin;
import org.apache.maven.model.PluginExecution;
import org.apache.maven.model.Profile;
import org.apache.maven.model.Resource;
import org.apache.maven.plugin.AbstractMojo;
import org.apache.maven.plugin.BuildPluginManager;
import org.apache.maven.plugin.MojoExecutionException;
import org.apache.maven.plugin.MojoFailureException;
import org.apache.maven.plugin.descriptor.MojoDescriptor;
import org.apache.maven.plugin.descriptor.PluginDescriptor;
import org.apache.maven.plugin.logging.Log;
import org.apache.maven.plugins.annotations.Component;
import org.apache.maven.plugins.annotations.LifecyclePhase;
import org.apache.maven.plugins.annotations.Mojo;
import org.apache.maven.plugins.annotations.Parameter;
import org.apache.maven.plugins.annotations.ResolutionScope;
import org.apache.maven.project.MavenProject;
import org.apache.maven.shared.utils.cli.CommandLineUtils;
import org.apache.maven.toolchain.Toolchain;
import org.apache.maven.toolchain.ToolchainManager;
import org.codehaus.plexus.util.xml.Xpp3Dom;
import org.eclipse.aether.RepositorySystem;
import org.eclipse.aether.RepositorySystemSession;
import org.eclipse.aether.artifact.DefaultArtifact;
import org.eclipse.aether.collection.CollectRequest;
import org.eclipse.aether.impl.RemoteRepositoryManager;
import org.eclipse.aether.repository.RemoteRepository;
import org.eclipse.aether.repository.WorkspaceReader;
import org.eclipse.aether.resolution.ArtifactRequest;
import org.eclipse.aether.resolution.ArtifactResolutionException;
import org.eclipse.aether.resolution.ArtifactResult;
import org.eclipse.aether.resolution.DependencyRequest;
import org.eclipse.aether.resolution.DependencyResolutionException;
import org.eclipse.aether.resolution.DependencyResult;
import org.eclipse.aether.util.artifact.JavaScopes;
import org.fusesource.jansi.internal.Kernel32;
import org.fusesource.jansi.internal.WindowsSupport;
import io.quarkus.bootstrap.BootstrapConstants;
import io.quarkus.bootstrap.app.QuarkusBootstrap;
import io.quarkus.bootstrap.devmode.DependenciesFilter;
import io.quarkus.bootstrap.model.ApplicationModel;
import io.quarkus.bootstrap.model.PathsCollection;
import io.quarkus.bootstrap.resolver.BootstrapAppModelResolver;
import io.quarkus.bootstrap.resolver.maven.MavenArtifactResolver;
import io.quarkus.bootstrap.resolver.maven.options.BootstrapMavenOptions;
import io.quarkus.bootstrap.util.BootstrapUtils;
import io.quarkus.bootstrap.workspace.ArtifactSources;
import io.quarkus.bootstrap.workspace.SourceDir;
import io.quarkus.deployment.dev.DevModeContext;
import io.quarkus.deployment.dev.DevModeMain;
import io.quarkus.deployment.dev.QuarkusDevModeLauncher;
import io.quarkus.maven.MavenDevModeLauncher.Builder;
import io.quarkus.maven.components.MavenVersionEnforcer;
import io.quarkus.maven.dependency.ArtifactKey;
import io.quarkus.maven.dependency.GACT;
import io.quarkus.maven.dependency.GACTV;
import io.quarkus.maven.dependency.ResolvedDependency;
import io.quarkus.paths.PathList;
import io.quarkus.runtime.LaunchMode;
/**
* The dev mojo, that runs a quarkus app in a forked process. A background compilation process is launched and any changes are
* automatically reflected in your running application.
* <p>
* You can use this dev mode in a remote container environment with {@code remote-dev}.
*/
@Mojo(name = "dev", defaultPhase = LifecyclePhase.PREPARE_PACKAGE, requiresDependencyResolution = ResolutionScope.TEST, threadSafe = true)
public class DevMojo extends AbstractMojo {
private static final String EXT_PROPERTIES_PATH = "META-INF/quarkus-extension.properties";
private static final String KOTLIN_MAVEN_PLUGIN_GA = "org.jetbrains.kotlin:kotlin-maven-plugin";
/**
* running any one of these phases means the compile phase will have been run, if these have
* not been run we manually run compile.
*/
private static final List<String> POST_COMPILE_PHASES = List.of(
"compile",
"process-classes",
"generate-test-sources",
"process-test-sources",
"generate-test-resources",
"process-test-resources",
"test-compile",
"process-test-classes",
"test",
"prepare-package",
"package",
"pre-integration-test",
"integration-test",
"post-integration-test",
"verify",
"install",
"deploy");
/**
* running any one of these phases means the test-compile phase will have been run, if these have
* not been run we manually run test-compile
*/
private static final List<String> POST_TEST_COMPILE_PHASES = List.of(
"test-compile",
"process-test-classes",
"test",
"prepare-package",
"package",
"pre-integration-test",
"integration-test",
"post-integration-test",
"verify",
"install",
"deploy");
private static final String QUARKUS_GENERATE_CODE_GOAL = "generate-code";
private static final String QUARKUS_GENERATE_CODE_TESTS_GOAL = "generate-code-tests";
private static final String ORG_APACHE_MAVEN_PLUGINS = "org.apache.maven.plugins";
private static final String MAVEN_COMPILER_PLUGIN = "maven-compiler-plugin";
private static final String MAVEN_RESOURCES_PLUGIN = "maven-resources-plugin";
private static final String MAVEN_TOOLCHAINS_PLUGIN = "maven-toolchains-plugin";
private static final String ORG_JETBRAINS_KOTLIN = "org.jetbrains.kotlin";
private static final String KOTLIN_MAVEN_PLUGIN = "kotlin-maven-plugin";
private static final String ORG_JBOSS_JANDEX = "org.jboss.jandex";
private static final String JANDEX_MAVEN_PLUGIN = "jandex-maven-plugin";
/**
* The directory for compiled classes.
*/
@Parameter(readonly = true, required = true, defaultValue = "${project.build.outputDirectory}")
private File outputDirectory;
@Parameter(defaultValue = "${project}", readonly = true, required = true)
protected MavenProject project;
/**
* If this server should be started in debug mode. The default is to start in debug mode and listen on
* port 5005. Whether or not the JVM is suspended waiting for a debugger to be attached,
* depends on the value of {@link #suspend}.
* <p>
* {@code debug} supports the following options:
* <table>
* <tr>
* <td><b>Value</b></td>
* <td>Effect</td>
* </tr>
* <tr>
* <td><b>false</b></td>
* <td>The JVM is not started in debug mode</td>
* </tr>
* <tr>
* <td><b>true</b></td>
* <td>The JVM is started in debug mode and will be listening on {@code debugHost}:{@code debugPort}</td>
* </tr>
* <tr>
* <td><b>client</b></td>
* <td>The JVM is started in client mode, and will attempt to connect to {@code debugHost}:{@code debugPort}</td>
* </tr>
* <tr>
* <td><b>{port}</b></td>
* <td>The JVM is started in debug mode and will be listening on {@code debugHost}:{port}.</td>
* </tr>
* </table>
* By default, {@code debugHost} has the value "localhost", and {@code debugPort} is 5005.
*/
@Parameter(defaultValue = "${debug}")
private String debug;
/**
* Whether or not the JVM launch, in debug mode, should be suspended. This parameter is only
* relevant when the JVM is launched in {@link #debug debug mode}. This parameter supports the
* following values (all the allowed values are case insensitive):
* <table>
* <th>
* <td>Value</td>
* <td>Effect</td>
* </th>
* <tr>
* <td>y or true</td>
* <td>The debug mode JVM launch is suspended</td>
* </tr>
* <tr>
* <td>n or false</td>
* <td>The debug mode JVM is started without suspending</td>
* </tr>
* </table>
*/
@Parameter(defaultValue = "${suspend}")
private String suspend;
@Parameter(defaultValue = "${debugHost}")
private String debugHost;
@Parameter(defaultValue = "${debugPort}")
private String debugPort;
@Parameter(defaultValue = "${project.build.directory}")
private File buildDir;
@Parameter(defaultValue = "${project.build.sourceDirectory}")
private File sourceDir;
@Parameter
private File workingDir;
@Parameter(defaultValue = "${jvm.args}")
private String jvmArgs;
@Parameter(defaultValue = "${quarkus.args}")
private String argsString;
@Parameter
private Map<String, String> environmentVariables = Collections.emptyMap();
@Parameter
private Map<String, String> systemProperties = Collections.emptyMap();
@Parameter(defaultValue = "${session}")
private MavenSession session;
@Parameter(defaultValue = "TRUE")
private boolean deleteDevJar;
@Component
private MavenVersionEnforcer mavenVersionEnforcer;
@Component
private RepositorySystem repoSystem;
@Component
RemoteRepositoryManager remoteRepositoryManager;
@Parameter(defaultValue = "${repositorySystemSession}", readonly = true)
private RepositorySystemSession repoSession;
@Parameter(defaultValue = "${project.remoteProjectRepositories}", readonly = true, required = true)
private List<RemoteRepository> repos;
@Parameter(defaultValue = "${project.remotePluginRepositories}", readonly = true, required = true)
private List<RemoteRepository> pluginRepos;
/**
* This value is intended to be set to true when some generated bytecode
* is erroneous causing the JVM to crash when the verify:none option is set (which is on by default)
*/
@Parameter(defaultValue = "${preventnoverify}")
private boolean preventnoverify = false;
/**
* Whether changes in the projects that appear to be dependencies of the project containing the application to be launched
* should trigger hot-reload. By default they do.
*/
@Parameter(defaultValue = "${noDeps}")
private boolean noDeps = false;
/**
* Additional parameters to pass to javac when recompiling changed
* source files.
*/
@Parameter
private List<String> compilerArgs;
/**
* The --release argument to javac.
*/
@Parameter(defaultValue = "${maven.compiler.release}")
private String release;
/**
* The -source argument to javac.
*/
@Parameter(defaultValue = "${maven.compiler.source}")
private String source;
/**
* The -target argument to javac.
*/
@Parameter(defaultValue = "${maven.compiler.target}")
private String target;
/**
* Whether or not to enforce the quarkus-maven-plugin build goal to be configured.
* By default, a missing build goal is considered an inconsistency (although the build goal is not <i>required</i>
* technically).
* In this case a warning will be logged and the application will not be started.
*/
@Parameter(defaultValue = "${quarkus.enforceBuildGoal}")
private boolean enforceBuildGoal = true;
@Component
private WorkspaceReader wsReader;
@Component
private BuildPluginManager pluginManager;
@Component
private ToolchainManager toolchainManager;
private Map<GACT, Plugin> pluginMap;
@Component
protected QuarkusBootstrapProvider bootstrapProvider;
/**
* console attributes, used to restore the console state
*/
private Attributes attributes;
private int windowsAttributes;
private boolean windowsAttributesSet;
private Pty pty;
private boolean windowsColorSupport;
@Override
public void setLog(Log log) {
super.setLog(log);
MojoLogger.delegate = log;
}
@Override
public void execute() throws MojoFailureException, MojoExecutionException {
mavenVersionEnforcer.ensureMavenVersion(getLog(), session);
initToolchain();
// we always want to compile if needed, so if it is run from the parent it will compile dependent projects
handleAutoCompile();
if (enforceBuildGoal) {
final PluginDescriptor pluginDescr = getPluginDescriptor();
final Plugin pluginDef = getConfiguredPluginOrNull(pluginDescr.getGroupId(), pluginDescr.getArtifactId());
if (pluginDef == null || !isGoalConfigured(pluginDef, "build")) {
getLog().warn("The quarkus-maven-plugin build goal was not configured for this project," +
" skipping quarkus:dev as this is assumed to be a support library. If you want to run quarkus:dev" +
" on this project make sure the quarkus-maven-plugin is configured with the build goal" +
" or disable the enforceBuildGoal flag (via plugin configuration or via" +
" -Dquarkus.enforceBuildGoal=false).");
return;
}
}
saveTerminalState();
try {
DevModeRunner runner = new DevModeRunner();
Map<Path, Long> pomFiles = readPomFileTimestamps(runner);
runner.run();
long nextCheck = System.currentTimeMillis() + 100;
for (;;) {
//we never suspend after the first run
suspend = "n";
long sleep = Math.max(0, nextCheck - System.currentTimeMillis()) + 1;
Thread.sleep(sleep);
if (System.currentTimeMillis() > nextCheck) {
nextCheck = System.currentTimeMillis() + 100;
if (!runner.alive()) {
restoreTerminalState();
if (!runner.isExpectedExitValue()) {
throw new MojoExecutionException("Dev mode process did not complete successfully");
}
return;
}
final Set<Path> changed = new HashSet<>();
for (Map.Entry<Path, Long> e : pomFiles.entrySet()) {
long t = Files.getLastModifiedTime(e.getKey()).toMillis();
if (t > e.getValue()) {
changed.add(e.getKey());
pomFiles.put(e.getKey(), t);
}
}
if (!changed.isEmpty()) {
getLog().info("Changes detected to " + changed + ", restarting dev mode");
final DevModeRunner newRunner;
try {
triggerCompile(false, false);
triggerCompile(true, false);
newRunner = new DevModeRunner();
} catch (Exception e) {
getLog().info("Could not load changed pom.xml file, changes not applied", e);
continue;
}
runner.stop();
newRunner.run();
runner = newRunner;
}
}
}
} catch (Exception e) {
throw new MojoFailureException("Failed to run", e);
}
}
/**
* if the process is forcibly killed then the terminal may be left in raw mode, which
* messes everything up. This attempts to fix that by saving the state so it can be restored
*/
private void saveTerminalState() {
try {
windowsAttributes = WindowsSupport.getConsoleMode();
windowsAttributesSet = true;
if (windowsAttributes > 0) {
long hConsole = Kernel32.GetStdHandle(Kernel32.STD_INPUT_HANDLE);
if (hConsole != (long) Kernel32.INVALID_HANDLE_VALUE) {
final int VIRTUAL_TERMINAL_PROCESSING = 0x0004; //enable color on the windows console
if (Kernel32.SetConsoleMode(hConsole, windowsAttributes | VIRTUAL_TERMINAL_PROCESSING) != 0) {
windowsColorSupport = true;
}
}
}
} catch (Throwable t) {
//this only works with a proper PTY based terminal
//Aesh creates an input pump thread, that will steal
//input from the dev mode process
try {
Pty pty = ExecPty.current();
attributes = pty.getAttr();
DevMojo.this.pty = pty;
} catch (Exception e) {
getLog().debug("Failed to get a local tty", e);
}
}
}
private void restoreTerminalState() {
if (windowsAttributesSet) {
WindowsSupport.setConsoleMode(windowsAttributes);
} else {
if (attributes == null || pty == null) {
return;
}
Pty finalPty = pty;
try (finalPty) {
finalPty.setAttr(attributes);
int height = finalPty.getSize().getHeight();
String sb = ANSI.MAIN_BUFFER +
ANSI.CURSOR_SHOW +
"\u001B[0m" +
"\033[" + height + ";0H";
finalPty.getSlaveOutput().write(sb.getBytes(StandardCharsets.UTF_8));
} catch (IOException e) {
getLog().error("Error restoring console state", e);
}
}
}
private void handleAutoCompile() throws MojoExecutionException {
//we check to see if there was a compile (or later) goal before this plugin
boolean compileNeeded = true;
boolean testCompileNeeded = true;
boolean prepareNeeded = true;
boolean prepareTestsNeeded = true;
String jandexGoalPhase = getGoalPhaseOrNull(ORG_JBOSS_JANDEX, JANDEX_MAVEN_PLUGIN, "jandex", "process-classes");
boolean indexClassNeeded = jandexGoalPhase != null;
for (String goal : session.getGoals()) {
if (goal.endsWith("quarkus:generate-code")) {
prepareNeeded = false;
}
if (goal.endsWith("quarkus:generate-code-tests")) {
prepareTestsNeeded = false;
}
if (POST_COMPILE_PHASES.contains(goal)) {
compileNeeded = false;
}
if (jandexGoalPhase != null
&& POST_COMPILE_PHASES.indexOf(goal) >= POST_COMPILE_PHASES.indexOf(jandexGoalPhase)) {
indexClassNeeded = false;
}
if (POST_TEST_COMPILE_PHASES.contains(goal)) {
testCompileNeeded = false;
}
if (goal.endsWith("quarkus:dev")) {
break;
}
}
//if the user did not compile we run it for them
if (compileNeeded) {
triggerCompile(false, prepareNeeded);
}
if (indexClassNeeded) {
initClassIndexes();
}
if (testCompileNeeded) {
try {
triggerCompile(true, prepareTestsNeeded);
} catch (Throwable t) {
getLog().error("Test compile failed, you will need to fix your tests before you can use continuous testing", t);
}
}
}
private void initToolchain() throws MojoExecutionException {
executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_TOOLCHAINS_PLUGIN, "toolchain", Collections.emptyMap());
}
private void triggerPrepare(boolean test) throws MojoExecutionException {
final PluginDescriptor pluginDescr = getPluginDescriptor();
executeIfConfigured(pluginDescr.getGroupId(), pluginDescr.getArtifactId(),
test ? QUARKUS_GENERATE_CODE_TESTS_GOAL : QUARKUS_GENERATE_CODE_GOAL,
Collections.singletonMap("mode", LaunchMode.DEVELOPMENT.name()));
}
private void initClassIndexes() throws MojoExecutionException {
executeIfConfigured(ORG_JBOSS_JANDEX, JANDEX_MAVEN_PLUGIN, "jandex", Collections.emptyMap());
}
private PluginDescriptor getPluginDescriptor() {
return (PluginDescriptor) getPluginContext().get("pluginDescriptor");
}
private void triggerCompile(boolean test, boolean prepareNeeded) throws MojoExecutionException {
handleResources(test);
if (prepareNeeded) {
triggerPrepare(test);
}
// compile the Kotlin sources if needed
executeIfConfigured(ORG_JETBRAINS_KOTLIN, KOTLIN_MAVEN_PLUGIN, test ? "test-compile" : "compile",
Collections.emptyMap());
// Compile the Java sources if needed
executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_COMPILER_PLUGIN, test ? "testCompile" : "compile",
Collections.emptyMap());
}
/**
* Execute the resources:resources goal if resources have been configured on the project
*/
private void handleResources(boolean test) throws MojoExecutionException {
List<Resource> resources = project.getResources();
if (resources.isEmpty()) {
return;
}
executeIfConfigured(ORG_APACHE_MAVEN_PLUGINS, MAVEN_RESOURCES_PLUGIN, test ? "testResources" : "resources",
Collections.emptyMap());
}
private void executeIfConfigured(String pluginGroupId, String pluginArtifactId, String goal, Map<String, String> params)
throws MojoExecutionException {
final Plugin plugin = getConfiguredPluginOrNull(pluginGroupId, pluginArtifactId);
if (!isGoalConfigured(plugin, goal)) {
return;
}
getLog().info("Invoking " + plugin.getGroupId() + ":" + plugin.getArtifactId() + ":" + plugin.getVersion() + ":" + goal
+ ") @ " + project.getArtifactId());
executeMojo(
plugin(
groupId(pluginGroupId),
artifactId(pluginArtifactId),
version(plugin.getVersion()),
plugin.getDependencies()),
goal(goal),
getPluginConfig(plugin, goal, params),
executionEnvironment(
project,
session,
pluginManager));
}
private String getGoalPhaseOrNull(String groupId, String artifactId, String goal, String defaultPhase) {
Plugin plugin = getConfiguredPluginOrNull(groupId, artifactId);
if (plugin == null) {
return null;
}
for (PluginExecution pluginExecution : plugin.getExecutions()) {
if (pluginExecution.getGoals().contains(goal)) {
var configuredPhase = pluginExecution.getPhase();
return configuredPhase != null ? configuredPhase : defaultPhase;
}
}
return null;
}
public boolean isGoalConfigured(Plugin plugin, String goal) {
if (plugin == null) {
return false;
}
for (PluginExecution pluginExecution : plugin.getExecutions()) {
if (pluginExecution.getGoals().contains(goal)) {
return true;
}
}
return false;
}
private Xpp3Dom getPluginConfig(Plugin plugin, String goal, Map<String, String> params) throws MojoExecutionException {
Xpp3Dom mergedConfig = null;
if (!plugin.getExecutions().isEmpty()) {
for (PluginExecution exec : plugin.getExecutions()) {
if (exec.getConfiguration() != null && exec.getGoals().contains(goal)) {
mergedConfig = mergedConfig == null ? (Xpp3Dom) exec.getConfiguration()
: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) exec.getConfiguration(), true);
}
}
}
if ((Xpp3Dom) plugin.getConfiguration() != null) {
mergedConfig = mergedConfig == null ? (Xpp3Dom) plugin.getConfiguration()
: Xpp3Dom.mergeXpp3Dom(mergedConfig, (Xpp3Dom) plugin.getConfiguration(), true);
}
final Xpp3Dom configuration = configuration();
if (mergedConfig != null) {
Set<String> supportedParams = null;
// Filter out `test*` configurations
for (Xpp3Dom child : mergedConfig.getChildren()) {
if (child.getName().startsWith("test")) {
continue;
}
if (supportedParams == null) {
supportedParams = getMojoDescriptor(plugin, goal).getParameterMap().keySet();
}
if (supportedParams.contains(child.getName())) {
configuration.addChild(child);
}
}
}
for (Map.Entry<String, String> param : params.entrySet()) {
final Xpp3Dom p = new Xpp3Dom(param.getKey());
p.setValue(param.getValue());
configuration.addChild(p);
}
return configuration;
}
private MojoDescriptor getMojoDescriptor(Plugin plugin, String goal) throws MojoExecutionException {
try {
return pluginManager.getMojoDescriptor(plugin, goal, pluginRepos, repoSession);
} catch (Exception e) {
throw new MojoExecutionException(
"Failed to obtain descriptor for Maven plugin " + plugin.getId() + " goal " + goal, e);
}
}
private Plugin getConfiguredPluginOrNull(String groupId, String artifactId) {
if (pluginMap == null) {
pluginMap = new HashMap<>();
// the original plugin keys may include property expressions, so we can't rely on the exact groupId:artifactId keys
for (Plugin p : project.getBuildPlugins()) {
pluginMap.put(new GACT(p.getGroupId(), p.getArtifactId()), p);
}
}
return pluginMap.get(new GACT(groupId, artifactId));
}
private Map<Path, Long> readPomFileTimestamps(DevModeRunner runner) throws IOException {
Map<Path, Long> ret = new HashMap<>();
for (Path i : runner.pomFiles()) {
ret.put(i, Files.getLastModifiedTime(i).toMillis());
}
return ret;
}
private String getSourceEncoding() {
Object sourceEncodingProperty = project.getProperties().get("project.build.sourceEncoding");
if (sourceEncodingProperty != null) {
return (String) sourceEncodingProperty;
}
return null;
}
private void addProject(MavenDevModeLauncher.Builder builder, ResolvedDependency module, boolean root) throws Exception {
String projectDirectory;
Set<Path> sourcePaths;
String classesPath = null;
Set<Path> resourcePaths;
Set<Path> testSourcePaths;
String testClassesPath;
Set<Path> testResourcePaths;
List<Profile> activeProfiles = Collections.emptyList();
final MavenProject mavenProject = module.getClassifier().isEmpty()
? session.getProjectMap()
.get(String.format("%s:%s:%s", module.getGroupId(), module.getArtifactId(), module.getVersion()))
: null;
final ArtifactSources sources = module.getSources();
if (mavenProject == null) {
projectDirectory = module.getWorkspaceModule().getModuleDir().getAbsolutePath();
sourcePaths = new LinkedHashSet<>();
for (SourceDir src : sources.getSourceDirs()) {
for (Path p : src.getSourceTree().getRoots()) {
sourcePaths.add(p.toAbsolutePath());
}
}
testSourcePaths = new LinkedHashSet<>();
ArtifactSources testSources = module.getWorkspaceModule().getTestSources();
if (testSources != null) {
for (SourceDir src : testSources.getSourceDirs()) {
for (Path p : src.getSourceTree().getRoots()) {
testSourcePaths.add(p.toAbsolutePath());
}
}
}
} else {
projectDirectory = mavenProject.getBasedir().getPath();
sourcePaths = mavenProject.getCompileSourceRoots().stream()
.map(Paths::get)
.map(Path::toAbsolutePath)
.collect(Collectors.toCollection(LinkedHashSet::new));
testSourcePaths = mavenProject.getTestCompileSourceRoots().stream()
.map(Paths::get)
.map(Path::toAbsolutePath)
.collect(Collectors.toCollection(LinkedHashSet::new));
activeProfiles = mavenProject.getActiveProfiles();
}
final Path sourceParent;
if (sources.getSourceDirs() == null) {
if (sources.getResourceDirs() == null) {
throw new MojoExecutionException("The project does not appear to contain any sources or resources");
}
sourceParent = sources.getResourceDirs().iterator().next().getDir().toAbsolutePath().getParent();
} else {
sourceParent = sources.getSourceDirs().iterator().next().getDir().toAbsolutePath().getParent();
}
Path classesDir = sources.getSourceDirs().iterator().next().getOutputDir().toAbsolutePath();
if (Files.isDirectory(classesDir)) {
classesPath = classesDir.toString();
}
Path testClassesDir = module.getWorkspaceModule().getTestSources().getSourceDirs().iterator().next().getOutputDir()
.toAbsolutePath();
testClassesPath = testClassesDir.toString();
resourcePaths = new LinkedHashSet<>();
for (SourceDir src : sources.getResourceDirs()) {
for (Path p : src.getSourceTree().getRoots()) {
resourcePaths.add(p.toAbsolutePath());
}
}
testResourcePaths = new LinkedHashSet<>();
ArtifactSources testSources = module.getWorkspaceModule().getTestSources();
if (testSources != null) {
for (SourceDir src : testSources.getResourceDirs()) {
for (Path p : src.getSourceTree().getRoots()) {
testResourcePaths.add(p.toAbsolutePath());
}
}
}
// Add the resources and test resources from the profiles
for (Profile profile : activeProfiles) {
final BuildBase build = profile.getBuild();
if (build != null) {
resourcePaths.addAll(
build.getResources().stream()
.map(Resource::getDirectory)
.map(Paths::get)
.map(Path::toAbsolutePath)
.collect(Collectors.toList()));
testResourcePaths.addAll(
build.getTestResources().stream()
.map(Resource::getDirectory)
.map(Paths::get)
.map(Path::toAbsolutePath)
.collect(Collectors.toList()));
}
}
if (classesPath == null && (!sourcePaths.isEmpty() || !resourcePaths.isEmpty())) {
throw new MojoExecutionException("Hot reloadable dependency " + module.getWorkspaceModule().getId()
+ " has not been compiled yet (the classes directory " + classesDir + " does not exist)");
}
Path targetDir = Paths.get(project.getBuild().getDirectory());
DevModeContext.ModuleInfo moduleInfo = new DevModeContext.ModuleInfo.Builder()
.setArtifactKey(module.getKey())
.setProjectDirectory(projectDirectory)
.setSourcePaths(PathList.from(sourcePaths))
.setClassesPath(classesPath)
.setResourcesOutputPath(classesPath)
.setResourcePaths(PathList.from(resourcePaths))
.setSourceParents(PathList.of(sourceParent.toAbsolutePath()))
.setPreBuildOutputDir(targetDir.resolve("generated-sources").toAbsolutePath().toString())
.setTargetDir(targetDir.toAbsolutePath().toString())
.setTestSourcePaths(PathList.from(testSourcePaths))
.setTestClassesPath(testClassesPath)
.setTestResourcesOutputPath(testClassesPath)
.setTestResourcePaths(PathList.from(testResourcePaths))
.build();
if (root) {
builder.mainModule(moduleInfo);
} else {
builder.dependency(moduleInfo);
}
}
private class DevModeRunner {
final QuarkusDevModeLauncher launcher;
private Process process;
private DevModeRunner() throws Exception {
launcher = newLauncher();
}
Collection<Path> pomFiles() {
return launcher.watchedBuildFiles();
}
boolean alive() {
return process != null && process.isAlive();
}
int exitValue() {
return process == null ? -1 : process.exitValue();
}
boolean isExpectedExitValue() {
// '130' is what the process exits with in remote-dev mode under bash
return exitValue() == 0 || exitValue() == 130;
}
void run() throws Exception {
// Display the launch command line in dev mode
if (getLog().isDebugEnabled()) {
getLog().debug("Launching JVM with command line: " + String.join(" ", launcher.args()));
}
final ProcessBuilder processBuilder = new ProcessBuilder(launcher.args())
.redirectErrorStream(true)
.inheritIO()
.directory(workingDir == null ? project.getBasedir() : workingDir);
if (!environmentVariables.isEmpty()) {
processBuilder.environment().putAll(environmentVariables);
}
process = processBuilder.start();
//https://github.com/quarkusio/quarkus/issues/232
Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
@Override
public void run() {
process.destroy();
try {
process.waitFor();
} catch (InterruptedException e) {
getLog().warn("Unable to properly wait for dev-mode end", e);
}
}
}, "Development Mode Shutdown Hook"));
}
void stop() throws InterruptedException {
process.destroy();
process.waitFor();
}
}
private QuarkusDevModeLauncher newLauncher() throws Exception {
String java = null;
// See if a toolchain is configured
if (toolchainManager != null) {
Toolchain toolchain = toolchainManager.getToolchainFromBuildContext("jdk", session);
if (toolchain != null) {
java = toolchain.findTool("java");
getLog().info("JVM from toolchain: " + java);
}
}
final MavenDevModeLauncher.Builder builder = MavenDevModeLauncher.builder(java, getLog())
.preventnoverify(preventnoverify)
.buildDir(buildDir)
.outputDir(outputDirectory)
.suspend(suspend)
.debug(debug)
.debugHost(debugHost)
.debugPort(debugPort)
.deleteDevJar(deleteDevJar);
setJvmArgs(builder);
if (windowsColorSupport) {
builder.jvmArgs("-Dio.quarkus.force-color-support=true");
}
builder.projectDir(project.getFile().getParentFile());
builder.buildSystemProperties((Map) project.getProperties());
builder.applicationName(project.getArtifactId());
builder.applicationVersion(project.getVersion());
builder.sourceEncoding(getSourceEncoding());
// Set compilation flags. Try the explicitly given configuration first. Otherwise,
// refer to the configuration of the Maven Compiler Plugin.
final Optional<Xpp3Dom> compilerPluginConfiguration = findCompilerPluginConfiguration();
if (compilerArgs != null) {
builder.compilerOptions(compilerArgs);
} else if (compilerPluginConfiguration.isPresent()) {
final Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.get().getChild("compilerArgs");
if (compilerPluginArgsConfiguration != null) {
List<String> compilerPluginArgs = new ArrayList<>();
for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {
compilerPluginArgs.add(argConfiguration.getValue());
}
// compilerArgs can also take a value without using arg
if (compilerPluginArgsConfiguration.getValue() != null
&& !compilerPluginArgsConfiguration.getValue().isEmpty()) {
compilerPluginArgs.add(compilerPluginArgsConfiguration.getValue().trim());
}
builder.compilerOptions(compilerPluginArgs);
}
}
if (release != null) {
builder.releaseJavaVersion(release);
} else if (compilerPluginConfiguration.isPresent()) {
applyCompilerFlag(compilerPluginConfiguration, "release", builder::releaseJavaVersion);
}
if (source != null) {
builder.sourceJavaVersion(source);
} else if (compilerPluginConfiguration.isPresent()) {
applyCompilerFlag(compilerPluginConfiguration, "source", builder::sourceJavaVersion);
}
if (target != null) {
builder.targetJavaVersion(target);
} else if (compilerPluginConfiguration.isPresent()) {
applyCompilerFlag(compilerPluginConfiguration, "target", builder::targetJavaVersion);
}
setKotlinSpecificFlags(builder);
// path to the serialized application model
final Path appModelLocation = resolveSerializedModelLocation();
ApplicationModel appModel = bootstrapProvider
.getResolvedApplicationModel(QuarkusBootstrapProvider.getProjectId(project), LaunchMode.DEVELOPMENT);
if (appModel != null) {
bootstrapProvider.close();
} else {
final MavenArtifactResolver.Builder resolverBuilder = MavenArtifactResolver.builder()
.setRepositorySystem(repoSystem)
.setRemoteRepositories(repos)
.setRemoteRepositoryManager(remoteRepositoryManager)
.setWorkspaceDiscovery(true)
.setPreferPomsFromWorkspace(true)
.setCurrentProject(project.getFile().toString());
// if it already exists, it may be a reload triggered by a change in a POM
// in which case we should not be using the original Maven session
boolean reinitializeMavenSession = Files.exists(appModelLocation);
if (reinitializeMavenSession) {
Files.delete(appModelLocation);
} else {
// we can re-use the original Maven session
resolverBuilder.setRepositorySystemSession(repoSession);
}
appModel = new BootstrapAppModelResolver(resolverBuilder.build())
.setDevMode(true)
.setCollectReloadableDependencies(!noDeps)
.resolveModel(new GACTV(project.getGroupId(), project.getArtifactId(), null, ArtifactCoords.TYPE_JAR,
project.getVersion()));
}
// serialize the app model to avoid re-resolving it in the dev process
BootstrapUtils.serializeAppModel(appModel, appModelLocation);
builder.jvmArgs("-D" + BootstrapConstants.SERIALIZED_APP_MODEL + "=" + appModelLocation);
if (noDeps) {
addProject(builder, appModel.getAppArtifact(), true);
appModel.getApplicationModule().getBuildFiles().forEach(p -> builder.watchedBuildFile(p));
builder.localArtifact(
ArtifactKey.gact(project.getGroupId(), project.getArtifactId(), null, ArtifactCoords.TYPE_JAR));
} else {
for (ResolvedDependency project : DependenciesFilter.getReloadableModules(appModel)) {
addProject(builder, project, project == appModel.getAppArtifact());
project.getWorkspaceModule().getBuildFiles().forEach(p -> builder.watchedBuildFile(p));
builder.localArtifact(project.getKey());
}
}
addQuarkusDevModeDeps(builder);
//look for an application.properties
Set<Path> resourceDirs = new HashSet<>();
for (Resource resource : project.getResources()) {
String dir = resource.getDirectory();
Path path = Paths.get(dir);
resourceDirs.add(path);
}
Set<ArtifactKey> configuredParentFirst = QuarkusBootstrap.createClassLoadingConfig(PathsCollection.from(resourceDirs),
QuarkusBootstrap.Mode.DEV, Collections.emptyList()).parentFirstArtifacts;
//in most cases these are not used, however they need to be present for some
//parent-first cases such as logging
//first we go through and get all the parent first artifacts
Set<ArtifactKey> parentFirstArtifacts = new HashSet<>(configuredParentFirst);
parentFirstArtifacts.addAll(appModel.getParentFirst());
for (Artifact appDep : project.getArtifacts()) {
// only add the artifact if it's present in the dev mode context
// we need this to avoid having jars on the classpath multiple times
ArtifactKey key = ArtifactKey.gact(appDep.getGroupId(), appDep.getArtifactId(),
appDep.getClassifier(), appDep.getArtifactHandler().getExtension());
if (!builder.isLocal(key) && parentFirstArtifacts.contains(key)) {
builder.classpathEntry(appDep.getFile());
}
}
builder.baseName(project.getBuild().getFinalName());
modifyDevModeContext(builder);
if (argsString != null) {
builder.applicationArgs(argsString);
}
propagateUserProperties(builder);
return builder.build();
}
private void setJvmArgs(Builder builder) throws Exception {
String jvmArgs = this.jvmArgs;
if (!systemProperties.isEmpty()) {
final StringBuilder buf = new StringBuilder();
if (jvmArgs != null) {
buf.append(jvmArgs);
}
for (Map.Entry<String, String> prop : systemProperties.entrySet()) {
buf.append(" -D").append(prop.getKey()).append("=\"").append(prop.getValue()).append("\"");
}
jvmArgs = buf.toString();
}
if (jvmArgs != null) {
builder.jvmArgs(Arrays.asList(CommandLineUtils.translateCommandline(jvmArgs)));
}
}
private void propagateUserProperties(MavenDevModeLauncher.Builder builder) {
Properties userProps = BootstrapMavenOptions.newInstance().getSystemProperties();
if (userProps == null) {
return;
}
final StringBuilder buf = new StringBuilder();
buf.append("-D");
for (Object o : userProps.keySet()) {
String name = o.toString();
final String value = userProps.getProperty(name);
buf.setLength(2);
buf.append(name);
if (value != null && !value.isEmpty()) {
buf.append('=');
buf.append(value);
}
builder.jvmArgs(buf.toString());
}
}
private void applyCompilerFlag(Optional<Xpp3Dom> compilerPluginConfiguration, String flagName,
Consumer<String> builderCall) {
compilerPluginConfiguration
.map(cfg -> cfg.getChild(flagName))
.map(Xpp3Dom::getValue)
.map(String::trim)
.filter(not(String::isEmpty))
.ifPresent(builderCall);
}
private void addQuarkusDevModeDeps(MavenDevModeLauncher.Builder builder)
throws MojoExecutionException, DependencyResolutionException {
final String pomPropsPath = "META-INF/maven/io.quarkus/quarkus-core-deployment/pom.properties";
final InputStream devModePomPropsIs = DevModeMain.class.getClassLoader().getResourceAsStream(pomPropsPath);
if (devModePomPropsIs == null) {
throw new MojoExecutionException("Failed to locate " + pomPropsPath + " on the classpath");
}
final Properties devModeProps = new Properties();
try (InputStream is = devModePomPropsIs) {
devModeProps.load(is);
} catch (IOException e) {
throw new MojoExecutionException("Failed to load " + pomPropsPath + " from the classpath", e);
}
final String devModeGroupId = devModeProps.getProperty("groupId");
if (devModeGroupId == null) {
throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing groupId");
}
final String devModeArtifactId = devModeProps.getProperty("artifactId");
if (devModeArtifactId == null) {
throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing artifactId");
}
final String devModeVersion = devModeProps.getProperty("version");
if (devModeVersion == null) {
throw new MojoExecutionException("Classpath resource " + pomPropsPath + " is missing version");
}
final DefaultArtifact devModeJar = new DefaultArtifact(devModeGroupId, devModeArtifactId, "jar", devModeVersion);
final DependencyResult cpRes = repoSystem.resolveDependencies(repoSession,
new DependencyRequest()
.setCollectRequest(
new CollectRequest()
.setRoot(new org.eclipse.aether.graph.Dependency(devModeJar, JavaScopes.RUNTIME))
.setRepositories(repos)));
for (ArtifactResult appDep : cpRes.getArtifactResults()) {
//we only use the launcher for launching from the IDE, we need to exclude it
if (!(appDep.getArtifact().getGroupId().equals("io.quarkus")
&& appDep.getArtifact().getArtifactId().equals("quarkus-ide-launcher"))) {
if (appDep.getArtifact().getGroupId().equals("io.quarkus")
&& appDep.getArtifact().getArtifactId().equals("quarkus-class-change-agent")) {
builder.jvmArgs("-javaagent:" + appDep.getArtifact().getFile().getAbsolutePath());
} else {
builder.classpathEntry(appDep.getArtifact().getFile());
}
}
}
}
private void setKotlinSpecificFlags(MavenDevModeLauncher.Builder builder) {
Plugin kotlinMavenPlugin = null;
for (Plugin plugin : project.getBuildPlugins()) {
if (plugin.getKey().equals(KOTLIN_MAVEN_PLUGIN_GA)) {
kotlinMavenPlugin = plugin;
break;
}
}
if (kotlinMavenPlugin == null) {
return;
}
getLog().debug("Kotlin Maven plugin detected");
List<String> compilerPluginArtifacts = new ArrayList<>();
List<Dependency> dependencies = kotlinMavenPlugin.getDependencies();
for (Dependency dependency : dependencies) {
try {
ArtifactResult resolvedArtifact = repoSystem.resolveArtifact(repoSession,
new ArtifactRequest()
.setArtifact(new DefaultArtifact(dependency.getGroupId(), dependency.getArtifactId(),
dependency.getClassifier(), dependency.getType(), dependency.getVersion()))
.setRepositories(repos));
compilerPluginArtifacts.add(resolvedArtifact.getArtifact().getFile().toPath().toAbsolutePath().toString());
} catch (ArtifactResolutionException e) {
getLog().warn("Unable to properly setup dev-mode for Kotlin", e);
return;
}
}
builder.compilerPluginArtifacts(compilerPluginArtifacts);
List<String> options = new ArrayList<>();
Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) kotlinMavenPlugin.getConfiguration();
if (compilerPluginConfiguration != null) {
Xpp3Dom compilerPluginArgsConfiguration = compilerPluginConfiguration.getChild("pluginOptions");
if (compilerPluginArgsConfiguration != null) {
for (Xpp3Dom argConfiguration : compilerPluginArgsConfiguration.getChildren()) {
options.add(argConfiguration.getValue());
}
}
}
builder.compilerPluginOptions(options);
}
protected void modifyDevModeContext(MavenDevModeLauncher.Builder builder) {
}
private Optional<Xpp3Dom> findCompilerPluginConfiguration() {
for (final Plugin plugin : project.getBuildPlugins()) {
if (!plugin.getKey().equals("org.apache.maven.plugins:maven-compiler-plugin")) {
continue;
}
final Xpp3Dom compilerPluginConfiguration = (Xpp3Dom) plugin.getConfiguration();
if (compilerPluginConfiguration != null) {
return Optional.of(compilerPluginConfiguration);
}
}
return Optional.empty();
}
private Path resolveSerializedModelLocation() {
final Path p = BootstrapUtils.resolveSerializedAppModelPath(Paths.get(project.getBuild().getDirectory()));
p.toFile().deleteOnExit();
return p;
}
}
| |
// Copyright (c) 2017, Baidu.com, Inc. All Rights Reserved
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package com.baidu.palo.load;
import com.baidu.palo.analysis.BrokerDesc;
import com.baidu.palo.analysis.ExportStmt;
import com.baidu.palo.analysis.TableName;
import com.baidu.palo.catalog.Catalog;
import com.baidu.palo.catalog.Database;
import com.baidu.palo.common.Config;
import com.baidu.palo.common.util.ListComparator;
import com.baidu.palo.common.util.OrderByPair;
import com.baidu.palo.common.util.TimeUtils;
import com.baidu.palo.mysql.privilege.PrivPredicate;
import com.baidu.palo.qe.ConnectContext;
import com.google.common.base.Joiner;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class ExportMgr {
private static final Logger LOG = LogManager.getLogger(ExportJob.class);
// lock for export job
// lock is private and must use after db lock
private ReentrantReadWriteLock lock;
private Map<Long, ExportJob> idToJob; // exportJobId to exportJob
public ExportMgr() {
idToJob = Maps.newHashMap();
lock = new ReentrantReadWriteLock(true);
}
public void readLock() {
lock.readLock().lock();
}
public void readUnlock() {
lock.readLock().unlock();
}
private void writeLock() {
lock.writeLock().lock();
}
private void writeUnlock() {
lock.writeLock().unlock();
}
public Map<Long, ExportJob> getIdToJob() {
return idToJob;
}
public void addExportJob(ExportStmt stmt) throws Exception {
long jobId = Catalog.getInstance().getNextId();
ExportJob job = createJob(jobId, stmt);
writeLock();
try {
unprotectAddJob(job);
Catalog.getInstance().getEditLog().logExportCreate(job);
} finally {
writeUnlock();
}
LOG.debug("debug: add export job. {}", job);
}
public void unprotectAddJob(ExportJob job) {
idToJob.put(job.getId(), job);
}
private ExportJob createJob(long jobId, ExportStmt stmt) throws Exception {
ExportJob job = new ExportJob(jobId);
job.setJob(stmt);
return job;
}
public List<ExportJob> getExportJobs(ExportJob.JobState state) {
List<ExportJob> result = Lists.newArrayList();
readLock();
try {
for (ExportJob job : idToJob.values()) {
if (job.getState() == state) {
result.add(job);
}
}
} finally {
readUnlock();
}
return result;
}
// NOTE: jobid and states may both specified, or only one of them, or neither
public LinkedList<List<Comparable>> getExportJobInfosByIdOrState(
long dbId, long jobId, Set<ExportJob.JobState> states,
ArrayList<OrderByPair> orderByPairs) {
LinkedList<List<Comparable>> exportJobInfos = new LinkedList<List<Comparable>>();
readLock();
try {
for (ExportJob job : idToJob.values()) {
long id = job.getId();
ExportJob.JobState state = job.getState();
if (job.getDbId() != dbId) {
continue;
}
if (jobId != 0) {
if (id != jobId) {
continue;
}
}
// check auth
TableName tableName = job.getTableName();
if (tableName == null || tableName.getTbl().equals("DUMMY")) {
// forward compatibility, no table name is saved before
Database db = Catalog.getCurrentCatalog().getDb(dbId);
if (db == null) {
continue;
}
if (!Catalog.getCurrentCatalog().getAuth().checkDbPriv(ConnectContext.get(),
db.getFullName(), PrivPredicate.SHOW)) {
continue;
}
} else {
if (!Catalog.getCurrentCatalog().getAuth().checkTblPriv(ConnectContext.get(),
tableName.getDb(), tableName.getTbl(),
PrivPredicate.SHOW)) {
continue;
}
}
if (states != null) {
if (!states.contains(state)) {
continue;
}
}
List<Comparable> jobInfo = new ArrayList<Comparable>();
// add slot in order
jobInfo.add(id);
jobInfo.add(state.name());
jobInfo.add(job.getProgress() + "%");
// task infos
StringBuilder sb = new StringBuilder();
sb.append(" PARTITION:");
List<String> partitions = job.getPartitions();
if (partitions == null) {
sb.append("ALL");
} else {
Joiner.on(",").appendTo(sb, partitions);
}
sb.append("; ");
BrokerDesc brokerDesc = job.getBrokerDesc();
if (brokerDesc != null) {
sb.append("BROKER:").append(brokerDesc.getName());
}
jobInfo.add(sb.toString());
sb.append("PATH:").append(job.getExportPath());
// error msg
if (job.getState() == ExportJob.JobState.CANCELLED) {
ExportFailMsg failMsg = job.getFailMsg();
jobInfo.add("type:" + failMsg.getCancelType() + "; msg:" + failMsg.getMsg());
} else {
jobInfo.add("N/A");
}
jobInfo.add(TimeUtils.longToTimeString(job.getCreateTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(job.getStartTimeMs()));
jobInfo.add(TimeUtils.longToTimeString(job.getFinishTimeMs()));
jobInfo.add(job.getExportPath());
exportJobInfos.add(jobInfo);
}
} finally {
readUnlock();
}
// order by
ListComparator<List<Comparable>> comparator = null;
if (orderByPairs != null) {
OrderByPair[] orderByPairArr = new OrderByPair[orderByPairs.size()];
comparator = new ListComparator<List<Comparable>>(orderByPairs.toArray(orderByPairArr));
} else {
// sort by id asc
comparator = new ListComparator<List<Comparable>>(0);
}
Collections.sort(exportJobInfos, comparator);
return exportJobInfos;
}
public void removeOldExportJobs() {
long currentTimeMs = System.currentTimeMillis();
writeLock();
try {
Iterator<Map.Entry<Long, ExportJob>> iter = idToJob.entrySet().iterator();
while (iter.hasNext()) {
Map.Entry<Long, ExportJob> entry = iter.next();
ExportJob job = entry.getValue();
if ((currentTimeMs - job.getCreateTimeMs()) / 1000 > Config.export_keep_max_second
&& (job.getState() == ExportJob.JobState.CANCELLED
|| job.getState() == ExportJob.JobState.FINISHED)) {
iter.remove();
}
}
} finally {
writeUnlock();
}
}
public void replayCreateExportJob(ExportJob job) {
writeLock();
try {
unprotectAddJob(job);
} finally {
writeUnlock();
}
}
public void replayUpdateJobState(long jobId, ExportJob.JobState newState) {
writeLock();
try {
ExportJob job = idToJob.get(jobId);
job.updateState(newState, true);
} finally {
writeUnlock();
}
}
public Integer getJobNum(ExportJob.JobState state, long dbId) {
int size = 0;
readLock();
try {
for (ExportJob job : idToJob.values()) {
if (job.getState() == state && job.getDbId() == dbId) {
++size;
}
}
} finally {
readUnlock();
}
return size;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.solr.core;
import org.apache.lucene.util.Version;
import org.apache.solr.common.SolrException;
import org.apache.solr.util.DOMUtil;
import org.apache.solr.util.SystemIdResolver;
import org.apache.solr.common.util.XMLErrorLogger;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.apache.commons.io.IOUtils;
import javax.xml.namespace.QName;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMResult;
import javax.xml.transform.dom.DOMSource;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import java.io.IOException;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.SortedSet;
import java.util.TreeMap;
import java.util.TreeSet;
import java.util.concurrent.atomic.AtomicBoolean;
/**
*
*/
public class Config {
public static final Logger log = LoggerFactory.getLogger(Config.class);
private static final XMLErrorLogger xmllog = new XMLErrorLogger(log);
static final XPathFactory xpathFactory = XPathFactory.newInstance();
private final Document doc;
private final Document origDoc; // with unsubstituted properties
private final String prefix;
private final String name;
private final SolrResourceLoader loader;
/**
* Builds a config from a resource name with no xpath prefix.
*/
public Config(SolrResourceLoader loader, String name) throws ParserConfigurationException, IOException, SAXException
{
this( loader, name, null, null );
}
public Config(SolrResourceLoader loader, String name, InputSource is, String prefix) throws ParserConfigurationException, IOException, SAXException
{
this(loader, name, is, prefix, true);
}
/**
* Builds a config:
* <p>
* Note that the 'name' parameter is used to obtain a valid input stream if no valid one is provided through 'is'.
* If no valid stream is provided, a valid SolrResourceLoader instance should be provided through 'loader' so
* the resource can be opened (@see SolrResourceLoader#openResource); if no SolrResourceLoader instance is provided, a default one
* will be created.
* </p>
* <p>
* Consider passing a non-null 'name' parameter in all use-cases since it is used for logging & exception reporting.
* </p>
* @param loader the resource loader used to obtain an input stream if 'is' is null
* @param name the resource name used if the input stream 'is' is null
* @param is the resource as a SAX InputSource
* @param prefix an optional prefix that will be preprended to all non-absolute xpath expressions
*/
public Config(SolrResourceLoader loader, String name, InputSource is, String prefix, boolean subProps) throws ParserConfigurationException, IOException, SAXException
{
if( loader == null ) {
loader = new SolrResourceLoader( null );
}
this.loader = loader;
this.name = name;
this.prefix = (prefix != null && !prefix.endsWith("/"))? prefix + '/' : prefix;
try {
javax.xml.parsers.DocumentBuilderFactory dbf = DocumentBuilderFactory.newInstance();
if (is == null) {
is = new InputSource(loader.openConfig(name));
is.setSystemId(SystemIdResolver.createSystemIdFromResourceName(name));
}
// only enable xinclude, if a SystemId is available
if (is.getSystemId() != null) {
try {
dbf.setXIncludeAware(true);
dbf.setNamespaceAware(true);
} catch(UnsupportedOperationException e) {
log.warn(name + " XML parser doesn't support XInclude option");
}
}
final DocumentBuilder db = dbf.newDocumentBuilder();
db.setEntityResolver(new SystemIdResolver(loader));
db.setErrorHandler(xmllog);
try {
doc = db.parse(is);
origDoc = copyDoc(doc);
} finally {
// some XML parsers are broken and don't close the byte stream (but they should according to spec)
IOUtils.closeQuietly(is.getByteStream());
}
if (subProps) {
DOMUtil.substituteProperties(doc, loader.getCoreProperties());
}
} catch (ParserConfigurationException e) {
SolrException.log(log, "Exception during parsing file: " + name, e);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
} catch (SAXException e) {
SolrException.log(log, "Exception during parsing file: " + name, e);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
} catch (TransformerException e) {
SolrException.log(log, "Exception during parsing file: " + name, e);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
}
public Config(SolrResourceLoader loader, String name, Document doc) {
this.prefix = null;
this.doc = doc;
try {
this.origDoc = copyDoc(doc);
} catch (TransformerException e) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, e);
}
this.name = name;
this.loader = loader;
}
private static Document copyDoc(Document doc) throws TransformerException {
TransformerFactory tfactory = TransformerFactory.newInstance();
Transformer tx = tfactory.newTransformer();
DOMSource source = new DOMSource(doc);
DOMResult result = new DOMResult();
tx.transform(source, result);
return (Document) result.getNode();
}
/**
* @since solr 1.3
*/
public SolrResourceLoader getResourceLoader()
{
return loader;
}
/**
* @since solr 1.3
*/
public String getResourceName() {
return name;
}
public String getName() {
return name;
}
public Document getDocument() {
return doc;
}
public XPath getXPath() {
return xpathFactory.newXPath();
}
private String normalize(String path) {
return (prefix==null || path.startsWith("/")) ? path : prefix+path;
}
public void substituteProperties() {
DOMUtil.substituteProperties(doc, loader.getCoreProperties());
}
public Object evaluate(String path, QName type) {
XPath xpath = xpathFactory.newXPath();
try {
String xstr=normalize(path);
// TODO: instead of prepending /prefix/, we could do the search rooted at /prefix...
Object o = xpath.evaluate(xstr, doc, type);
return o;
} catch (XPathExpressionException e) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + path +" for " + name,e);
}
}
public Node getNode(String path, boolean errifMissing) {
return getNode(path, doc, errifMissing);
}
public Node getUnsubstitutedNode(String path, boolean errIfMissing) {
return getNode(path, origDoc, errIfMissing);
}
public Node getNode(String path, Document doc, boolean errIfMissing) {
XPath xpath = xpathFactory.newXPath();
String xstr = normalize(path);
try {
NodeList nodes = (NodeList)xpath.evaluate(xstr, doc,
XPathConstants.NODESET);
if (nodes==null || 0 == nodes.getLength() ) {
if (errIfMissing) {
throw new RuntimeException(name + " missing "+path);
} else {
log.debug(name + " missing optional " + path);
return null;
}
}
if ( 1 < nodes.getLength() ) {
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,
name + " contains more than one value for config path: " + path);
}
Node nd = nodes.item(0);
log.trace(name + ":" + path + "=" + nd);
return nd;
} catch (XPathExpressionException e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr + " for " + name,e);
} catch (SolrException e) {
throw(e);
} catch (Exception e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr+ " for " + name,e);
}
}
public NodeList getNodeList(String path, boolean errIfMissing) {
XPath xpath = xpathFactory.newXPath();
String xstr = normalize(path);
try {
NodeList nodeList = (NodeList)xpath.evaluate(xstr, doc, XPathConstants.NODESET);
if (null == nodeList) {
if (errIfMissing) {
throw new RuntimeException(name + " missing "+path);
} else {
log.debug(name + " missing optional " + path);
return null;
}
}
log.trace(name + ":" + path + "=" + nodeList);
return nodeList;
} catch (XPathExpressionException e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr + " for " + name,e);
} catch (SolrException e) {
throw(e);
} catch (Exception e) {
SolrException.log(log,"Error in xpath",e);
throw new SolrException( SolrException.ErrorCode.SERVER_ERROR,"Error in xpath:" + xstr+ " for " + name,e);
}
}
/**
* Returns the set of attributes on the given element that are not among the given knownAttributes,
* or null if all attributes are known.
*/
public Set<String> getUnknownAttributes(Element element, String... knownAttributes) {
Set<String> knownAttributeSet = new HashSet<String>(Arrays.asList(knownAttributes));
Set<String> unknownAttributeSet = null;
NamedNodeMap attributes = element.getAttributes();
for (int i = 0 ; i < attributes.getLength() ; ++i) {
final String attributeName = attributes.item(i).getNodeName();
if ( ! knownAttributeSet.contains(attributeName)) {
if (null == unknownAttributeSet) {
unknownAttributeSet = new HashSet<String>();
}
unknownAttributeSet.add(attributeName);
}
}
return unknownAttributeSet;
}
/**
* Logs an error and throws an exception if any of the element(s) at the given elementXpath
* contains an attribute name that is not among knownAttributes.
*/
public void complainAboutUnknownAttributes(String elementXpath, String... knownAttributes) {
SortedMap<String,SortedSet<String>> problems = new TreeMap<String,SortedSet<String>>();
NodeList nodeList = getNodeList(elementXpath, false);
for (int i = 0 ; i < nodeList.getLength() ; ++i) {
Element element = (Element)nodeList.item(i);
Set<String> unknownAttributes = getUnknownAttributes(element, knownAttributes);
if (null != unknownAttributes) {
String elementName = element.getNodeName();
SortedSet<String> allUnknownAttributes = problems.get(elementName);
if (null == allUnknownAttributes) {
allUnknownAttributes = new TreeSet<String>();
problems.put(elementName, allUnknownAttributes);
}
allUnknownAttributes.addAll(unknownAttributes);
}
}
if (problems.size() > 0) {
StringBuilder message = new StringBuilder();
for (Map.Entry<String,SortedSet<String>> entry : problems.entrySet()) {
if (message.length() > 0) {
message.append(", ");
}
message.append('<');
message.append(entry.getKey());
for (String attributeName : entry.getValue()) {
message.append(' ');
message.append(attributeName);
message.append("=\"...\"");
}
message.append('>');
}
message.insert(0, "Unknown attribute(s) on element(s): ");
String msg = message.toString();
SolrException.log(log, msg);
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR, msg);
}
}
public String getVal(String path, boolean errIfMissing) {
Node nd = getNode(path,errIfMissing);
if (nd==null) return null;
String txt = DOMUtil.getText(nd);
log.debug(name + ' '+path+'='+txt);
return txt;
/******
short typ = nd.getNodeType();
if (typ==Node.ATTRIBUTE_NODE || typ==Node.TEXT_NODE) {
return nd.getNodeValue();
}
return nd.getTextContent();
******/
}
public String get(String path) {
return getVal(path,true);
}
public String get(String path, String def) {
String val = getVal(path, false);
if (val == null || val.length() == 0) {
return def;
}
return val;
}
public int getInt(String path) {
return Integer.parseInt(getVal(path, true));
}
public int getInt(String path, int def) {
String val = getVal(path, false);
return val!=null ? Integer.parseInt(val) : def;
}
public boolean getBool(String path) {
return Boolean.parseBoolean(getVal(path, true));
}
public boolean getBool(String path, boolean def) {
String val = getVal(path, false);
return val!=null ? Boolean.parseBoolean(val) : def;
}
public float getFloat(String path) {
return Float.parseFloat(getVal(path, true));
}
public float getFloat(String path, float def) {
String val = getVal(path, false);
return val!=null ? Float.parseFloat(val) : def;
}
public double getDouble(String path){
return Double.parseDouble(getVal(path, true));
}
public double getDouble(String path, double def) {
String val = getVal(path, false);
return val!=null ? Double.parseDouble(val) : def;
}
public Version getLuceneVersion(String path) {
return parseLuceneVersionString(getVal(path, true));
}
public Version getLuceneVersion(String path, Version def) {
String val = getVal(path, false);
return val!=null ? parseLuceneVersionString(val) : def;
}
private static final AtomicBoolean versionWarningAlreadyLogged = new AtomicBoolean(false);
public static final Version parseLuceneVersionString(final String matchVersion) {
final Version version;
try {
version = Version.parseLeniently(matchVersion);
} catch (IllegalArgumentException iae) {
throw new SolrException(SolrException.ErrorCode.SERVER_ERROR,
"Invalid luceneMatchVersion '" + matchVersion +
"', valid values are: " + Arrays.toString(Version.values()) +
" or a string in format 'V.V'", iae);
}
if (version == Version.LUCENE_CURRENT && !versionWarningAlreadyLogged.getAndSet(true)) {
log.warn(
"You should not use LUCENE_CURRENT as luceneMatchVersion property: "+
"if you use this setting, and then Solr upgrades to a newer release of Lucene, "+
"sizable changes may happen. If precise back compatibility is important "+
"then you should instead explicitly specify an actual Lucene version."
);
}
return version;
}
public Config getOriginalConfig() {
return new Config(loader, null, origDoc);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail;
import static org.junit.matchers.JUnitMatchers.containsString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.StringTokenizer;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnsupportedFileSystemException;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
import org.apache.hadoop.yarn.api.protocolrecords.GetContainerStatusesRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainersRequest;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerExitStatus;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.LocalResource;
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.Token;
import org.apache.hadoop.yarn.api.records.URL;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.Event;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.security.ContainerTokenIdentifier;
import org.apache.hadoop.yarn.server.api.protocolrecords.NMContainerStatus;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor.ExitCode;
import org.apache.hadoop.yarn.server.nodemanager.DefaultContainerExecutor;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager.NMContext;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.BaseContainerManagerTest;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerEventType;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.ContainerExitEvent;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch.ShellScriptBuilder;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ContainerLocalizer;
import org.apache.hadoop.yarn.server.nodemanager.security.NMContainerTokenSecretManager;
import org.apache.hadoop.yarn.server.nodemanager.security.NMTokenSecretManagerInNM;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.server.nodemanager.recovery.NMNullStateStoreService;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.AuxiliaryServiceHelper;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.LinuxResourceCalculatorPlugin;
import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
public class TestContainerLaunch extends BaseContainerManagerTest {
protected Context distContext = new NMContext(new NMContainerTokenSecretManager(
conf), new NMTokenSecretManagerInNM(), null,
new ApplicationACLsManager(conf), new NMNullStateStoreService()) {
public int getHttpPort() {
return HTTP_PORT;
};
public NodeId getNodeId() {
return NodeId.newInstance("ahost", 1234);
};
};
public TestContainerLaunch() throws UnsupportedFileSystemException {
super();
}
@Before
public void setup() throws IOException {
conf.setClass(
YarnConfiguration.NM_CONTAINER_MON_RESOURCE_CALCULATOR,
LinuxResourceCalculatorPlugin.class, ResourceCalculatorPlugin.class);
super.setup();
}
@Test
public void testSpecialCharSymlinks() throws IOException {
File shellFile = null;
File tempFile = null;
String badSymlink = Shell.WINDOWS ? "foo@zz_#!-+bar.cmd" :
"foo@zz%_#*&!-+= bar()";
File symLinkFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
tempFile = Shell.appendScriptExtension(tmpDir, "temp");
String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" :
"echo \"hello\"";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(timeoutCommand);
writer.close();
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
Path path = new Path(shellFile.getAbsolutePath());
resources.put(path, Arrays.asList(badSymlink));
FileOutputStream fos = new FileOutputStream(tempFile);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
if (Shell.WINDOWS) {
commands.add("cmd");
commands.add("/c");
commands.add("\"" + badSymlink + "\"");
} else {
commands.add("/bin/sh ./\\\"" + badSymlink + "\\\"");
}
new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
FileUtil.setExecutable(tempFile, true);
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{tempFile.getAbsolutePath()}, tmpDir);
shexc.execute();
assertEquals(shexc.getExitCode(), 0);
assert(shexc.getOutput().contains("hello"));
symLinkFile = new File(tmpDir, badSymlink);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
if (tempFile != null
&& tempFile.exists()) {
tempFile.delete();
}
if (symLinkFile != null
&& symLinkFile.exists()) {
symLinkFile.delete();
}
}
}
// test the diagnostics are generated
@Test (timeout = 20000)
public void testInvalidSymlinkDiagnostics() throws IOException {
File shellFile = null;
File tempFile = null;
String symLink = Shell.WINDOWS ? "test.cmd" :
"test";
File symLinkFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
tempFile = Shell.appendScriptExtension(tmpDir, "temp");
String timeoutCommand = Shell.WINDOWS ? "@echo \"hello\"" :
"echo \"hello\"";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(timeoutCommand);
writer.close();
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
//This is an invalid path and should throw exception because of No such file.
Path invalidPath = new Path(shellFile.getAbsolutePath()+"randomPath");
resources.put(invalidPath, Arrays.asList(symLink));
FileOutputStream fos = new FileOutputStream(tempFile);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
if (Shell.WINDOWS) {
commands.add("cmd");
commands.add("/c");
commands.add("\"" + symLink + "\"");
} else {
commands.add("/bin/sh ./\\\"" + symLink + "\\\"");
}
new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
FileUtil.setExecutable(tempFile, true);
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{tempFile.getAbsolutePath()}, tmpDir);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch(ExitCodeException e){
diagnostics = e.getMessage();
}
Assert.assertNotNull(diagnostics);
Assert.assertTrue(shexc.getExitCode() != 0);
symLinkFile = new File(tmpDir, symLink);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
if (tempFile != null
&& tempFile.exists()) {
tempFile.delete();
}
if (symLinkFile != null
&& symLinkFile.exists()) {
symLinkFile.delete();
}
}
}
@Test (timeout = 20000)
public void testInvalidEnvSyntaxDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile);
FileUtil.setExecutable(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
// invalid env
env.put(
"APPLICATION_WORKFLOW_CONTEXT", "{\"workflowId\":\"609f91c5cd83\"," +
"\"workflowName\":\"\n\ninsert table " +
"\npartition (cd_education_status)\nselect cd_demo_sk, cd_gender, " );
List<String> commands = new ArrayList<String>();
new DefaultContainerExecutor().writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
// It is supposed that LANG is set as C.
Map<String, String> cmdEnv = new HashMap<String, String>();
cmdEnv.put("LANG", "C");
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()},
tmpDir, cmdEnv);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch(ExitCodeException e){
diagnostics = e.getMessage();
}
Assert.assertTrue(diagnostics.contains(Shell.WINDOWS ?
"is not recognized as an internal or external command" :
"command not found"));
Assert.assertTrue(shexc.getExitCode() != 0);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
}
}
@Test(timeout = 10000)
public void testEnvExpansion() throws IOException {
Path logPath = new Path("/nm/container/logs");
String input =
Apps.crossPlatformify("HADOOP_HOME") + "/share/hadoop/common/*"
+ ApplicationConstants.CLASS_PATH_SEPARATOR
+ Apps.crossPlatformify("HADOOP_HOME") + "/share/hadoop/common/lib/*"
+ ApplicationConstants.CLASS_PATH_SEPARATOR
+ Apps.crossPlatformify("HADOOP_LOG_HOME")
+ ApplicationConstants.LOG_DIR_EXPANSION_VAR;
String res = ContainerLaunch.expandEnvironment(input, logPath);
if (Shell.WINDOWS) {
Assert.assertEquals("%HADOOP_HOME%/share/hadoop/common/*;"
+ "%HADOOP_HOME%/share/hadoop/common/lib/*;"
+ "%HADOOP_LOG_HOME%/nm/container/logs", res);
} else {
Assert.assertEquals("$HADOOP_HOME/share/hadoop/common/*:"
+ "$HADOOP_HOME/share/hadoop/common/lib/*:"
+ "$HADOOP_LOG_HOME/nm/container/logs", res);
}
System.out.println(res);
}
@Test (timeout = 20000)
public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {
File shellFile = null;
try {
shellFile = Shell.appendScriptExtension(tmpDir, "hello");
// echo "hello" to stdout and "error" to stderr and exit code with 2;
String command = Shell.WINDOWS ?
"@echo \"hello\" & @echo \"error\" 1>&2 & exit /b 2" :
"echo \"hello\"; echo \"error\" 1>&2; exit 2;";
PrintWriter writer = new PrintWriter(new FileOutputStream(shellFile));
FileUtil.setExecutable(shellFile, true);
writer.println(command);
writer.close();
Map<Path, List<String>> resources =
new HashMap<Path, List<String>>();
FileOutputStream fos = new FileOutputStream(shellFile, true);
Map<String, String> env = new HashMap<String, String>();
List<String> commands = new ArrayList<String>();
commands.add(command);
ContainerExecutor exec = new DefaultContainerExecutor();
exec.writeLaunchEnv(fos, env, resources, commands);
fos.flush();
fos.close();
Shell.ShellCommandExecutor shexc
= new Shell.ShellCommandExecutor(new String[]{shellFile.getAbsolutePath()}, tmpDir);
String diagnostics = null;
try {
shexc.execute();
Assert.fail("Should catch exception");
} catch(ExitCodeException e){
diagnostics = e.getMessage();
}
// test stderr
Assert.assertTrue(diagnostics.contains("error"));
// test stdout
Assert.assertTrue(shexc.getOutput().contains("hello"));
Assert.assertTrue(shexc.getExitCode() == 2);
}
finally {
// cleanup
if (shellFile != null
&& shellFile.exists()) {
shellFile.delete();
}
}
}
@Test
public void testPrependDistcache() throws Exception {
// Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS);
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
Map<String, String> userSetEnv = new HashMap<String, String>();
userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id");
userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST");
userSetEnv.put(Environment.NM_PORT.name(), "user_set_NM_PORT");
userSetEnv.put(Environment.NM_HTTP_PORT.name(), "user_set_NM_HTTP_PORT");
userSetEnv.put(Environment.LOCAL_DIRS.name(), "user_set_LOCAL_DIR");
userSetEnv.put(Environment.USER.key(), "user_set_" +
Environment.USER.key());
userSetEnv.put(Environment.LOGNAME.name(), "user_set_LOGNAME");
userSetEnv.put(Environment.PWD.name(), "user_set_PWD");
userSetEnv.put(Environment.HOME.name(), "user_set_HOME");
userSetEnv.put(Environment.CLASSPATH.name(), "APATH");
containerLaunchContext.setEnvironment(userSetEnv);
Container container = mock(Container.class);
when(container.getContainerId()).thenReturn(cId);
when(container.getLaunchContext()).thenReturn(containerLaunchContext);
when(container.getLocalizedResources()).thenReturn(null);
Dispatcher dispatcher = mock(Dispatcher.class);
EventHandler eventHandler = new EventHandler() {
public void handle(Event event) {
Assert.assertTrue(event instanceof ContainerExitEvent);
ContainerExitEvent exitEvent = (ContainerExitEvent) event;
Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
exitEvent.getType());
}
};
when(dispatcher.getEventHandler()).thenReturn(eventHandler);
Configuration conf = new Configuration();
ContainerLaunch launch = new ContainerLaunch(distContext, conf,
dispatcher, exec, null, container, dirsHandler, containerManager);
String testDir = System.getProperty("test.build.data",
"target/test-dir");
Path pwd = new Path(testDir);
List<Path> appDirs = new ArrayList<Path>();
List<String> containerLogs = new ArrayList<String>();
Map<Path, List<String>> resources = new HashMap<Path, List<String>>();
Path userjar = new Path("user.jar");
List<String> lpaths = new ArrayList<String>();
lpaths.add("userjarlink.jar");
resources.put(userjar, lpaths);
Path nmp = new Path(testDir);
launch.sanitizeEnv(
userSetEnv, pwd, appDirs, containerLogs, resources, nmp);
List<String> result =
getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name()));
Assert.assertTrue(result.size() > 1);
Assert.assertTrue(
result.get(result.size() - 1).endsWith("userjarlink.jar"));
//Then, with user classpath first
userSetEnv.put(Environment.CLASSPATH_PREPEND_DISTCACHE.name(), "true");
cId = ContainerId.newContainerId(appAttemptId, 1);
when(container.getContainerId()).thenReturn(cId);
launch = new ContainerLaunch(distContext, conf,
dispatcher, exec, null, container, dirsHandler, containerManager);
launch.sanitizeEnv(
userSetEnv, pwd, appDirs, containerLogs, resources, nmp);
result =
getJarManifestClasspath(userSetEnv.get(Environment.CLASSPATH.name()));
Assert.assertTrue(result.size() > 1);
Assert.assertTrue(
result.get(0).endsWith("userjarlink.jar"));
}
private static List<String> getJarManifestClasspath(String path)
throws Exception {
List<String> classpath = new ArrayList<String>();
JarFile jarFile = new JarFile(path);
Manifest manifest = jarFile.getManifest();
String cps = manifest.getMainAttributes().getValue("Class-Path");
StringTokenizer cptok = new StringTokenizer(cps);
while (cptok.hasMoreTokens()) {
String cpentry = cptok.nextToken();
classpath.add(cpentry);
}
return classpath;
}
/**
* See if environment variable is forwarded using sanitizeEnv.
* @throws Exception
*/
@Test (timeout = 60000)
public void testContainerEnvVariables() throws Exception {
containerManager.start();
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
// ////// Construct the Container-id
ApplicationId appId = ApplicationId.newInstance(0, 0);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
Map<String, String> userSetEnv = new HashMap<String, String>();
userSetEnv.put(Environment.CONTAINER_ID.name(), "user_set_container_id");
userSetEnv.put(Environment.NM_HOST.name(), "user_set_NM_HOST");
userSetEnv.put(Environment.NM_PORT.name(), "user_set_NM_PORT");
userSetEnv.put(Environment.NM_HTTP_PORT.name(), "user_set_NM_HTTP_PORT");
userSetEnv.put(Environment.LOCAL_DIRS.name(), "user_set_LOCAL_DIR");
userSetEnv.put(Environment.USER.key(), "user_set_" +
Environment.USER.key());
userSetEnv.put(Environment.LOGNAME.name(), "user_set_LOGNAME");
userSetEnv.put(Environment.PWD.name(), "user_set_PWD");
userSetEnv.put(Environment.HOME.name(), "user_set_HOME");
containerLaunchContext.setEnvironment(userSetEnv);
File scriptFile = Shell.appendScriptExtension(tmpDir, "scriptFile");
PrintWriter fileWriter = new PrintWriter(scriptFile);
File processStartFile =
new File(tmpDir, "env_vars.txt").getAbsoluteFile();
if (Shell.WINDOWS) {
fileWriter.println("@echo " + Environment.CONTAINER_ID.$() + "> "
+ processStartFile);
fileWriter.println("@echo " + Environment.NM_HOST.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.NM_PORT.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.NM_HTTP_PORT.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.LOCAL_DIRS.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.USER.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.LOGNAME.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.PWD.$() + ">> "
+ processStartFile);
fileWriter.println("@echo " + Environment.HOME.$() + ">> "
+ processStartFile);
for (String serviceName : containerManager.getAuxServiceMetaData()
.keySet()) {
fileWriter.println("@echo %" + AuxiliaryServiceHelper.NM_AUX_SERVICE
+ serviceName + "%>> "
+ processStartFile);
}
fileWriter.println("@echo " + cId + ">> " + processStartFile);
fileWriter.println("@ping -n 100 127.0.0.1 >nul");
} else {
fileWriter.write("\numask 0"); // So that start file is readable by the test
fileWriter.write("\necho $" + Environment.CONTAINER_ID.name() + " > "
+ processStartFile);
fileWriter.write("\necho $" + Environment.NM_HOST.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.NM_PORT.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.NM_HTTP_PORT.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.LOCAL_DIRS.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.USER.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.LOGNAME.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.PWD.name() + " >> "
+ processStartFile);
fileWriter.write("\necho $" + Environment.HOME.name() + " >> "
+ processStartFile);
for (String serviceName : containerManager.getAuxServiceMetaData()
.keySet()) {
fileWriter.write("\necho $" + AuxiliaryServiceHelper.NM_AUX_SERVICE
+ serviceName + " >> "
+ processStartFile);
}
fileWriter.write("\necho $$ >> " + processStartFile);
fileWriter.write("\nexec sleep 100");
}
fileWriter.close();
// upload the script file so that the container can run it
URL resource_alpha =
ConverterUtils.getYarnUrlFromPath(localFS
.makeQualified(new Path(scriptFile.getAbsolutePath())));
LocalResource rsrc_alpha =
recordFactory.newRecordInstance(LocalResource.class);
rsrc_alpha.setResource(resource_alpha);
rsrc_alpha.setSize(-1);
rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION);
rsrc_alpha.setType(LocalResourceType.FILE);
rsrc_alpha.setTimestamp(scriptFile.lastModified());
String destinationFile = "dest_file";
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>();
localResources.put(destinationFile, rsrc_alpha);
containerLaunchContext.setLocalResources(localResources);
// set up the rest of the container
List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile));
containerLaunchContext.setCommands(commands);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(containerLaunchContext,
createContainerToken(cId, Priority.newInstance(0), 0));
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
containerManager.startContainers(allRequests);
int timeoutSecs = 0;
while (!processStartFile.exists() && timeoutSecs++ < 20) {
Thread.sleep(1000);
LOG.info("Waiting for process start-file to be created");
}
Assert.assertTrue("ProcessStartFile doesn't exist!",
processStartFile.exists());
// Now verify the contents of the file
List<String> localDirs = dirsHandler.getLocalDirs();
List<String> logDirs = dirsHandler.getLogDirs();
List<Path> appDirs = new ArrayList<Path>(localDirs.size());
for (String localDir : localDirs) {
Path usersdir = new Path(localDir, ContainerLocalizer.USERCACHE);
Path userdir = new Path(usersdir, user);
Path appsdir = new Path(userdir, ContainerLocalizer.APPCACHE);
appDirs.add(new Path(appsdir, appId.toString()));
}
List<String> containerLogDirs = new ArrayList<String>();
String relativeContainerLogDir = ContainerLaunch
.getRelativeContainerLogDir(appId.toString(), cId.toString());
for(String logDir : logDirs){
containerLogDirs.add(logDir + Path.SEPARATOR + relativeContainerLogDir);
}
BufferedReader reader =
new BufferedReader(new FileReader(processStartFile));
Assert.assertEquals(cId.toString(), reader.readLine());
Assert.assertEquals(context.getNodeId().getHost(), reader.readLine());
Assert.assertEquals(String.valueOf(context.getNodeId().getPort()),
reader.readLine());
Assert.assertEquals(String.valueOf(HTTP_PORT), reader.readLine());
Assert.assertEquals(StringUtils.join(",", appDirs), reader.readLine());
Assert.assertEquals(user, reader.readLine());
Assert.assertEquals(user, reader.readLine());
String obtainedPWD = reader.readLine();
boolean found = false;
for (Path localDir : appDirs) {
if (new Path(localDir, cId.toString()).toString().equals(obtainedPWD)) {
found = true;
break;
}
}
Assert.assertTrue("Wrong local-dir found : " + obtainedPWD, found);
Assert.assertEquals(
conf.get(
YarnConfiguration.NM_USER_HOME_DIR,
YarnConfiguration.DEFAULT_NM_USER_HOME_DIR),
reader.readLine());
for (String serviceName : containerManager.getAuxServiceMetaData().keySet()) {
Assert.assertEquals(
containerManager.getAuxServiceMetaData().get(serviceName),
ByteBuffer.wrap(Base64.decodeBase64(reader.readLine().getBytes())));
}
Assert.assertEquals(cId.toString(), containerLaunchContext
.getEnvironment().get(Environment.CONTAINER_ID.name()));
Assert.assertEquals(context.getNodeId().getHost(), containerLaunchContext
.getEnvironment().get(Environment.NM_HOST.name()));
Assert.assertEquals(String.valueOf(context.getNodeId().getPort()),
containerLaunchContext.getEnvironment().get(Environment.NM_PORT.name()));
Assert.assertEquals(String.valueOf(HTTP_PORT), containerLaunchContext
.getEnvironment().get(Environment.NM_HTTP_PORT.name()));
Assert.assertEquals(StringUtils.join(",", appDirs), containerLaunchContext
.getEnvironment().get(Environment.LOCAL_DIRS.name()));
Assert.assertEquals(StringUtils.join(",", containerLogDirs),
containerLaunchContext.getEnvironment().get(Environment.LOG_DIRS.name()));
Assert.assertEquals(user, containerLaunchContext.getEnvironment()
.get(Environment.USER.name()));
Assert.assertEquals(user, containerLaunchContext.getEnvironment()
.get(Environment.LOGNAME.name()));
found = false;
obtainedPWD =
containerLaunchContext.getEnvironment().get(Environment.PWD.name());
for (Path localDir : appDirs) {
if (new Path(localDir, cId.toString()).toString().equals(obtainedPWD)) {
found = true;
break;
}
}
Assert.assertTrue("Wrong local-dir found : " + obtainedPWD, found);
Assert.assertEquals(
conf.get(
YarnConfiguration.NM_USER_HOME_DIR,
YarnConfiguration.DEFAULT_NM_USER_HOME_DIR),
containerLaunchContext.getEnvironment()
.get(Environment.HOME.name()));
// Get the pid of the process
String pid = reader.readLine().trim();
// No more lines
Assert.assertEquals(null, reader.readLine());
// Now test the stop functionality.
// Assert that the process is alive
Assert.assertTrue("Process is not alive!",
DefaultContainerExecutor.containerIsAlive(pid));
// Once more
Assert.assertTrue("Process is not alive!",
DefaultContainerExecutor.containerIsAlive(pid));
// Now test the stop functionality.
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(cId);
StopContainersRequest stopRequest =
StopContainersRequest.newInstance(containerIds);
containerManager.stopContainers(stopRequest);
BaseContainerManagerTest.waitForContainerState(containerManager, cId,
ContainerState.COMPLETE);
GetContainerStatusesRequest gcsRequest =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerStatus containerStatus =
containerManager.getContainerStatuses(gcsRequest).getContainerStatuses().get(0);
int expectedExitCode = ContainerExitStatus.KILLED_BY_APPMASTER;
Assert.assertEquals(expectedExitCode, containerStatus.getExitStatus());
// Assert that the process is not alive anymore
Assert.assertFalse("Process is still alive!",
DefaultContainerExecutor.containerIsAlive(pid));
}
@Test (timeout = 5000)
public void testAuxiliaryServiceHelper() throws Exception {
Map<String, String> env = new HashMap<String, String>();
String serviceName = "testAuxiliaryService";
ByteBuffer bb = ByteBuffer.wrap("testAuxiliaryService".getBytes());
AuxiliaryServiceHelper.setServiceDataIntoEnv(serviceName, bb, env);
Assert.assertEquals(bb,
AuxiliaryServiceHelper.getServiceDataFromEnv(serviceName, env));
}
private void internalKillTest(boolean delayed) throws Exception {
conf.setLong(YarnConfiguration.NM_SLEEP_DELAY_BEFORE_SIGKILL_MS,
delayed ? 1000 : 0);
containerManager.start();
// ////// Construct the Container-id
ApplicationId appId = ApplicationId.newInstance(1, 1);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
File processStartFile =
new File(tmpDir, "pid.txt").getAbsoluteFile();
// setup a script that can handle sigterm gracefully
File scriptFile = Shell.appendScriptExtension(tmpDir, "testscript");
PrintWriter writer = new PrintWriter(new FileOutputStream(scriptFile));
if (Shell.WINDOWS) {
writer.println("@echo \"Running testscript for delayed kill\"");
writer.println("@echo \"Writing pid to start file\"");
writer.println("@echo " + cId + "> " + processStartFile);
writer.println("@ping -n 100 127.0.0.1 >nul");
} else {
writer.println("#!/bin/bash\n\n");
writer.println("echo \"Running testscript for delayed kill\"");
writer.println("hello=\"Got SIGTERM\"");
writer.println("umask 0");
writer.println("trap \"echo $hello >> " + processStartFile + "\" SIGTERM");
writer.println("echo \"Writing pid to start file\"");
writer.println("echo $$ >> " + processStartFile);
writer.println("while true; do\nsleep 1s;\ndone");
}
writer.close();
FileUtil.setExecutable(scriptFile, true);
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
// upload the script file so that the container can run it
URL resource_alpha =
ConverterUtils.getYarnUrlFromPath(localFS
.makeQualified(new Path(scriptFile.getAbsolutePath())));
LocalResource rsrc_alpha =
recordFactory.newRecordInstance(LocalResource.class);
rsrc_alpha.setResource(resource_alpha);
rsrc_alpha.setSize(-1);
rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION);
rsrc_alpha.setType(LocalResourceType.FILE);
rsrc_alpha.setTimestamp(scriptFile.lastModified());
String destinationFile = "dest_file.sh";
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>();
localResources.put(destinationFile, rsrc_alpha);
containerLaunchContext.setLocalResources(localResources);
// set up the rest of the container
List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile));
containerLaunchContext.setCommands(commands);
Priority priority = Priority.newInstance(10);
long createTime = 1234;
Token containerToken = createContainerToken(cId, priority, createTime);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(containerLaunchContext,
containerToken);
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
containerManager.startContainers(allRequests);
int timeoutSecs = 0;
while (!processStartFile.exists() && timeoutSecs++ < 20) {
Thread.sleep(1000);
LOG.info("Waiting for process start-file to be created");
}
Assert.assertTrue("ProcessStartFile doesn't exist!",
processStartFile.exists());
NMContainerStatus nmContainerStatus =
containerManager.getContext().getContainers().get(cId)
.getNMContainerStatus();
Assert.assertEquals(priority, nmContainerStatus.getPriority());
// Now test the stop functionality.
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(cId);
StopContainersRequest stopRequest =
StopContainersRequest.newInstance(containerIds);
containerManager.stopContainers(stopRequest);
BaseContainerManagerTest.waitForContainerState(containerManager, cId,
ContainerState.COMPLETE);
// if delayed container stop sends a sigterm followed by a sigkill
// otherwise sigkill is sent immediately
GetContainerStatusesRequest gcsRequest =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerStatus containerStatus =
containerManager.getContainerStatuses(gcsRequest)
.getContainerStatuses().get(0);
Assert.assertEquals(ContainerExitStatus.KILLED_BY_APPMASTER,
containerStatus.getExitStatus());
// Now verify the contents of the file. Script generates a message when it
// receives a sigterm so we look for that. We cannot perform this check on
// Windows, because the process is not notified when killed by winutils.
// There is no way for the process to trap and respond. Instead, we can
// verify that the job object with ID matching container ID no longer exists.
if (Shell.WINDOWS || !delayed) {
Assert.assertFalse("Process is still alive!",
DefaultContainerExecutor.containerIsAlive(cId.toString()));
} else {
BufferedReader reader =
new BufferedReader(new FileReader(processStartFile));
boolean foundSigTermMessage = false;
while (true) {
String line = reader.readLine();
if (line == null) {
break;
}
if (line.contains("SIGTERM")) {
foundSigTermMessage = true;
break;
}
}
Assert.assertTrue("Did not find sigterm message", foundSigTermMessage);
reader.close();
}
}
@Test (timeout = 30000)
public void testDelayedKill() throws Exception {
internalKillTest(true);
}
@Test (timeout = 30000)
public void testImmediateKill() throws Exception {
internalKillTest(false);
}
@SuppressWarnings("rawtypes")
@Test (timeout = 10000)
public void testCallFailureWithNullLocalizedResources() {
Container container = mock(Container.class);
when(container.getContainerId()).thenReturn(ContainerId.newContainerId(
ApplicationAttemptId.newInstance(ApplicationId.newInstance(
System.currentTimeMillis(), 1), 1), 1));
ContainerLaunchContext clc = mock(ContainerLaunchContext.class);
when(clc.getCommands()).thenReturn(Collections.<String>emptyList());
when(container.getLaunchContext()).thenReturn(clc);
when(container.getLocalizedResources()).thenReturn(null);
Dispatcher dispatcher = mock(Dispatcher.class);
EventHandler eventHandler = new EventHandler() {
public void handle(Event event) {
Assert.assertTrue(event instanceof ContainerExitEvent);
ContainerExitEvent exitEvent = (ContainerExitEvent) event;
Assert.assertEquals(ContainerEventType.CONTAINER_EXITED_WITH_FAILURE,
exitEvent.getType());
}
};
when(dispatcher.getEventHandler()).thenReturn(eventHandler);
ContainerLaunch launch = new ContainerLaunch(context, new Configuration(),
dispatcher, exec, null, container, dirsHandler, containerManager);
launch.call();
}
protected Token createContainerToken(ContainerId cId, Priority priority,
long createTime) throws InvalidToken {
Resource r = BuilderUtils.newResource(1024, 1);
ContainerTokenIdentifier containerTokenIdentifier =
new ContainerTokenIdentifier(cId, context.getNodeId().toString(), user,
r, System.currentTimeMillis() + 10000L, 123, DUMMY_RM_IDENTIFIER,
priority, createTime);
Token containerToken =
BuilderUtils.newContainerToken(
context.getNodeId(),
context.getContainerTokenSecretManager().retrievePassword(
containerTokenIdentifier), containerTokenIdentifier);
return containerToken;
}
/**
* Test that script exists with non-zero exit code when command fails.
* @throws IOException
*/
@Test (timeout = 10000)
public void testShellScriptBuilderNonZeroExitCode() throws IOException {
ShellScriptBuilder builder = ShellScriptBuilder.create();
builder.command(Arrays.asList(new String[] {"unknownCommand"}));
File shellFile = Shell.appendScriptExtension(tmpDir, "testShellScriptBuilderError");
PrintStream writer = new PrintStream(new FileOutputStream(shellFile));
builder.write(writer);
writer.close();
try {
FileUtil.setExecutable(shellFile, true);
Shell.ShellCommandExecutor shexc = new Shell.ShellCommandExecutor(
new String[]{shellFile.getAbsolutePath()}, tmpDir);
try {
shexc.execute();
fail("builder shell command was expected to throw");
}
catch(IOException e) {
// expected
System.out.println("Received an expected exception: " + e.getMessage());
}
}
finally {
FileUtil.fullyDelete(shellFile);
}
}
private static final String expectedMessage = "The command line has a length of";
@Test (timeout = 10000)
public void testWindowsShellScriptBuilderCommand() throws IOException {
String callCmd = "@call ";
// Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS);
// The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGHT);
ShellScriptBuilder builder = ShellScriptBuilder.create();
// Basic tests: less length, exact length, max+1 length
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("A", 1024)));
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat(
"E", Shell.WINDOWS_MAX_SHELL_LENGHT - callCmd.length())));
try {
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat(
"X", Shell.WINDOWS_MAX_SHELL_LENGHT -callCmd.length() + 1)));
fail("longCommand was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), containsString(expectedMessage));
}
// Composite tests, from parts: less, exact and +
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("A", 1024),
org.apache.commons.lang.StringUtils.repeat("A", 1024),
org.apache.commons.lang.StringUtils.repeat("A", 1024)));
// buildr.command joins the command parts with an extra space
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("E", 4095),
org.apache.commons.lang.StringUtils.repeat("E", 2047),
org.apache.commons.lang.StringUtils.repeat("E", 2047 - callCmd.length())));
try {
builder.command(Arrays.asList(
org.apache.commons.lang.StringUtils.repeat("X", 4095),
org.apache.commons.lang.StringUtils.repeat("X", 2047),
org.apache.commons.lang.StringUtils.repeat("X", 2048 - callCmd.length())));
fail("long commands was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), containsString(expectedMessage));
}
}
@Test (timeout = 10000)
public void testWindowsShellScriptBuilderEnv() throws IOException {
// Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS);
// The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGHT);
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test env
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat("A", 1024));
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat(
"A", Shell.WINDOWS_MAX_SHELL_LENGHT - ("@set somekey=").length()));
try {
builder.env("somekey", org.apache.commons.lang.StringUtils.repeat(
"A", Shell.WINDOWS_MAX_SHELL_LENGHT - ("@set somekey=").length()) + 1);
fail("long env was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), containsString(expectedMessage));
}
}
@Test (timeout = 10000)
public void testWindowsShellScriptBuilderMkdir() throws IOException {
String mkDirCmd = "@if not exist \"\" mkdir \"\"";
// Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS);
// The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGHT);
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test mkdir
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024)));
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat(
"E", (Shell.WINDOWS_MAX_SHELL_LENGHT - mkDirCmd.length())/2)));
try {
builder.mkdir(new Path(org.apache.commons.lang.StringUtils.repeat(
"X", (Shell.WINDOWS_MAX_SHELL_LENGHT - mkDirCmd.length())/2 +1)));
fail("long mkdir was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), containsString(expectedMessage));
}
}
@Test (timeout = 10000)
public void testWindowsShellScriptBuilderLink() throws IOException {
// Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS);
String linkCmd = "@" +Shell.WINUTILS + " symlink \"\" \"\"";
// The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGHT);
ShellScriptBuilder builder = ShellScriptBuilder.create();
// test link
builder.link(new Path(org.apache.commons.lang.StringUtils.repeat("A", 1024)),
new Path(org.apache.commons.lang.StringUtils.repeat("B", 1024)));
builder.link(
new Path(org.apache.commons.lang.StringUtils.repeat(
"E", (Shell.WINDOWS_MAX_SHELL_LENGHT - linkCmd.length())/2)),
new Path(org.apache.commons.lang.StringUtils.repeat(
"F", (Shell.WINDOWS_MAX_SHELL_LENGHT - linkCmd.length())/2)));
try {
builder.link(
new Path(org.apache.commons.lang.StringUtils.repeat(
"X", (Shell.WINDOWS_MAX_SHELL_LENGHT - linkCmd.length())/2 + 1)),
new Path(org.apache.commons.lang.StringUtils.repeat(
"Y", (Shell.WINDOWS_MAX_SHELL_LENGHT - linkCmd.length())/2) + 1));
fail("long link was expected to throw");
} catch(IOException e) {
assertThat(e.getMessage(), containsString(expectedMessage));
}
}
@Test
public void testKillProcessGroup() throws Exception {
Assume.assumeTrue(Shell.isSetsidAvailable);
containerManager.start();
// Construct the Container-id
ApplicationId appId = ApplicationId.newInstance(2, 2);
ApplicationAttemptId appAttemptId =
ApplicationAttemptId.newInstance(appId, 1);
ContainerId cId = ContainerId.newContainerId(appAttemptId, 0);
File processStartFile =
new File(tmpDir, "pid.txt").getAbsoluteFile();
File childProcessStartFile =
new File(tmpDir, "child_pid.txt").getAbsoluteFile();
// setup a script that can handle sigterm gracefully
File scriptFile = Shell.appendScriptExtension(tmpDir, "testscript");
PrintWriter writer = new PrintWriter(new FileOutputStream(scriptFile));
writer.println("#!/bin/bash\n\n");
writer.println("echo \"Running testscript for forked process\"");
writer.println("umask 0");
writer.println("echo $$ >> " + processStartFile);
writer.println("while true;\ndo sleep 1s;\ndone > /dev/null 2>&1 &");
writer.println("echo $! >> " + childProcessStartFile);
writer.println("while true;\ndo sleep 1s;\ndone");
writer.close();
FileUtil.setExecutable(scriptFile, true);
ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
// upload the script file so that the container can run it
URL resource_alpha =
ConverterUtils.getYarnUrlFromPath(localFS
.makeQualified(new Path(scriptFile.getAbsolutePath())));
LocalResource rsrc_alpha =
recordFactory.newRecordInstance(LocalResource.class);
rsrc_alpha.setResource(resource_alpha);
rsrc_alpha.setSize(-1);
rsrc_alpha.setVisibility(LocalResourceVisibility.APPLICATION);
rsrc_alpha.setType(LocalResourceType.FILE);
rsrc_alpha.setTimestamp(scriptFile.lastModified());
String destinationFile = "dest_file.sh";
Map<String, LocalResource> localResources =
new HashMap<String, LocalResource>();
localResources.put(destinationFile, rsrc_alpha);
containerLaunchContext.setLocalResources(localResources);
// set up the rest of the container
List<String> commands = Arrays.asList(Shell.getRunScriptCommand(scriptFile));
containerLaunchContext.setCommands(commands);
Priority priority = Priority.newInstance(10);
long createTime = 1234;
Token containerToken = createContainerToken(cId, priority, createTime);
StartContainerRequest scRequest =
StartContainerRequest.newInstance(containerLaunchContext,
containerToken);
List<StartContainerRequest> list = new ArrayList<StartContainerRequest>();
list.add(scRequest);
StartContainersRequest allRequests =
StartContainersRequest.newInstance(list);
containerManager.startContainers(allRequests);
int timeoutSecs = 0;
while (!processStartFile.exists() && timeoutSecs++ < 20) {
Thread.sleep(1000);
LOG.info("Waiting for process start-file to be created");
}
Assert.assertTrue("ProcessStartFile doesn't exist!",
processStartFile.exists());
BufferedReader reader =
new BufferedReader(new FileReader(processStartFile));
// Get the pid of the process
String pid = reader.readLine().trim();
// No more lines
Assert.assertEquals(null, reader.readLine());
reader.close();
reader =
new BufferedReader(new FileReader(childProcessStartFile));
// Get the pid of the child process
String child = reader.readLine().trim();
// No more lines
Assert.assertEquals(null, reader.readLine());
reader.close();
LOG.info("Manually killing pid " + pid + ", but not child pid " + child);
Shell.execCommand(new String[]{"kill", "-9", pid});
BaseContainerManagerTest.waitForContainerState(containerManager, cId,
ContainerState.COMPLETE);
Assert.assertFalse("Process is still alive!",
DefaultContainerExecutor.containerIsAlive(pid));
List<ContainerId> containerIds = new ArrayList<ContainerId>();
containerIds.add(cId);
GetContainerStatusesRequest gcsRequest =
GetContainerStatusesRequest.newInstance(containerIds);
ContainerStatus containerStatus =
containerManager.getContainerStatuses(gcsRequest)
.getContainerStatuses().get(0);
Assert.assertEquals(ExitCode.FORCE_KILLED.getExitCode(),
containerStatus.getExitStatus());
}
}
| |
/* Copyright (c) 2015 Mark Christopher Lauman
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
package ca.marklauman.tools;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;
import android.os.Build;
import android.support.v4.content.ContextCompat;
import android.util.TypedValue;
import android.widget.TextView;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.util.Collection;
@SuppressWarnings({"SameParameterValue", "WeakerAccess", "unused"})
public abstract class Utils {
/** Retrieve an array of resource ids from the xml of the provided {@link Context}.
* These resources are placed in an application's xml files as an array containing
* individual resource ids (not an integer-array or a string-array, just an array).
* @param c The {@code Context} to search for the array.
* @param resourceId The resource id of an {@code <array>} containing a list of resources.
* @return The resource ids of all the drawables in the array, in the order in which
* they appear in the xml. Returns null if the array does not exist. */
public static int[] getResourceArray(Context c, int resourceId) {
TypedArray ta = c.getResources()
.obtainTypedArray(resourceId);
if(ta == null) return null;
int[] res = new int[ta.length()];
for(int i=0; i<ta.length(); i++)
res[i] = ta.getResourceId(i, 0);
ta.recycle();
return res;
}
/** Retrieve an array of drawable resources from the xml of the provided {@link Context}.
* These resources are placed in an application's xml files as an array containing
* individual resource ids (not an integer-array or a string-array, just an array).
* @param c The {@code Context} to search for the array.
* @param resourceId The resource id of an {@code <array>} containing a list of resources.
* @return The drawables in the array, in the order in which they appear in the xml.
* Returns null if the array does not exist. */
public static Drawable[] getDrawableArray(Context c, int resourceId) {
TypedArray ta = c.getResources()
.obtainTypedArray(resourceId);
if(ta == null) return null;
Drawable[] res = new Drawable[ta.length()];
try {
for(int i=0; i<ta.length(); i++)
res[i] = ContextCompat.getDrawable(c, ta.getResourceId(i, 0));
} finally {
ta.recycle();
}
return res;
}
/** Round a number to a given number of places
* @param number The number to round
* @param places The number of decimal places to keep
* @return The rounded number */
public static float round(float number, int places) {
return (float)round((double)number, places);
}
/** Round a number to a given number of places
* @param number The number to round
* @param places The number of decimal places to keep
* @return The rounded number */
public static double round(double number, int places) {
double trans = Math.pow(10, places);
return Math.round(trans * number) / trans;
}
/** Join all the values together into one string.
* @param separator The string used to separate the individual values.
* @param values The values to place into the string. Values will be
* converted by their class' toString() method.
* @return A String made of all the values joined together, with each
* item separated from its neighbours by the separator.
* The separator will not appear at the beginning or end
* of the result. */
public static <T> String join(String separator, T[] values) {
// sanitize inputs
if(values == null) return "" + null;
if(separator == null) separator = "" + null;
// join the strings
String res = "";
for(T val : values)
res += separator + val;
// remove the excess separator at the start and return
if(res.length() < separator.length()) return "";
return res.substring(separator.length());
}
/** Join all the values together into one string.
* @param separator The string used to separate the individual values.
* @param values The values to place into the string. Values will be
* converted by their class' toString() method.
* @return A String made of all the values joined together, with each
* item separated from its neighbours by the separator.
* The separator will not appear at the beginning or end
* of the result. */
public static String join(String separator, long[] values) {
// sanitize inputs
if(values == null) return "" + null;
if(separator == null) separator = "" + null;
// join the strings
String res = "";
for(long val : values)
res += separator + val;
// remove the excess separator at the start and return
if(res.length() < separator.length()) return "";
return res.substring(separator.length());
}
/** Join all the values together into one string.
* @param separator The string used to separate the individual values.
* @param values The values to place into the string.
* @return A String made of all the values joined together, with each
* item separated from its neighbours by the separator.
* The separator will not appear at the beginning or end
* of the result. */
public static String join(String separator, String... values) {
// sanitize inputs
if(values == null) return ""+null;
if(separator == null) separator = "" + null;
// join the strings
String res = "";
for(String val : values)
res += separator + val;
// remove the excess separator at the start and return
if(res.length() < separator.length()) return "";
return res.substring(separator.length());
}
/** Join all the values together into one string.
* @param separator The string used to separate the individual values.
* @param values The values to place into the string. Values will be
* converted by their class' toString() method.
* @return A String made of all the values joined together, with each
* item separated from its neighbours by the separator.
* The separator will not appear at the beginning or end
* of the result. */
public static <T> String join(String separator, Collection<T> values) {
// sanitize inputs
if(values == null) return "" + null;
if(separator == null) separator = "" + null;
// join the strings
String res = "";
for(T val : values)
res += separator + val;
// remove the excess separator at the start and return
if(res.length() < separator.length()) return "";
return res.substring(separator.length());
}
/** Count the occurrences of the character in the sequence.
* @param seq The sequence of characters to loop over.
* @param c The character to find.
* @return The total number of times c appears in seq. */
public static int countChar(CharSequence seq, char c) {
if(seq == null || seq.length() < 1) return 0;
int count = 0;
for(int i=0; i<seq.length(); i++) {
if(c == seq.charAt(i)) count++;
}
return count;
}
/** Provide with a dp measurement, to get a pixel measurement.
* @param c The activity context.
* @param dp The measure in dp
* @return The measure in px. */
public static int dp(Context c, int dp) {
return (int) (TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dp,
c.getResources().getDisplayMetrics()) + 0.5f);
}
/** Copy one file into another file.
* @param in An input stream positioned at the start of the source file.
* @param outs An output stream positioned at the start of the destination file.
* @throws IOException if any errors occur while copying. */
public static void copyFile(InputStream in, OutputStream outs) throws IOException {
byte[] buffer = new byte[1024];
int length;
while(0 < (length = in.read(buffer)))
outs.write(buffer, 0, length);
outs.flush();
outs.close();
in.close();
}
/** Set the drawables on a TextView using valid support methods.
* @param view The TextView that the drawables should be applied to.
* @param start Resource identifier of the start Drawable.
* @param top Resource identifier of the top Drawable.
* @param end Resource identifier of the end Drawable.
* @param bottom Resource identifier of the bottom Drawable. */
public static void setDrawables(TextView view, int start, int top, int end, int bottom) {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
view.setCompoundDrawablesRelativeWithIntrinsicBounds(start, top, end, bottom);
} else view.setCompoundDrawablesWithIntrinsicBounds(start, top, end, bottom);
}
/** Set the drawables on a TextView using valid support methods.
* @param view The TextView that the drawables should be applied to.
* @param start Resource identifier of the start Drawable.
* @param top Resource identifier of the top Drawable.
* @param end Resource identifier of the end Drawable.
* @param bottom Resource identifier of the bottom Drawable. */
public static void setDrawables(TextView view, Drawable start, Drawable top,
Drawable end, Drawable bottom) {
if(Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN_MR1) {
view.setCompoundDrawablesRelativeWithIntrinsicBounds(start, top, end, bottom);
} else view.setCompoundDrawablesWithIntrinsicBounds(start, top, end, bottom);
}
}
| |
/*
* Copyright (c) 2021, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.apimgt.impl.dao;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.wso2.carbon.apimgt.api.APIManagementException;
import org.wso2.carbon.apimgt.api.model.ServiceEntry;
import org.wso2.carbon.apimgt.api.model.ServiceFilterParams;
import org.wso2.carbon.apimgt.impl.APIConstants;
import org.wso2.carbon.apimgt.impl.dao.constants.SQLConstants;
import org.wso2.carbon.apimgt.impl.factory.SQLConstantManagerFactory;
import org.wso2.carbon.apimgt.impl.utils.APIMgtDBUtil;
import org.wso2.carbon.apimgt.impl.utils.APIUtil;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
/**
* This class represent the ServiceCatalogDAO.
*/
public class ServiceCatalogDAO {
private static final Log log = LogFactory.getLog(ServiceCatalogDAO.class);
private static ServiceCatalogDAO INSTANCE = null;
/**
* Method to get the instance of the ServiceCatalogDAO.
*
* @return {@link ServiceCatalogDAO} instance
*/
public static ServiceCatalogDAO getInstance() {
if (INSTANCE == null) {
INSTANCE = new ServiceCatalogDAO();
}
return INSTANCE;
}
private void handleException(String msg, Throwable t) throws APIManagementException {
log.error(msg, t);
throw new APIManagementException(msg, t);
}
/**
* Add a new serviceCatalog
*
* @param serviceEntry ServiceCatalogInfo
* @param tenantID ID of the owner's tenant
* @param username Logged in user name
* @return serviceCatalogId
* throws APIManagementException if failed to create service catalog
*/
public String addServiceEntry(ServiceEntry serviceEntry, int tenantID, String username)
throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps = connection
.prepareStatement(SQLConstants.ServiceCatalogConstants.ADD_SERVICE)) {
try {
connection.setAutoCommit(false);
setServiceParams(ps, serviceEntry, tenantID, username);
ps.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to rollback adding endpoint information", e);
}
} catch (SQLException e) {
handleException("Failed to add service catalog of tenant "
+ APIUtil.getTenantDomainFromTenantId(tenantID), e);
}
return null;
}
/**
* Add list of services to Service Catalog
* @param services List of Services that needs to be added
* @param tenantId Tenant ID of the logged-in user
* @param username Logged-in username
* @param connection DB Connection
*
*/
private void addServices(List<ServiceEntry> services, int tenantId, String username, Connection connection)
throws SQLException {
try (PreparedStatement preparedStatement = connection
.prepareStatement(SQLConstants.ServiceCatalogConstants.ADD_SERVICE)) {
for (ServiceEntry service : services) {
setServiceParams(preparedStatement, service, tenantId, username);
preparedStatement.addBatch();
}
preparedStatement.executeBatch();
}
}
/**
* Update list of services available in Service Catalog
* @param services List of Services that needs to be updated
* @param tenantId Tenant ID of the logged-in user
* @param username Logged-in username
* @param connection DB Connection
*
*/
private void updateServices(List<ServiceEntry> services, int tenantId, String username, Connection connection)
throws SQLException {
try (PreparedStatement ps = connection.prepareStatement(SQLConstants.ServiceCatalogConstants
.UPDATE_SERVICE_BY_KEY)) {
for (ServiceEntry service: services) {
setUpdateServiceParams(ps, service, tenantId, username);
ps.addBatch();
}
ps.executeBatch();
}
}
public List<ServiceEntry> importServices(List<ServiceEntry> services, int tenantId, String username)
throws APIManagementException {
List<ServiceEntry> serviceListToAdd = new ArrayList<>();
List<ServiceEntry> serviceListToUpdate = new ArrayList<>();
for (int i = 0; i < services.size(); i++) {
ServiceEntry service = services.get(i);
String md5 = getMd5HashByKey(service.getKey(), tenantId);
if (StringUtils.isNotEmpty(md5)) {
if (!md5.equals(service.getMd5())) {
serviceListToUpdate.add(service);
}
} else {
serviceListToAdd.add(service);
}
}
try (Connection connection = APIMgtDBUtil.getConnection()) {
try {
connection.setAutoCommit(false);
addServices(serviceListToAdd, tenantId, username, connection);
updateServices(serviceListToUpdate, tenantId, username, connection);
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to import services to service catalog of tenant " + tenantId, e);
}
} catch (SQLException e) {
handleException("Failed to import services to service catalog of tenant "
+ APIUtil.getTenantDomainFromTenantId(tenantId), e);
}
List<ServiceEntry> importedServiceList = new ArrayList<>();
importedServiceList.addAll(serviceListToAdd);
importedServiceList.addAll(serviceListToUpdate);
return importedServiceList;
}
/**
* Update an existing serviceCatalog
*
* @param serviceEntry ServiceCatalogInfo
* @param tenantID ID of the owner's tenant
* @param userName Logged in user name
* @return serviceCatalogId
* throws APIManagementException if failed to create service catalog
*/
public String updateServiceCatalog(ServiceEntry serviceEntry, int tenantID, String userName)
throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps = connection
.prepareStatement(SQLConstants.ServiceCatalogConstants.UPDATE_SERVICE_BY_KEY)) {
boolean initialAutoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
setUpdateServiceParams(ps, serviceEntry, tenantID, userName);
ps.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to rollback updating endpoint information", e);
} finally {
APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit);
}
} catch (SQLException e) {
handleException("Failed to update service catalog of tenant "
+ APIUtil.getTenantDomainFromTenantId(tenantID), e);
}
return serviceEntry.getKey();
}
/**
* Add a new end-point definition entry
*
* @param serviceEntry EndPoint related information
* @return uuid
* throws APIManagementException if failed to update service catalog
*/
public String addEndPointDefinition(ServiceEntry serviceEntry, String uuid) throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps = connection
.prepareStatement(SQLConstants.ServiceCatalogConstants.ADD_ENDPOINT_RESOURCES)) {
boolean initialAutoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
ps.setString(1, uuid);
ps.setBinaryStream(2, serviceEntry.getEndpointDef());
ps.setBinaryStream(3, serviceEntry.getMetadata());
ps.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to rollback adding endpoint definitions", e);
} finally {
APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit);
}
} catch (SQLException e) {
handleException("Failed to add end point definition for service catalog entry ID "
+ uuid, e);
}
return uuid;
}
/**
* Update MD5 hash value of existing ServiceEntry object
*
* @param serviceInfo ServiceEntry object
* @param tenantId ID of the owner's tenant
* @return ServiceEntry
* throws APIManagementException if failed
*/
public ServiceEntry getMd5Hash(ServiceEntry serviceInfo, int tenantId) throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps =
connection.prepareStatement(SQLConstants.ServiceCatalogConstants.GET_SERVICE_MD5_BY_NAME_AND_VERSION)) {
ps.setString(1, serviceInfo.getName());
ps.setString(2, serviceInfo.getVersion());
ps.setInt(3, tenantId);
try (ResultSet resultSet = ps.executeQuery()) {
if (resultSet.next()) {
serviceInfo.setMd5(resultSet.getString("MD5"));
}
}
} catch (SQLException e) {
handleException("Error while executing SQL for getting User MD5 hash", e);
}
return serviceInfo;
}
/**
* Get MD5 hash value of a service
*
* @param key Service key of service
* @param tenantId ID of the owner's tenant
* @return String key
* throws APIManagementException if failed
*/
public String getMd5HashByKey(String key, int tenantId) throws APIManagementException {
String md5 = null;
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps =
connection.prepareStatement(SQLConstants.ServiceCatalogConstants.GET_SERVICE_MD5_BY_SERVICE_KEY)) {
ps.setString(1, key);
ps.setInt(2, tenantId);
try (ResultSet resultSet = ps.executeQuery()) {
if (resultSet.next()) {
md5 = resultSet.getString("MD5");
}
}
} catch (SQLException e) {
handleException("Error while executing SQL for getting User MD5 hash", e);
}
return md5;
}
/**
* Get service resources by service key
*
* @param key Service key of service
* @param tenantId ID of the owner's tenant
* @return ServiceEntry
* throws APIManagementException if failed to retrieve
*/
public ServiceEntry getServiceResourcesByKey(String key, int tenantId) throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps =
connection.prepareStatement(SQLConstants.ServiceCatalogConstants.GET_ENDPOINT_RESOURCES_BY_KEY)) {
ps.setString(1, key);
ps.setInt(2, tenantId);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ServiceEntry serviceEntry = new ServiceEntry();
serviceEntry.setUuid(rs.getString("UUID"));
serviceEntry.setMetadata(rs.getBinaryStream("METADATA"));
serviceEntry.setEndpointDef(rs.getBinaryStream("SERVICE_DEFINITION"));
return serviceEntry;
}
}
} catch (SQLException e) {
handleException("Error while executing SQL for getting catalog entry resources", e);
}
return null;
}
/**
* Get service information by service key
*
* @param key Service key of service
* @param tenantId ID of the owner's tenant
* @return ServiceEntry
* throws APIManagementException if failed to retrieve
*/
public ServiceEntry getServiceByKey(String key, int tenantId) throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps =
connection.prepareStatement(SQLConstants.ServiceCatalogConstants.GET_SERVICE_BY_SERVICE_KEY)) {
ps.setString(1, key);
ps.setInt(2, tenantId);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ServiceEntry serviceEntry = getServiceParams(rs, false);
serviceEntry.setMetadata(rs.getBinaryStream(APIConstants.ServiceCatalogConstants.METADATA));
serviceEntry.setEndpointDef(rs.getBinaryStream(APIConstants.ServiceCatalogConstants
.SERVICE_DEFINITION));
return serviceEntry;
}
}
} catch (SQLException e) {
handleException("Error while executing SQL for getting service information", e);
}
return null;
}
/**
* Get service information by name and version
*
* @param name Service name
* @param version Service version
* @param tenantId ID of the owner's tenant
* @return ServiceEntry
* throws APIManagementException if failed to retrieve
*/
public ServiceEntry getServiceByNameAndVersion(String name, String version, int tenantId)
throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps =
connection.prepareStatement(SQLConstants.ServiceCatalogConstants.GET_ENDPOINT_RESOURCES_BY_NAME_AND_VERSION)) {
ps.setString(1, name);
ps.setString(2, version);
ps.setInt(3, tenantId);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ServiceEntry serviceEntry = new ServiceEntry();
serviceEntry.setUuid(rs.getString("UUID"));
serviceEntry.setMetadata(rs.getBinaryStream("METADATA"));
serviceEntry.setEndpointDef(rs.getBinaryStream("SERVICE_DEFINITION"));
return serviceEntry;
}
}
} catch (SQLException e) {
handleException("Error while executing SQL for getting catalog entry resources", e);
}
return null;
}
/**
* Delete service by service ID
*
* @param serviceId Service ID
* @param tenantId ID of the owner's tenant
* throws APIManagementException if failed to delete
*/
public void deleteService(String serviceId, int tenantId) throws APIManagementException {
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement statement = connection.prepareStatement(SQLConstants
.ServiceCatalogConstants.DELETE_SERVICE_BY_SERVICE_ID)) {
boolean initialAutoCommit = connection.getAutoCommit();
try {
connection.setAutoCommit(false);
statement.setString(1, serviceId);
statement.setInt(2, tenantId);
statement.executeUpdate();
connection.commit();
} catch (SQLException e) {
connection.rollback();
handleException("Failed to delete service : " + serviceId + " from service catalog: " + tenantId, e);
} finally {
APIMgtDBUtil.setAutoCommit(connection, initialAutoCommit);
}
} catch (SQLException e) {
handleException("Failed to delete service : " + serviceId + " from service catalog: " + tenantId, e);
}
}
/**
* Get services
* @param filterParams Service Filter parameters
* @param tenantId Tenant ID of the logged in user
* @param shrink Whether to shrink the response or not
* @return List of Services
* @throws APIManagementException
*/
public List<ServiceEntry> getServices(ServiceFilterParams filterParams, int tenantId, boolean shrink)
throws APIManagementException {
List<ServiceEntry> serviceEntryList = new ArrayList<>();
String query = SQLConstantManagerFactory.getSQlString("GET_ALL_SERVICES_BY_TENANT_ID");
query = query.replace("$1", filterParams.getSortBy());
query = query.replace("$2", filterParams.getSortOrder());
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps = connection.prepareStatement(query)) {
ps.setInt(1, tenantId);
ps.setString(2, "%" + filterParams.getName() + "%");
ps.setString(3, "%" + filterParams.getVersion() + "%");
ps.setString(4, "%" + filterParams.getDefinitionType() + "%");
ps.setString(5, "%" + filterParams.getDisplayName() + "%");
ps.setString(6, "%" + filterParams.getKey() + "%");
ps.setInt(7, filterParams.getOffset());
ps.setInt(8, filterParams.getLimit());
try(ResultSet resultSet = ps.executeQuery()) {
while(resultSet.next()) {
ServiceEntry service = getServiceParams(resultSet, shrink);
serviceEntryList.add(service);
}
}
} catch (SQLException e) {
handleException("Error while retrieving the Services", e);
}
return serviceEntryList;
}
public ServiceEntry getServiceByUUID(String serviceId, int tenantId) throws APIManagementException {
String query = SQLConstants.ServiceCatalogConstants.GET_SERVICE_BY_SERVICE_ID;
try (Connection connection = APIMgtDBUtil.getConnection();
PreparedStatement ps = connection.prepareStatement(query)) {
ps.setString(1, serviceId);
ps.setInt(2, tenantId);
try (ResultSet rs = ps.executeQuery()) {
if (rs.next()) {
ServiceEntry service = getServiceParams(rs, false);
return service;
}
}
} catch (SQLException e) {
handleException("Error while retrieving details of Service with Id: " + serviceId, e);
}
return null;
}
private void setUpdateServiceParams(PreparedStatement ps, ServiceEntry service, int tenantId, String username)
throws SQLException {
ps.setString(1, service.getMd5());
ps.setString(2, service.getName());
ps.setString(3, service.getDisplayName());
ps.setString(4, service.getVersion());
ps.setInt(5, tenantId);
ps.setString(6, service.getServiceUrl());
ps.setString(7, service.getDefinitionType().name());
ps.setString(8, service.getDefUrl());
ps.setString(9, service.getDescription());
ps.setString(10, service.getSecurityType().toString());
ps.setBoolean(11, service.isMutualSSLEnabled());
ps.setTimestamp(12, new Timestamp(System.currentTimeMillis()));
ps.setString(13, username);
ps.setBinaryStream(14, service.getEndpointDef());
ps.setBinaryStream(15, service.getMetadata());
ps.setString(16, service.getKey());
ps.setInt(17, tenantId);
}
private void setServiceParams(PreparedStatement ps, ServiceEntry service, int tenantId, String username)
throws SQLException {
String uuid = UUID.randomUUID().toString();
ps.setString(1, uuid);
ps.setString(2, service.getKey());
ps.setString(3, service.getMd5());
ps.setString(4, service.getName());
ps.setString(5, service.getDisplayName());
ps.setString(6, service.getVersion());
ps.setInt(7, tenantId);
ps.setString(8, service.getServiceUrl());
ps.setString(9, service.getDefinitionType().name());
ps.setString(10, service.getDefUrl());
ps.setString(11, service.getDescription());
ps.setString(12, service.getSecurityType().toString());
ps.setBoolean(13, service.isMutualSSLEnabled());
ps.setTimestamp(14, new Timestamp(System.currentTimeMillis()));
ps.setTimestamp(15, new Timestamp(System.currentTimeMillis()));
ps.setString(16, username);
ps.setString(17, username);
ps.setBinaryStream(18, service.getEndpointDef());
ps.setBinaryStream(19, service.getMetadata());
}
private ServiceEntry getServiceParams(ResultSet resultSet, boolean shrink) throws APIManagementException {
ServiceEntry service = new ServiceEntry();
try {
service.setUuid(resultSet.getString(APIConstants.ServiceCatalogConstants.SERVICE_UUID));
service.setName(resultSet.getString(APIConstants.ServiceCatalogConstants.SERVICE_NAME));
service.setKey(resultSet.getString(APIConstants.ServiceCatalogConstants.SERVICE_KEY));
service.setMd5(resultSet.getString(APIConstants.ServiceCatalogConstants.MD5));
service.setVersion(resultSet.getString(APIConstants.ServiceCatalogConstants.SERVICE_VERSION));
if (!shrink) {
service.setDisplayName(resultSet.getString(APIConstants.ServiceCatalogConstants
.SERVICE_DISPLAY_NAME));
service.setServiceUrl(resultSet.getString(APIConstants.ServiceCatalogConstants.SERVICE_URL));
service.setDefinitionType(ServiceEntry.DefinitionType.valueOf(resultSet.getString(APIConstants
.ServiceCatalogConstants.DEFINITION_TYPE)));
service.setDefUrl(resultSet.getString(APIConstants.ServiceCatalogConstants.DEFINITION_URL));
service.setDescription(resultSet.getString(APIConstants.ServiceCatalogConstants.DESCRIPTION));
service.setSecurityType(ServiceEntry.SecurityType.valueOf(resultSet
.getString(APIConstants.ServiceCatalogConstants.SECURITY_TYPE)));
service.setMutualSSLEnabled(resultSet.getBoolean(APIConstants.ServiceCatalogConstants
.MUTUAL_SSL_ENABLED));
service.setCreatedTime(resultSet.getTimestamp(APIConstants.ServiceCatalogConstants
.CREATED_TIME));
service.setLastUpdatedTime(resultSet.getTimestamp(APIConstants.ServiceCatalogConstants
.LAST_UPDATED_TIME));
service.setCreatedBy(resultSet.getString(APIConstants.ServiceCatalogConstants.CREATED_BY));
service.setUpdatedBy(resultSet.getString(APIConstants.ServiceCatalogConstants.UPDATED_BY));
}
return service;
} catch (SQLException e) {
handleException("Error while setting service parameters", e);
return null;
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2017 Serge Rider (serge@jkiss.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ext.postgresql.model;
import org.jkiss.code.NotNull;
import org.jkiss.code.Nullable;
import org.jkiss.dbeaver.DBException;
import org.jkiss.dbeaver.Log;
import org.jkiss.dbeaver.ext.postgresql.PostgreUtils;
import org.jkiss.dbeaver.model.DBPDataKind;
import org.jkiss.dbeaver.model.DBPHiddenObject;
import org.jkiss.dbeaver.model.DBPNamedObject2;
import org.jkiss.dbeaver.model.DBUtils;
import org.jkiss.dbeaver.model.exec.jdbc.JDBCResultSet;
import org.jkiss.dbeaver.model.impl.DBPositiveNumberTransformer;
import org.jkiss.dbeaver.model.impl.jdbc.JDBCUtils;
import org.jkiss.dbeaver.model.impl.jdbc.struct.JDBCTableColumn;
import org.jkiss.dbeaver.model.meta.IPropertyValueListProvider;
import org.jkiss.dbeaver.model.meta.IPropertyValueTransformer;
import org.jkiss.dbeaver.model.meta.Property;
import org.jkiss.dbeaver.model.sql.SQLUtils;
import org.jkiss.dbeaver.model.struct.DBSEntity;
import org.jkiss.dbeaver.model.struct.DBSTypedObjectEx;
import java.util.Comparator;
import java.util.Set;
import java.util.TreeSet;
/**
* PostgreAttribute
*/
public abstract class PostgreAttribute<OWNER extends DBSEntity & PostgreObject> extends JDBCTableColumn<OWNER> implements DBSTypedObjectEx, DBPNamedObject2, DBPHiddenObject
{
private static final Log log = Log.getLog(PostgreAttribute.class);
@NotNull
private PostgreDataType dataType;
private String comment;
private long charLength;
private int arrayDim;
private int inheritorsCount;
private String description;
protected PostgreAttribute(
OWNER table)
{
super(table, false);
}
public PostgreAttribute(
OWNER table,
JDBCResultSet dbResult)
throws DBException
{
super(table, true);
loadInfo(dbResult);
}
public PostgreDatabase getDatabase() {
return getTable().getDatabase();
}
private void loadInfo(JDBCResultSet dbResult)
throws DBException
{
setName(JDBCUtils.safeGetString(dbResult, "attname"));
setOrdinalPosition(JDBCUtils.safeGetInt(dbResult, "attnum"));
setRequired(JDBCUtils.safeGetBoolean(dbResult, "attnotnull"));
final long typeId = JDBCUtils.safeGetLong(dbResult, "atttypid");
dataType = getTable().getDatabase().getDataType(typeId);
if (dataType == null) {
throw new DBException("Attribute data type '" + typeId + "' not found");
}
setTypeName(dataType.getTypeName());
setValueType(dataType.getTypeID());
setDefaultValue(JDBCUtils.safeGetString(dbResult, "def_value"));
int typeMod = JDBCUtils.safeGetInt(dbResult, "atttypmod");
int maxLength = PostgreUtils.getAttributePrecision(typeId, typeMod);
DBPDataKind dataKind = dataType.getDataKind();
if (dataKind == DBPDataKind.NUMERIC || dataKind == DBPDataKind.DATETIME) {
setMaxLength(0);
} else {
if (maxLength <= 0) {
maxLength = PostgreUtils.getDisplaySize(typeId, typeMod);
}
if (maxLength >= 0) {
setMaxLength(maxLength);
} else {
// TypeMod can be anything.
// It is often used in packed format and has no numeric meaning at all
//setMaxLength(typeMod);
}
}
setPrecision(maxLength);
setScale(PostgreUtils.getScale(typeId, typeMod));
this.description = JDBCUtils.safeGetString(dbResult, "description");
this.arrayDim = JDBCUtils.safeGetInt(dbResult, "attndims");
this.inheritorsCount = JDBCUtils.safeGetInt(dbResult, "attinhcount");
setPersisted(true);
}
@NotNull
@Override
public PostgreDataSource getDataSource()
{
return getTable().getDataSource();
}
@NotNull
@Override
@Property(viewable = true, editable = true, updatable = true, order = 20, listProvider = DataTypeListProvider.class, valueTransformer = DataTypeValueTransformer.class)
public PostgreDataType getDataType() {
return dataType;
}
public void setDataType(@NotNull PostgreDataType dataType) {
this.dataType = dataType;
setTypeName(dataType.getTypeName());
setValueType(dataType.getTypeID());
}
@Override
public DBPDataKind getDataKind() {
return dataType.getDataKind();
}
@Override
@Property(viewable = true, editable = true, updatable = true, valueRenderer = DBPositiveNumberTransformer.class, order = 21)
public long getMaxLength()
{
return super.getMaxLength();
}
@Override
public String getTypeName()
{
return dataType.getTypeName();
}
@Override
@Property(viewable = true, editable = true, updatable = true, valueRenderer = DBPositiveNumberTransformer.class, order = 22)
public int getPrecision()
{
return super.getPrecision();
}
@Override
@Property(viewable = true, editable = true, updatable = true, valueRenderer = DBPositiveNumberTransformer.class, order = 23)
public int getScale()
{
return super.getScale();
}
@Override
@Property(viewable = true, editable = true, updatable = true, order = 50)
public boolean isRequired()
{
return super.isRequired();
}
@Override
public boolean isAutoGenerated()
{
final String def = getDefaultValue();
return def != null && def.contains("nextval(");
}
@Override
@Property(viewable = true, editable = true, updatable = true, order = 70)
public String getDefaultValue()
{
return super.getDefaultValue();
}
@Nullable
@Override
@Property(viewable = true, editable = true, updatable = true, order = 100)
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
@Override
public boolean isHidden() {
return isPersisted() && getOrdinalPosition() < 0;
}
public String getFullTypeName() {
String fqtn = dataType.getTypeName();
if (dataType.getDataKind() != DBPDataKind.CONTENT) {
return DBUtils.getFullTypeName(this);
}
return fqtn;
}
public static class DataTypeListProvider implements IPropertyValueListProvider<PostgreAttribute> {
@Override
public boolean allowCustomValue()
{
return true;
}
@Override
public Object[] getPossibleValues(PostgreAttribute column)
{
Set<PostgreDataType> types = new TreeSet<>(new Comparator<PostgreDataType>() {
@Override
public int compare(PostgreDataType o1, PostgreDataType o2) {
return o1.getTypeName().compareTo(o2.getTypeName());
}
});
for (PostgreDataType type : column.getDataSource().getLocalDataTypes()) {
types.add(type);
}
return types.toArray(new PostgreDataType[types.size()]);
}
}
public static class DataTypeValueTransformer implements IPropertyValueTransformer<PostgreAttribute, Object> {
@Override
public PostgreDataType transform(PostgreAttribute object, Object value) {
if (value instanceof String) {
PostgreDataType dataType = object.getDataSource().getDefaultInstance().getDataType((String) value);
if (dataType == null) {
throw new IllegalArgumentException("Bad data type name specified: " + value);
}
return dataType;
} else if (value instanceof PostgreDataType) {
return (PostgreDataType) value;
} else {
throw new IllegalArgumentException("Invalid type value: " + value);
}
}
}
}
| |
/*
* Copyright (c) 2009, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.tools.classfile;
/**
* See JVMS, chapter 6.
*
* <p><b>This is NOT part of any supported API.
* If you write code that depends on this, you do so at your own risk.
* This code and its internal interfaces are subject to change or
* deletion without notice.</b>
*
* @see Code_attribute#getInstructions
*/
public class Instruction {
/** The kind of an instruction, as determined by the position, size and
* types of its operands. */
public static enum Kind {
/** Opcode is not followed by any operands. */
NO_OPERANDS(1),
/** Opcode is followed by a byte indicating a type. */
ATYPE(2),
/** Opcode is followed by a 2-byte branch offset. */
BRANCH(3),
/** Opcode is followed by a 4-byte branch offset. */
BRANCH_W(5),
/** Opcode is followed by a signed byte value. */
BYTE(2),
/** Opcode is followed by a 1-byte index into the constant pool. */
CPREF(2),
/** Opcode is followed by a 2-byte index into the constant pool. */
CPREF_W(3),
/** Opcode is followed by a 2-byte index into the constant pool,
* an unsigned byte value. */
CPREF_W_UBYTE(4),
/** Opcode is followed by a 2-byte index into the constant pool.,
* an unsigned byte value, and a zero byte. */
CPREF_W_UBYTE_ZERO(5),
/** Opcode is followed by variable number of operands, depending
* on the instruction.*/
DYNAMIC(-1),
/** Opcode is followed by a 1-byte reference to a local variable. */
LOCAL(2),
/** Opcode is followed by a 1-byte reference to a local variable,
* and a signed byte value. */
LOCAL_BYTE(3),
/** Opcode is followed by a signed short value. */
SHORT(3),
/** Wide opcode is not followed by any operands. */
WIDE_NO_OPERANDS(2),
/** Wide opcode is followed by a 2-byte index into the constant pool. */
WIDE_CPREF_W(4),
/** Wide opcode is followed by a 2-byte index into the constant pool,
* and a signed short value. */
WIDE_CPREF_W_SHORT(6),
/** Opcode was not recognized. */
UNKNOWN(1);
Kind(int length) {
this.length = length;
}
/** The length, in bytes, of this kind of instruction, or -1 is the
* length depends on the specific instruction. */
public final int length;
};
/** A utility visitor to help decode the operands of an instruction.
* @see Instruction#accept */
public interface KindVisitor<R,P> {
/** See {@link Kind#NO_OPERANDS}, {@link Kind#WIDE_NO_OPERANDS}. */
R visitNoOperands(Instruction instr, P p);
/** See {@link Kind#ATYPE}. */
R visitArrayType(Instruction instr, TypeKind kind, P p);
/** See {@link Kind#BRANCH}, {@link Kind#BRANCH_W}. */
R visitBranch(Instruction instr, int offset, P p);
/** See {@link Kind#CPREF}, {@link Kind#CPREF_W}, {@link Kind#WIDE_CPREF_W}. */
R visitConstantPoolRef(Instruction instr, int index, P p);
/** See {@link Kind#CPREF_W_UBYTE}, {@link Kind#CPREF_W_UBYTE_ZERO}, {@link Kind#WIDE_CPREF_W_SHORT}. */
R visitConstantPoolRefAndValue(Instruction instr, int index, int value, P p);
/** See {@link Kind#LOCAL}. */
R visitLocal(Instruction instr, int index, P p);
/** See {@link Kind#LOCAL_UBYTE}. */
R visitLocalAndValue(Instruction instr, int index, int value, P p);
/** See {@link Kind#DYNAMIC}. */
R visitLookupSwitch(Instruction instr, int default_, int npairs, int[] matches, int[] offsets, P p);
/** See {@link Kind#DYNAMIC}. */
R visitTableSwitch(Instruction instr, int default_, int low, int high, int[] offsets, P p);
/** See {@link Kind#BYTE}, {@link Kind#SHORT}. */
R visitValue(Instruction instr, int value, P p);
/** Instruction is unrecognized. */
R visitUnknown(Instruction instr, P p);
}
/** The kind of primitive array type to create.
* See JVMS chapter 6, newarray. */
public static enum TypeKind {
T_BOOLEAN(4, "boolean"),
T_CHAR(5, "char"),
T_FLOAT(6, "float"),
T_DOUBLE(7, "double"),
T_BYTE(8, "byte"),
T_SHORT(9, "short"),
T_INT (10, "int"),
T_LONG (11, "long");
TypeKind(int value, String name) {
this.value = value;
this.name = name;
}
public static TypeKind get(int value) {
switch (value) {
case 4: return T_BOOLEAN;
case 5: return T_CHAR;
case 6: return T_FLOAT;
case 7: return T_DOUBLE;
case 8: return T_BYTE;
case 9: return T_SHORT;
case 10: return T_INT;
case 11: return T_LONG;
default: return null;
}
}
public final int value;
public final String name;
}
/** An instruction is defined by its position in a bytecode array. */
public Instruction(byte[] bytes, int pc) {
this.bytes = bytes;
this.pc = pc;
}
/** Get the position of the instruction within the bytecode array. */
public int getPC() {
return pc;
}
/** Get a byte value, relative to the start of this instruction. */
public int getByte(int offset) {
return bytes[pc + offset];
}
/** Get an unsigned byte value, relative to the start of this instruction. */
public int getUnsignedByte(int offset) {
return getByte(offset) & 0xff;
}
/** Get a 2-byte value, relative to the start of this instruction. */
public int getShort(int offset) {
return (getByte(offset) << 8) | getUnsignedByte(offset + 1);
}
/** Get a unsigned 2-byte value, relative to the start of this instruction. */
public int getUnsignedShort(int offset) {
return getShort(offset) & 0xFFFF;
}
/** Get a 4-byte value, relative to the start of this instruction. */
public int getInt(int offset) {
return (getShort(offset) << 16) | (getUnsignedShort(offset + 2));
}
/** Get the Opcode for this instruction, or null if the instruction is
* unrecognized. */
public Opcode getOpcode() {
int b = getUnsignedByte(0);
switch (b) {
case Opcode.NONPRIV:
case Opcode.PRIV:
case Opcode.WIDE:
return Opcode.get(b, getUnsignedByte(1));
}
return Opcode.get(b);
}
/** Get the mnemonic for this instruction, or a default string if the
* instruction is unrecognized. */
public String getMnemonic() {
Opcode opcode = getOpcode();
if (opcode == null)
return "bytecode " + getUnsignedByte(0);
else
return opcode.toString().toLowerCase();
}
/** Get the length, in bytes, of this instruction, including the opcode
* and all its operands. */
public int length() {
Opcode opcode = getOpcode();
if (opcode == null)
return 1;
switch (opcode) {
case TABLESWITCH: {
int pad = align(pc + 1) - pc;
int low = getInt(pad + 4);
int high = getInt(pad + 8);
return pad + 12 + 4 * (high - low + 1);
}
case LOOKUPSWITCH: {
int pad = align(pc + 1) - pc;
int npairs = getInt(pad + 4);
return pad + 8 + 8 * npairs;
}
default:
return opcode.kind.length;
}
}
/** Get the {@link Kind} of this instruction. */
public Kind getKind() {
Opcode opcode = getOpcode();
return (opcode != null ? opcode.kind : Kind.UNKNOWN);
}
/** Invoke a method on the visitor according to the kind of this
* instruction, passing in the decoded operands for the instruction. */
public <R,P> R accept(KindVisitor<R,P> visitor, P p) {
switch (getKind()) {
case NO_OPERANDS:
return visitor.visitNoOperands(this, p);
case ATYPE:
return visitor.visitArrayType(
this, TypeKind.get(getUnsignedByte(1)), p);
case BRANCH:
return visitor.visitBranch(this, getShort(1), p);
case BRANCH_W:
return visitor.visitBranch(this, getInt(1), p);
case BYTE:
return visitor.visitValue(this, getByte(1), p);
case CPREF:
return visitor.visitConstantPoolRef(this, getUnsignedByte(1), p);
case CPREF_W:
return visitor.visitConstantPoolRef(this, getUnsignedShort(1), p);
case CPREF_W_UBYTE:
case CPREF_W_UBYTE_ZERO:
return visitor.visitConstantPoolRefAndValue(
this, getUnsignedShort(1), getUnsignedByte(3), p);
case DYNAMIC: {
switch (getOpcode()) {
case TABLESWITCH: {
int pad = align(pc + 1) - pc;
int default_ = getInt(pad);
int low = getInt(pad + 4);
int high = getInt(pad + 8);
int[] values = new int[high - low + 1];
for (int i = 0; i < values.length; i++)
values[i] = getInt(pad + 12 + 4 * i);
return visitor.visitTableSwitch(
this, default_, low, high, values, p);
}
case LOOKUPSWITCH: {
int pad = align(pc + 1) - pc;
int default_ = getInt(pad);
int npairs = getInt(pad + 4);
int[] matches = new int[npairs];
int[] offsets = new int[npairs];
for (int i = 0; i < npairs; i++) {
matches[i] = getInt(pad + 8 + i * 8);
offsets[i] = getInt(pad + 12 + i * 8);
}
return visitor.visitLookupSwitch(
this, default_, npairs, matches, offsets, p);
}
default:
throw new IllegalStateException();
}
}
case LOCAL:
return visitor.visitLocal(this, getUnsignedByte(1), p);
case LOCAL_BYTE:
return visitor.visitLocalAndValue(
this, getUnsignedByte(1), getByte(2), p);
case SHORT:
return visitor.visitValue(this, getShort(1), p);
case WIDE_NO_OPERANDS:
return visitor.visitNoOperands(this, p);
case WIDE_CPREF_W:
return visitor.visitConstantPoolRef(this, getUnsignedShort(2), p);
case WIDE_CPREF_W_SHORT:
return visitor.visitConstantPoolRefAndValue(
this, getUnsignedShort(2), getUnsignedByte(4), p);
case UNKNOWN:
return visitor.visitUnknown(this, p);
default:
throw new IllegalStateException();
}
}
private static int align(int n) {
return (n + 3) & ~3;
}
private byte[] bytes;
private int pc;
}
| |
// Copyright 2019 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.feed.library.piet;
import static com.google.common.truth.Truth.assertThat;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.never;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import static org.mockito.MockitoAnnotations.initMocks;
import static org.chromium.chrome.browser.feed.library.api.host.imageloader.ImageLoaderApi.DIMENSION_UNKNOWN;
import static org.chromium.chrome.browser.feed.library.common.testing.RunnableSubject.assertThatRunnable;
import static org.chromium.chrome.browser.feed.library.piet.StyleProvider.DIMENSION_NOT_SET;
import android.app.Activity;
import android.content.Context;
import android.graphics.Color;
import android.graphics.drawable.ColorDrawable;
import android.graphics.drawable.Drawable;
import android.view.View;
import android.view.View.MeasureSpec;
import android.widget.ImageView;
import android.widget.ImageView.ScaleType;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.robolectric.Robolectric;
import org.robolectric.annotation.Config;
import org.chromium.chrome.browser.feed.library.common.functional.Suppliers;
import org.chromium.chrome.browser.feed.library.common.time.testing.FakeClock;
import org.chromium.chrome.browser.feed.library.common.ui.LayoutUtils;
import org.chromium.chrome.browser.feed.library.piet.PietStylesHelper.PietStylesHelperFactory;
import org.chromium.chrome.browser.feed.library.piet.host.AssetProvider;
import org.chromium.chrome.browser.feed.library.piet.ui.RoundedCornerMaskCache;
import org.chromium.chrome.browser.feed.library.piet.ui.RoundedCornerWrapperView;
import org.chromium.components.feed.core.proto.ui.piet.BindingRefsProto.ImageBindingRef;
import org.chromium.components.feed.core.proto.ui.piet.BindingRefsProto.StyleBindingRef;
import org.chromium.components.feed.core.proto.ui.piet.ElementsProto.BindingValue;
import org.chromium.components.feed.core.proto.ui.piet.ElementsProto.CustomElement;
import org.chromium.components.feed.core.proto.ui.piet.ElementsProto.Element;
import org.chromium.components.feed.core.proto.ui.piet.ElementsProto.ImageElement;
import org.chromium.components.feed.core.proto.ui.piet.ElementsProto.Visibility;
import org.chromium.components.feed.core.proto.ui.piet.ImagesProto.Image;
import org.chromium.components.feed.core.proto.ui.piet.ImagesProto.ImageSource;
import org.chromium.components.feed.core.proto.ui.piet.MediaQueriesProto.DarkLightCondition;
import org.chromium.components.feed.core.proto.ui.piet.MediaQueriesProto.DarkLightCondition.DarkLightMode;
import org.chromium.components.feed.core.proto.ui.piet.MediaQueriesProto.MediaQueryCondition;
import org.chromium.components.feed.core.proto.ui.piet.RoundedCornersProto.RoundedCorners;
import org.chromium.components.feed.core.proto.ui.piet.RoundedCornersProto.RoundedCorners.Corners;
import org.chromium.components.feed.core.proto.ui.piet.StylesProto.EdgeWidths;
import org.chromium.components.feed.core.proto.ui.piet.StylesProto.Style;
import org.chromium.components.feed.core.proto.ui.piet.StylesProto.StyleIdsStack;
import org.chromium.testing.local.LocalRobolectricTestRunner;
/** Tests of the {@link ImageElementAdapter}. */
@RunWith(LocalRobolectricTestRunner.class)
@Config(manifest = Config.NONE)
public class ImageElementAdapterTest {
private static final int HEIGHT_DP = 123;
private static final int WIDTH_DP = 321;
private static final EdgeWidths PADDING =
EdgeWidths.newBuilder().setBottom(1).setTop(2).setStart(3).setEnd(4).build();
private static final RoundedCorners CORNERS = RoundedCorners.newBuilder()
.setBitmask(Corners.BOTTOM_START_VALUE)
.setRadiusDp(34)
.build();
private static final Image DEFAULT_IMAGE =
Image.newBuilder().addSources(ImageSource.newBuilder().setUrl("icanhas.chz")).build();
private static final Element DEFAULT_MODEL =
asElement(ImageElement.newBuilder().setImage(DEFAULT_IMAGE).build());
private static final boolean LEGACY_CORNERS_FLAG = false;
private static final boolean OUTLINE_CORNERS_FLAG = false;
@Mock
private ElementAdapterFactory mAdapterFactory;
@Mock
private TemplateBinder mTemplateBinder;
@Mock
private FrameContext mFrameContext;
@Mock
private AssetProvider mAssetProvider;
@Mock
private StyleProvider mStyleProvider;
@Mock
private HostProviders mHostProviders;
@Mock
private LoadImageCallback mLoadImageCallback;
private Context mContext;
private int mHeightPx;
private int mWidthPx;
private ImageView mImageView;
private final FakeClock mClock = new FakeClock();
private RoundedCornerMaskCache mMaskCache;
private ImageElementAdapterForTest mAdapter;
@Before
public void setUp() throws Exception {
initMocks(this);
mContext = Robolectric.buildActivity(Activity.class).get();
mHeightPx = (int) LayoutUtils.dpToPx(HEIGHT_DP, mContext);
mWidthPx = (int) LayoutUtils.dpToPx(WIDTH_DP, mContext);
mMaskCache = new RoundedCornerMaskCache();
AdapterParameters parameters = new AdapterParameters(mContext, null, mHostProviders, null,
mAdapterFactory, mTemplateBinder, mClock, new PietStylesHelperFactory(), mMaskCache,
LEGACY_CORNERS_FLAG, OUTLINE_CORNERS_FLAG);
when(mFrameContext.makeStyleFor(any(StyleIdsStack.class))).thenReturn(mStyleProvider);
when(mFrameContext.filterImageSourcesByMediaQueryCondition(any(Image.class)))
.thenAnswer(invocation -> invocation.getArguments()[0]);
when(mHostProviders.getAssetProvider()).thenReturn(mAssetProvider);
when(mStyleProvider.getPadding()).thenReturn(PADDING);
when(mStyleProvider.hasRoundedCorners()).thenReturn(true);
when(mStyleProvider.getRoundedCorners()).thenReturn(CORNERS);
when(mStyleProvider.getScaleType()).thenReturn(ScaleType.FIT_CENTER);
when(mStyleProvider.createWrapperView(
mContext, mMaskCache, LEGACY_CORNERS_FLAG, OUTLINE_CORNERS_FLAG))
.thenReturn(new RoundedCornerWrapperView(mContext, CORNERS, mMaskCache,
Suppliers.of(false),
/*radiusOverride= */ 0,
/* borders= */ null,
/* allowClipPath= */ false,
/* allowOutlineRounding= */ false));
setStyle(null, null);
mAdapter = new ImageElementAdapterForTest(mContext, parameters);
}
@Test
public void testCreate() {
assertThat(mAdapter).isNotNull();
}
@Test
public void testCreateAdapter() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getModel()).isSameInstanceAs(DEFAULT_MODEL.getImageElement());
assertThat(mAdapter.getView()).isNotNull();
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(mHeightPx);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(mWidthPx);
assertThat(mAdapter.getBaseView().getCropToPadding()).isTrue();
verify(mStyleProvider).applyElementStyles(mAdapter);
}
@Test
public void testCreateAdapter_noDimensionsSet() {
setStyle(null, null);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getModel()).isSameInstanceAs(DEFAULT_MODEL.getImageElement());
assertThat(mAdapter.getView()).isNotNull();
// Assert that width and height are set to the defaults
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(DIMENSION_NOT_SET);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(DIMENSION_NOT_SET);
}
@Test
public void testCreateAdapter_heightOnly() {
setStyle(HEIGHT_DP, null);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getModel()).isEqualTo(DEFAULT_MODEL.getImageElement());
assertThat(mAdapter.getView()).isNotNull();
// Width defaults to MATCH_PARENT
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(mHeightPx);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(DIMENSION_NOT_SET);
}
@Test
public void testCreateAdapter_widthOnly() {
setStyle(null, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getModel()).isEqualTo(DEFAULT_MODEL.getImageElement());
assertThat(mAdapter.getView()).isNotNull();
// Image defaults to a square.
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(mWidthPx);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(mWidthPx);
}
@Test
public void testCreateAdapter_noContent() {
Element model = asElement(ImageElement.getDefaultInstance());
mAdapter.createAdapter(model, mFrameContext);
assertThatRunnable(() -> mAdapter.bindModel(model, mFrameContext))
.throwsAnExceptionOfType(PietFatalException.class)
.that()
.hasMessageThat()
.contains("Unsupported or missing content");
}
@Test
public void testBindModel_image() {
StyleIdsStack styles = StyleIdsStack.newBuilder().addStyleIds("stylecat").build();
Element model = Element.newBuilder()
.setStyleReferences(styles)
.setImageElement(ImageElement.newBuilder().setImage(DEFAULT_IMAGE))
.build();
mAdapter.createAdapter(model, mFrameContext);
mAdapter.bindModel(model, mFrameContext);
mImageView = mAdapter.getBaseView();
verify(mAssetProvider)
.getImage(DEFAULT_IMAGE, DIMENSION_UNKNOWN, DIMENSION_UNKNOWN, mLoadImageCallback);
assertThat(mAdapter.getModel()).isSameInstanceAs(model.getImageElement());
assertThat(mAdapter.getElementStyleIdsStack()).isEqualTo(styles);
}
@Test
public void testBindModel_imageBinding() {
ImageBindingRef imageBinding = ImageBindingRef.newBuilder().setBindingId("feline").build();
Element model = asElement(ImageElement.newBuilder().setImageBinding(imageBinding).build());
when(mFrameContext.getImageBindingValue(imageBinding))
.thenReturn(BindingValue.newBuilder().setImage(DEFAULT_IMAGE).build());
mAdapter.createAdapter(model, mFrameContext);
mAdapter.bindModel(model, mFrameContext);
verify(mAssetProvider)
.getImage(DEFAULT_IMAGE, DIMENSION_UNKNOWN, DIMENSION_UNKNOWN, mLoadImageCallback);
assertThat(mAdapter.getModel()).isSameInstanceAs(model.getImageElement());
}
@Test
public void testBindModel_optionalAbsent() {
String bindingRef = "foto";
ImageBindingRef imageBindingRef =
ImageBindingRef.newBuilder().setBindingId(bindingRef).setIsOptional(true).build();
Element imageBindingElement =
asElement(ImageElement.newBuilder().setImageBinding(imageBindingRef).build());
mAdapter.createAdapter(
asElement(ImageElement.newBuilder().setImage(Image.getDefaultInstance()).build()),
mFrameContext);
when(mFrameContext.getImageBindingValue(imageBindingRef))
.thenReturn(BindingValue.getDefaultInstance());
mAdapter.bindModel(imageBindingElement, mFrameContext);
assertThat(mAdapter.getBaseView().getDrawable()).isNull();
assertThat(mAdapter.getBaseView().getVisibility()).isEqualTo(View.GONE);
}
@Test
public void testBindModel_noContentInBindingValue() {
String bindingRef = "foto";
ImageBindingRef imageBindingRef =
ImageBindingRef.newBuilder().setBindingId(bindingRef).build();
Element imageBindingElement =
asElement(ImageElement.newBuilder().setImageBinding(imageBindingRef).build());
mAdapter.createAdapter(
asElement(ImageElement.newBuilder().setImage(Image.getDefaultInstance()).build()),
mFrameContext);
when(mFrameContext.getImageBindingValue(imageBindingRef))
.thenReturn(BindingValue.newBuilder()
.setBindingId(bindingRef)
.setVisibility(Visibility.VISIBLE)
.clearImage()
.build());
assertThatRunnable(() -> mAdapter.bindModel(imageBindingElement, mFrameContext))
.throwsAnExceptionOfType(PietFatalException.class)
.that()
.hasMessageThat()
.contains("Image binding foto had no content");
}
@Test
public void testBindModel_setsScaleType() {
StyleIdsStack styles = StyleIdsStack.newBuilder().addStyleIds("stylecat").build();
when(mStyleProvider.getScaleType()).thenReturn(ImageView.ScaleType.CENTER_CROP);
Element model = asElement(ImageElement.newBuilder()
.setImage(DEFAULT_IMAGE)
.setStyleReferences(styles)
.build());
mAdapter.createAdapter(model, mFrameContext);
mAdapter.bindModel(model, mFrameContext);
verify(mAssetProvider)
.getImage(DEFAULT_IMAGE, DIMENSION_UNKNOWN, DIMENSION_UNKNOWN, mLoadImageCallback);
assertThat(mAdapter.mScaleTypeForCallback).isEqualTo(ScaleType.CENTER_CROP);
}
@Test
public void testBindModel_again() {
// Bind a model, then unbind it.
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
mImageView = mAdapter.getBaseView();
RecyclerKey key1 = mAdapter.getKey();
mAdapter.unbindModel();
// Bind a different model
Element model2 =
asElement(ImageElement.newBuilder().setImage(Image.getDefaultInstance()).build());
mAdapter.bindModel(model2, mFrameContext);
verify(mAssetProvider)
.getImage(Image.getDefaultInstance(), WIDTH_DP, HEIGHT_DP, mLoadImageCallback);
RecyclerKey key2 = mAdapter.getKey();
assertThat(key1).isSameInstanceAs(key2);
assertThat(mAdapter.getModel()).isSameInstanceAs(model2.getImageElement());
assertThat(mAdapter.getView()).isNotNull();
ImageView imageView2 = mAdapter.getBaseView();
assertThat(imageView2).isSameInstanceAs(mImageView);
}
@Test
public void testBindModel_bindingTwiceThrowsException() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
assertThatRunnable(() -> mAdapter.bindModel(DEFAULT_MODEL, mFrameContext))
.throwsAnExceptionOfType(IllegalStateException.class)
.that()
.hasMessageThat()
.contains("An image loading callback exists");
}
@Test
public void testBindModel_setsStylesOnlyIfBindingIsDefined() {
// Create an adapter with a default style
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
verify(mStyleProvider).applyElementStyles(mAdapter);
// Styles do not change when a different model is bound
StyleIdsStack otherStyle = StyleIdsStack.newBuilder().addStyleIds("ignored").build();
Element imageWithOtherStyle =
DEFAULT_MODEL.toBuilder().setStyleReferences(otherStyle).build();
mAdapter.bindModel(imageWithOtherStyle, mFrameContext);
mAdapter.unbindModel();
verify(mFrameContext, never()).makeStyleFor(otherStyle);
// Styles do change when a model with a style binding is bound
StyleIdsStack boundStyle =
StyleIdsStack.newBuilder()
.setStyleBinding(StyleBindingRef.newBuilder().setBindingId("tuna"))
.build();
Element imageWithBoundStyle =
DEFAULT_MODEL.toBuilder().setStyleReferences(boundStyle).build();
mAdapter.bindModel(imageWithBoundStyle, mFrameContext);
verify(mFrameContext).makeStyleFor(boundStyle);
verify(mStyleProvider, times(2)).applyElementStyles(mAdapter);
}
@Test
public void testBindModel_preLoadFill() {
Drawable preLoadFillDrawable = new ColorDrawable(Color.RED);
// Set up the StyleProvider mock
when(mStyleProvider.createPreLoadFill()).thenReturn(preLoadFillDrawable);
when(mStyleProvider.hasPreLoadFill()).thenReturn(true);
// Bind and expect the pre-load fill to be set
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getBaseView().getDrawable()).isSameInstanceAs(preLoadFillDrawable);
// Load drawable and replace pre-load fill
verify(mAssetProvider)
.getImage(DEFAULT_IMAGE, DIMENSION_UNKNOWN, DIMENSION_UNKNOWN, mLoadImageCallback);
}
@Test
public void testBindModel_color() {
int red = 0xFFFF0000;
ImageElement defaultImageElement =
ImageElement.newBuilder().setImage(DEFAULT_IMAGE).build();
StyleProvider redTintStyleProvider = new StyleProvider(
Style.newBuilder().setStyleId("red").setColor(red).build(), mAssetProvider);
StyleIdsStack redTintStyle = StyleIdsStack.newBuilder().addStyleIds("red").build();
when(mFrameContext.makeStyleFor(redTintStyle)).thenReturn(redTintStyleProvider);
Element modelWithOverlayColor = Element.newBuilder()
.setStyleReferences(redTintStyle)
.setImageElement(defaultImageElement)
.build();
// Bind and expect tint to be set
mAdapter.createAdapter(modelWithOverlayColor, mFrameContext);
mAdapter.bindModel(modelWithOverlayColor, mFrameContext);
verify(mFrameContext).makeStyleFor(redTintStyle);
assertThat(mAdapter.mOverlayColorForCallback).isEqualTo(red);
}
@Test
public void testBindModel_filtersImageSources() {
ImageSource activeSource =
ImageSource.newBuilder()
.addConditions(MediaQueryCondition.newBuilder().setDarkLight(
DarkLightCondition.newBuilder().setMode(DarkLightMode.DARK)))
.build();
ImageSource inactiveSource =
ImageSource.newBuilder()
.addConditions(MediaQueryCondition.newBuilder().setDarkLight(
DarkLightCondition.newBuilder().setMode(DarkLightMode.LIGHT)))
.build();
Image image =
Image.newBuilder().addSources(activeSource).addSources(inactiveSource).build();
Image filteredImage = Image.newBuilder().addSources(activeSource).build();
when(mFrameContext.filterImageSourcesByMediaQueryCondition(image))
.thenReturn(filteredImage);
Element model = asElement(ImageElement.newBuilder().setImage(image).build());
mAdapter.createAdapter(model, mFrameContext);
mAdapter.bindModel(model, mFrameContext);
verify(mAssetProvider)
.getImage(eq(filteredImage), anyInt(), anyInt(), any(LoadImageCallback.class));
}
@Test
public void testBindModel_setsAspectRatio() {
StyleIdsStack styles = StyleIdsStack.newBuilder().addStyleIds("stylecat").build();
setStyle(null, null);
Element model = asElement(ImageElement.newBuilder()
.setImage(Image.newBuilder().addSources(
ImageSource.newBuilder()
.setWidthPx(100)
.setHeightPx(20) // Aspect ratio of 5.0
.setUrl("http://whatever")))
.setStyleReferences(styles)
.build());
mAdapter.createAdapter(model, mFrameContext);
mAdapter.bindModel(model, mFrameContext);
mImageView = mAdapter.getBaseView();
mImageView.measure(MeasureSpec.makeMeasureSpec(10, MeasureSpec.EXACTLY),
MeasureSpec.makeMeasureSpec(0, MeasureSpec.UNSPECIFIED));
assertThat(mImageView.getMeasuredWidth()).isEqualTo(10);
assertThat(mImageView.getMeasuredHeight()).isEqualTo(2);
}
@Test
public void testUnbind() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
mAdapter.unbindModel();
assertThat(mAdapter.getView()).isNotNull();
assertThat(mAdapter.getBaseView().getDrawable()).isNull();
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(mHeightPx);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(mWidthPx);
}
@Test
public void testUnbind_cancelsCallback() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
mImageView = mAdapter.getBaseView();
mAdapter.unbindModel();
verify(mAssetProvider).getImage(DEFAULT_IMAGE, WIDTH_DP, HEIGHT_DP, mLoadImageCallback);
verify(mLoadImageCallback).cancel();
}
@Test
public void testReleaseAdapter_resetsDims() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
mAdapter.bindModel(DEFAULT_MODEL, mFrameContext);
mAdapter.unbindModel();
mAdapter.releaseAdapter();
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(DIMENSION_NOT_SET);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(DIMENSION_NOT_SET);
}
@Test
public void testComputedDimensions_unbound() {
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(DIMENSION_NOT_SET);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(DIMENSION_NOT_SET);
}
@Test
public void testComputedDimensions_bound() {
setStyle(HEIGHT_DP, WIDTH_DP);
mAdapter.createAdapter(DEFAULT_MODEL, mFrameContext);
assertThat(mAdapter.getComputedHeightPx()).isEqualTo(mHeightPx);
assertThat(mAdapter.getComputedWidthPx()).isEqualTo(mWidthPx);
}
@Test
public void testGetAspectRatio_succeeds() {
Image image = Image.newBuilder()
.addSources(ImageSource.getDefaultInstance())
.addSources(ImageSource.newBuilder().setHeightPx(123))
.addSources(ImageSource.newBuilder().setWidthPx(456))
.addSources(ImageSource.newBuilder().setWidthPx(99).setHeightPx(
33)) // This one gets picked
.addSources(ImageSource.newBuilder().setWidthPx(100).setHeightPx(50))
.build();
assertThat(ImageElementAdapter.getAspectRatio(image)).isWithin(0.01f).of(3.0f);
}
@Test
public void testGetAspectRatio_fails() {
Image image = Image.newBuilder()
.addSources(ImageSource.getDefaultInstance())
.addSources(ImageSource.newBuilder().setHeightPx(123))
.addSources(ImageSource.newBuilder().setWidthPx(456))
.build();
assertThat(ImageElementAdapter.getAspectRatio(image)).isZero();
assertThat(ImageElementAdapter.getAspectRatio(Image.getDefaultInstance())).isZero();
}
@Test
public void testGetModelFromElement() {
ImageElement model =
ImageElement.newBuilder()
.setStyleReferences(StyleIdsStack.newBuilder().addStyleIds("image"))
.build();
Element elementWithModel = Element.newBuilder().setImageElement(model).build();
assertThat(mAdapter.getModelFromElement(elementWithModel)).isSameInstanceAs(model);
Element elementWithWrongModel =
Element.newBuilder().setCustomElement(CustomElement.getDefaultInstance()).build();
assertThatRunnable(() -> mAdapter.getModelFromElement(elementWithWrongModel))
.throwsAnExceptionOfType(PietFatalException.class)
.that()
.hasMessageThat()
.contains("Missing ImageElement");
Element emptyElement = Element.getDefaultInstance();
assertThatRunnable(() -> mAdapter.getModelFromElement(emptyElement))
.throwsAnExceptionOfType(PietFatalException.class)
.that()
.hasMessageThat()
.contains("Missing ImageElement");
}
private void setStyle(/*@Nullable*/ Integer height, /*@Nullable*/ Integer width) {
if (height != null) {
when(mStyleProvider.hasHeight()).thenReturn(true);
when(mStyleProvider.getHeightSpecPx(mContext)).thenReturn(height);
} else {
when(mStyleProvider.hasHeight()).thenReturn(false);
when(mStyleProvider.getHeightSpecPx(mContext)).thenReturn(DIMENSION_NOT_SET);
}
if (width != null) {
when(mStyleProvider.hasWidth()).thenReturn(true);
when(mStyleProvider.getWidthSpecPx(mContext)).thenReturn(width);
} else {
when(mStyleProvider.hasWidth()).thenReturn(false);
when(mStyleProvider.getWidthSpecPx(mContext)).thenReturn(DIMENSION_NOT_SET);
}
}
private static Element asElement(ImageElement imageElement) {
return Element.newBuilder().setImageElement(imageElement).build();
}
private class ImageElementAdapterForTest extends ImageElementAdapter {
private ScaleType mScaleTypeForCallback;
private Integer mOverlayColorForCallback;
private ImageElementAdapterForTest(Context context, AdapterParameters parameters) {
super(context, parameters);
}
@Override
LoadImageCallback createLoadImageCallback(ScaleType scaleType,
/*@Nullable*/ Integer overlayColor, FrameContext frameContext) {
this.mScaleTypeForCallback = scaleType;
this.mOverlayColorForCallback = overlayColor;
return mLoadImageCallback;
}
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.plugins.gradle.importing;
import com.intellij.compiler.server.BuildManager;
import com.intellij.openapi.application.PathManager;
import com.intellij.openapi.application.Result;
import com.intellij.openapi.application.WriteAction;
import com.intellij.openapi.externalSystem.model.ProjectSystemId;
import com.intellij.openapi.externalSystem.model.settings.ExternalSystemExecutionSettings;
import com.intellij.openapi.externalSystem.settings.ExternalSystemSettingsListenerAdapter;
import com.intellij.openapi.externalSystem.test.ExternalSystemImportingTestCase;
import com.intellij.openapi.externalSystem.util.ExternalSystemApiUtil;
import com.intellij.openapi.projectRoots.JavaSdk;
import com.intellij.openapi.projectRoots.ProjectJdkTable;
import com.intellij.openapi.projectRoots.Sdk;
import com.intellij.openapi.projectRoots.impl.SdkConfigurationUtil;
import com.intellij.openapi.ui.Messages;
import com.intellij.openapi.ui.TestDialog;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.LocalFileSystem;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.testFramework.IdeaTestUtil;
import com.intellij.util.PathUtil;
import com.intellij.util.containers.ContainerUtil;
import com.intellij.util.io.PathKt;
import org.gradle.StartParameter;
import org.gradle.util.GradleVersion;
import org.gradle.wrapper.GradleWrapperMain;
import org.gradle.wrapper.PathAssembler;
import org.gradle.wrapper.WrapperConfiguration;
import org.intellij.lang.annotations.Language;
import org.jetbrains.annotations.NonNls;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.plugins.gradle.settings.DistributionType;
import org.jetbrains.plugins.gradle.settings.GradleProjectSettings;
import org.jetbrains.plugins.gradle.settings.GradleSettings;
import org.jetbrains.plugins.gradle.tooling.VersionMatcherRule;
import org.jetbrains.plugins.gradle.tooling.builder.AbstractModelBuilderTest;
import org.jetbrains.plugins.gradle.util.GradleConstants;
import org.jetbrains.plugins.gradle.util.GradleUtil;
import org.junit.Rule;
import org.junit.rules.TestName;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.io.File;
import java.io.IOException;
import java.io.StringWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.Arrays;
import java.util.Collection;
import java.util.List;
import java.util.Properties;
import java.util.zip.ZipException;
import java.util.zip.ZipFile;
import static org.jetbrains.plugins.gradle.tooling.builder.AbstractModelBuilderTest.DistributionLocator;
import static org.jetbrains.plugins.gradle.tooling.builder.AbstractModelBuilderTest.SUPPORTED_GRADLE_VERSIONS;
import static org.junit.Assume.assumeThat;
@RunWith(value = Parameterized.class)
public abstract class GradleImportingTestCase extends ExternalSystemImportingTestCase {
public static final String BASE_GRADLE_VERSION = AbstractModelBuilderTest.BASE_GRADLE_VERSION;
protected static final String GRADLE_JDK_NAME = "Gradle JDK";
private static final int GRADLE_DAEMON_TTL_MS = 10000;
@Rule public TestName name = new TestName();
@Rule public VersionMatcherRule versionMatcherRule = new VersionMatcherRule();
@NotNull
@org.junit.runners.Parameterized.Parameter(0)
public String gradleVersion;
private GradleProjectSettings myProjectSettings;
private String myJdkHome;
@Override
public void setUp() throws Exception {
myJdkHome = IdeaTestUtil.requireRealJdkHome();
super.setUp();
assumeThat(gradleVersion, versionMatcherRule.getMatcher());
new WriteAction() {
@Override
protected void run(@NotNull Result result) throws Throwable {
Sdk oldJdk = ProjectJdkTable.getInstance().findJdk(GRADLE_JDK_NAME);
if (oldJdk != null) {
ProjectJdkTable.getInstance().removeJdk(oldJdk);
}
VirtualFile jdkHomeDir = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(new File(myJdkHome));
Sdk jdk = SdkConfigurationUtil.setupSdk(new Sdk[0], jdkHomeDir, JavaSdk.getInstance(), true, null, GRADLE_JDK_NAME);
assertNotNull("Cannot create JDK for " + myJdkHome, jdk);
ProjectJdkTable.getInstance().addJdk(jdk);
}
}.execute();
myProjectSettings = new GradleProjectSettings();
GradleSettings.getInstance(myProject).setGradleVmOptions("-Xmx128m -XX:MaxPermSize=64m");
System.setProperty(ExternalSystemExecutionSettings.REMOTE_PROCESS_IDLE_TTL_IN_MS_KEY, String.valueOf(GRADLE_DAEMON_TTL_MS));
configureWrapper();
}
@Override
public void tearDown() throws Exception {
if (myJdkHome == null) {
//super.setUp() wasn't called
return;
}
try {
new WriteAction() {
@Override
protected void run(@NotNull Result result) throws Throwable {
Sdk old = ProjectJdkTable.getInstance().findJdk(GRADLE_JDK_NAME);
if (old != null) {
SdkConfigurationUtil.removeSdk(old);
}
}
}.execute();
Messages.setTestDialog(TestDialog.DEFAULT);
PathKt.delete(BuildManager.getInstance().getBuildSystemDirectory());
}
finally {
super.tearDown();
}
}
@Override
protected void collectAllowedRoots(final List<String> roots) throws IOException {
roots.add(myJdkHome);
roots.addAll(collectRootsInside(myJdkHome));
roots.add(PathManager.getConfigPath());
}
@Override
public String getName() {
return name.getMethodName() == null ? super.getName() : FileUtil.sanitizeFileName(name.getMethodName());
}
@Parameterized.Parameters(name = "{index}: with Gradle-{0}")
public static Collection<Object[]> data() throws Throwable {
return Arrays.asList(SUPPORTED_GRADLE_VERSIONS);
}
@Override
protected String getTestsTempDir() {
return "gradleImportTests";
}
@Override
protected String getExternalSystemConfigFileName() {
return "build.gradle";
}
protected void importProjectUsingSingeModulePerGradleProject() {
getCurrentExternalProjectSettings().setResolveModulePerSourceSet(false);
importProject();
}
@Override
protected void importProject() {
ExternalSystemApiUtil.subscribe(myProject, GradleConstants.SYSTEM_ID, new ExternalSystemSettingsListenerAdapter() {
@Override
public void onProjectsLinked(@NotNull Collection settings) {
final Object item = ContainerUtil.getFirstItem(settings);
if (item instanceof GradleProjectSettings) {
((GradleProjectSettings)item).setGradleJvm(GRADLE_JDK_NAME);
}
}
});
super.importProject();
}
protected void importProjectUsingSingeModulePerGradleProject(@NonNls @Language("Groovy") String config) throws IOException {
getCurrentExternalProjectSettings().setResolveModulePerSourceSet(false);
importProject(config);
}
@Override
protected void importProject(@NonNls @Language("Groovy") String config) throws IOException {
config = injectRepo(config);
super.importProject(config);
}
@NotNull
protected String injectRepo(@NonNls @Language("Groovy") String config) {
config = "allprojects {\n" +
" repositories {\n" +
" maven {\n" +
" url 'http://maven.labs.intellij.net/repo1'\n" +
" }\n" +
" }" +
"}\n" + config;
return config;
}
@Override
protected GradleProjectSettings getCurrentExternalProjectSettings() {
return myProjectSettings;
}
@Override
protected ProjectSystemId getExternalSystemId() {
return GradleConstants.SYSTEM_ID;
}
protected VirtualFile createSettingsFile(@NonNls @Language("Groovy") String content) throws IOException {
return createProjectSubFile("settings.gradle", content);
}
protected boolean isGradle40orNewer() {
return GradleVersion.version(gradleVersion).compareTo(GradleVersion.version("4.0")) >= 0;
}
private void configureWrapper() throws IOException, URISyntaxException {
final URI distributionUri = new DistributionLocator().getDistributionFor(GradleVersion.version(gradleVersion));
myProjectSettings.setDistributionType(DistributionType.DEFAULT_WRAPPED);
final VirtualFile wrapperJarFrom = LocalFileSystem.getInstance().refreshAndFindFileByIoFile(wrapperJar());
assert wrapperJarFrom != null;
final VirtualFile wrapperJarFromTo = createProjectSubFile("gradle/wrapper/gradle-wrapper.jar");
new WriteAction() {
@Override
protected void run(@NotNull Result result) throws Throwable {
wrapperJarFromTo.setBinaryContent(wrapperJarFrom.contentsToByteArray());
}
}.execute().throwException();
Properties properties = new Properties();
properties.setProperty("distributionBase", "GRADLE_USER_HOME");
properties.setProperty("distributionPath", "wrapper/dists");
properties.setProperty("zipStoreBase", "GRADLE_USER_HOME");
properties.setProperty("zipStorePath", "wrapper/dists");
properties.setProperty("distributionUrl", distributionUri.toString());
StringWriter writer = new StringWriter();
properties.store(writer, null);
createProjectSubFile("gradle/wrapper/gradle-wrapper.properties", writer.toString());
WrapperConfiguration wrapperConfiguration = GradleUtil.getWrapperConfiguration(getProjectPath());
PathAssembler.LocalDistribution localDistribution = new PathAssembler(
StartParameter.DEFAULT_GRADLE_USER_HOME).getDistribution(wrapperConfiguration);
File zip = localDistribution.getZipFile();
try {
if (zip.exists()) {
ZipFile zipFile = new ZipFile(zip);
zipFile.close();
}
}
catch (ZipException e) {
e.printStackTrace();
System.out.println("Corrupted file will be removed: " + zip.getPath());
FileUtil.delete(zip);
}
catch (IOException e) {
e.printStackTrace();
}
}
@NotNull
private static File wrapperJar() {
return new File(PathUtil.getJarPathForClass(GradleWrapperMain.class));
}
}
| |
// Copyright (c) 2008 The Board of Trustees of The Leland Stanford Junior University
// Copyright (c) 2011, 2012 Open Networking Foundation
// Copyright (c) 2012, 2013 Big Switch Networks, Inc.
// This library was generated by the LoxiGen Compiler.
// See the file LICENSE.txt which should have been included in the source distribution
// Automatically generated by LOXI from template of_class.java
// Do not modify
package org.projectfloodlight.openflow.protocol.ver13;
import org.projectfloodlight.openflow.protocol.*;
import org.projectfloodlight.openflow.protocol.action.*;
import org.projectfloodlight.openflow.protocol.actionid.*;
import org.projectfloodlight.openflow.protocol.bsntlv.*;
import org.projectfloodlight.openflow.protocol.errormsg.*;
import org.projectfloodlight.openflow.protocol.meterband.*;
import org.projectfloodlight.openflow.protocol.instruction.*;
import org.projectfloodlight.openflow.protocol.instructionid.*;
import org.projectfloodlight.openflow.protocol.match.*;
import org.projectfloodlight.openflow.protocol.stat.*;
import org.projectfloodlight.openflow.protocol.oxm.*;
import org.projectfloodlight.openflow.protocol.oxs.*;
import org.projectfloodlight.openflow.protocol.queueprop.*;
import org.projectfloodlight.openflow.types.*;
import org.projectfloodlight.openflow.util.*;
import org.projectfloodlight.openflow.exceptions.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Set;
import io.netty.buffer.ByteBuf;
import com.google.common.hash.PrimitiveSink;
import com.google.common.hash.Funnel;
class OFBsnLacpConvergenceNotifVer13 implements OFBsnLacpConvergenceNotif {
private static final Logger logger = LoggerFactory.getLogger(OFBsnLacpConvergenceNotifVer13.class);
// version: 1.3
final static byte WIRE_VERSION = 4;
final static int LENGTH = 52;
private final static long DEFAULT_XID = 0x0L;
private final static short DEFAULT_CONVERGENCE_STATUS = (short) 0x0;
private final static OFPort DEFAULT_PORT_NO = OFPort.ANY;
private final static int DEFAULT_ACTOR_SYS_PRIORITY = 0x0;
private final static MacAddress DEFAULT_ACTOR_SYS_MAC = MacAddress.NONE;
private final static int DEFAULT_ACTOR_PORT_PRIORITY = 0x0;
private final static int DEFAULT_ACTOR_PORT_NUM = 0x0;
private final static int DEFAULT_ACTOR_KEY = 0x0;
private final static int DEFAULT_PARTNER_SYS_PRIORITY = 0x0;
private final static MacAddress DEFAULT_PARTNER_SYS_MAC = MacAddress.NONE;
private final static int DEFAULT_PARTNER_PORT_PRIORITY = 0x0;
private final static int DEFAULT_PARTNER_PORT_NUM = 0x0;
private final static int DEFAULT_PARTNER_KEY = 0x0;
// OF message fields
private final long xid;
private final short convergenceStatus;
private final OFPort portNo;
private final int actorSysPriority;
private final MacAddress actorSysMac;
private final int actorPortPriority;
private final int actorPortNum;
private final int actorKey;
private final int partnerSysPriority;
private final MacAddress partnerSysMac;
private final int partnerPortPriority;
private final int partnerPortNum;
private final int partnerKey;
//
// Immutable default instance
final static OFBsnLacpConvergenceNotifVer13 DEFAULT = new OFBsnLacpConvergenceNotifVer13(
DEFAULT_XID, DEFAULT_CONVERGENCE_STATUS, DEFAULT_PORT_NO, DEFAULT_ACTOR_SYS_PRIORITY, DEFAULT_ACTOR_SYS_MAC, DEFAULT_ACTOR_PORT_PRIORITY, DEFAULT_ACTOR_PORT_NUM, DEFAULT_ACTOR_KEY, DEFAULT_PARTNER_SYS_PRIORITY, DEFAULT_PARTNER_SYS_MAC, DEFAULT_PARTNER_PORT_PRIORITY, DEFAULT_PARTNER_PORT_NUM, DEFAULT_PARTNER_KEY
);
// package private constructor - used by readers, builders, and factory
OFBsnLacpConvergenceNotifVer13(long xid, short convergenceStatus, OFPort portNo, int actorSysPriority, MacAddress actorSysMac, int actorPortPriority, int actorPortNum, int actorKey, int partnerSysPriority, MacAddress partnerSysMac, int partnerPortPriority, int partnerPortNum, int partnerKey) {
if(portNo == null) {
throw new NullPointerException("OFBsnLacpConvergenceNotifVer13: property portNo cannot be null");
}
if(actorSysMac == null) {
throw new NullPointerException("OFBsnLacpConvergenceNotifVer13: property actorSysMac cannot be null");
}
if(partnerSysMac == null) {
throw new NullPointerException("OFBsnLacpConvergenceNotifVer13: property partnerSysMac cannot be null");
}
this.xid = U32.normalize(xid);
this.convergenceStatus = U8.normalize(convergenceStatus);
this.portNo = portNo;
this.actorSysPriority = U16.normalize(actorSysPriority);
this.actorSysMac = actorSysMac;
this.actorPortPriority = U16.normalize(actorPortPriority);
this.actorPortNum = U16.normalize(actorPortNum);
this.actorKey = U16.normalize(actorKey);
this.partnerSysPriority = U16.normalize(partnerSysPriority);
this.partnerSysMac = partnerSysMac;
this.partnerPortPriority = U16.normalize(partnerPortPriority);
this.partnerPortNum = U16.normalize(partnerPortNum);
this.partnerKey = U16.normalize(partnerKey);
}
// Accessors for OF message fields
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2bL;
}
@Override
public short getConvergenceStatus() {
return convergenceStatus;
}
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public int getActorSysPriority() {
return actorSysPriority;
}
@Override
public MacAddress getActorSysMac() {
return actorSysMac;
}
@Override
public int getActorPortPriority() {
return actorPortPriority;
}
@Override
public int getActorPortNum() {
return actorPortNum;
}
@Override
public int getActorKey() {
return actorKey;
}
@Override
public int getPartnerSysPriority() {
return partnerSysPriority;
}
@Override
public MacAddress getPartnerSysMac() {
return partnerSysMac;
}
@Override
public int getPartnerPortPriority() {
return partnerPortPriority;
}
@Override
public int getPartnerPortNum() {
return partnerPortNum;
}
@Override
public int getPartnerKey() {
return partnerKey;
}
public OFBsnLacpConvergenceNotif.Builder createBuilder() {
return new BuilderWithParent(this);
}
static class BuilderWithParent implements OFBsnLacpConvergenceNotif.Builder {
final OFBsnLacpConvergenceNotifVer13 parentMessage;
// OF message fields
private boolean xidSet;
private long xid;
private boolean convergenceStatusSet;
private short convergenceStatus;
private boolean portNoSet;
private OFPort portNo;
private boolean actorSysPrioritySet;
private int actorSysPriority;
private boolean actorSysMacSet;
private MacAddress actorSysMac;
private boolean actorPortPrioritySet;
private int actorPortPriority;
private boolean actorPortNumSet;
private int actorPortNum;
private boolean actorKeySet;
private int actorKey;
private boolean partnerSysPrioritySet;
private int partnerSysPriority;
private boolean partnerSysMacSet;
private MacAddress partnerSysMac;
private boolean partnerPortPrioritySet;
private int partnerPortPriority;
private boolean partnerPortNumSet;
private int partnerPortNum;
private boolean partnerKeySet;
private int partnerKey;
BuilderWithParent(OFBsnLacpConvergenceNotifVer13 parentMessage) {
this.parentMessage = parentMessage;
}
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2bL;
}
@Override
public short getConvergenceStatus() {
return convergenceStatus;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setConvergenceStatus(short convergenceStatus) {
this.convergenceStatus = convergenceStatus;
this.convergenceStatusSet = true;
return this;
}
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPortNo(OFPort portNo) {
this.portNo = portNo;
this.portNoSet = true;
return this;
}
@Override
public int getActorSysPriority() {
return actorSysPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorSysPriority(int actorSysPriority) {
this.actorSysPriority = actorSysPriority;
this.actorSysPrioritySet = true;
return this;
}
@Override
public MacAddress getActorSysMac() {
return actorSysMac;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorSysMac(MacAddress actorSysMac) {
this.actorSysMac = actorSysMac;
this.actorSysMacSet = true;
return this;
}
@Override
public int getActorPortPriority() {
return actorPortPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorPortPriority(int actorPortPriority) {
this.actorPortPriority = actorPortPriority;
this.actorPortPrioritySet = true;
return this;
}
@Override
public int getActorPortNum() {
return actorPortNum;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorPortNum(int actorPortNum) {
this.actorPortNum = actorPortNum;
this.actorPortNumSet = true;
return this;
}
@Override
public int getActorKey() {
return actorKey;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorKey(int actorKey) {
this.actorKey = actorKey;
this.actorKeySet = true;
return this;
}
@Override
public int getPartnerSysPriority() {
return partnerSysPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerSysPriority(int partnerSysPriority) {
this.partnerSysPriority = partnerSysPriority;
this.partnerSysPrioritySet = true;
return this;
}
@Override
public MacAddress getPartnerSysMac() {
return partnerSysMac;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerSysMac(MacAddress partnerSysMac) {
this.partnerSysMac = partnerSysMac;
this.partnerSysMacSet = true;
return this;
}
@Override
public int getPartnerPortPriority() {
return partnerPortPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerPortPriority(int partnerPortPriority) {
this.partnerPortPriority = partnerPortPriority;
this.partnerPortPrioritySet = true;
return this;
}
@Override
public int getPartnerPortNum() {
return partnerPortNum;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerPortNum(int partnerPortNum) {
this.partnerPortNum = partnerPortNum;
this.partnerPortNumSet = true;
return this;
}
@Override
public int getPartnerKey() {
return partnerKey;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerKey(int partnerKey) {
this.partnerKey = partnerKey;
this.partnerKeySet = true;
return this;
}
@Override
public OFBsnLacpConvergenceNotif build() {
long xid = this.xidSet ? this.xid : parentMessage.xid;
short convergenceStatus = this.convergenceStatusSet ? this.convergenceStatus : parentMessage.convergenceStatus;
OFPort portNo = this.portNoSet ? this.portNo : parentMessage.portNo;
if(portNo == null)
throw new NullPointerException("Property portNo must not be null");
int actorSysPriority = this.actorSysPrioritySet ? this.actorSysPriority : parentMessage.actorSysPriority;
MacAddress actorSysMac = this.actorSysMacSet ? this.actorSysMac : parentMessage.actorSysMac;
if(actorSysMac == null)
throw new NullPointerException("Property actorSysMac must not be null");
int actorPortPriority = this.actorPortPrioritySet ? this.actorPortPriority : parentMessage.actorPortPriority;
int actorPortNum = this.actorPortNumSet ? this.actorPortNum : parentMessage.actorPortNum;
int actorKey = this.actorKeySet ? this.actorKey : parentMessage.actorKey;
int partnerSysPriority = this.partnerSysPrioritySet ? this.partnerSysPriority : parentMessage.partnerSysPriority;
MacAddress partnerSysMac = this.partnerSysMacSet ? this.partnerSysMac : parentMessage.partnerSysMac;
if(partnerSysMac == null)
throw new NullPointerException("Property partnerSysMac must not be null");
int partnerPortPriority = this.partnerPortPrioritySet ? this.partnerPortPriority : parentMessage.partnerPortPriority;
int partnerPortNum = this.partnerPortNumSet ? this.partnerPortNum : parentMessage.partnerPortNum;
int partnerKey = this.partnerKeySet ? this.partnerKey : parentMessage.partnerKey;
//
return new OFBsnLacpConvergenceNotifVer13(
xid,
convergenceStatus,
portNo,
actorSysPriority,
actorSysMac,
actorPortPriority,
actorPortNum,
actorKey,
partnerSysPriority,
partnerSysMac,
partnerPortPriority,
partnerPortNum,
partnerKey
);
}
}
static class Builder implements OFBsnLacpConvergenceNotif.Builder {
// OF message fields
private boolean xidSet;
private long xid;
private boolean convergenceStatusSet;
private short convergenceStatus;
private boolean portNoSet;
private OFPort portNo;
private boolean actorSysPrioritySet;
private int actorSysPriority;
private boolean actorSysMacSet;
private MacAddress actorSysMac;
private boolean actorPortPrioritySet;
private int actorPortPriority;
private boolean actorPortNumSet;
private int actorPortNum;
private boolean actorKeySet;
private int actorKey;
private boolean partnerSysPrioritySet;
private int partnerSysPriority;
private boolean partnerSysMacSet;
private MacAddress partnerSysMac;
private boolean partnerPortPrioritySet;
private int partnerPortPriority;
private boolean partnerPortNumSet;
private int partnerPortNum;
private boolean partnerKeySet;
private int partnerKey;
@Override
public OFVersion getVersion() {
return OFVersion.OF_13;
}
@Override
public OFType getType() {
return OFType.EXPERIMENTER;
}
@Override
public long getXid() {
return xid;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setXid(long xid) {
this.xid = xid;
this.xidSet = true;
return this;
}
@Override
public long getExperimenter() {
return 0x5c16c7L;
}
@Override
public long getSubtype() {
return 0x2bL;
}
@Override
public short getConvergenceStatus() {
return convergenceStatus;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setConvergenceStatus(short convergenceStatus) {
this.convergenceStatus = convergenceStatus;
this.convergenceStatusSet = true;
return this;
}
@Override
public OFPort getPortNo() {
return portNo;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPortNo(OFPort portNo) {
this.portNo = portNo;
this.portNoSet = true;
return this;
}
@Override
public int getActorSysPriority() {
return actorSysPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorSysPriority(int actorSysPriority) {
this.actorSysPriority = actorSysPriority;
this.actorSysPrioritySet = true;
return this;
}
@Override
public MacAddress getActorSysMac() {
return actorSysMac;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorSysMac(MacAddress actorSysMac) {
this.actorSysMac = actorSysMac;
this.actorSysMacSet = true;
return this;
}
@Override
public int getActorPortPriority() {
return actorPortPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorPortPriority(int actorPortPriority) {
this.actorPortPriority = actorPortPriority;
this.actorPortPrioritySet = true;
return this;
}
@Override
public int getActorPortNum() {
return actorPortNum;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorPortNum(int actorPortNum) {
this.actorPortNum = actorPortNum;
this.actorPortNumSet = true;
return this;
}
@Override
public int getActorKey() {
return actorKey;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setActorKey(int actorKey) {
this.actorKey = actorKey;
this.actorKeySet = true;
return this;
}
@Override
public int getPartnerSysPriority() {
return partnerSysPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerSysPriority(int partnerSysPriority) {
this.partnerSysPriority = partnerSysPriority;
this.partnerSysPrioritySet = true;
return this;
}
@Override
public MacAddress getPartnerSysMac() {
return partnerSysMac;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerSysMac(MacAddress partnerSysMac) {
this.partnerSysMac = partnerSysMac;
this.partnerSysMacSet = true;
return this;
}
@Override
public int getPartnerPortPriority() {
return partnerPortPriority;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerPortPriority(int partnerPortPriority) {
this.partnerPortPriority = partnerPortPriority;
this.partnerPortPrioritySet = true;
return this;
}
@Override
public int getPartnerPortNum() {
return partnerPortNum;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerPortNum(int partnerPortNum) {
this.partnerPortNum = partnerPortNum;
this.partnerPortNumSet = true;
return this;
}
@Override
public int getPartnerKey() {
return partnerKey;
}
@Override
public OFBsnLacpConvergenceNotif.Builder setPartnerKey(int partnerKey) {
this.partnerKey = partnerKey;
this.partnerKeySet = true;
return this;
}
//
@Override
public OFBsnLacpConvergenceNotif build() {
long xid = this.xidSet ? this.xid : DEFAULT_XID;
short convergenceStatus = this.convergenceStatusSet ? this.convergenceStatus : DEFAULT_CONVERGENCE_STATUS;
OFPort portNo = this.portNoSet ? this.portNo : DEFAULT_PORT_NO;
if(portNo == null)
throw new NullPointerException("Property portNo must not be null");
int actorSysPriority = this.actorSysPrioritySet ? this.actorSysPriority : DEFAULT_ACTOR_SYS_PRIORITY;
MacAddress actorSysMac = this.actorSysMacSet ? this.actorSysMac : DEFAULT_ACTOR_SYS_MAC;
if(actorSysMac == null)
throw new NullPointerException("Property actorSysMac must not be null");
int actorPortPriority = this.actorPortPrioritySet ? this.actorPortPriority : DEFAULT_ACTOR_PORT_PRIORITY;
int actorPortNum = this.actorPortNumSet ? this.actorPortNum : DEFAULT_ACTOR_PORT_NUM;
int actorKey = this.actorKeySet ? this.actorKey : DEFAULT_ACTOR_KEY;
int partnerSysPriority = this.partnerSysPrioritySet ? this.partnerSysPriority : DEFAULT_PARTNER_SYS_PRIORITY;
MacAddress partnerSysMac = this.partnerSysMacSet ? this.partnerSysMac : DEFAULT_PARTNER_SYS_MAC;
if(partnerSysMac == null)
throw new NullPointerException("Property partnerSysMac must not be null");
int partnerPortPriority = this.partnerPortPrioritySet ? this.partnerPortPriority : DEFAULT_PARTNER_PORT_PRIORITY;
int partnerPortNum = this.partnerPortNumSet ? this.partnerPortNum : DEFAULT_PARTNER_PORT_NUM;
int partnerKey = this.partnerKeySet ? this.partnerKey : DEFAULT_PARTNER_KEY;
return new OFBsnLacpConvergenceNotifVer13(
xid,
convergenceStatus,
portNo,
actorSysPriority,
actorSysMac,
actorPortPriority,
actorPortNum,
actorKey,
partnerSysPriority,
partnerSysMac,
partnerPortPriority,
partnerPortNum,
partnerKey
);
}
}
final static Reader READER = new Reader();
static class Reader implements OFMessageReader<OFBsnLacpConvergenceNotif> {
@Override
public OFBsnLacpConvergenceNotif readFrom(ByteBuf bb) throws OFParseError {
int start = bb.readerIndex();
// fixed value property version == 4
byte version = bb.readByte();
if(version != (byte) 0x4)
throw new OFParseError("Wrong version: Expected=OFVersion.OF_13(4), got="+version);
// fixed value property type == 4
byte type = bb.readByte();
if(type != (byte) 0x4)
throw new OFParseError("Wrong type: Expected=OFType.EXPERIMENTER(4), got="+type);
int length = U16.f(bb.readShort());
if(length != 52)
throw new OFParseError("Wrong length: Expected=52(52), got="+length);
if(bb.readableBytes() + (bb.readerIndex() - start) < length) {
// Buffer does not have all data yet
bb.readerIndex(start);
return null;
}
if(logger.isTraceEnabled())
logger.trace("readFrom - length={}", length);
long xid = U32.f(bb.readInt());
// fixed value property experimenter == 0x5c16c7L
int experimenter = bb.readInt();
if(experimenter != 0x5c16c7)
throw new OFParseError("Wrong experimenter: Expected=0x5c16c7L(0x5c16c7L), got="+experimenter);
// fixed value property subtype == 0x2bL
int subtype = bb.readInt();
if(subtype != 0x2b)
throw new OFParseError("Wrong subtype: Expected=0x2bL(0x2bL), got="+subtype);
short convergenceStatus = U8.f(bb.readByte());
// pad: 3 bytes
bb.skipBytes(3);
OFPort portNo = OFPort.read4Bytes(bb);
int actorSysPriority = U16.f(bb.readShort());
MacAddress actorSysMac = MacAddress.read6Bytes(bb);
int actorPortPriority = U16.f(bb.readShort());
int actorPortNum = U16.f(bb.readShort());
int actorKey = U16.f(bb.readShort());
int partnerSysPriority = U16.f(bb.readShort());
MacAddress partnerSysMac = MacAddress.read6Bytes(bb);
int partnerPortPriority = U16.f(bb.readShort());
int partnerPortNum = U16.f(bb.readShort());
int partnerKey = U16.f(bb.readShort());
OFBsnLacpConvergenceNotifVer13 bsnLacpConvergenceNotifVer13 = new OFBsnLacpConvergenceNotifVer13(
xid,
convergenceStatus,
portNo,
actorSysPriority,
actorSysMac,
actorPortPriority,
actorPortNum,
actorKey,
partnerSysPriority,
partnerSysMac,
partnerPortPriority,
partnerPortNum,
partnerKey
);
if(logger.isTraceEnabled())
logger.trace("readFrom - read={}", bsnLacpConvergenceNotifVer13);
return bsnLacpConvergenceNotifVer13;
}
}
public void putTo(PrimitiveSink sink) {
FUNNEL.funnel(this, sink);
}
final static OFBsnLacpConvergenceNotifVer13Funnel FUNNEL = new OFBsnLacpConvergenceNotifVer13Funnel();
static class OFBsnLacpConvergenceNotifVer13Funnel implements Funnel<OFBsnLacpConvergenceNotifVer13> {
private static final long serialVersionUID = 1L;
@Override
public void funnel(OFBsnLacpConvergenceNotifVer13 message, PrimitiveSink sink) {
// fixed value property version = 4
sink.putByte((byte) 0x4);
// fixed value property type = 4
sink.putByte((byte) 0x4);
// fixed value property length = 52
sink.putShort((short) 0x34);
sink.putLong(message.xid);
// fixed value property experimenter = 0x5c16c7L
sink.putInt(0x5c16c7);
// fixed value property subtype = 0x2bL
sink.putInt(0x2b);
sink.putShort(message.convergenceStatus);
// skip pad (3 bytes)
message.portNo.putTo(sink);
sink.putInt(message.actorSysPriority);
message.actorSysMac.putTo(sink);
sink.putInt(message.actorPortPriority);
sink.putInt(message.actorPortNum);
sink.putInt(message.actorKey);
sink.putInt(message.partnerSysPriority);
message.partnerSysMac.putTo(sink);
sink.putInt(message.partnerPortPriority);
sink.putInt(message.partnerPortNum);
sink.putInt(message.partnerKey);
}
}
public void writeTo(ByteBuf bb) {
WRITER.write(bb, this);
}
final static Writer WRITER = new Writer();
static class Writer implements OFMessageWriter<OFBsnLacpConvergenceNotifVer13> {
@Override
public void write(ByteBuf bb, OFBsnLacpConvergenceNotifVer13 message) {
// fixed value property version = 4
bb.writeByte((byte) 0x4);
// fixed value property type = 4
bb.writeByte((byte) 0x4);
// fixed value property length = 52
bb.writeShort((short) 0x34);
bb.writeInt(U32.t(message.xid));
// fixed value property experimenter = 0x5c16c7L
bb.writeInt(0x5c16c7);
// fixed value property subtype = 0x2bL
bb.writeInt(0x2b);
bb.writeByte(U8.t(message.convergenceStatus));
// pad: 3 bytes
bb.writeZero(3);
message.portNo.write4Bytes(bb);
bb.writeShort(U16.t(message.actorSysPriority));
message.actorSysMac.write6Bytes(bb);
bb.writeShort(U16.t(message.actorPortPriority));
bb.writeShort(U16.t(message.actorPortNum));
bb.writeShort(U16.t(message.actorKey));
bb.writeShort(U16.t(message.partnerSysPriority));
message.partnerSysMac.write6Bytes(bb);
bb.writeShort(U16.t(message.partnerPortPriority));
bb.writeShort(U16.t(message.partnerPortNum));
bb.writeShort(U16.t(message.partnerKey));
}
}
@Override
public String toString() {
StringBuilder b = new StringBuilder("OFBsnLacpConvergenceNotifVer13(");
b.append("xid=").append(xid);
b.append(", ");
b.append("convergenceStatus=").append(convergenceStatus);
b.append(", ");
b.append("portNo=").append(portNo);
b.append(", ");
b.append("actorSysPriority=").append(actorSysPriority);
b.append(", ");
b.append("actorSysMac=").append(actorSysMac);
b.append(", ");
b.append("actorPortPriority=").append(actorPortPriority);
b.append(", ");
b.append("actorPortNum=").append(actorPortNum);
b.append(", ");
b.append("actorKey=").append(actorKey);
b.append(", ");
b.append("partnerSysPriority=").append(partnerSysPriority);
b.append(", ");
b.append("partnerSysMac=").append(partnerSysMac);
b.append(", ");
b.append("partnerPortPriority=").append(partnerPortPriority);
b.append(", ");
b.append("partnerPortNum=").append(partnerPortNum);
b.append(", ");
b.append("partnerKey=").append(partnerKey);
b.append(")");
return b.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnLacpConvergenceNotifVer13 other = (OFBsnLacpConvergenceNotifVer13) obj;
if( xid != other.xid)
return false;
if( convergenceStatus != other.convergenceStatus)
return false;
if (portNo == null) {
if (other.portNo != null)
return false;
} else if (!portNo.equals(other.portNo))
return false;
if( actorSysPriority != other.actorSysPriority)
return false;
if (actorSysMac == null) {
if (other.actorSysMac != null)
return false;
} else if (!actorSysMac.equals(other.actorSysMac))
return false;
if( actorPortPriority != other.actorPortPriority)
return false;
if( actorPortNum != other.actorPortNum)
return false;
if( actorKey != other.actorKey)
return false;
if( partnerSysPriority != other.partnerSysPriority)
return false;
if (partnerSysMac == null) {
if (other.partnerSysMac != null)
return false;
} else if (!partnerSysMac.equals(other.partnerSysMac))
return false;
if( partnerPortPriority != other.partnerPortPriority)
return false;
if( partnerPortNum != other.partnerPortNum)
return false;
if( partnerKey != other.partnerKey)
return false;
return true;
}
@Override
public boolean equalsIgnoreXid(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (getClass() != obj.getClass())
return false;
OFBsnLacpConvergenceNotifVer13 other = (OFBsnLacpConvergenceNotifVer13) obj;
// ignore XID
if( convergenceStatus != other.convergenceStatus)
return false;
if (portNo == null) {
if (other.portNo != null)
return false;
} else if (!portNo.equals(other.portNo))
return false;
if( actorSysPriority != other.actorSysPriority)
return false;
if (actorSysMac == null) {
if (other.actorSysMac != null)
return false;
} else if (!actorSysMac.equals(other.actorSysMac))
return false;
if( actorPortPriority != other.actorPortPriority)
return false;
if( actorPortNum != other.actorPortNum)
return false;
if( actorKey != other.actorKey)
return false;
if( partnerSysPriority != other.partnerSysPriority)
return false;
if (partnerSysMac == null) {
if (other.partnerSysMac != null)
return false;
} else if (!partnerSysMac.equals(other.partnerSysMac))
return false;
if( partnerPortPriority != other.partnerPortPriority)
return false;
if( partnerPortNum != other.partnerPortNum)
return false;
if( partnerKey != other.partnerKey)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * (int) (xid ^ (xid >>> 32));
result = prime * result + convergenceStatus;
result = prime * result + ((portNo == null) ? 0 : portNo.hashCode());
result = prime * result + actorSysPriority;
result = prime * result + ((actorSysMac == null) ? 0 : actorSysMac.hashCode());
result = prime * result + actorPortPriority;
result = prime * result + actorPortNum;
result = prime * result + actorKey;
result = prime * result + partnerSysPriority;
result = prime * result + ((partnerSysMac == null) ? 0 : partnerSysMac.hashCode());
result = prime * result + partnerPortPriority;
result = prime * result + partnerPortNum;
result = prime * result + partnerKey;
return result;
}
@Override
public int hashCodeIgnoreXid() {
final int prime = 31;
int result = 1;
// ignore XID
result = prime * result + convergenceStatus;
result = prime * result + ((portNo == null) ? 0 : portNo.hashCode());
result = prime * result + actorSysPriority;
result = prime * result + ((actorSysMac == null) ? 0 : actorSysMac.hashCode());
result = prime * result + actorPortPriority;
result = prime * result + actorPortNum;
result = prime * result + actorKey;
result = prime * result + partnerSysPriority;
result = prime * result + ((partnerSysMac == null) ? 0 : partnerSysMac.hashCode());
result = prime * result + partnerPortPriority;
result = prime * result + partnerPortNum;
result = prime * result + partnerKey;
return result;
}
}
| |
package io.craigmiller160.school.service;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.time.LocalDate;
import java.util.List;
import javax.transaction.Transactional;
import org.junit.AfterClass;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ApplicationContext;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringJUnit4ClassRunner;
import io.craigmiller160.school.context.AppContext;
import io.craigmiller160.school.entity.Course;
import io.craigmiller160.school.entity.Gender;
import io.craigmiller160.school.entity.ScJoinHolder;
import io.craigmiller160.school.entity.Student;
import io.craigmiller160.school.util.HibernateTestUtil;
/**
* An integration test for the service layer
* of this application. Because the service
* class depends on the DAO classes to successfully
* perform its operations, if the tests for the DAO
* classes fail, then the tests for this will fail.
* <p>
* This is also a LARGE test, as the service class
* joins together operations from all DAOs and as
* such all need to be tested.
*
* @author craig
* @version 1.0
*/
@RunWith(SpringJUnit4ClassRunner.class)
@ContextConfiguration({"classpath:/test-context.xml"})
public class SchoolDataServiceIT {
/**
* Output message for the insert operation failing.
*/
private static final String INSERT_FAIL = "Insert Failed";
/**
* Output message for the update operation failing.
*/
private static final String UPDATE_FAIL = "Update Failed";
/**
* Output message for the delete operation failing.
*/
private static final String DELETE_FAIL = "Delete Failed";
private static final String CREATE_FAIL = "Create Failed";
/**
* The service class of this application, to be tested here.
*/
@Autowired (required=true)
private GenericEntityServiceBean schoolDataService;
/**
* Get the service class of this application, to be tested here.
*
* @return the service class of this application, to be tested here.
*/
public GenericEntityServiceBean getSchoolDataService() {
return schoolDataService;
}
/**
* Set the service class of this application, to be tested here.
*
* @param schoolDataService the service class of this application,
* to be tested here.
*/
public void setSchoolDataService(GenericEntityServiceBean schoolDataService) {
this.schoolDataService = schoolDataService;
}
/**
* Test CRURD operations with <tt>Student</tt> entities.
*/
@Transactional
@Test
public void testStudentCRUD(){
//Create and insert test entity
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
//Get entity and test for successful insert
student = schoolDataService.getEntityById(Student.class, studentId);
assertNotNull(INSERT_FAIL, student);
assertEquals(INSERT_FAIL, student.getFirstName(), "First");
assertEquals(INSERT_FAIL, student.getLastName(), "Last");
assertEquals(INSERT_FAIL, student.getBirthDate(), LocalDate.of(1900, 1, 1));
assertEquals(INSERT_FAIL, student.getGender(), Gender.UNKNOWN);
assertEquals(INSERT_FAIL, student.getGrade(), 1);
//Change content and update
setStudent2(student);
schoolDataService.updateEntity(student);
//Get entity and test for successful update
student = schoolDataService.getEntityById(Student.class, studentId);
assertNotNull(INSERT_FAIL, student);
assertEquals(UPDATE_FAIL, student.getFirstName(), "First2");
assertEquals(UPDATE_FAIL, student.getLastName(), "Last2");
assertEquals(UPDATE_FAIL, student.getBirthDate(), LocalDate.of(1950, 1, 1));
assertEquals(UPDATE_FAIL, student.getGender(), Gender.MALE);
assertEquals(UPDATE_FAIL, student.getGrade(), 2);
//Delete entity
schoolDataService.deleteEntity(student);
//Try to get entity and test for delete
student = schoolDataService.getEntityById(Student.class, studentId);
assertNull(DELETE_FAIL, student);
}
/**
* Test CRURD operations with <tt>Course</tt> entities.
*/
@Transactional
@Test
public void testCourseCRUD(){
//Create and insert test entity
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
//Get entity and test for successful insert
course = schoolDataService.getEntityById(Course.class, courseId);
assertNotNull(INSERT_FAIL, course);
assertEquals(INSERT_FAIL, course.getCourseName(), "Name");
assertEquals(INSERT_FAIL, course.getSubject(), "Subject");
assertEquals(INSERT_FAIL, course.getTeacherLastName(), "LastName");
assertEquals(INSERT_FAIL, course.getPeriod(), 1);
//Change content and update
setCourse2(course);
schoolDataService.updateEntity(course);
//Get entity and test for successful update
course = schoolDataService.getEntityById(Course.class, courseId);
assertNotNull(INSERT_FAIL, course);
assertEquals(UPDATE_FAIL, course.getCourseName(), "Name2");
assertEquals(UPDATE_FAIL, course.getSubject(), "Subject2");
assertEquals(UPDATE_FAIL, course.getTeacherLastName(), "LastName2");
assertEquals(UPDATE_FAIL, course.getPeriod(), 2);
//Delete entity
schoolDataService.deleteEntity(course);
//Try to get entity and test for delete
course = schoolDataService.getEntityById(Course.class, courseId);
assertNull(DELETE_FAIL, course);
}
/**
* Test CRURD operations with <tt>JoinHolder</tt> entities.
*/
@Transactional
@Test
public void testScJoinHolderCRUD(){
//Create and insert test entities
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
//Create first joinHolder and insert
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
int joinHolderId = joinHolder.getScId();
//Get joinHolder and test values
joinHolder = schoolDataService.getEntityById(ScJoinHolder.class, joinHolderId);
assertNotNull(INSERT_FAIL, joinHolder);
assertEquals(INSERT_FAIL, joinHolder.getCourse().getCourseName(), "Name");
assertEquals(INSERT_FAIL, joinHolder.getStudent().getFirstName(), "First");
//Change content and update
setStudent2(student);
setCourse2(course);
joinHolder.setStudent(student);
joinHolder.setCourse(course);
schoolDataService.updateEntity(joinHolder);
//Get entity and test for successful update
joinHolder = schoolDataService.getEntityById(ScJoinHolder.class, joinHolderId);
assertNotNull(INSERT_FAIL, joinHolder);
assertEquals(UPDATE_FAIL, joinHolder.getCourse().getCourseName(), "Name2");
assertEquals(UPDATE_FAIL, joinHolder.getStudent().getFirstName(), "First2");
//Delete entity
schoolDataService.deleteEntity(joinHolder);
//Try to get entity and test for delete
joinHolder = schoolDataService.getEntityById(ScJoinHolder.class, joinHolderId);
assertNull(DELETE_FAIL, joinHolder);
//Test to ensure that student/course entities haven't been deleted
student = schoolDataService.getEntityById(Student.class, studentId);
assertNotNull("Student was deleted with JoinHolder", student);
course = schoolDataService.getEntityById(Course.class, courseId);
assertNotNull("Course was deleted with JoinHolder", course);
}
/**
* Test create operation for <tt>Student</tt> entities.
*/
@Transactional
@Test
public void testCreateStudent(){
//Create entity and test content.
Student student = schoolDataService.createEntity(Student.class,
"First", "Last", LocalDate.of(1988, 10, 26), Gender.MALE, 10);
assertEquals(CREATE_FAIL, student.getFirstName(), "First");
assertEquals(CREATE_FAIL, student.getLastName(), "Last");
assertEquals(CREATE_FAIL, student.getBirthDate(), LocalDate.of(1988, 10, 26));
assertEquals(CREATE_FAIL, student.getGender(), Gender.MALE);
assertEquals(CREATE_FAIL, student.getGrade(), 10);
int studentId = student.getStudentId();
//Test if it was inserted properly into the database.
student = schoolDataService.getEntityById(Student.class, studentId);
assertNotNull(INSERT_FAIL, student);
assertEquals(INSERT_FAIL, student.getFirstName(), "First");
}
/**
* Test create operation for <tt>Course</tt> entities.
*/
@Transactional
@Test
public void testCreateCourse(){
//Create entity and test content.
Course course = schoolDataService.createEntity(Course.class,
"Name", "Subject", "LastName", 1);
assertNotNull(CREATE_FAIL, course);
assertEquals(CREATE_FAIL, course.getCourseName(), "Name");
assertEquals(CREATE_FAIL, course.getSubject(), "Subject");
assertEquals(CREATE_FAIL, course.getTeacherLastName(), "LastName");
assertEquals(CREATE_FAIL, course.getPeriod(), 1);
int courseId = course.getCourseId();
//Test if it was inserted properly into the database.
course = schoolDataService.getEntityById(Course.class, courseId);
assertNotNull(INSERT_FAIL, course);
assertEquals(INSERT_FAIL, course.getCourseName(), "Name");
}
/**
* Test count operation for <tt>Student</tt> entities.
*/
@Transactional
@Test
public void testStudentCount(){
//Create dummy data
for(int i = 0; i < 3; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
}
//Get count and test its value
long count = schoolDataService.getEntityCount(Student.class);
assertTrue(count >= 3);
}
/**
* Test count operation for <tt>Course</tt> entities.
*/
@Transactional
@Test
public void testCourseCount(){
//Create dummy data
for(int i = 0; i < 3; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
}
//Get count and test its value
long count = schoolDataService.getEntityCount(Course.class);
assertTrue(count >= 3);
}
/**
* Test count operation for <tt>JoinHolder</tt> entities.
*/
@Transactional
@Test
public void testScJoinHolderCount(){
//Create dummy data
for(int i = 0; i < 3; i++){
Course course = new Course();
setCourse1(course);
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get count and test its vaue
long count = schoolDataService.getEntityCount(ScJoinHolder.class);
assertTrue(count >= 3);
}
/**
* Test get all operation for <tt>Student</tt> entities.
*/
@Transactional
@Test
public void testGetAllStudents(){
//Create dummy data
for(int i = 0; i < 3; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
}
//Get list and check for content
List<Student> students = schoolDataService.getAllEntities(Student.class);
assertNotNull(students);
assertTrue(students.size() >= 3);
}
/**
* Test get all operation for <tt>Course</tt> entities.
*/
@Transactional
@Test
public void testGetAllCourses(){
//Create dummy data
for(int i = 0; i < 3; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
}
//Get list and check for content
List<Course> courses = schoolDataService.getAllEntities(Course.class);
assertNotNull(courses);
assertTrue(courses.size() >= 3);
}
/**
* Test get all operation for <tt>JoinHolder</tt> entities.
*/
@Transactional
@Test
public void testGetAllScJoinHolders(){
//Create dummy data
for(int i = 0; i < 3; i++){
Course course = new Course();
setCourse1(course);
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get list and check for content
List<ScJoinHolder> joinHolders = schoolDataService.getAllEntities(ScJoinHolder.class);
assertNotNull(joinHolders);
assertTrue(joinHolders.size() >= 3);
}
/**
* Test getting <tt>Student</tt> entities in a
* paginated way.
*/
@Transactional
@Test
public void testGetStudentsPaginated(){
//Create dummy data
for(int i = 0; i < 20; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
}
//Get page and test for content
List<Student> students1 = schoolDataService.getEntitiesByPage(
Student.class, 2, 5);
assertNotNull("Students list is null", students1);
assertTrue("List is wrong size", students1.size() == 5);
//Get another page and compare the two
//This list is deliberately one entity larger than the first one
//This allows testing to ensure that the pages are retrieving entities
//in the right order.
List<Student> students2 = schoolDataService.getEntitiesByPage(
Student.class, 1, 6);
assertNotNull("Students list is null", students2);
assertTrue("List is wrong size", students2.size() == 6);
//The uneven sizes are meant for the following test: If this is true,
//then the last entity in the second list matches the first in the first.
//That would prove that pages are being retrieved in order.
assertEquals("First entity in first list doesn't equal last entity in second",
students1.get(0), students2.get(students2.size() - 1));
//Test for overlap while skipping the first record in list one because
//that one should match, but the others should not.
for(int i = 1; i < students1.size(); i++){
assertFalse("Overlap between pages",
students2.contains(students1.get(i)));
}
}
/**
* Test getting <tt>Course</tt> entities in a
* paginated way.
*/
@Transactional
@Test
public void testGetCoursesPaginated(){
//Create dummy data
for(int i = 0; i < 20; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
}
//Get page and test for content
List<Course> courses1 = schoolDataService.getEntitiesByPage(
Course.class, 2, 5);
assertNotNull("Courses list is null", courses1);
assertTrue("List is wrong size", courses1.size() == 5);
//Get another page and compare the two
//This list is deliberately one entity larger than the first one
//This allows testing to ensure that the pages are retrieving entities
//in the right order.
List<Course> courses2 = schoolDataService.getEntitiesByPage(
Course.class, 1, 6);
assertNotNull("Courses list is null", courses2);
assertTrue("List is wrong size", courses2.size() == 6);
//The uneven sizes are meant for the following test: If this is true,
//then the last entity in the second list matches the first in the first.
//That would prove that pages are being retrieved in order.
assertEquals("First entity in first list doesn't equal last entity in second",
courses1.get(0), courses2.get(courses2.size() - 1));
//Test for overlap while skipping the first record in list one because
//that one should match, but the others should not.
for(int i = 1; i < courses1.size(); i++){
assertFalse("Overlap between pages",
courses2.contains(courses1.get(i)));
}
}
/**
* Test getting <tt>JoinHolder</tt> entities in a
* paginated way.
*/
@Transactional
@Test
public void testGetScJoinHoldersPaginated(){
//Create dummy data
for(int i = 0; i < 20; i++){
Course course = new Course();
setCourse1(course);
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get page and test for content
List<ScJoinHolder> joinHolders1 = schoolDataService.getEntitiesByPage(
ScJoinHolder.class, 2, 5);
assertNotNull("JoinHolders list is null", joinHolders1);
assertTrue("List is wrong size", joinHolders1.size() == 5);
//Get another page and compare the two
//This list is deliberately one entity larger than the first one
//This allows testing to ensure that the pages are retrieving entities
//in the right order.
List<ScJoinHolder> joinHolders2 = schoolDataService.getEntitiesByPage(
ScJoinHolder.class, 1, 6);
assertNotNull("JoinHolders list is null", joinHolders2);
assertTrue("List is wrong size", joinHolders1.size() == 5);
//The uneven sizes are meant for the following test: If this is true,
//then the last entity in the second list matches the first in the first.
//That would prove that pages are being retrieved in order.
assertEquals("First entity in first list doesn't equal last entity in second",
joinHolders1.get(0), joinHolders2.get(joinHolders2.size() - 1));
//Test for overlap while skipping the first record in list one because
//that one should match, but the others should not.
for(int i = 1; i < joinHolders1.size(); i++){
assertFalse("Overlap between pages",
joinHolders2.contains(joinHolders1.get(i)));
}
}
/**
* Test get all joined entities with
* specified <tt>Student</tt>.
*/
@Transactional
@Test
public void testGetAllForStudent(){
//Create entities and insert.
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
course = new Course();
setCourse2(course);
schoolDataService.insertEntity(course);
joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Retrieve list based on student and test
List<ScJoinHolder> joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Student.class, studentId);
assertNotNull("JoinHolders is null", joinHolders);
assertEquals(joinHolders.size(), 2);
}
/**
* Test get all joined entities with
* specified <tt>Course</tt>.
*/
@Transactional
@Test
public void testGetAllForCourse(){
//Create entities and insert.
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
student = new Student();
setStudent2(student);
schoolDataService.insertEntity(student);
joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Retrieve list based on course and test
List<ScJoinHolder> joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Course.class, courseId);
assertNotNull("JoinHolders is null", joinHolders);
assertEquals(joinHolders.size(), 2);
}
/**
* Get entities joined with specified
* <tt>Student</tt> in a paginated way.
*/
@Transactional
@Test
public void testPaginatedForStudent(){
//Create large list of dummy data
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
for(int i = 0; i < 20; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get page and test content
List<ScJoinHolder> joinHolders1 = schoolDataService.getEntitiesByPageFor(
ScJoinHolder.class, Student.class, studentId,
2, 5);
assertNotNull("JoinHolders list is null", joinHolders1);
//assertTrue("List is wrong size", joinHolders1.size() == 5);
for(ScJoinHolder jh : joinHolders1){
assertEquals("Wrong studentId", jh.getStudent().getStudentId(), studentId);
}
//Get another page and compare
//This list is deliberately one entity larger than the first one
//This allows testing to ensure that the pages are retrieving entities
//in the right order.
List<ScJoinHolder> joinHolders2 = schoolDataService.getEntitiesByPageFor(
ScJoinHolder.class, Student.class, studentId,
1, 6);
assertNotNull("JoinHolders list is null", joinHolders2);
assertTrue("List is wrong size", joinHolders2.size() == 6);
//The uneven sizes are meant for the following test: If this is true,
//then the last entity in the second list matches the first in the first.
//That would prove that pages are being retrieved in order.
assertEquals("First entity in first list doesn't equal last entity in second",
joinHolders1.get(0), joinHolders2.get(joinHolders2.size() - 1));
//Test for overlap while skipping the first record in list one because
//that one should match, but the others should not.
for(int i = 0; i < joinHolders2.size(); i++){
assertEquals("Wrong studentId",
joinHolders2.get(i).getStudent().getStudentId(), studentId);
//Don't do this for the last entry in the list
if(i != joinHolders2.size() - 1){
assertFalse("Overlap between pages",
joinHolders1.contains(joinHolders2.get(i)));
}
}
}
/**
* Get entities joined with specified
* <tt>Course</tt> in a paginated way.
*/
@Transactional
@Test
public void testPaginatedForCourse(){
//Create large list of dummy data
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
for(int i = 0; i < 20; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get page and test content
List<ScJoinHolder> joinHolders1 = schoolDataService.getEntitiesByPageFor(
ScJoinHolder.class, Course.class, courseId,
2, 5);
assertNotNull("JoinHolders list is null", joinHolders1);
assertTrue("List is wrong size", joinHolders1.size() == 5);
for(ScJoinHolder jh : joinHolders1){
assertEquals("Wrong courseID", jh.getCourse().getCourseId(), courseId);
}
//Get another page and compare
//This list is deliberately one entity larger than the first one
//This allows testing to ensure that the pages are retrieving entities
//in the right order.
List<ScJoinHolder> joinHolders2 = schoolDataService.getEntitiesByPageFor(
ScJoinHolder.class, Course.class, courseId,
1, 6);
assertNotNull("JoinHolders list is null", joinHolders2);
assertTrue("List is wrong size", joinHolders2.size() == 6);
//The uneven sizes are meant for the following test: If this is true,
//then the last entity in the second list matches the first in the first.
//That would prove that pages are being retrieved in order.
assertEquals("First entity in first list doesn't equal last entity in second",
joinHolders1.get(0), joinHolders2.get(joinHolders2.size() - 1));
//Test for overlap while skipping the first record in list one because
//that one should match, but the others should not.
for(int i = 0; i < joinHolders2.size(); i++){
assertEquals("Wrong courseID",
joinHolders2.get(i).getCourse().getCourseId(), courseId);
//Don't do this for the last entry in the list
if(i != joinHolders2.size() - 1){
assertFalse("Overlap between pages",
joinHolders1.contains(joinHolders2.get(i)));
}
}
}
/**
* Test getting count of all entities joined
* with specified <tt>Student</tt>.
*/
@Transactional
@Test
public void testGetCountForStudent(){
//Create entities and insert.
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
course = new Course();
setCourse2(course);
schoolDataService.insertEntity(course);
joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Get count based on student
long count = schoolDataService.getJoinCountFor(ScJoinHolder.class,
Student.class, studentId);
assertEquals(count, 2);
}
/**
* Test getting count of all entities joined
* with specified <tt>Course</tt>.
*/
@Transactional
@Test
public void testGetCountForCourse(){
//Create entities and insert.
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
student = new Student();
setStudent2(student);
schoolDataService.insertEntity(student);
joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Get count based on course
long count = schoolDataService.getJoinCountFor(ScJoinHolder.class,
Course.class, courseId);
assertEquals(count, 2);
}
/**
* Test operation to create a <tt>JoinHolder</tt>
* by specifying the entities to be joined.
* This test depends on the <tt>getJoinsFor</tt>
* operation to be working, which is tested
* separately here. If that test fails, this
* one will too.
*/
@Transactional
@Test
public void testJoinEntities(){
//Create entities and insert.
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
//Use convenience method to join entities and insert
schoolDataService.joinEntities(ScJoinHolder.class, student, course);
//Retrieve joins for student and test
List<ScJoinHolder> joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Student.class, studentId);
assertNotNull("JoinHolders is null", joinHolders);
assertEquals("JoinHolders wrong size", joinHolders.size(), 1);
assertEquals("Wrong JoinHolder retrieved",
joinHolders.get(0).getCourse().getCourseId(), courseId);
//Retrieve joins for course and test
joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Course.class, courseId);
assertNotNull("JoinHolders is null", joinHolders);
assertEquals("JoinHolders wrong size", joinHolders.size(), 1);
assertEquals("Wrong JoinHolder retrieved",
joinHolders.get(0).getStudent().getStudentId(), studentId);
}
/**
* Test removing all entities joined with
* a specified student.
*/
@Transactional
@Test
public void testRemoveForStudent(){
//Create dummy data
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Remove join via student
schoolDataService.removeJoinsFor(ScJoinHolder.class, Student.class,
studentId);
//Retrieve JoinHolder by student and test it
List<ScJoinHolder> joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Student.class, studentId);
assertNotNull("JoinHolders list is null", joinHolders);
assertEquals("Wrong size", joinHolders.size(), 0);
}
/**
* Test remove all entities joined with
* a specified <tt>Course</tt>.
*/
@Transactional
@Test
public void testRemoveForCourse(){
//Create dummy data
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
//Remove join via course
schoolDataService.removeJoinsFor(ScJoinHolder.class, Course.class,
courseId);
//Retreive JoinHolder by course and test it
List<ScJoinHolder> joinHolders = schoolDataService.getAllJoinsFor(
ScJoinHolder.class, Course.class, courseId);
assertNotNull("JoinHolders list is null", joinHolders);
assertEquals("Wrong size", joinHolders.size(), 0);
}
/**
* Test delete by ID for <tt>Student</tt> entities.
* This test depends on the basic CRUD operations
* to be working, if they do not then this test will fail too.
*/
@Transactional
@Test
public void testStudentDeleteById(){
//Create dummy data
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
//Test to ensure successful insert
student = schoolDataService.getEntityById(Student.class, studentId);
assertNotNull("Student insert failed", student);
//Delete student
schoolDataService.deleteEntityById(Student.class, studentId);
//Attempt to retrieve student to test for deletion
student = schoolDataService.getEntityById(Student.class, studentId);
assertNull("Student not deleted", student);
}
/**
* Test delete by ID for <tt>Course</tt> entities.
* This test depends on the basic CRUD operations
* to be working, if they do not then this test will fail too.
*/
@Transactional
@Test
public void testCourseDeleteById(){
//Create dummy data
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
//Test to ensure successful insert
course = schoolDataService.getEntityById(Course.class, courseId);
assertNotNull("Course insert failed", course);
//Delete course
schoolDataService.deleteEntityById(Course.class, courseId);
//Attempt to retrieve course to test for deletion
course = schoolDataService.getEntityById(Course.class, courseId);
assertNull("Course not deleted", course);
}
/**
* Test delete by ID for <tt>JoinHolder</tt> entities.
* This test depends on the basic CRUD operations
* to be working, if they do not then this test will fail too.
*/
@Transactional
@Test
public void testJoinHolderDeleteById(){
//Create dummy data
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder();
joinHolder.setCourse(course);
joinHolder.setStudent(student);
schoolDataService.insertEntity(joinHolder);
int jhId = joinHolder.getScId();
//Test to ensure successful insert
joinHolder = schoolDataService.getEntityById(ScJoinHolder.class, jhId);
assertNotNull("JoinHolder insert failed", joinHolder);
//Delete JoinHolder
schoolDataService.deleteEntityById(ScJoinHolder.class, jhId);
//Attempt to retrieve JoinHolder to test for deletion
joinHolder = schoolDataService.getEntityById(ScJoinHolder.class, jhId);
assertNull("JoinHolder not deleted", joinHolder);
}
/**
* Test the hasPagesRemaining method in the service
* class for <tt>Student</tt> entities. This is dependent
* on the count methods, and
* if they don't work, this won't
* work either.
*/
@Transactional
@Test
public void testStudentHasPagesRemaining(){
//Create dummy data
for(int i = 0; i < 10; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
}
//Get the current count to set up the comparison
long actualCount = schoolDataService.getEntityCount(Student.class);
int pageCount = (int) actualCount / 10; //Works because of small data sets, might not work in larger application.
//Test with a value that should result in true
assertTrue("Should have pages remaining", schoolDataService.hasPagesRemaining(
Student.class, 1, 5));
//Test with a value that should result in false
assertFalse("Should not have pages remaining", schoolDataService.hasPagesRemaining(
Student.class, pageCount + 1, 10));
}
/**
* Test the hasPagesRemaining method in the service
* class for <tt>Course</tt> entities. This is dependent
* on the count methods, and
* if they don't work, this won't
* work either.
*/
@Transactional
@Test
public void testCourseHasPagesRemaining(){
//Create dummy data
for(int i = 0; i < 10; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
}
//Get the current count to set up the comparison
long actualCount = schoolDataService.getEntityCount(Course.class);
int pageCount = (int) actualCount / 10; //Works because of small data sets, might not work in larger application.
//Test with a value that should result in true
assertTrue("Should have pages remaining", schoolDataService.hasPagesRemaining(
Course.class, 1, 5));
//Test with a value that should result in false
assertFalse("Should not have pages remaining", schoolDataService.hasPagesRemaining(
Course.class, pageCount + 1, 10));
}
/**
* Test the hasPagesRemaining method in the service
* class for <tt>ScJoinHolder</tt> entities. This is dependent
* on the count methods, and
* if they don't work, this won't
* work either.
*/
@Transactional
@Test
public void testJoinHolderHasPagesRemaining(){
//Create dummy data
for(int i = 0; i < 10; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get the current count to set up the comparison
long actualCount = schoolDataService.getEntityCount(ScJoinHolder.class);
int pageCount = (int) actualCount / 10; //Works because of small data sets, might not work in larger application.
//Test with a value that should result in true
assertTrue("Should have pages remaining", schoolDataService.hasPagesRemaining(
ScJoinHolder.class, 1, 5));
//Test with a value that should result in false
assertFalse("Should not have pages remaining", schoolDataService.hasPagesRemaining(
ScJoinHolder.class, pageCount + 1, 10));
}
/**
* Test hasPagesRemainingFor method for
* joins from a <tt>Student</tt> entity.
* This is dependent
* on the count methods, and
* if they don't work, this won't
* work either.
*/
@Transactional
@Test
public void testHasPagesRemainingForStudent(){
//Create dummy data
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
int studentId = student.getStudentId();
for(int i = 0; i < 10; i++){
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get the current count to set up the comparison
long actualCount = schoolDataService.getJoinCountFor(
ScJoinHolder.class, Student.class, studentId);
int pageCount = (int) actualCount / 10; //Works because of small data sets, might not work in larger application.
//Test with a value that should result in true
assertTrue("Should have pages remaining", schoolDataService.hasPagesRemainingFor(
ScJoinHolder.class, Student.class, studentId, 1, 5));
//Test with a value that should result in false
assertFalse("Should not have pages remaining", schoolDataService.hasPagesRemainingFor(
ScJoinHolder.class, Student.class, studentId, pageCount + 1, 10));
}
/**
* Test hasPagesRemainingFor method for
* joins from a <tt>Student</tt> entity.
* This is dependent
* on the count methods, and
* if they don't work, this won't
* work either.
*/
@Transactional
@Test
public void testHasPagesRemainingForCourse(){
//Create dummy data
Course course = new Course();
setCourse1(course);
schoolDataService.insertEntity(course);
int courseId = course.getCourseId();
for(int i = 0; i < 10; i++){
Student student = new Student();
setStudent1(student);
schoolDataService.insertEntity(student);
ScJoinHolder joinHolder = new ScJoinHolder(student, course);
schoolDataService.insertEntity(joinHolder);
}
//Get the current count to set up the comparison
long actualCount = schoolDataService.getJoinCountFor(
ScJoinHolder.class, Course.class, courseId);
int pageCount = (int) actualCount / 10; //Works because of small data sets, might not work in larger application.
//Test with a value that should result in true
assertTrue("Should have pages remaining", schoolDataService.hasPagesRemainingFor(
ScJoinHolder.class, Course.class, courseId, 1, 5));
//Test with a value that should result in false
assertFalse("Should not have pages remaining", schoolDataService.hasPagesRemainingFor(
ScJoinHolder.class, Course.class, courseId, pageCount + 1, 10));
}
/**
* Set the fields of the <tt>Student</tt> object
* to the first set of values.
*
* @param course the <tt>Student</tt> object to set.
*/
private void setStudent1(Student student){
student.setFirstName("First");
student.setLastName("Last");
student.setBirthDate(LocalDate.of(1900, 1, 1));
student.setGender(Gender.UNKNOWN);
student.setGrade(1);
}
/**
* Set the fields of the <tt>Student</tt> object
* to the second set of values.
*
* @param course the <tt>Student</tt> object to set.
*/
private void setStudent2(Student student){
student.setFirstName("First2");
student.setLastName("Last2");
student.setBirthDate(LocalDate.of(1950, 1, 1));
student.setGender(Gender.MALE);
student.setGrade(2);
}
/**
* Set the fields of the <tt>Course</tt> object
* to the first set of values.
*
* @param course the <tt>Course</tt> object to set.
*/
private void setCourse1(Course course){
course.setCourseName("Name");
course.setSubject("Subject");
course.setTeacherLastName("LastName");
course.setPeriod(1);
}
/**
* Set the fields of the <tt>Course</tt> object
* to the second set of values.
* * @param course the <tt>Course</tt> object to set.
*/
private void setCourse2(Course course){
course.setCourseName("Name2");
course.setSubject("Subject2");
course.setTeacherLastName("LastName2");
course.setPeriod(2);
}
/**
* Reset the auto-increment counter of the table being tested
* in the database. This method is invoked after all test
* cases have completed.
*/
@AfterClass
public static void resetAutoIncrement(){
ApplicationContext context = AppContext.getApplicationContext();
HibernateTestUtil testUtil = context.getBean(HibernateTestUtil.class, "hibernateTestUtil");
testUtil.resetCourseAutoIncrement();
testUtil.resetStudentAutoIncrement();
testUtil.resetStudentCourseAutoIncrement();
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp.parsing;
import com.google.common.base.Preconditions;
import com.google.javascript.jscomp.mozilla.rhino.ScriptRuntime;
/**
* This class implements the scanner for JsDoc strings.
*
* It is heavily based on Rhino's TokenStream.
*
*/
class JsDocTokenStream {
/*
* For chars - because we need something out-of-range
* to check. (And checking EOF by exception is annoying.)
* Note distinction from EOF token type!
*/
private final static int
EOF_CHAR = -1;
JsDocTokenStream(String sourceString) {
this(sourceString, 0);
}
JsDocTokenStream(String sourceString, int lineno) {
this(sourceString, lineno, 0);
}
JsDocTokenStream(String sourceString, int lineno, int initCharno) {
Preconditions.checkNotNull(sourceString);
this.lineno = lineno;
this.sourceString = sourceString;
this.sourceEnd = sourceString.length();
this.sourceCursor = this.cursor = 0;
this.initLineno = lineno;
this.initCharno = initCharno;
}
/**
* Tokenizes JSDoc comments.
*/
@SuppressWarnings("fallthrough")
final JsDocToken getJsDocToken() {
int c;
stringBufferTop = 0;
for (;;) {
// eat white spaces
for (;;) {
charno = -1;
c = getChar();
if (c == EOF_CHAR) {
return JsDocToken.EOF;
} else if (c == '\n') {
return JsDocToken.EOL;
} else if (!isJSSpace(c)) {
break;
}
}
switch (c) {
// annotation, e.g. @type or @constructor
case '@':
do {
c = getChar();
if (isAlpha(c)) {
addToString(c);
} else {
ungetChar(c);
this.string = getStringFromBuffer();
stringBufferTop = 0;
return JsDocToken.ANNOTATION;
}
} while (true);
case '*':
if (matchChar('/')) {
return JsDocToken.EOC;
} else {
return JsDocToken.STAR;
}
case ',':
return JsDocToken.COMMA;
case '>':
return JsDocToken.GT;
case '(':
return JsDocToken.LP;
case ')':
return JsDocToken.RP;
case '{':
return JsDocToken.LC;
case '}':
return JsDocToken.RC;
case '[':
return JsDocToken.LB;
case ']':
return JsDocToken.RB;
case '?':
return JsDocToken.QMARK;
case '!':
return JsDocToken.BANG;
case ':':
return JsDocToken.COLON;
case '=':
return JsDocToken.EQUALS;
case '|':
matchChar('|');
return JsDocToken.PIPE;
case '.':
c = getChar();
if (c == '<') {
return JsDocToken.LT;
} else {
if (c == '.') {
c = getChar();
if (c == '.') {
return JsDocToken.ELLIPSIS;
} else {
addToString('.');
}
}
// we may backtrack across line boundary
ungetBuffer[ungetCursor++] = c;
c = '.';
}
// fall through
default: {
// recognize a jsdoc string but discard last . if it is followed by
// a non-jsdoc comment char, e.g. Array.<
int c1 = c;
addToString(c);
int c2 = getChar();
if (!isJSDocString(c2)) {
ungetChar(c2);
this.string = getStringFromBuffer();
stringBufferTop = 0;
return JsDocToken.STRING;
} else {
do {
c1 = c2;
c2 = getChar();
if (c1 == '.' && c2 == '<') {
ungetChar(c2);
ungetChar(c1);
this.string = getStringFromBuffer();
stringBufferTop = 0;
return JsDocToken.STRING;
} else {
if (isJSDocString(c2)) {
addToString(c1);
} else {
ungetChar(c2);
addToString(c1);
this.string = getStringFromBuffer();
stringBufferTop = 0;
return JsDocToken.STRING;
}
}
} while (true);
}
}
}
}
}
/**
* Gets the remaining JSDoc line without the {@link JsDocToken#EOL},
* {@link JsDocToken#EOF} or {@link JsDocToken#EOC}.
*/
@SuppressWarnings("fallthrough")
String getRemainingJSDocLine() {
int c;
for (;;) {
c = getChar();
switch (c) {
case '*':
if (peekChar() != '/') {
addToString(c);
break;
}
// fall through
case EOF_CHAR:
case '\n':
ungetChar(c);
this.string = getStringFromBuffer();
stringBufferTop = 0;
return this.string;
default:
addToString(c);
break;
}
}
}
final int getLineno() { return lineno; }
final int getCharno() {
return lineno == initLineno? initCharno + charno : charno;
}
final String getString() { return string; }
final boolean eof() { return hitEOF; }
private String getStringFromBuffer() {
tokenEnd = cursor;
return new String(stringBuffer, 0, stringBufferTop);
}
private void addToString(int c) {
int N = stringBufferTop;
if (N == stringBuffer.length) {
char[] tmp = new char[stringBuffer.length * 2];
System.arraycopy(stringBuffer, 0, tmp, 0, N);
stringBuffer = tmp;
}
stringBuffer[N] = (char)c;
stringBufferTop = N + 1;
}
void ungetChar(int c) {
// can not unread past across line boundary
assert(!(ungetCursor != 0 && ungetBuffer[ungetCursor - 1] == '\n'));
ungetBuffer[ungetCursor++] = c;
cursor--;
}
private boolean matchChar(int test) {
int c = getCharIgnoreLineEnd();
if (c == test) {
tokenEnd = cursor;
return true;
} else {
ungetCharIgnoreLineEnd(c);
return false;
}
}
private static boolean isAlpha(int c) {
// Use 'Z' < 'a'
if (c <= 'Z') {
return 'A' <= c;
} else {
return 'a' <= c && c <= 'z';
}
}
private boolean isJSDocString(int c) {
switch (c) {
case '@':
case '*':
case ',':
case '>':
case ':':
case '(':
case ')':
case '{':
case '}':
case '[':
case ']':
case '?':
case '!':
case '|':
case '=':
case EOF_CHAR:
case '\n':
return false;
default:
return !isJSSpace(c);
}
}
/* As defined in ECMA. jsscan.c uses C isspace() (which allows
* \v, I think.) note that code in getChar() implicitly accepts
* '\r' == \u000D as well.
*/
static boolean isJSSpace(int c) {
if (c <= 127) {
return c == 0x20 || c == 0x9 || c == 0xC || c == 0xB;
} else {
return c == 0xA0
|| Character.getType((char)c) == Character.SPACE_SEPARATOR;
}
}
private static boolean isJSFormatChar(int c) {
return c > 127 && Character.getType((char)c) == Character.FORMAT;
}
/**
* Allows the JSDocParser to update the character offset
* so that getCharno() returns a valid character position.
*/
void update() {
charno = getOffset();
}
private int peekChar() {
int c = getChar();
ungetChar(c);
return c;
}
protected int getChar() {
if (ungetCursor != 0) {
cursor++;
--ungetCursor;
if (charno == -1) {
charno = getOffset();
}
return ungetBuffer[ungetCursor];
}
for(;;) {
int c;
if (sourceCursor == sourceEnd) {
hitEOF = true;
if (charno == -1) {
charno = getOffset();
}
return EOF_CHAR;
}
cursor++;
c = sourceString.charAt(sourceCursor++);
if (lineEndChar >= 0) {
if (lineEndChar == '\r' && c == '\n') {
lineEndChar = '\n';
continue;
}
lineEndChar = -1;
lineStart = sourceCursor - 1;
lineno++;
}
if (c <= 127) {
if (c == '\n' || c == '\r') {
lineEndChar = c;
c = '\n';
}
} else {
if (isJSFormatChar(c)) {
continue;
}
if (ScriptRuntime.isJSLineTerminator(c)) {
lineEndChar = c;
c = '\n';
}
}
if (charno == -1) {
charno = getOffset();
}
return c;
}
}
private int getCharIgnoreLineEnd() {
if (ungetCursor != 0) {
cursor++;
--ungetCursor;
if (charno == -1) {
charno = getOffset();
}
return ungetBuffer[ungetCursor];
}
for(;;) {
int c;
if (sourceCursor == sourceEnd) {
hitEOF = true;
if (charno == -1) {
charno = getOffset();
}
return EOF_CHAR;
}
cursor++;
c = sourceString.charAt(sourceCursor++);
if (c <= 127) {
if (c == '\n' || c == '\r') {
lineEndChar = c;
c = '\n';
}
} else {
if (isJSFormatChar(c)) {
continue;
}
if (ScriptRuntime.isJSLineTerminator(c)) {
lineEndChar = c;
c = '\n';
}
}
if (charno == -1) {
charno = getOffset();
}
return c;
}
}
private void ungetCharIgnoreLineEnd(int c) {
ungetBuffer[ungetCursor++] = c;
cursor--;
}
/**
* Returns the offset into the current line.
*/
final int getOffset() {
return sourceCursor - lineStart - ungetCursor - 1;
}
// Set this to an initial non-null value so that the Parser has
// something to retrieve even if an error has occurred and no
// string is found. Fosters one class of error, but saves lots of
// code.
private String string = "";
private char[] stringBuffer = new char[128];
private int stringBufferTop;
// Room to backtrace from to < on failed match of the last - in <!--
private final int[] ungetBuffer = new int[3];
private int ungetCursor;
private boolean hitEOF = false;
private int lineStart = 0;
private int lineEndChar = -1;
int lineno;
private int charno = -1;
private int initCharno;
private int initLineno;
private String sourceString;
private int sourceEnd;
// sourceCursor is an index into a small buffer that keeps a
// sliding window of the source stream.
int sourceCursor;
// cursor is a monotonically increasing index into the original
// source stream, tracking exactly how far scanning has progressed.
// Its value is the index of the next character to be scanned.
int cursor;
// Record start and end positions of last scanned token.
int tokenBeg;
int tokenEnd;
}
| |
package it.andreacioni.sdrive.test;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import it.andreacioni.commons.utils.FileUtils;
public class FileUtilsTest {
private static final String WIN_FILE_PATH_1 = "C:\\abs\\aer\\342r.txt";
private static final String WIN_FILE_PATH_2 = "C:/abs/aer/342r.txt";
private static final String WIN_FILE_PATH_3 = "C:/abs/aer/342r.bc.txt";
private static final String WIN_FILE_PATH_4 = "C://abs//aer//342r.txt";
private static final String UNX_FILE_PATH_1 = "abs/aer/342r.txt";
private static final String UNX_FILE_PATH_2 = "/abs/aer/342r.txt";
private static final String UNX_FILE_PATH_3 = "/abs/aer/342r.bc.txt";
@Test
public void testNonConflictingFileNames1() throws IOException {
File f1 = new File("abc"), f2 = new File("abc(1)");
f1.deleteOnExit();
f2.deleteOnExit();
Assert.assertTrue(!f1.exists() && !f2.exists());
Assert.assertTrue(f1.createNewFile());
Assert.assertEquals(f2.getName(), FileUtils.generateNonConflictFileName(f1).getName());
Assert.assertTrue(f1.exists() && !f2.exists());
Assert.assertTrue(f2.createNewFile());
Assert.assertTrue(f1.exists() && f2.exists());
Assert.assertTrue(f1.delete() && f2.delete());
}
@Test
public void testNonConflictingFileNames2() throws IOException {
File f1 = new File("abc.txt"), f2 = new File("abc(1).txt");
f1.deleteOnExit();
f2.deleteOnExit();
Assert.assertTrue(!f1.exists() && !f2.exists());
Assert.assertTrue(f1.createNewFile());
Assert.assertEquals(f2.getName(), FileUtils.generateNonConflictFileName(f1).getName());
Assert.assertTrue(f1.exists() && !f2.exists());
Assert.assertTrue(f2.createNewFile());
Assert.assertTrue(f1.exists() && f2.exists());
Assert.assertTrue(f1.delete() && f2.delete());
}
@Test
public void testNonConflictingFileNames3() throws IOException {
File f1 = new File("abc.txt.bak"), f2 = new File("abc(1).txt.bak");
f1.deleteOnExit();
f2.deleteOnExit();
Assert.assertTrue(!f1.exists() && !f2.exists());
Assert.assertTrue(f1.createNewFile());
Assert.assertEquals(f2.getName(), FileUtils.generateNonConflictFileName(f1).getName());
Assert.assertTrue(f1.exists() && !f2.exists());
Assert.assertTrue(f2.createNewFile());
Assert.assertTrue(f1.exists() && f2.exists());
Assert.assertTrue(f1.delete() && f2.delete());
}
@Test
public void testWindows() {
Assert.assertEquals("342r", FileUtils.getBaseName(WIN_FILE_PATH_1));
Assert.assertEquals("txt", FileUtils.getExtension(WIN_FILE_PATH_1));
Assert.assertEquals("342r", FileUtils.getBaseName(WIN_FILE_PATH_2));
Assert.assertEquals("txt", FileUtils.getExtension(WIN_FILE_PATH_2));
Assert.assertEquals("342r", FileUtils.getBaseName(WIN_FILE_PATH_3));
Assert.assertEquals("bc.txt", FileUtils.getExtension(WIN_FILE_PATH_3));
Assert.assertEquals("342r", FileUtils.getBaseName(WIN_FILE_PATH_4));
Assert.assertEquals("txt", FileUtils.getExtension(WIN_FILE_PATH_4));
}
@Test
public void testUnix() {
Assert.assertEquals("342r", FileUtils.getBaseName(UNX_FILE_PATH_1));
Assert.assertEquals("txt", FileUtils.getExtension(UNX_FILE_PATH_1));
Assert.assertEquals("342r", FileUtils.getBaseName(UNX_FILE_PATH_2));
Assert.assertEquals("txt", FileUtils.getExtension(UNX_FILE_PATH_2));
Assert.assertEquals("342r", FileUtils.getBaseName(UNX_FILE_PATH_3));
Assert.assertEquals("bc.txt", FileUtils.getExtension(UNX_FILE_PATH_3));
}
@Test
public void testUnixPath() {
String expected[] = new String[] { "abs", "aer", "342r.txt" };
String parts[] = FileUtils.splitFilePath(UNX_FILE_PATH_1);
assertArrayEquals(expected, parts);
parts = FileUtils.splitFilePath(UNX_FILE_PATH_2);
expected = new String[] { "abs", "aer", "342r.txt" };
assertArrayEquals(expected, parts);
parts = FileUtils.splitFilePath(UNX_FILE_PATH_3);
expected = new String[] { "abs", "aer", "342r.bc.txt" };
assertArrayEquals(expected, parts);
}
@Test
public void testWindowsPath() {
String expected[] = new String[] { "C:", "abs", "aer", "342r.txt" };
String parts[] = FileUtils.splitFilePath(WIN_FILE_PATH_1);
assertArrayEquals(expected, parts);
parts = FileUtils.splitFilePath(WIN_FILE_PATH_2);
expected = new String[] { "C:", "abs", "aer", "342r.txt" };
assertArrayEquals(expected, parts);
parts = FileUtils.splitFilePath(WIN_FILE_PATH_3);
expected = new String[] { "C:", "abs", "aer", "342r.bc.txt" };
assertArrayEquals(expected, parts);
}
@Test
public void testFileCreation() {
File f = new File("*");
assertTrue(!f.exists());
f = new File("./*");
assertTrue(!f.exists());
}
@Test
public void listFiles1() throws IOException {
File rootDir = new File("test"), dir = new File("test/testNested"), file1 = new File("test/file1"),
file2 = new File("test/file2"), file3 = new File("test/testNested/file3");
rootDir.deleteOnExit();
dir.deleteOnExit();
file1.deleteOnExit();
file2.deleteOnExit();
file3.deleteOnExit();
assertTrue(!rootDir.exists() && !dir.exists() && !file1.exists() && !file2.exists() && !file3.exists());
assertTrue(rootDir.mkdir() && rootDir.exists() && rootDir.isDirectory());
assertTrue(dir.mkdir() && dir.exists() && dir.isDirectory());
assertTrue(file1.createNewFile() && file1.exists() && file1.isFile());
assertTrue(file2.createNewFile() && file2.exists() && file2.isFile());
assertTrue(file3.createNewFile() && file3.exists() && file3.isFile());
List<File> files = FileUtils.listFiles(rootDir, null, true);
assertEquals(3, files.size());
assertTrue(files.contains(file1));
assertTrue(files.contains(file2));
assertTrue(files.contains(file3));
assertTrue(!files.contains(rootDir));
assertTrue(!files.contains(dir));
assertTrue(file1.delete());
assertTrue(file2.delete());
assertTrue(file3.delete());
assertTrue(dir.delete());
assertTrue(rootDir.delete());
}
@Test
public void listFiles2() throws IOException {
File file = new File("test");
file.deleteOnExit();
assertTrue(!file.exists());
assertTrue(file.createNewFile());
List<File> files = FileUtils.listFiles(file, null, true);
assertTrue(files.size() == 1 && files.contains(file));
assertTrue(file.delete());
}
@Test
public void listFilesAndDirs1() throws IOException {
File rootDir = new File("test"), dir = new File("test/testNested"), file1 = new File("test/file1"),
file2 = new File("test/file2"), file3 = new File("test/testNested/file3");
rootDir.deleteOnExit();
dir.deleteOnExit();
file1.deleteOnExit();
file2.deleteOnExit();
file3.deleteOnExit();
assertTrue(!rootDir.exists() && !dir.exists() && !file1.exists() && !file2.exists() && !file3.exists());
assertTrue(rootDir.mkdir() && rootDir.exists() && rootDir.isDirectory());
assertTrue(dir.mkdir() && dir.exists() && dir.isDirectory());
assertTrue(file1.createNewFile() && file1.exists() && file1.isFile());
assertTrue(file2.createNewFile() && file2.exists() && file2.isFile());
assertTrue(file3.createNewFile() && file3.exists() && file3.isFile());
List<File> files = FileUtils.listFilesAndDirs(rootDir, null, true);
assertEquals(5, files.size());
assertTrue(files.contains(file1));
assertTrue(files.contains(file2));
assertTrue(files.contains(file3));
assertTrue(files.contains(rootDir));
assertTrue(files.contains(dir));
assertTrue(file1.delete());
assertTrue(file2.delete());
assertTrue(file3.delete());
assertTrue(dir.delete());
assertTrue(rootDir.delete());
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Prayers;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Prayer_AnimateGhost extends Prayer
{
@Override
public String ID()
{
return "Prayer_AnimateGhost";
}
private final static String localizedName = CMLib.lang().L("Animate Ghost");
@Override
public String name()
{
return localizedName;
}
@Override
public int classificationCode()
{
return Ability.ACODE_PRAYER | Ability.DOMAIN_DEATHLORE;
}
@Override
public int abstractQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
public int enchantQuality()
{
return Ability.QUALITY_INDIFFERENT;
}
@Override
public long flags()
{
return Ability.FLAG_UNHOLY;
}
@Override
protected int canTargetCode()
{
return CAN_ITEMS;
}
private final static String localizedDiplayText = CMLib.lang().L("Newly animate dead");
@Override
public String displayText()
{
return localizedDiplayText;
}
@Override
public void unInvoke()
{
final Physical P=affected;
super.unInvoke();
if((P instanceof MOB)&&(this.canBeUninvoked)&&(this.unInvoked))
{
if((!P.amDestroyed())&&(((MOB)P).amFollowing()==null))
{
final Room R=CMLib.map().roomLocation(P);
if(R!=null)
R.showHappens(CMMsg.MSG_OK_ACTION, P,L("<S-NAME> wander(s) off."));
P.destroy();
}
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
int tickSet = super.tickDown;
if(!super.tick(ticking, tickID))
return false;
if(ticking instanceof MOB)
{
final MOB mob=(MOB)ticking;
if(mob.amFollowing() != null)
super.tickDown = tickSet;
}
return true;
}
public void makeGhostFrom(Room R, DeadBody body, MOB mob, int level)
{
String race="a";
if((body.charStats()!=null)&&(body.charStats().getMyRace()!=null))
race=CMLib.english().startWithAorAn(body.charStats().getMyRace().name()).toLowerCase();
String description=body.getMobDescription();
if(description.trim().length()==0)
description="It looks dead.";
else
description+="\n\rIt also looks dead.";
final MOB newMOB=CMClass.getMOB("GenUndead");
newMOB.setName(race+((mob==null)?" poltergeist":" ghost"));
newMOB.setDescription(description);
newMOB.setDisplayText(L("@x1 is here",newMOB.Name()));
newMOB.basePhyStats().setLevel(level+(super.getX1Level(mob)*2)+super.getXLEVELLevel(mob));
newMOB.baseCharStats().setStat(CharStats.STAT_GENDER,body.charStats().getStat(CharStats.STAT_GENDER));
newMOB.baseCharStats().setMyRace(CMClass.getRace("Spirit"));
newMOB.baseCharStats().setBodyPartsFromStringAfterRace(body.charStats().getBodyPartsAsString());
final Ability P=CMClass.getAbility("Prop_StatTrainer");
if(P!=null)
{
P.setMiscText("NOTEACH STR=2 INT=10 WIS=10 CON=10 DEX=35 CHA=2");
newMOB.addNonUninvokableEffect(P);
}
newMOB.recoverCharStats();
newMOB.basePhyStats().setAttackAdjustment(10);
newMOB.basePhyStats().setDisposition(PhyStats.IS_FLYING|((mob==null)?PhyStats.IS_INVISIBLE:0));
newMOB.basePhyStats().setSensesMask(PhyStats.CAN_SEE_DARK|PhyStats.CAN_SEE_INVISIBLE);
newMOB.basePhyStats().setDamage(4);
CMLib.factions().setAlignment(newMOB,Faction.Align.EVIL);
newMOB.baseState().setHitPoints(10*newMOB.basePhyStats().level());
newMOB.baseState().setMovement(CMLib.leveler().getLevelMove(newMOB));
newMOB.basePhyStats().setArmor(CMLib.leveler().getLevelMOBArmor(newMOB));
newMOB.baseState().setMana(100);
newMOB.addNonUninvokableEffect(CMClass.getAbility("Prop_ModExperience"));
final Ability A=CMClass.getAbility("Immunities");
if(A!=null)
{
A.setMiscText("all");
newMOB.addNonUninvokableEffect(A);
}
Behavior B=CMClass.getBehavior("Aggressive");
if((B!=null)&&(mob!=null))
{
B.setParms("+NAMES \"-"+mob.Name()+"\" -LEVEL +>"+newMOB.basePhyStats().level());
newMOB.addBehavior(B);
}
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
if(mob==null)
{
B=CMClass.getBehavior("Thiefness");
if(B!=null)
newMOB.addBehavior(B);
}
newMOB.recoverCharStats();
newMOB.recoverPhyStats();
newMOB.recoverMaxState();
newMOB.resetToMaxState();
newMOB.text();
newMOB.bringToLife(R,true);
CMLib.beanCounter().clearZeroMoney(newMOB,null);
//R.showOthers(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> appears!"));
int it=0;
while(it<newMOB.location().numItems())
{
final Item item=newMOB.location().getItem(it);
if((item!=null)&&(item.container()==body))
{
final CMMsg msg2=CMClass.getMsg(newMOB,body,item,CMMsg.MSG_GET,null);
newMOB.location().send(newMOB,msg2);
final CMMsg msg4=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_GET,null);
newMOB.location().send(newMOB,msg4);
final CMMsg msg3=CMClass.getMsg(newMOB,item,null,CMMsg.MSG_WEAR,null);
newMOB.location().send(newMOB,msg3);
if(!newMOB.isMine(item))
it++;
else
it=0;
}
else
it++;
}
body.destroy();
newMOB.setStartRoom(null);
beneficialAffect(mob,newMOB,0,0);
R.show(newMOB,null,CMMsg.MSG_OK_ACTION,L("<S-NAME> begin(s) to rise!"));
R.recoverRoomStats();
}
@Override
public boolean invoke(MOB mob, List<String> commands, Physical givenTarget, boolean auto, int asLevel)
{
final Physical target=getAnyTarget(mob,commands,givenTarget,Wearable.FILTER_UNWORNONLY);
if(target==null)
return false;
if(target==mob)
{
mob.tell(L("@x1 doesn't look dead yet.",target.name(mob)));
return false;
}
if(!(target instanceof DeadBody))
{
mob.tell(L("You can't animate that."));
return false;
}
final DeadBody body=(DeadBody)target;
if(body.isPlayerCorpse()||(body.getMobName().length()==0)
||((body.charStats()!=null)&&(body.charStats().getMyRace()!=null)&&(body.charStats().getMyRace().racialCategory().equalsIgnoreCase("Undead"))))
{
mob.tell(L("You can't animate that."));
return false;
}
if(body.basePhyStats().level()<15)
{
mob.tell(L("This creature is too weak to create a ghost from."));
return false;
}
if(!super.invoke(mob,commands,givenTarget,auto,asLevel))
return false;
final boolean success=proficiencyCheck(mob,0,auto);
if(success)
{
final CMMsg msg=CMClass.getMsg(mob,target,this,verbalCastCode(mob,target,auto),auto?"":L("^S<S-NAME> @x1 to animate <T-NAMESELF> as a ghost.^?",prayForWord(mob)));
if(mob.location().okMessage(mob,msg))
{
mob.location().send(mob,msg);
makeGhostFrom(mob.location(),body,mob,14);
}
}
else
return beneficialWordsFizzle(mob,target,L("<S-NAME> @x1 to animate <T-NAMESELF>, but fail(s) miserably.",prayForWord(mob)));
// return whether it worked
return success;
}
}
| |
/*
* Copyright (c) 2017, Apptentive, Inc. All Rights Reserved.
* Please refer to the LICENSE file for the terms and conditions
* under which redistribution and use of this file is permitted.
*/
package com.apptentive.android.sdk.model;
import com.apptentive.android.sdk.ApptentiveLog;
import com.apptentive.android.sdk.module.messagecenter.model.MessageCenterListItem;
import org.json.JSONException;
import org.json.JSONObject;
import java.util.Map;
import static com.apptentive.android.sdk.ApptentiveLogTag.MESSAGES;
import static com.apptentive.android.sdk.debug.ErrorMetrics.logException;
public abstract class ApptentiveMessage extends ConversationItem implements MessageCenterListItem {
public static final String KEY_ID = "id";
public static final String KEY_CREATED_AT = "created_at";
public static final String KEY_TYPE = "type";
public static final String KEY_HIDDEN = "hidden";
/** inbound here means inbound to the server. When this is true, the message is outgoing */
public static final String KEY_INBOUND = "inbound";
@SensitiveDataKey public static final String KEY_CUSTOM_DATA = "custom_data";
public static final String KEY_AUTOMATED = "automated";
public static final String KEY_SENDER = "sender";
public static final String KEY_SENDER_ID = "id";
@SensitiveDataKey private static final String KEY_SENDER_NAME = "name";
@SensitiveDataKey private static final String KEY_SENDER_PROFILE_PHOTO = "profile_photo";
// State and Read are not stored in JSON, only in DB.
private State state = State.unknown;
private boolean read = false;
// datestamp is only stored in memory, due to how we selectively apply date labeling in the view.
private String datestamp;
// this an abstract class so we don't need to register it's sensitive keys (subclasses will do)
protected ApptentiveMessage() {
super(PayloadType.message);
state = State.sending;
read = true; // This message originated here.
initType();
}
protected ApptentiveMessage(String json) throws JSONException {
super(PayloadType.message, json);
state = State.unknown;
initType();
}
protected abstract void initType();
public void setId(String id) {
put(KEY_ID, id);
}
public String getId() {
return optString(KEY_ID, null);
}
public Double getCreatedAt() {
return getDouble(KEY_CREATED_AT);
}
public void setCreatedAt(Double createdAt) {
put(KEY_CREATED_AT, createdAt);
}
public Type getMessageType() {
if (isNull(KEY_TYPE)) {
return Type.CompoundMessage;
}
String typeString = optString(KEY_TYPE, null);
return typeString == null ? Type.unknown : Type.parse(typeString);
}
protected void setType(Type type) {
put(KEY_TYPE, type.name());
}
public boolean isHidden() {
return getBoolean(KEY_HIDDEN);
}
public void setHidden(boolean hidden) {
put(KEY_HIDDEN, hidden);
}
public boolean isOutgoingMessage() {
// Default is true because this field is only set from the server.
return getBoolean(KEY_INBOUND, true);
}
public void setCustomData(Map<String, Object> customData) {
if (customData == null || customData.size() == 0) {
if (!isNull(KEY_CUSTOM_DATA)) {
remove(KEY_CUSTOM_DATA);
}
return;
}
try {
JSONObject customDataJson = new JSONObject();
for (String key : customData.keySet()) {
customDataJson.put(key, customData.get(key));
}
put(KEY_CUSTOM_DATA, customDataJson);
} catch (JSONException e) {
ApptentiveLog.e(e, "Exception setting ApptentiveMessage's %s field.", KEY_CUSTOM_DATA);
logException(e);
}
}
public State getState() {
if (state == null) {
return State.unknown;
}
return state;
}
public void setState(State state) {
this.state = state;
}
public boolean isRead() {
return read;
}
public void setRead(boolean read) {
this.read = read;
}
// For debugging only.
public void setSenderId(String senderId) {
try {
JSONObject sender;
if (!isNull((KEY_SENDER))) {
sender = getJSONObject(KEY_SENDER);
} else {
sender = new JSONObject();
put(KEY_SENDER, sender);
}
sender.put(KEY_SENDER_ID, senderId);
} catch (JSONException e) {
ApptentiveLog.e(e, "Exception setting ApptentiveMessage's %s field.", KEY_SENDER_ID);
logException(e);
}
}
public String getSenderUsername() {
try {
if (!isNull((KEY_SENDER))) {
JSONObject sender = getJSONObject(KEY_SENDER);
if (!sender.isNull((KEY_SENDER_NAME))) {
return sender.getString(KEY_SENDER_NAME);
}
}
} catch (JSONException e) {
logException(e);
}
return null;
}
public String getSenderProfilePhoto() {
try {
if (!isNull((KEY_SENDER))) {
JSONObject sender = getJSONObject(KEY_SENDER);
if (!sender.isNull((KEY_SENDER_PROFILE_PHOTO))) {
return sender.getString(KEY_SENDER_PROFILE_PHOTO);
}
}
} catch (JSONException e) {
logException(e);
}
return null;
}
public boolean getAutomated() {
return getBoolean(KEY_AUTOMATED);
}
public void setAutomated(boolean isAutomated) {
put(KEY_AUTOMATED, isAutomated);
}
public String getDatestamp() {
return datestamp;
}
/**
* Sets the datestamp for this message.
*
* @param datestamp A datestamp
* @return true if the datestamp was added or changed.
*/
public boolean setDatestamp(String datestamp) {
if (this.datestamp == null || !this.datestamp.equals(datestamp)) {
this.datestamp = datestamp;
return true;
} else {
return false;
}
}
/**
* Clears the datestamp from a message
*
* @return true If the datestamp existed and was cleared, false if it was already cleared.
*/
public boolean clearDatestamp() {
if (datestamp != null) {
this.datestamp = null;
return true;
} else {
return false;
}
}
public boolean isAutomatedMessage() {
return getAutomated();
}
public enum Type {
TextMessage,
FileMessage,
AutomatedMessage,
CompoundMessage,
// Unknown
unknown;
public static Type parse(String rawType) {
try {
return Type.valueOf(rawType);
} catch (IllegalArgumentException e) {
ApptentiveLog.v(MESSAGES, "Error parsing unknown ApptentiveMessage.Type: " + rawType);
logException(e);
}
return unknown;
}
}
public enum State {
sending, // The item is either being sent, or is queued for sending.
sent, // The item has been posted to the server successfully.
saved, // The item has been returned from the server during a fetch.
unknown;
public static State parse(String state) {
try {
return State.valueOf(state);
} catch (IllegalArgumentException e) {
ApptentiveLog.v(MESSAGES, "Error parsing unknown ApptentiveMessage.State: " + state);
logException(e);
}
return unknown;
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.plugin.raptor.legacy.metadata;
import com.google.common.collect.ImmutableSet;
import io.airlift.testing.TestingTicker;
import io.airlift.units.Duration;
import io.trino.plugin.raptor.legacy.backup.BackupStore;
import io.trino.plugin.raptor.legacy.backup.FileBackupStore;
import io.trino.plugin.raptor.legacy.storage.FileStorageService;
import io.trino.plugin.raptor.legacy.storage.StorageService;
import io.trino.plugin.raptor.legacy.util.DaoSupplier;
import io.trino.plugin.raptor.legacy.util.UuidUtil.UuidArgumentFactory;
import org.intellij.lang.annotations.Language;
import org.jdbi.v3.core.Handle;
import org.jdbi.v3.core.Jdbi;
import org.jdbi.v3.sqlobject.config.RegisterArgumentFactory;
import org.jdbi.v3.sqlobject.statement.GetGeneratedKeys;
import org.jdbi.v3.sqlobject.statement.SqlUpdate;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.sql.Timestamp;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import static com.google.common.io.Files.createTempDir;
import static com.google.common.io.MoreFiles.deleteRecursively;
import static com.google.common.io.RecursiveDeleteOption.ALLOW_INSECURE;
import static io.trino.plugin.raptor.legacy.DatabaseTesting.createTestingJdbi;
import static io.trino.plugin.raptor.legacy.metadata.SchemaDaoUtil.createTablesWithRetry;
import static io.trino.plugin.raptor.legacy.util.UuidUtil.uuidFromBytes;
import static io.trino.testing.QueryAssertions.assertEqualsIgnoreOrder;
import static java.util.Arrays.asList;
import static java.util.UUID.randomUUID;
import static java.util.concurrent.TimeUnit.HOURS;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
@Test(singleThreaded = true)
public class TestShardCleaner
{
private Jdbi dbi;
private Handle dummyHandle;
private File temporary;
private StorageService storageService;
private BackupStore backupStore;
private TestingTicker ticker;
private ShardCleaner cleaner;
@BeforeMethod
public void setup()
{
dbi = createTestingJdbi();
dummyHandle = dbi.open();
createTablesWithRetry(dbi);
temporary = createTempDir();
File directory = new File(temporary, "data");
storageService = new FileStorageService(directory);
storageService.start();
File backupDirectory = new File(temporary, "backup");
backupStore = new FileBackupStore(backupDirectory);
((FileBackupStore) backupStore).start();
ticker = new TestingTicker();
ShardCleanerConfig config = new ShardCleanerConfig();
cleaner = new ShardCleaner(
new DaoSupplier<>(dbi, H2ShardDao.class),
"node1",
true,
ticker,
storageService,
Optional.of(backupStore),
config.getMaxTransactionAge(),
config.getTransactionCleanerInterval(),
config.getLocalCleanerInterval(),
config.getLocalCleanTime(),
config.getBackupCleanerInterval(),
config.getBackupCleanTime(),
config.getBackupDeletionThreads(),
config.getMaxCompletedTransactionAge());
}
@AfterMethod(alwaysRun = true)
public void teardown()
throws IOException
{
if (dummyHandle != null) {
dummyHandle.close();
}
deleteRecursively(temporary.toPath(), ALLOW_INSECURE);
}
@Test
public void testAbortOldTransactions()
{
TestingDao dao = dbi.onDemand(TestingDao.class);
long now = System.currentTimeMillis();
long txn1 = dao.insertTransaction(new Timestamp(now - HOURS.toMillis(26)));
long txn2 = dao.insertTransaction(new Timestamp(now - HOURS.toMillis(25)));
long txn3 = dao.insertTransaction(new Timestamp(now));
ShardDao shardDao = dbi.onDemand(ShardDao.class);
assertEquals(shardDao.finalizeTransaction(txn1, true), 1);
assertQuery("SELECT transaction_id, successful FROM transactions",
row(txn1, true),
row(txn2, null),
row(txn3, null));
cleaner.abortOldTransactions();
assertQuery("SELECT transaction_id, successful FROM transactions",
row(txn1, true),
row(txn2, false),
row(txn3, null));
}
@Test
public void testDeleteOldShards()
{
assertEquals(cleaner.getBackupShardsQueued().getTotalCount(), 0);
ShardDao dao = dbi.onDemand(ShardDao.class);
UUID shard1 = randomUUID();
UUID shard2 = randomUUID();
UUID shard3 = randomUUID();
// shards for failed transaction
long txn1 = dao.insertTransaction();
assertEquals(dao.finalizeTransaction(txn1, false), 1);
dao.insertCreatedShard(shard1, txn1);
dao.insertCreatedShard(shard2, txn1);
// shards for running transaction
long txn2 = dao.insertTransaction();
dao.insertCreatedShard(shard3, txn2);
// verify database
assertQuery("SELECT shard_uuid, transaction_id FROM created_shards",
row(shard1, txn1),
row(shard2, txn1),
row(shard3, txn2));
assertQuery("SELECT shard_uuid FROM deleted_shards");
// move shards for failed transaction to deleted
cleaner.deleteOldShards();
assertEquals(cleaner.getBackupShardsQueued().getTotalCount(), 2);
// verify database
assertQuery("SELECT shard_uuid, transaction_id FROM created_shards",
row(shard3, txn2));
assertQuery("SELECT shard_uuid FROM deleted_shards",
row(shard1),
row(shard2));
}
@Test
public void testCleanLocalShardsImmediately()
throws Exception
{
assertEquals(cleaner.getLocalShardsCleaned().getTotalCount(), 0);
TestingShardDao shardDao = dbi.onDemand(TestingShardDao.class);
MetadataDao metadataDao = dbi.onDemand(MetadataDao.class);
long tableId = metadataDao.insertTable("test", "test", false, false, null, 0);
UUID shard1 = randomUUID();
UUID shard2 = randomUUID();
UUID shard3 = randomUUID();
Set<UUID> shards = ImmutableSet.of(shard1, shard2, shard3);
for (UUID shard : shards) {
shardDao.insertShard(shard, tableId, null, 0, 0, 0, 0);
createShardFile(shard);
assertTrue(shardFileExists(shard));
}
int node1 = shardDao.insertNode("node1");
int node2 = shardDao.insertNode("node2");
// shard 1: referenced by this node
// shard 2: not referenced
// shard 3: referenced by other node
shardDao.insertShardNode(shard1, node1);
shardDao.insertShardNode(shard3, node2);
// clean shards immediately
Set<UUID> local = cleaner.getLocalShards();
cleaner.cleanLocalShardsImmediately(local);
assertEquals(cleaner.getLocalShardsCleaned().getTotalCount(), 2);
// shards 2 and 3 should be deleted
// shard 1 is referenced by this node
assertTrue(shardFileExists(shard1));
assertFalse(shardFileExists(shard2));
assertFalse(shardFileExists(shard3));
}
@Test
public void testCleanLocalShards()
throws Exception
{
assertEquals(cleaner.getLocalShardsCleaned().getTotalCount(), 0);
TestingShardDao shardDao = dbi.onDemand(TestingShardDao.class);
MetadataDao metadataDao = dbi.onDemand(MetadataDao.class);
long tableId = metadataDao.insertTable("test", "test", false, false, null, 0);
UUID shard1 = randomUUID();
UUID shard2 = randomUUID();
UUID shard3 = randomUUID();
UUID shard4 = randomUUID();
Set<UUID> shards = ImmutableSet.of(shard1, shard2, shard3, shard4);
for (UUID shard : shards) {
shardDao.insertShard(shard, tableId, null, 0, 0, 0, 0);
createShardFile(shard);
assertTrue(shardFileExists(shard));
}
int node1 = shardDao.insertNode("node1");
int node2 = shardDao.insertNode("node2");
// shard 1: referenced by this node
// shard 2: not referenced
// shard 3: not referenced
// shard 4: referenced by other node
shardDao.insertShardNode(shard1, node1);
shardDao.insertShardNode(shard4, node2);
// mark unreferenced shards
cleaner.cleanLocalShards();
assertEquals(cleaner.getLocalShardsCleaned().getTotalCount(), 0);
// make sure nothing is deleted
for (UUID shard : shards) {
assertTrue(shardFileExists(shard));
}
// add reference for shard 3
shardDao.insertShardNode(shard3, node1);
// advance time beyond clean time
Duration cleanTime = new ShardCleanerConfig().getLocalCleanTime();
ticker.increment(cleanTime.toMillis() + 1, MILLISECONDS);
// clean shards
cleaner.cleanLocalShards();
assertEquals(cleaner.getLocalShardsCleaned().getTotalCount(), 2);
// shards 2 and 4 should be deleted
// shards 1 and 3 are referenced by this node
assertTrue(shardFileExists(shard1));
assertFalse(shardFileExists(shard2));
assertTrue(shardFileExists(shard3));
assertFalse(shardFileExists(shard4));
}
@Test
public void testCleanBackupShards()
throws Exception
{
assertEquals(cleaner.getBackupShardsCleaned().getTotalCount(), 0);
TestingDao dao = dbi.onDemand(TestingDao.class);
UUID shard1 = randomUUID();
UUID shard2 = randomUUID();
UUID shard3 = randomUUID();
long now = System.currentTimeMillis();
Timestamp time1 = new Timestamp(now - HOURS.toMillis(25));
Timestamp time2 = new Timestamp(now - HOURS.toMillis(23));
// shard 1: should be cleaned
dao.insertDeletedShard(shard1, time1);
// shard 2: should be cleaned
dao.insertDeletedShard(shard2, time1);
// shard 3: deleted too recently
dao.insertDeletedShard(shard3, time2);
createShardBackups(shard1, shard2, shard3);
cleaner.cleanBackupShards();
assertEquals(cleaner.getBackupShardsCleaned().getTotalCount(), 2);
assertFalse(shardBackupExists(shard1));
assertFalse(shardBackupExists(shard2));
assertTrue(shardBackupExists(shard3));
assertQuery("SELECT shard_uuid FROM deleted_shards",
row(shard3));
}
@Test
public void testDeleteOldCompletedTransactions()
{
TestingDao dao = dbi.onDemand(TestingDao.class);
ShardDao shardDao = dbi.onDemand(ShardDao.class);
long now = System.currentTimeMillis();
Timestamp yesterdayStart = new Timestamp(now - HOURS.toMillis(27));
Timestamp yesterdayEnd = new Timestamp(now - HOURS.toMillis(26));
Timestamp todayEnd = new Timestamp(now - HOURS.toMillis(1));
long txn1 = dao.insertTransaction(yesterdayStart);
long txn2 = dao.insertTransaction(yesterdayStart);
long txn3 = dao.insertTransaction(yesterdayStart);
long txn4 = dao.insertTransaction(yesterdayStart);
long txn5 = dao.insertTransaction(new Timestamp(now));
long txn6 = dao.insertTransaction(new Timestamp(now));
assertEquals(shardDao.finalizeTransaction(txn1, true), 1);
assertEquals(shardDao.finalizeTransaction(txn2, false), 1);
assertEquals(shardDao.finalizeTransaction(txn3, false), 1);
assertEquals(shardDao.finalizeTransaction(txn5, true), 1);
assertEquals(shardDao.finalizeTransaction(txn6, false), 1);
assertEquals(dao.updateTransactionEndTime(txn1, yesterdayEnd), 1);
assertEquals(dao.updateTransactionEndTime(txn2, yesterdayEnd), 1);
assertEquals(dao.updateTransactionEndTime(txn3, yesterdayEnd), 1);
assertEquals(dao.updateTransactionEndTime(txn5, todayEnd), 1);
assertEquals(dao.updateTransactionEndTime(txn6, todayEnd), 1);
shardDao.insertCreatedShard(randomUUID(), txn2);
shardDao.insertCreatedShard(randomUUID(), txn2);
assertQuery("SELECT transaction_id, successful, end_time FROM transactions",
row(txn1, true, yesterdayEnd), // old successful
row(txn2, false, yesterdayEnd), // old failed, shards present
row(txn3, false, yesterdayEnd), // old failed, no referencing shards
row(txn4, null, null), // old not finished
row(txn5, true, todayEnd), // new successful
row(txn6, false, todayEnd)); // new failed, no referencing shards
cleaner.deleteOldCompletedTransactions();
assertQuery("SELECT transaction_id, successful, end_time FROM transactions",
row(txn2, false, yesterdayEnd),
row(txn4, null, null),
row(txn5, true, todayEnd),
row(txn6, false, todayEnd));
}
private boolean shardFileExists(UUID uuid)
{
return storageService.getStorageFile(uuid).exists();
}
private void createShardFile(UUID uuid)
throws IOException
{
File file = storageService.getStorageFile(uuid);
storageService.createParents(file);
assertTrue(file.createNewFile());
}
private boolean shardBackupExists(UUID uuid)
{
return backupStore.shardExists(uuid);
}
private void createShardBackups(UUID... uuids)
throws IOException
{
for (UUID uuid : uuids) {
File file = new File(temporary, "empty-" + randomUUID());
assertTrue(file.createNewFile());
backupStore.backupShard(uuid, file);
}
}
@SafeVarargs
private final void assertQuery(@Language("SQL") String sql, List<Object>... rows)
{
assertEqualsIgnoreOrder(select(sql), asList(rows));
}
private List<List<Object>> select(@Language("SQL") String sql)
{
return dbi.withHandle(handle -> handle.createQuery(sql)
.map((rs, index, context) -> {
int count = rs.getMetaData().getColumnCount();
List<Object> row = new ArrayList<>(count);
for (int i = 1; i <= count; i++) {
Object value = rs.getObject(i);
if (value instanceof byte[]) {
value = uuidFromBytes((byte[]) value);
}
row.add(value);
}
return row;
})
.list());
}
private static List<Object> row(Object... values)
{
return asList(values);
}
@RegisterArgumentFactory(UuidArgumentFactory.class)
private interface TestingDao
{
@SqlUpdate("INSERT INTO transactions (start_time) VALUES (:startTime)")
@GetGeneratedKeys
long insertTransaction(Timestamp startTime);
@SqlUpdate("INSERT INTO deleted_shards (shard_uuid, delete_time)\n" +
"VALUES (:shardUuid, :deleteTime)")
void insertDeletedShard(UUID shardUuid, Timestamp deleteTime);
@SqlUpdate("UPDATE transactions SET end_time = :endTime WHERE transaction_id = :transactionId")
int updateTransactionEndTime(long transactionId, Timestamp endTime);
}
}
| |
// Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package org.jetbrains.idea.svn.ignore;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.vcs.VcsException;
import com.intellij.openapi.vcs.changes.VcsDirtyScopeManager;
import com.intellij.openapi.vfs.VirtualFile;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.idea.svn.SvnPropertyKeys;
import org.jetbrains.idea.svn.SvnVcs;
import org.jetbrains.idea.svn.api.Depth;
import org.jetbrains.idea.svn.api.Revision;
import org.jetbrains.idea.svn.api.Target;
import org.jetbrains.idea.svn.properties.PropertyValue;
import java.io.File;
import java.util.*;
import static com.intellij.openapi.vfs.VfsUtilCore.virtualToIoFile;
public class SvnPropertyService {
private SvnPropertyService() {
}
public static void doAddToIgnoreProperty(@NotNull SvnVcs vcs, boolean useCommonExtension, VirtualFile[] file, IgnoreInfoGetter getter)
throws VcsException {
final IgnorePropertyAdder adder = new IgnorePropertyAdder(vcs, useCommonExtension);
adder.execute(file, getter);
}
public static void doRemoveFromIgnoreProperty(@NotNull SvnVcs vcs,
boolean useCommonExtension,
VirtualFile[] file,
IgnoreInfoGetter getter) throws VcsException {
final IgnorePropertyRemover remover = new IgnorePropertyRemover(vcs, useCommonExtension);
remover.execute(file, getter);
}
public static void doCheckIgnoreProperty(@NotNull SvnVcs vcs,
VirtualFile[] file,
IgnoreInfoGetter getter,
String extensionPattern,
Ref<? super Boolean> filesOk,
Ref<? super Boolean> extensionOk) {
final IgnorePropertyChecker checker = new IgnorePropertyChecker(vcs, extensionPattern);
try {
checker.execute(file, getter);
} catch (VcsException e) {
// ignore - actually never thrown inside
}
filesOk.set(checker.filesOk());
extensionOk.set(checker.extensionOk());
}
private static abstract class IgnorePropertyWorkTemplate {
@NotNull protected final SvnVcs myVcs;
protected final boolean myUseCommonExtension;
protected final boolean myCanUseCachedProperty;
protected abstract void processFolder(final VirtualFile folder, final File folderDir, final Set<String> data,
final PropertyValue propertyValue) throws VcsException;
protected abstract void onAfterProcessing(final VirtualFile[] file) throws VcsException;
protected abstract void onSVNException(Exception e);
protected abstract boolean stopIteration();
private IgnorePropertyWorkTemplate(@NotNull SvnVcs vcs, boolean useCommonExtension, boolean canUseCachedProperty) {
myVcs = vcs;
myCanUseCachedProperty = canUseCachedProperty;
myUseCommonExtension = useCommonExtension;
}
public void execute(final VirtualFile[] file, final IgnoreInfoGetter getter) throws VcsException {
final Map<VirtualFile, Set<String>> foldersInfo = getter.getInfo(myUseCommonExtension);
for (final Map.Entry<VirtualFile, Set<String>> entry : foldersInfo.entrySet()) {
if (stopIteration()) {
break;
}
final File dir = virtualToIoFile(entry.getKey());
try {
final PropertyValue value;
if (myCanUseCachedProperty) {
value = myVcs.getPropertyWithCaching(entry.getKey(), SvnPropertyKeys.SVN_IGNORE);
} else {
value = myVcs.getFactory(dir).createPropertyClient()
.getProperty(Target.on(dir), SvnPropertyKeys.SVN_IGNORE, false, Revision.WORKING);
}
processFolder(entry.getKey(), dir, entry.getValue(), value);
}
catch (VcsException e) {
onSVNException(e);
}
}
onAfterProcessing(file);
}
}
private static class IgnorePropertyChecker extends IgnorePropertyWorkTemplate {
private final String myExtensionPattern;
private boolean myFilesOk;
private boolean myExtensionOk;
private IgnorePropertyChecker(@NotNull SvnVcs vcs, String extensionPattern) {
super(vcs, false, true);
myExtensionPattern = extensionPattern;
myExtensionOk = true;
myFilesOk = true;
}
@Override
protected boolean stopIteration() {
return (! myFilesOk) && (! myExtensionOk);
}
@Override
protected void processFolder(final VirtualFile folder, final File folderDir, final Set<String> data, final PropertyValue propertyValue) {
if (propertyValue == null) {
myFilesOk = false;
myExtensionOk = false;
return;
}
final Set<String> ignorePatterns = new HashSet<>();
final StringTokenizer st = new StringTokenizer(PropertyValue.toString(propertyValue), "\r\n ");
while (st.hasMoreElements()) {
final String ignorePattern = (String)st.nextElement();
ignorePatterns.add(ignorePattern);
}
myExtensionOk &= ignorePatterns.contains(myExtensionPattern);
for (final String fileName : data) {
if (!ignorePatterns.contains(fileName)) {
myFilesOk = false;
}
}
}
@Override
protected void onAfterProcessing(final VirtualFile[] file) {
}
@Override
protected void onSVNException(final Exception e) {
myFilesOk = false;
myExtensionOk = false;
}
public boolean filesOk() {
return myFilesOk;
}
public boolean extensionOk() {
return myExtensionOk;
}
}
private abstract static class IgnorePropertyAddRemoveTemplate extends IgnorePropertyWorkTemplate {
private final Collection<String> exceptions;
private final VcsDirtyScopeManager dirtyScopeManager;
private IgnorePropertyAddRemoveTemplate(@NotNull SvnVcs vcs, boolean useCommonExtension) {
super(vcs, useCommonExtension, false);
exceptions = new ArrayList<>();
dirtyScopeManager = VcsDirtyScopeManager.getInstance(vcs.getProject());
}
@Override
protected boolean stopIteration() {
return false;
}
protected abstract String getNewPropertyValue(final Set<String> data, final PropertyValue propertyValue);
@Override
protected void processFolder(final VirtualFile folder, final File folderDir, final Set<String> data, final PropertyValue propertyValue)
throws VcsException {
String newValue = getNewPropertyValue(data, propertyValue);
newValue = (newValue.trim().isEmpty()) ? null : newValue;
myVcs.getFactory(folderDir).createPropertyClient()
.setProperty(folderDir, SvnPropertyKeys.SVN_IGNORE, PropertyValue.create(newValue), Depth.EMPTY, false);
if (myUseCommonExtension) {
dirtyScopeManager.dirDirtyRecursively(folder);
}
}
@Override
protected void onAfterProcessing(final VirtualFile[] file) throws VcsException {
if (! myUseCommonExtension) {
for (VirtualFile virtualFile : file) {
dirtyScopeManager.fileDirty(virtualFile);
}
}
if (!exceptions.isEmpty()) {
throw new VcsException(exceptions);
}
}
@Override
protected void onSVNException(final Exception e) {
exceptions.add(e.getMessage());
}
}
private static class IgnorePropertyRemover extends IgnorePropertyAddRemoveTemplate {
private IgnorePropertyRemover(@NotNull SvnVcs vcs, boolean useCommonExtension) {
super(vcs, useCommonExtension);
}
@Override
protected String getNewPropertyValue(final Set<String> data, final PropertyValue propertyValue) {
if (propertyValue != null) {
return getNewPropertyValueForRemove(data, PropertyValue.toString(propertyValue));
}
return "";
}
}
private static String getNewPropertyValueForRemove(final Collection<String> data, @NotNull final String propertyValue) {
final StringBuilder sb = new StringBuilder();
final StringTokenizer st = new StringTokenizer(propertyValue, "\r\n ");
while (st.hasMoreElements()) {
final String ignorePattern = (String)st.nextElement();
if (! data.contains(ignorePattern)) {
sb.append(ignorePattern).append('\n');
}
}
return sb.toString();
}
private static class IgnorePropertyAdder extends IgnorePropertyAddRemoveTemplate {
private IgnorePropertyAdder(@NotNull SvnVcs vcs, boolean useCommonExtension) {
super(vcs, useCommonExtension);
}
@Override
protected String getNewPropertyValue(final Set<String> data, final PropertyValue propertyValue) {
final String ignoreString;
if (data.size() == 1) {
ignoreString = data.iterator().next();
} else {
final StringBuilder sb = new StringBuilder();
for (final String name : data) {
sb.append(name).append('\n');
}
ignoreString = sb.toString();
}
return (propertyValue == null) ? ignoreString : (PropertyValue.toString(propertyValue) + '\n' + ignoreString);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.