gt stringclasses 1
value | context stringlengths 2.05k 161k |
|---|---|
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.analytics.data;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import org.hisp.dhis.analytics.AggregationType;
import org.hisp.dhis.analytics.AnalyticsAggregationType;
import org.hisp.dhis.analytics.DataQueryParams;
import org.hisp.dhis.analytics.DataType;
import org.hisp.dhis.analytics.event.EventQueryParams;
import org.hisp.dhis.common.DimensionalItemObject;
import org.hisp.dhis.common.ListMap;
import org.hisp.dhis.common.QueryModifiers;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.util.ObjectUtils;
/**
* Utilities for analytics query planning.
*
* @author Lars Helge Overland
*/
public class QueryPlannerUtils
{
/**
* Creates a mapping between level and organisation units for the given
* organisation units.
*
* @param orgUnits list of organisation units.
*/
public static ListMap<Integer, DimensionalItemObject> getLevelOrgUnitMap( List<DimensionalItemObject> orgUnits )
{
ListMap<Integer, DimensionalItemObject> map = new ListMap<>();
for ( DimensionalItemObject orgUnit : orgUnits )
{
OrganisationUnit ou = (OrganisationUnit) orgUnit;
map.putValue( ou.getLevel(), orgUnit );
}
return map;
}
/**
* Creates a mapping between level and organisation units for the given
* organisation units.
*
* @param orgUnits list of organisation units.
*/
public static ListMap<Integer, OrganisationUnit> getLevelOrgUnitTypedMap( List<OrganisationUnit> orgUnits )
{
ListMap<Integer, OrganisationUnit> map = new ListMap<>();
orgUnits.stream().forEach( ou -> map.putValue( ou.getLevel(), ou ) );
return map;
}
/**
* Creates a mapping between data type and data elements for the given data
* elements.
*
* @param dataElements list of data elements.
*/
public static ListMap<DataType, DimensionalItemObject> getDataTypeDataElementMap(
List<DimensionalItemObject> dataElements )
{
ListMap<DataType, DimensionalItemObject> map = new ListMap<>();
for ( DimensionalItemObject element : dataElements )
{
DataElement dataElement = (DataElement) element;
ValueType valueType = dataElement.getValueType();
// Both text and date types are recognized as DataType.TEXT
DataType dataType = (valueType.isText() || valueType.isDate()) ? DataType.TEXT : DataType.NUMERIC;
map.putValue( dataType, dataElement );
}
return map;
}
/**
* Creates a mapping between minDate/maxDate query modifiers and data
* elements for the given data elements.
*
* @param dataElements list of data elements.
*/
public static ListMap<QueryModifiers, DimensionalItemObject> getMinMaxDateDateElementMap(
List<DimensionalItemObject> dataElements )
{
ListMap<QueryModifiers, DimensionalItemObject> map = new ListMap<>();
for ( DimensionalItemObject element : dataElements )
{
QueryModifiers queryMods = element.getQueryMods();
// Get QueryModifiers but only with min and max date, nothing else
QueryModifiers minMaxDateModifiers = (queryMods == null)
? QueryModifiers.builder().build()
: QueryModifiers.builder().minDate( queryMods.getMinDate() ).maxDate( queryMods.getMaxDate() ).build();
map.putValue( minMaxDateModifiers, element );
}
return map;
}
/**
* Creates a mapping between the aggregation type and data elements for the
* given data elements and period type.
*
* @param dataElements a List of {@see DimensionalItemObject}
* @param aggregationType an {@see AnalyticsAggregationType}
* @param periodType a String representing a Period Type (e.g. 201901)
*/
public static ListMap<AnalyticsAggregationType, DimensionalItemObject> getAggregationTypeDataElementMap(
List<DimensionalItemObject> dataElements, AnalyticsAggregationType aggregationType, String periodType )
{
PeriodType aggregationPeriodType = PeriodType.getPeriodTypeByName( periodType );
ListMap<AnalyticsAggregationType, DimensionalItemObject> map = new ListMap<>();
for ( DimensionalItemObject element : dataElements )
{
DataElement de = (DataElement) element;
AnalyticsAggregationType aggType = ObjectUtils.firstNonNull( aggregationType,
AnalyticsAggregationType.fromAggregationType( de.getAggregationType() ) );
AnalyticsAggregationType analyticsAggregationType = getAggregationType( aggType, de.getValueType(),
aggregationPeriodType, de.getPeriodType() );
map.putValue( analyticsAggregationType, de );
}
return map;
}
/**
* Creates a mapping between the number of days in the period interval and
* periods for the given periods.
*
* @param periods
*/
public static ListMap<Integer, DimensionalItemObject> getDaysPeriodMap( List<DimensionalItemObject> periods )
{
ListMap<Integer, DimensionalItemObject> map = new ListMap<>();
for ( DimensionalItemObject period : periods )
{
Period pe = (Period) period;
map.putValue( pe.getDaysInPeriod(), pe );
}
return map;
}
/**
* Returns the {@link AnalyticsAggregationType} according to the given value
* type, aggregation type, value type aggregation period type and data
* period type.
*
* @param aggregationType the aggregation type.
* @param valueType the value type.
* @param aggregationPeriodType the aggregation period type.
* @param dataPeriodType the data period type.
*/
public static AnalyticsAggregationType getAggregationType( AnalyticsAggregationType aggregationType,
ValueType valueType,
PeriodType aggregationPeriodType, PeriodType dataPeriodType )
{
DataType dataType = DataType.fromValueType( valueType );
boolean disaggregation = isDisaggregation( aggregationType, aggregationPeriodType, dataPeriodType );
return new AnalyticsAggregationType( aggregationType.getAggregationType(),
aggregationType.getPeriodAggregationType(), dataType, disaggregation );
}
/**
* Indicates whether disaggregation is allowed for the given input.
* Disaggregation implies that the frequency order of the aggregation period
* type is lower than the data period type.
*
* @param aggregationPeriodType the aggregation period type.
* @param dataPeriodType the data period type.
*/
public static boolean isDisaggregation( AnalyticsAggregationType aggregationType, PeriodType aggregationPeriodType,
PeriodType dataPeriodType )
{
if ( dataPeriodType == null || aggregationPeriodType == null )
{
return false;
}
if ( aggregationType == null || AggregationType.AVERAGE != aggregationType.getPeriodAggregationType() )
{
return false;
}
if ( aggregationPeriodType.getFrequencyOrder() < dataPeriodType.getFrequencyOrder() )
{
return true;
}
if ( aggregationPeriodType.getFrequencyOrder() == dataPeriodType.getFrequencyOrder() &&
!aggregationPeriodType.equals( dataPeriodType ) )
{
return true;
}
return false;
}
/**
* Creates a mapping between the period type and data elements for the given
* list of data elements.
*
* @param dataElements the list of data elements.
*/
public static ListMap<PeriodType, DimensionalItemObject> getPeriodTypeDataElementMap(
Collection<DimensionalItemObject> dataElements )
{
ListMap<PeriodType, DimensionalItemObject> map = new ListMap<>();
dataElements.forEach( de -> map.putValue( ((DataElement) de).getPeriodType(), de ) );
return map;
}
/**
* Converts a list of data query parameters to a list of event query
* parameters.
*
* @param params the list of data query parameters.
*/
public static List<EventQueryParams> convert( List<DataQueryParams> params )
{
List<EventQueryParams> eventParams = new ArrayList<>();
params.forEach( p -> eventParams.add( (EventQueryParams) p ) );
return eventParams;
}
}
| |
package ru.mail.polis;
import org.apache.http.HttpResponse;
import org.junit.After;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.Timeout;
import java.io.File;
import java.io.IOException;
import java.util.Arrays;
import java.util.Collections;
import java.util.LinkedHashSet;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
/**
* Unit tests for a two node {@link KVService} cluster
*
* @author Vadim Tsesko <mail@incubos.org>
*/
public class TwoNodeTest extends ClusterTestBase {
@Rule
public final Timeout globalTimeout = Timeout.seconds(3);
private int port0;
private int port1;
private File data0;
private File data1;
private KVService storage0;
private KVService storage1;
@Before
public void beforeEach() throws IOException, InterruptedException {
port0 = randomPort();
port1 = randomPort();
endpoints = new LinkedHashSet<>(Arrays.asList(endpoint(port0), endpoint(port1)));
data0 = Files.createTempDirectory();
data1 = Files.createTempDirectory();
storage0 = KVServiceFactory.create(port0, data0, endpoints);
storage0.start();
storage1 = KVServiceFactory.create(port1, data1, endpoints);
storage1.start();
}
@After
public void afterEach() throws IOException {
storage0.stop();
Files.recursiveDelete(data0);
storage1.stop();
Files.recursiveDelete(data1);
endpoints = Collections.emptySet();
}
@Test
public void tooSmallRF() throws Exception {
assertEquals(400, get(0, randomKey(), 0, 2).getStatusLine().getStatusCode());
assertEquals(400, upsert(0, randomKey(), randomValue(), 0, 2).getStatusLine().getStatusCode());
assertEquals(400, delete(0, randomKey(), 0, 2).getStatusLine().getStatusCode());
}
@Test
public void tooBigRF() throws Exception {
assertEquals(400, get(0, randomKey(), 3, 2).getStatusLine().getStatusCode());
assertEquals(400, upsert(0, randomKey(), randomValue(), 3, 2).getStatusLine().getStatusCode());
assertEquals(400, delete(0, randomKey(), 3, 2).getStatusLine().getStatusCode());
}
@Test
public void unreachableRF() throws Exception {
storage0.stop();
assertEquals(504, get(1, randomKey(), 2, 2).getStatusLine().getStatusCode());
assertEquals(504, upsert(1, randomKey(), randomValue(), 2, 2).getStatusLine().getStatusCode());
assertEquals(504, delete(1, randomKey(), 2, 2).getStatusLine().getStatusCode());
}
@Test
public void overlapRead() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Insert
assertEquals(201, upsert(0, key, value, 1, 2).getStatusLine().getStatusCode());
// Check
final HttpResponse response = get(1, key, 2, 2);
assertEquals(200, response.getStatusLine().getStatusCode());
assertArrayEquals(value, payloadOf(response));
}
@Test
public void overlapWrite() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Insert
assertEquals(201, upsert(0, key, value, 2, 2).getStatusLine().getStatusCode());
// Check
final HttpResponse response = get(1, key, 1, 2);
assertEquals(200, response.getStatusLine().getStatusCode());
assertArrayEquals(value, payloadOf(response));
}
@Test
public void overlapDelete() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Insert
assertEquals(201, upsert(0, key, value, 2, 2).getStatusLine().getStatusCode());
// Check
HttpResponse response = get(1, key, 1, 2);
assertEquals(200, response.getStatusLine().getStatusCode());
assertArrayEquals(value, payloadOf(response));
// Delete
assertEquals(202, delete(0, key, 2, 2).getStatusLine().getStatusCode());
// Check
response = get(1, key, 1, 2);
assertEquals(404, response.getStatusLine().getStatusCode());
}
@Test
public void missedWrite() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Stop node 1
storage1.stop();
// Insert
assertEquals(201, upsert(0, key, value, 1, 2).getStatusLine().getStatusCode());
// Start node 1
storage1 = KVServiceFactory.create(port1, data1, endpoints);
storage1.start();
// Check
final HttpResponse response = get(1, key, 2, 2);
assertEquals(200, response.getStatusLine().getStatusCode());
assertArrayEquals(value, payloadOf(response));
}
@Test
public void missedDelete() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Insert
assertEquals(201, upsert(0, key, value, 2, 2).getStatusLine().getStatusCode());
// Stop node 0
storage0.stop();
// Delete
assertEquals(202, delete(1, key, 1, 2).getStatusLine().getStatusCode());
// Start node 0
storage0 = KVServiceFactory.create(port0, data0, endpoints);
storage0.start();
// Check
final HttpResponse response = get(0, key, 2, 2);
assertEquals(404, response.getStatusLine().getStatusCode());
}
@Test
public void respectRF() throws Exception {
final String key = randomKey();
final byte[] value = randomValue();
// Insert
assertEquals(201, upsert(0, key, value, 1, 1).getStatusLine().getStatusCode());
int copies = 0;
// Stop node 0
storage0.stop();
// Check
if (get(1, key, 1, 1).getStatusLine().getStatusCode() == 200) {
copies++;
}
// Start node 0
storage0 = KVServiceFactory.create(port0, data0, endpoints);
storage0.start();
// Stop node 1
storage1.stop();
// Check
if (get(0, key, 1, 1).getStatusLine().getStatusCode() == 200) {
copies++;
}
// Start node 1
storage1 = KVServiceFactory.create(port1, data1, endpoints);
storage1.start();
// Check
assertEquals(1, copies);
}
}
| |
/*
* Copyright (c) 2012-2014 EMC Corporation
* All Rights Reserved
*/
package com.emc.storageos.model.property;
import static com.emc.storageos.model.property.PropertyConstants.*;
import java.util.Arrays;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import org.codehaus.jackson.annotate.JsonIgnore;
import org.codehaus.jackson.annotate.JsonProperty;
@XmlRootElement
public class PropertyMetadata {
private String label;
private String description;
private String type;
private String tag;
private Integer minLen = 0; // For type STRING only; the lowest valid value is 0
private Integer maxLen = 65534; // For type STRING only; the highest valid value is 65534 (?)
private String[] allowedValues = new String[0]; // For STRING and INT types only
private Boolean userConfigurable = false; // This applies to OVF only
private Boolean userMutable = false; // This applies to wizard and syssvc API
private Boolean advanced = false; // Advanced wizard only
private Boolean hidden = false; // Do not show in wizard and syssvc API without force
private Boolean reconfigRequired = false;
private Boolean rebootRequired = false;
private String[] notifiers = new String[0];
private String value;
private Boolean controlNodeOnly = false; // Control node only property flag
private Boolean siteSpecific = false; // Site specific attribute
public PropertyMetadata() {
}
public void setLabel(String label) {
this.label = label;
}
@XmlElement(name = "label")
@JsonProperty("label")
public String getLabel() {
return label;
}
public void setDescription(String description) {
this.description = description;
}
@XmlElement(name = "description")
@JsonProperty("description")
public String getDescription() {
return description;
}
public void setType(String type) {
this.type = type;
}
@XmlElement(name = "type")
@JsonProperty("type")
public String getType() {
return type;
}
public void setTag(String tag) {
this.tag = tag;
}
@XmlElement(name = "tag")
@JsonProperty("tag")
public String getTag() {
return tag;
}
public void setMinLen(Integer minLen) {
this.minLen = minLen;
}
@XmlElement(name = "minLen")
@JsonProperty("minLen")
public Integer getMinLen() {
return minLen;
}
public void setMaxLen(Integer maxLen) {
this.maxLen = maxLen;
}
@XmlElement(name = "maxLen")
@JsonProperty("maxLen")
public Integer getMaxLen() {
return maxLen;
}
public void setAllowedValues(String[] allowedValues) {
if(allowedValues != null){
this.allowedValues = Arrays.copyOf(allowedValues, allowedValues.length);
} else{
this.allowedValues = new String[0];
}
}
@XmlElement(name = "allowedValues")
@JsonProperty("allowedValues")
public String[] getAllowedValues() {
return allowedValues.clone();
}
public void setUserConfigurable(Boolean userConfigurable) {
this.userConfigurable = userConfigurable;
}
@XmlElement(name = "userConfigurable")
@JsonProperty("userConfigurable")
public Boolean getUserConfigurable() {
return userConfigurable;
}
public void setUserMutable(Boolean userMutable) {
this.userMutable = userMutable;
}
@XmlElement(name = "userMutable")
@JsonProperty("userMutable")
public Boolean getUserMutable() {
return userMutable == null ? false : userMutable;
}
public void setAdvanced(Boolean advanced) {
this.advanced = advanced;
}
@XmlElement(name = "advanced")
@JsonProperty("advanced")
public Boolean getAdvanced() {
return advanced;
}
public void setHidden(Boolean hidden) {
this.hidden = hidden;
}
@XmlElement(name = "hidden")
@JsonProperty("hidden")
public Boolean getHidden() {
return hidden;
}
public void setReconfigRequired(Boolean reconfigRequired) {
this.reconfigRequired = reconfigRequired;
}
@XmlElement(name = "reconfigRequired")
@JsonProperty("reconfigRequired")
public Boolean getReconfigRequired() {
return reconfigRequired;
}
public void setRebootRequired(Boolean rebootRequired) {
this.rebootRequired = rebootRequired;
}
@XmlElement(name = "rebootRequired")
@JsonProperty("rebootRequired")
public Boolean getRebootRequired() {
return rebootRequired;
}
public void setNotifiers(String[] notifiers) {
if(notifiers == null){
this.notifiers = new String[0];
}else{
this.notifiers = Arrays.copyOf(notifiers, notifiers.length);
}
}
@XmlElement(name = "notifiers")
@JsonProperty("notifiers")
public String[] getNotifiers() {
return notifiers.clone();
}
public void setValue(String value) {
this.value = value;
}
@XmlElement(name = "value")
@JsonProperty("value")
public String getValue() {
return value;
}
public void setControlNodeOnly(Boolean controlNodeOnly) {
this.controlNodeOnly = controlNodeOnly;
}
@XmlElement(name = "controlNodeOnly")
@JsonProperty("controlNodeOnly")
public Boolean getControlNodeOnly() {
return controlNodeOnly;
}
@XmlElement(name = "siteSpecific")
@JsonProperty("siteSpecific")
public Boolean getSiteSpecific() {
return siteSpecific;
}
public void setSiteSpecific(Boolean siteSpecific) {
this.siteSpecific = siteSpecific;
}
/**
* Get default value
*
*
* @return default value from metadata. Null when property is non-usermutable.
*/
@JsonIgnore
public String getDefaultValue() {
if (userMutable == null || userMutable == false) {
return null;
}
return getDefaultValueMetaData();
}
/**
* Get default value meta data
*
* For all types : if _value not null, return it.
*
* For Ip Address : ip address can only be set during deployment. It can from user input or default value.
* if it has default value, then return it. Otherwise, it must be set by user.
*
* For String : if no default value defined in metadata
* case 1: minLen is null or 0, return ""
* case 2: minLen greater than 0, its value should have been set during deployment
* For URL and Email and License and Hostname and Iplist : same as String
* For UNIT64 and UINT32 and PERCENT : if no default, set to 0
*
* @return default value string. Null when no default values needed.
*/
@JsonIgnore
public String getDefaultValueMetaData() {
// return default value if exist
if (value != null) {
return value;
}
if (IPADDR.equals(type)) {
// do not set ip address since they are configured during deployment
return null;
}
if (STRING.equals(type) || URL.equals(type) || EMAIL.equals(type) || LICENSE.equals(type)
|| HOSTNAME.equals(type) || IPLIST.equals(type)) {
// if minLen is not set or 0. Default value is empty string
// if minLen is set.
// It must have been configured during deployment.
// no default value needed
return (minLen == null || minLen == 0) ? "" : null;
}
if (UINT64.equals(type) || UINT32.equals(type) || PERCENT.equals(type)) {
// default value set to 0
return "0";
}
return null;
}
}
| |
package org.drools.chance.reteoo.nodes;
import org.drools.base.DroolsQuery;
import org.drools.chance.common.ChanceStrategyFactory;
import org.drools.chance.degree.Degree;
import org.drools.chance.degree.simple.SimpleDegree;
import org.drools.chance.evaluation.CompositeEvaluation;
import org.drools.chance.evaluation.Evaluation;
import org.drools.chance.evaluation.MockEvaluation;
import org.drools.chance.evaluation.SimpleEvaluationImpl;
import org.drools.chance.reteoo.ChanceFactHandle;
import org.drools.chance.reteoo.tuples.ImperfectFromNodeLeftTuple;
import org.drools.chance.reteoo.tuples.ImperfectRightTuple;
import org.drools.chance.reteoo.tuples.ImperfectTuple;
import org.drools.chance.rule.constraint.ImperfectAlphaConstraint;
import org.drools.chance.rule.constraint.ImperfectBetaConstraint;
import org.drools.chance.rule.constraint.ImperfectConstraint;
import org.drools.chance.rule.constraint.OperatorConstraint;
import org.drools.chance.rule.constraint.core.connectives.ConnectiveCore;
import org.drools.common.BaseNode;
import org.drools.common.BetaConstraints;
import org.drools.common.InternalFactHandle;
import org.drools.common.InternalWorkingMemory;
import org.drools.core.util.Iterator;
import org.drools.core.util.LinkedList;
import org.drools.core.util.LinkedListEntry;
import org.drools.reteoo.*;
import org.drools.reteoo.builder.BuildContext;
import org.drools.rule.From;
import org.drools.spi.AlphaNodeFieldConstraint;
import org.drools.spi.BetaNodeFieldConstraint;
import org.drools.spi.DataProvider;
import org.drools.spi.PropagationContext;
import java.util.LinkedHashMap;
import java.util.Map;
import java.util.Stack;
public class ChanceFromNode extends FromNode {
private ConnectiveCore and;
public ChanceFromNode( int id, DataProvider dataProvider, LeftTupleSource tupleSource, AlphaNodeFieldConstraint[] alphaNodeFieldConstraints, BetaConstraints betaConstraints, boolean tupleMemoryEnabled, BuildContext context, From from) {
super( id, dataProvider, tupleSource, alphaNodeFieldConstraints, betaConstraints, tupleMemoryEnabled, context, from );
//TODO FIXME consume config!
and = ChanceStrategyFactory.getConnectiveFactory( null,null ).getAnd();
}
@Override
public LeftTuple createLeftTuple(InternalFactHandle factHandle, LeftTupleSink sink, boolean leftTupleMemoryEnabled) {
ImperfectTuple tup = new ImperfectFromNodeLeftTuple(factHandle, sink, leftTupleMemoryEnabled);
int src = ((LeftInputAdapterNode) this.getLeftTupleSource()).getParentObjectSource().getId();
tup.addEvaluation( ((ChanceFactHandle) factHandle).getCachedEvaluation( src ) );
return (LeftTuple) tup;
}
@Override
public LeftTuple createLeftTuple(LeftTuple leftTuple, LeftTupleSink sink, boolean leftTupleMemoryEnabled) {
ImperfectTuple tup = new ImperfectFromNodeLeftTuple(leftTuple, sink, leftTupleMemoryEnabled);
tup.addEvaluation( ((ImperfectTuple) leftTuple).getEvaluation() );
return (LeftTuple) tup;
}
@Override
public LeftTuple createLeftTuple(LeftTuple leftTuple, RightTuple rightTuple, LeftTupleSink sink) {
ImperfectTuple tup = new ImperfectFromNodeLeftTuple(leftTuple, rightTuple, sink);
tup.addEvaluation( ((ImperfectTuple) leftTuple).getEvaluation() );
tup.addEvaluation( ((ImperfectTuple) rightTuple).getEvaluation() );
return (LeftTuple) tup;
}
@Override
public LeftTuple createLeftTuple(LeftTuple leftTuple, RightTuple rightTuple, LeftTuple currentLeftChild, LeftTuple currentRightChild, LeftTupleSink sink, boolean leftTupleMemoryEnabled) {
ImperfectTuple tup = new ImperfectFromNodeLeftTuple(leftTuple, rightTuple, currentLeftChild, currentRightChild, sink, leftTupleMemoryEnabled);
tup.addEvaluation( ((ImperfectTuple) leftTuple).getEvaluation() );
tup.addEvaluation( ((ImperfectTuple) rightTuple).getEvaluation() );
return (LeftTuple) tup;
}
protected RightTuple newRightTuple(InternalFactHandle handle, Object o) {
return new ImperfectRightTuple( handle,
null );
}
@Override
public void modifyLeftTuple(LeftTuple leftTuple, PropagationContext context, InternalWorkingMemory workingMemory) {
super.modifyLeftTuple(leftTuple, context, workingMemory);
}
protected void checkConstraintsAndPropagate( final LeftTuple leftTuple,
final RightTuple rightTuple,
final PropagationContext context,
final InternalWorkingMemory workingMemory,
final FromMemory memory,
final boolean useLeftMemory ) {
Stack<Evaluation> results = new Stack<Evaluation>();
results.push( new SimpleEvaluationImpl( this.id, SimpleDegree.TRUE ) ); // emulate OTN
//<Evaluation[ this.alphaConstraints.length + this.betaConstraints.getConstraints().size() ];
int ccounter = 0;
boolean canPropagate = true;
if ( this.alphaConstraints != null ) {
// First alpha node filters
for ( int i = 0, length = this.alphaConstraints.length; i < length; i++ ) {
AlphaNodeFieldConstraint constraint = this.alphaConstraints[i];
ChanceFactHandle factHandle = (ChanceFactHandle) rightTuple.getFactHandle();
if ( constraint instanceof ImperfectConstraint) {
Degree degree;
if ( constraint instanceof OperatorConstraint ) {
OperatorConstraint opc = (OperatorConstraint) constraint;
int n = opc.getArity();
Evaluation[] args = new Evaluation[ n ];
for ( int j = 0; j < n; j++ ) {
if ( results.isEmpty() ) {
args[j] = new MockEvaluation( id, SimpleDegree.TRUE );
} else {
args[j] = results.pop();
}
}
degree = opc.getConnective().eval( args );
results.push( new CompositeEvaluation( getId(), degree, opc.getConnective(), args ) );
} else {
ImperfectAlphaConstraint alpha = (ImperfectAlphaConstraint) constraint;
degree = alpha.match( (InternalFactHandle) factHandle,
workingMemory,
memory.alphaContexts[i] );
results.push( new SimpleEvaluationImpl( getId(), constraint.toString(), degree, alpha.getLabel() ) );
}
canPropagate = canPropagate && degree.toBoolean();
} else {
boolean allowed = constraint.isAllowed( (InternalFactHandle) factHandle, workingMemory, memory.alphaContexts[i] );
results.push( new SimpleEvaluationImpl( getId(), constraint.toString(), SimpleDegree.fromBooleanLiteral(allowed) ) );
canPropagate = canPropagate && allowed;
}
ccounter++;
}
}
BetaNodeFieldConstraint[] constraintList = this.betaConstraints.getConstraints();
for ( int j = 0; j < constraintList.length; j++ ) {
Degree degree;
BetaNodeFieldConstraint con = constraintList[j];
if ( con instanceof ImperfectBetaConstraint) {
if ( con instanceof OperatorConstraint ) {
OperatorConstraint opc = (OperatorConstraint) con;
int n = opc.getArity();
Evaluation[] args = new Evaluation[ n ];
for ( int k = 0; k < n; k++ ) {
args[k] = results.pop();
}
results.push( new CompositeEvaluation( getId(), opc.getConnective().eval( args ), opc.getConnective(), args ) );
} else {
ImperfectBetaConstraint ibc = (ImperfectBetaConstraint) con;
degree = ibc.matchCachedLeft( memory.betaMemory.getContext()[j], rightTuple.getFactHandle() );
results.push( new SimpleEvaluationImpl( ibc.getNodeId(), con.toString(), degree, ibc.getLabel() ) );
canPropagate = canPropagate && degree.toBoolean();
}
} else {
boolean allowed = ((BetaNodeFieldConstraint) con).isAllowedCachedRight( leftTuple, memory.betaMemory.getContext()[j] );
if ( ! allowed ) {
canPropagate = false;
}
}
j++;
ccounter++;
}
//TODO FIXME Support inner operators
if ( results.size() > 0 ) {
Evaluation[] args = results.toArray( new Evaluation[ results.size() ] );
Evaluation composite = new CompositeEvaluation( this.getId(), and.eval( args ), and, args );
((ImperfectRightTuple) rightTuple).addEvaluation( composite );
} else {
((ImperfectRightTuple) rightTuple).addEvaluation( new SimpleEvaluationImpl( this.id, SimpleDegree.TRUE ) );
}
if ( canPropagate ) {
if ( rightTuple.firstChild == null ) {
// this is a new match, so propagate as assert
this.sink.propagateAssertLeftTuple( leftTuple,
rightTuple,
null,
null,
context,
workingMemory,
useLeftMemory );
} else {
// this is an existing match, so propagate as a modify
this.sink.propagateModifyChildLeftTuple( rightTuple.firstChild,
leftTuple,
context,
workingMemory,
useLeftMemory );
}
} else {
retractMatch( leftTuple,
rightTuple,
context,
workingMemory );
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.simpleworkflow.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Paginated representation of a workflow history for a workflow execution. This is the up to date, complete and
* authoritative record of the events related to all tasks and events in the life of the workflow execution.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/swf-2012-01-25/History" target="_top">AWS API Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class History extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* The list of history events.
* </p>
*/
private java.util.List<HistoryEvent> events;
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve
* the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all
* other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
* </p>
*/
private String nextPageToken;
/**
* <p>
* The list of history events.
* </p>
*
* @return The list of history events.
*/
public java.util.List<HistoryEvent> getEvents() {
return events;
}
/**
* <p>
* The list of history events.
* </p>
*
* @param events
* The list of history events.
*/
public void setEvents(java.util.Collection<HistoryEvent> events) {
if (events == null) {
this.events = null;
return;
}
this.events = new java.util.ArrayList<HistoryEvent>(events);
}
/**
* <p>
* The list of history events.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setEvents(java.util.Collection)} or {@link #withEvents(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param events
* The list of history events.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public History withEvents(HistoryEvent... events) {
if (this.events == null) {
setEvents(new java.util.ArrayList<HistoryEvent>(events.length));
}
for (HistoryEvent ele : events) {
this.events.add(ele);
}
return this;
}
/**
* <p>
* The list of history events.
* </p>
*
* @param events
* The list of history events.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public History withEvents(java.util.Collection<HistoryEvent> events) {
setEvents(events);
return this;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve
* the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all
* other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
* </p>
*
* @param nextPageToken
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To
* retrieve the next page of results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
*/
public void setNextPageToken(String nextPageToken) {
this.nextPageToken = nextPageToken;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve
* the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all
* other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
* </p>
*
* @return If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To
* retrieve the next page of results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
*/
public String getNextPageToken() {
return this.nextPageToken;
}
/**
* <p>
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To retrieve
* the next page of results, make the call again using the returned token in <code>nextPageToken</code>. Keep all
* other arguments unchanged.
* </p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
* </p>
*
* @param nextPageToken
* If a <code>NextPageToken</code> was returned by a previous call, there are more results available. To
* retrieve the next page of results, make the call again using the returned token in
* <code>nextPageToken</code>. Keep all other arguments unchanged.</p>
* <p>
* The configured <code>maximumPageSize</code> determines how many results can be returned in a single call.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public History withNextPageToken(String nextPageToken) {
setNextPageToken(nextPageToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getEvents() != null)
sb.append("Events: ").append(getEvents()).append(",");
if (getNextPageToken() != null)
sb.append("NextPageToken: ").append(getNextPageToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof History == false)
return false;
History other = (History) obj;
if (other.getEvents() == null ^ this.getEvents() == null)
return false;
if (other.getEvents() != null && other.getEvents().equals(this.getEvents()) == false)
return false;
if (other.getNextPageToken() == null ^ this.getNextPageToken() == null)
return false;
if (other.getNextPageToken() != null && other.getNextPageToken().equals(this.getNextPageToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getEvents() == null) ? 0 : getEvents().hashCode());
hashCode = prime * hashCode + ((getNextPageToken() == null) ? 0 : getNextPageToken().hashCode());
return hashCode;
}
@Override
public History clone() {
try {
return (History) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.doNothing;
import static org.mockito.Mockito.spy;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import com.google.common.base.Supplier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileAlreadyExistsException;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.namenode.FSEditLog;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.LeaseManager;
import org.apache.hadoop.hdfs.server.namenode.NameNodeAdapter;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.Mockito;
public class TestLeaseRecovery2 {
public static final Log LOG = LogFactory.getLog(TestLeaseRecovery2.class);
{
GenericTestUtils.setLogLevel(DataNode.LOG, Level.ALL);
GenericTestUtils.setLogLevel(LeaseManager.LOG, Level.ALL);
GenericTestUtils.setLogLevel(FSNamesystem.LOG, Level.ALL);
}
static final private long BLOCK_SIZE = 1024;
static final private int FILE_SIZE = (int)BLOCK_SIZE*2;
static final short REPLICATION_NUM = (short)3;
static final byte[] buffer = new byte[FILE_SIZE];
static private final String fakeUsername = "fakeUser1";
static private final String fakeGroup = "supergroup";
static private MiniDFSCluster cluster;
static private DistributedFileSystem dfs;
final static private Configuration conf = new HdfsConfiguration();
final static private int BUF_SIZE = conf.getInt(
CommonConfigurationKeys.IO_FILE_BUFFER_SIZE_KEY, 4096);
final static private long SHORT_LEASE_PERIOD = 1000L;
final static private long LONG_LEASE_PERIOD = 60*60*SHORT_LEASE_PERIOD;
/** start a dfs cluster
*
* @throws IOException
*/
@Before
public void startUp() throws IOException {
conf.setLong(DFSConfigKeys.DFS_BLOCK_SIZE_KEY, BLOCK_SIZE);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
cluster = new MiniDFSCluster.Builder(conf)
.numDataNodes(5)
.checkExitOnShutdown(false)
.build();
cluster.waitActive();
dfs = cluster.getFileSystem();
}
/**
* stop the cluster
* @throws IOException
*/
@After
public void tearDown() throws IOException {
if (cluster != null) {
IOUtils.closeStream(dfs);
cluster.shutdown();
}
}
/**
* Test the NameNode's revoke lease on current lease holder function.
* @throws Exception
*/
@Test
public void testImmediateRecoveryOfLease() throws Exception {
//create a file
// write bytes into the file.
byte [] actual = new byte[FILE_SIZE];
int size = AppendTestUtil.nextInt(FILE_SIZE);
Path filepath = createFile("/immediateRecoverLease-shortlease", size, true);
// set the soft limit to be 1 second so that the
// namenode triggers lease recovery on next attempt to write-for-open.
cluster.setLeasePeriod(SHORT_LEASE_PERIOD, LONG_LEASE_PERIOD);
recoverLeaseUsingCreate(filepath);
verifyFile(dfs, filepath, actual, size);
//test recoverLease
// set the soft limit to be 1 hour but recoverLease should
// close the file immediately
cluster.setLeasePeriod(LONG_LEASE_PERIOD, LONG_LEASE_PERIOD);
size = AppendTestUtil.nextInt(FILE_SIZE);
filepath = createFile("/immediateRecoverLease-longlease", size, false);
// test recoverLease from a different client
recoverLease(filepath, null);
verifyFile(dfs, filepath, actual, size);
// test recoverlease from the same client
size = AppendTestUtil.nextInt(FILE_SIZE);
filepath = createFile("/immediateRecoverLease-sameclient", size, false);
// create another file using the same client
Path filepath1 = new Path(filepath.toString() + AppendTestUtil.nextInt());
FSDataOutputStream stm = dfs.create(filepath1, true, BUF_SIZE,
REPLICATION_NUM, BLOCK_SIZE);
// recover the first file
recoverLease(filepath, dfs);
verifyFile(dfs, filepath, actual, size);
// continue to write to the second file
stm.write(buffer, 0, size);
stm.close();
verifyFile(dfs, filepath1, actual, size);
}
@Test
public void testLeaseRecoverByAnotherUser() throws Exception {
byte [] actual = new byte[FILE_SIZE];
cluster.setLeasePeriod(SHORT_LEASE_PERIOD, LONG_LEASE_PERIOD);
Path filepath = createFile("/immediateRecoverLease-x", 0, true);
recoverLeaseUsingCreate2(filepath);
verifyFile(dfs, filepath, actual, 0);
}
private Path createFile(final String filestr, final int size,
final boolean triggerLeaseRenewerInterrupt)
throws IOException, InterruptedException {
AppendTestUtil.LOG.info("filestr=" + filestr);
Path filepath = new Path(filestr);
FSDataOutputStream stm = dfs.create(filepath, true, BUF_SIZE,
REPLICATION_NUM, BLOCK_SIZE);
assertTrue(dfs.dfs.exists(filestr));
AppendTestUtil.LOG.info("size=" + size);
stm.write(buffer, 0, size);
// hflush file
AppendTestUtil.LOG.info("hflush");
stm.hflush();
if (triggerLeaseRenewerInterrupt) {
AppendTestUtil.LOG.info("leasechecker.interruptAndJoin()");
dfs.dfs.getLeaseRenewer().interruptAndJoin();
}
return filepath;
}
private void recoverLease(Path filepath, DistributedFileSystem dfs)
throws Exception {
if (dfs == null) {
dfs = (DistributedFileSystem)getFSAsAnotherUser(conf);
}
while (!dfs.recoverLease(filepath)) {
AppendTestUtil.LOG.info("sleep " + 5000 + "ms");
Thread.sleep(5000);
}
}
private FileSystem getFSAsAnotherUser(final Configuration c)
throws IOException, InterruptedException {
return FileSystem.get(FileSystem.getDefaultUri(c), c,
UserGroupInformation.createUserForTesting(fakeUsername,
new String [] {fakeGroup}).getUserName());
}
private void recoverLeaseUsingCreate(Path filepath)
throws IOException, InterruptedException {
FileSystem dfs2 = getFSAsAnotherUser(conf);
for(int i = 0; i < 10; i++) {
AppendTestUtil.LOG.info("i=" + i);
try {
dfs2.create(filepath, false, BUF_SIZE, (short)1, BLOCK_SIZE);
fail("Creation of an existing file should never succeed.");
} catch(FileAlreadyExistsException e) {
return; // expected
} catch(AlreadyBeingCreatedException e) {
return; // expected
} catch(IOException ioe) {
AppendTestUtil.LOG.warn("UNEXPECTED ", ioe);
AppendTestUtil.LOG.info("sleep " + 5000 + "ms");
try {Thread.sleep(5000);} catch (InterruptedException e) {}
}
}
fail("recoverLeaseUsingCreate failed");
}
private void recoverLeaseUsingCreate2(Path filepath)
throws Exception {
FileSystem dfs2 = getFSAsAnotherUser(conf);
int size = AppendTestUtil.nextInt(FILE_SIZE);
DistributedFileSystem dfsx = (DistributedFileSystem) dfs2;
//create file using dfsx
Path filepath2 = new Path("/immediateRecoverLease-x2");
FSDataOutputStream stm = dfsx.create(filepath2, true, BUF_SIZE,
REPLICATION_NUM, BLOCK_SIZE);
assertTrue(dfsx.dfs.exists("/immediateRecoverLease-x2"));
try {Thread.sleep(10000);} catch (InterruptedException e) {}
dfsx.append(filepath);
}
private void verifyFile(FileSystem dfs, Path filepath, byte[] actual,
int size) throws IOException {
AppendTestUtil.LOG.info("Lease for file " + filepath + " is recovered. "
+ "Validating its contents now...");
// verify that file-size matches
assertTrue("File should be " + size + " bytes, but is actually " +
" found to be " + dfs.getFileStatus(filepath).getLen() +
" bytes",
dfs.getFileStatus(filepath).getLen() == size);
// verify that there is enough data to read.
System.out.println("File size is good. Now validating sizes from datanodes...");
FSDataInputStream stmin = dfs.open(filepath);
stmin.readFully(0, actual, 0, size);
stmin.close();
}
/**
* This test makes the client does not renew its lease and also
* set the hard lease expiration period to be short 1s. Thus triggering
* lease expiration to happen while the client is still alive.
*
* The test makes sure that the lease recovery completes and the client
* fails if it continues to write to the file.
*
* @throws Exception
*/
@Test
public void testHardLeaseRecovery() throws Exception {
//create a file
String filestr = "/hardLeaseRecovery";
AppendTestUtil.LOG.info("filestr=" + filestr);
Path filepath = new Path(filestr);
FSDataOutputStream stm = dfs.create(filepath, true,
BUF_SIZE, REPLICATION_NUM, BLOCK_SIZE);
assertTrue(dfs.dfs.exists(filestr));
// write bytes into the file.
int size = AppendTestUtil.nextInt(FILE_SIZE);
AppendTestUtil.LOG.info("size=" + size);
stm.write(buffer, 0, size);
// hflush file
AppendTestUtil.LOG.info("hflush");
stm.hflush();
// kill the lease renewal thread
AppendTestUtil.LOG.info("leasechecker.interruptAndJoin()");
dfs.dfs.getLeaseRenewer().interruptAndJoin();
// set the hard limit to be 1 second
cluster.setLeasePeriod(LONG_LEASE_PERIOD, SHORT_LEASE_PERIOD);
// wait for lease recovery to complete
LocatedBlocks locatedBlocks;
do {
Thread.sleep(SHORT_LEASE_PERIOD);
locatedBlocks = dfs.dfs.getLocatedBlocks(filestr, 0L, size);
} while (locatedBlocks.isUnderConstruction());
assertEquals(size, locatedBlocks.getFileLength());
// make sure that the writer thread gets killed
try {
stm.write('b');
stm.close();
fail("Writer thread should have been killed");
} catch (IOException e) {
e.printStackTrace();
}
// verify data
AppendTestUtil.LOG.info(
"File size is good. Now validating sizes from datanodes...");
AppendTestUtil.checkFullFile(dfs, filepath, size, buffer, filestr);
}
/**
* This test makes the client does not renew its lease and also
* set the soft lease expiration period to be short 1s. Thus triggering
* soft lease expiration to happen immediately by having another client
* trying to create the same file.
*
* The test makes sure that the lease recovery completes.
*
* @throws Exception
*/
@Test
public void testSoftLeaseRecovery() throws Exception {
Map<String, String []> u2g_map = new HashMap<String, String []>(1);
u2g_map.put(fakeUsername, new String[] {fakeGroup});
DFSTestUtil.updateConfWithFakeGroupMapping(conf, u2g_map);
// Reset default lease periods
cluster.setLeasePeriod(HdfsConstants.LEASE_SOFTLIMIT_PERIOD,
HdfsConstants.LEASE_HARDLIMIT_PERIOD);
//create a file
// create a random file name
String filestr = "/foo" + AppendTestUtil.nextInt();
AppendTestUtil.LOG.info("filestr=" + filestr);
Path filepath = new Path(filestr);
FSDataOutputStream stm = dfs.create(filepath, true,
BUF_SIZE, REPLICATION_NUM, BLOCK_SIZE);
assertTrue(dfs.dfs.exists(filestr));
// write random number of bytes into it.
int size = AppendTestUtil.nextInt(FILE_SIZE);
AppendTestUtil.LOG.info("size=" + size);
stm.write(buffer, 0, size);
// hflush file
AppendTestUtil.LOG.info("hflush");
stm.hflush();
AppendTestUtil.LOG.info("leasechecker.interruptAndJoin()");
dfs.dfs.getLeaseRenewer().interruptAndJoin();
// set the soft limit to be 1 second so that the
// namenode triggers lease recovery on next attempt to write-for-open.
cluster.setLeasePeriod(SHORT_LEASE_PERIOD, LONG_LEASE_PERIOD);
// try to re-open the file before closing the previous handle. This
// should fail but will trigger lease recovery.
{
UserGroupInformation ugi =
UserGroupInformation.createUserForTesting(fakeUsername,
new String [] { fakeGroup});
FileSystem dfs2 = DFSTestUtil.getFileSystemAs(ugi, conf);
boolean done = false;
for(int i = 0; i < 10 && !done; i++) {
AppendTestUtil.LOG.info("i=" + i);
try {
dfs2.create(filepath, false, BUF_SIZE, REPLICATION_NUM, BLOCK_SIZE);
fail("Creation of an existing file should never succeed.");
} catch (FileAlreadyExistsException ex) {
done = true;
} catch (AlreadyBeingCreatedException ex) {
AppendTestUtil.LOG.info("GOOD! got " + ex.getMessage());
} catch (IOException ioe) {
AppendTestUtil.LOG.warn("UNEXPECTED IOException", ioe);
}
if (!done) {
AppendTestUtil.LOG.info("sleep " + 5000 + "ms");
try {Thread.sleep(5000);} catch (InterruptedException e) {}
}
}
assertTrue(done);
}
AppendTestUtil.LOG.info("Lease for file " + filepath + " is recovered. "
+ "Validating its contents now...");
// verify that file-size matches
long fileSize = dfs.getFileStatus(filepath).getLen();
assertTrue("File should be " + size + " bytes, but is actually " +
" found to be " + fileSize + " bytes", fileSize == size);
// verify data
AppendTestUtil.LOG.info("File size is good. " +
"Now validating data and sizes from datanodes...");
AppendTestUtil.checkFullFile(dfs, filepath, size, buffer, filestr);
}
/**
* This test makes it so the client does not renew its lease and also
* set the hard lease expiration period to be short, thus triggering
* lease expiration to happen while the client is still alive. The test
* also causes the NN to restart after lease recovery has begun, but before
* the DNs have completed the blocks. This test verifies that when the NN
* comes back up, the client no longer holds the lease.
*
* The test makes sure that the lease recovery completes and the client
* fails if it continues to write to the file, even after NN restart.
*
* @throws Exception
*/
@Test(timeout = 30000)
public void testHardLeaseRecoveryAfterNameNodeRestart() throws Exception {
hardLeaseRecoveryRestartHelper(false, -1);
}
@Test(timeout = 30000)
public void testHardLeaseRecoveryAfterNameNodeRestart2() throws Exception {
hardLeaseRecoveryRestartHelper(false, 1535);
}
@Test(timeout = 30000)
public void testHardLeaseRecoveryWithRenameAfterNameNodeRestart()
throws Exception {
hardLeaseRecoveryRestartHelper(true, -1);
}
public void hardLeaseRecoveryRestartHelper(boolean doRename, int size)
throws Exception {
if (size < 0) {
size = AppendTestUtil.nextInt(FILE_SIZE + 1);
}
//create a file
String fileStr = "/hardLeaseRecovery";
AppendTestUtil.LOG.info("filestr=" + fileStr);
Path filePath = new Path(fileStr);
FSDataOutputStream stm = dfs.create(filePath, true,
BUF_SIZE, REPLICATION_NUM, BLOCK_SIZE);
assertTrue(dfs.dfs.exists(fileStr));
// write bytes into the file.
AppendTestUtil.LOG.info("size=" + size);
stm.write(buffer, 0, size);
String originalLeaseHolder = NameNodeAdapter.getLeaseHolderForPath(
cluster.getNameNode(), fileStr);
assertFalse("original lease holder should not be the NN",
originalLeaseHolder.equals(HdfsServerConstants.NAMENODE_LEASE_HOLDER));
// hflush file
AppendTestUtil.LOG.info("hflush");
stm.hflush();
// check visible length
final HdfsDataInputStream in = (HdfsDataInputStream)dfs.open(filePath);
Assert.assertEquals(size, in.getVisibleLength());
in.close();
if (doRename) {
fileStr += ".renamed";
Path renamedPath = new Path(fileStr);
assertTrue(dfs.rename(filePath, renamedPath));
filePath = renamedPath;
}
// kill the lease renewal thread
AppendTestUtil.LOG.info("leasechecker.interruptAndJoin()");
dfs.dfs.getLeaseRenewer().interruptAndJoin();
// Make sure the DNs don't send a heartbeat for a while, so the blocks
// won't actually get completed during lease recovery.
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, true);
}
// set the hard limit to be 1 second
cluster.setLeasePeriod(LONG_LEASE_PERIOD, SHORT_LEASE_PERIOD);
// Make sure lease recovery begins.
final String path = fileStr;
GenericTestUtils.waitFor(new Supplier<Boolean>() {
@Override
public Boolean get() {
return HdfsServerConstants.NAMENODE_LEASE_HOLDER.equals(
NameNodeAdapter.getLeaseHolderForPath(cluster.getNameNode(), path));
}
}, (int)SHORT_LEASE_PERIOD, (int)SHORT_LEASE_PERIOD * 10);
// Normally, the in-progress edit log would be finalized by
// FSEditLog#endCurrentLogSegment. For testing purposes, we
// disable that here.
FSEditLog spyLog = spy(cluster.getNameNode().getFSImage().getEditLog());
doNothing().when(spyLog).endCurrentLogSegment(Mockito.anyBoolean());
DFSTestUtil.setEditLogForTesting(cluster.getNamesystem(), spyLog);
cluster.restartNameNode(false);
checkLease(fileStr, size);
// Let the DNs send heartbeats again.
for (DataNode dn : cluster.getDataNodes()) {
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false);
}
cluster.waitActive();
// set the hard limit to be 1 second, to initiate lease recovery.
cluster.setLeasePeriod(LONG_LEASE_PERIOD, SHORT_LEASE_PERIOD);
// wait for lease recovery to complete
LocatedBlocks locatedBlocks;
do {
Thread.sleep(SHORT_LEASE_PERIOD);
locatedBlocks = dfs.dfs.getLocatedBlocks(fileStr, 0L, size);
} while (locatedBlocks.isUnderConstruction());
assertEquals(size, locatedBlocks.getFileLength());
// make sure that the client can't write data anymore.
try {
stm.write('b');
stm.hflush();
fail("Should not be able to flush after we've lost the lease");
} catch (IOException e) {
LOG.info("Expceted exception on write/hflush", e);
}
try {
stm.close();
fail("Should not be able to close after we've lost the lease");
} catch (IOException e) {
LOG.info("Expected exception on close", e);
}
// verify data
AppendTestUtil.LOG.info(
"File size is good. Now validating sizes from datanodes...");
AppendTestUtil.checkFullFile(dfs, filePath, size, buffer, fileStr);
}
static void checkLease(String f, int size) {
final String holder = NameNodeAdapter.getLeaseHolderForPath(
cluster.getNameNode(), f);
if (size == 0) {
assertEquals("lease holder should null, file is closed", null, holder);
} else {
assertEquals("lease holder should now be the NN",
HdfsServerConstants.NAMENODE_LEASE_HOLDER, holder);
}
}
}
| |
/*
* Copyright (c) 2010-2013 Evolveum
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.evolveum.midpoint.repo.sql;
import com.evolveum.midpoint.prism.ItemDefinition;
import com.evolveum.midpoint.prism.PrismContainerDefinition;
import com.evolveum.midpoint.prism.PrismObject;
import com.evolveum.midpoint.prism.PrismObjectDefinition;
import com.evolveum.midpoint.prism.PrismPropertyDefinition;
import com.evolveum.midpoint.prism.PrismPropertyValue;
import com.evolveum.midpoint.prism.delta.ContainerDelta;
import com.evolveum.midpoint.prism.delta.ItemDelta;
import com.evolveum.midpoint.prism.delta.ObjectDelta;
import com.evolveum.midpoint.prism.delta.PropertyDelta;
import com.evolveum.midpoint.prism.delta.builder.DeltaBuilder;
import com.evolveum.midpoint.prism.path.IdItemPathSegment;
import com.evolveum.midpoint.prism.path.ItemPath;
import com.evolveum.midpoint.prism.path.ItemPathSegment;
import com.evolveum.midpoint.prism.path.NameItemPathSegment;
import com.evolveum.midpoint.prism.polystring.PolyString;
import com.evolveum.midpoint.prism.query.builder.QueryBuilder;
import com.evolveum.midpoint.prism.util.PrismTestUtil;
import com.evolveum.midpoint.prism.xml.XmlTypeConverter;
import com.evolveum.midpoint.repo.sql.testing.SqlRepoTestUtil;
import com.evolveum.midpoint.schema.result.OperationResult;
import com.evolveum.midpoint.util.logging.LoggingUtils;
import com.evolveum.midpoint.util.logging.Trace;
import com.evolveum.midpoint.util.logging.TraceManager;
import com.evolveum.midpoint.xml.ns._public.common.common_3.*;
import com.evolveum.prism.xml.ns._public.types_3.PolyStringType;
import org.hibernate.Session;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.ContextConfiguration;
import org.testng.AssertJUnit;
import org.testng.annotations.Test;
import java.io.File;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Date;
import java.util.List;
/**
* @author Pavol Mederly
*/
@ContextConfiguration(locations = {"../../../../../ctx-test.xml"})
@DirtiesContext(classMode = DirtiesContext.ClassMode.AFTER_CLASS)
public class ConcurrencyTest extends BaseSQLRepoTest {
private static final Trace LOGGER = TraceManager.getTrace(ConcurrencyTest.class);
//private static final long WAIT_TIME = 60000;
//private static final long WAIT_STEP = 500;
@Test
public void test001TwoWriters_OneAttributeEach__NoReader() throws Exception {
PropertyModifierThread[] mts = new PropertyModifierThread[]{
new PropertyModifierThread(1, new ItemPath(UserType.F_GIVEN_NAME), true, null, true),
new PropertyModifierThread(2, new ItemPath(UserType.F_FAMILY_NAME), true, null, true),
// new ModifierThread(3, oid, UserType.F_DESCRIPTION, false),
// new ModifierThread(4, oid, UserType.F_EMAIL_ADDRESS, false),
// new ModifierThread(5, oid, UserType.F_TELEPHONE_NUMBER, false),
// new ModifierThread(6, oid, UserType.F_EMPLOYEE_NUMBER, false),
// new ModifierThread(8, oid, UserType.F_EMAIL_ADDRESS),
// new ModifierThread(9, oid, UserType.F_EMPLOYEE_NUMBER)
};
concurrencyUniversal("Test1", 30000L, 500L, mts, null);
}
@Test
public void test002FourWriters_OneAttributeEach__NoReader() throws Exception {
PropertyModifierThread[] mts = new PropertyModifierThread[]{
new PropertyModifierThread(1, new ItemPath(UserType.F_GIVEN_NAME), true, null, true),
new PropertyModifierThread(2, new ItemPath(UserType.F_FAMILY_NAME), true, null, true),
new PropertyModifierThread(3, new ItemPath(UserType.F_DESCRIPTION), false, null, true),
new PropertyModifierThread(4, new ItemPath(UserType.F_EMAIL_ADDRESS), false, null, true)
};
concurrencyUniversal("Test2", 60000L, 500L, mts, null);
}
@Test
public void test003OneWriter_TwoAttributes__OneReader() throws Exception {
PropertyModifierThread[] mts = new PropertyModifierThread[]{
new PropertyModifierThread(1, new ItemPath(UserType.F_GIVEN_NAME), true,
new ItemPath(
new NameItemPathSegment(UserType.F_ASSIGNMENT),
new IdItemPathSegment(1L),
new NameItemPathSegment(AssignmentType.F_DESCRIPTION)),
true)
};
Checker checker = (iteration, oid) -> {
PrismObject<UserType> userRetrieved = repositoryService.getObject(UserType.class, oid, null, new OperationResult("dummy"));
String givenName = userRetrieved.asObjectable().getGivenName().getOrig();
String assignmentDescription = userRetrieved.asObjectable().getAssignment().get(0).getDescription();
LOGGER.info("[" + iteration + "] givenName = " + givenName + ", assignment description = " + assignmentDescription);
if (!givenName.equals(assignmentDescription)) {
String msg = "Inconsistent object state: GivenName = " + givenName + ", assignment description = " + assignmentDescription;
LOGGER.error(msg);
throw new AssertionError(msg);
}
};
concurrencyUniversal("Test3", 60000L, 0L, mts, checker);
}
@Test
public void test004TwoWriters_TwoAttributesEach__OneReader() throws Exception {
PropertyModifierThread[] mts = new PropertyModifierThread[]{
new PropertyModifierThread(1, new ItemPath(UserType.F_GIVEN_NAME), true,
new ItemPath(
new NameItemPathSegment(UserType.F_ASSIGNMENT),
new IdItemPathSegment(1L),
new NameItemPathSegment(AssignmentType.F_DESCRIPTION)),
true),
new PropertyModifierThread(2, new ItemPath(UserType.F_FAMILY_NAME), true,
new ItemPath(
new NameItemPathSegment(UserType.F_ASSIGNMENT),
new IdItemPathSegment(1L),
new NameItemPathSegment(AssignmentType.F_CONSTRUCTION)),
true),
};
Checker checker = (iteration, oid) -> {
PrismObject<UserType> userRetrieved = repositoryService.getObject(UserType.class, oid, null, new OperationResult("dummy"));
String givenName = userRetrieved.asObjectable().getGivenName().getOrig();
String familyName = userRetrieved.asObjectable().getFamilyName().getOrig();
String assignmentDescription = userRetrieved.asObjectable().getAssignment().get(0).getDescription();
String referenceDescription = userRetrieved.asObjectable().getAssignment().get(0).getConstruction().getDescription();
LOGGER.info("[" + iteration + "] givenName = " + givenName + ", assignment description = " + assignmentDescription + ", familyName = " + familyName + ", referenceDescription = " + referenceDescription);
if (!givenName.equals(assignmentDescription)) {
String msg = "Inconsistent object state: GivenName = " + givenName + ", assignment description = " + assignmentDescription;
LOGGER.error(msg);
throw new AssertionError(msg);
}
if (!familyName.equals(referenceDescription)) {
String msg = "Inconsistent object state: FamilyName = " + familyName + ", account construction description = " + referenceDescription;
LOGGER.error(msg);
throw new AssertionError(msg);
}
};
concurrencyUniversal("Test4", 60000L, 0L, mts, checker);
}
@FunctionalInterface
private interface Checker {
void check(int iteration, String oid) throws Exception;
}
private void concurrencyUniversal(String name, long duration, long waitStep, PropertyModifierThread[] modifierThreads, Checker checker) throws Exception {
Session session = getFactory().openSession();
session.doWork(connection -> System.out.println(">>>>" + connection.getTransactionIsolation()));
session.close();
final File file = new File("src/test/resources/concurrency/user.xml");
PrismObject<UserType> user = prismContext.parseObject(file);
user.asObjectable().setName(new PolyStringType(name));
OperationResult result = new OperationResult("Concurrency Test");
String oid = repositoryService.addObject(user, null, result);
LOGGER.info("*** Object added: " + oid + " ***");
LOGGER.info("*** Starting modifier threads ***");
// modifierThreads[1].setOid(oid);
// modifierThreads[1].runOnce();
// if(true) return;
for (PropertyModifierThread mt : modifierThreads) {
mt.setOid(oid);
mt.start();
}
LOGGER.info("*** Waiting " + duration + " ms ***");
long startTime = System.currentTimeMillis();
int readIteration = 1;
main:
while (System.currentTimeMillis() - startTime < duration) {
if (checker != null) {
checker.check(readIteration, oid);
}
if (waitStep > 0L) {
Thread.sleep(waitStep);
}
for (PropertyModifierThread mt : modifierThreads) {
if (!mt.isAlive()) {
LOGGER.error("At least one of threads died prematurely, finishing waiting.");
break main;
}
}
readIteration++;
}
for (PropertyModifierThread mt : modifierThreads) {
mt.stop = true; // stop the threads
System.out.println("Thread " + mt.id + " has done " + (mt.counter - 1) + " iterations");
LOGGER.info("Thread " + mt.id + " has done " + (mt.counter - 1) + " iterations");
}
// we do not have to wait for the threads to be stopped, just examine their results
Thread.sleep(1000); // give the threads a chance to finish (before repo will be shut down)
for (PropertyModifierThread mt : modifierThreads) {
LOGGER.info("Modifier thread " + mt.id + " finished with an exception: ", mt.threadResult);
}
for (PropertyModifierThread mt : modifierThreads) {
if (mt.threadResult != null) {
throw new AssertionError("Modifier thread " + mt.id + " finished with an exception: " + mt.threadResult, mt.threadResult);
}
}
}
abstract class WorkerThread extends Thread {
int id;
String oid; // object to modify
String lastVersion = null;
volatile Throwable threadResult;
volatile int counter = 1;
WorkerThread(int id) {
this.id = id;
}
public volatile boolean stop = false;
@Override
public void run() {
try {
while (!stop) {
OperationResult result = new OperationResult("run");
counter++;
LOGGER.info(" --- Iteration number {} for {} ---", counter, description());
runOnce(result);
}
} catch (Throwable t) {
LoggingUtils.logException(LOGGER, "Got exception: " + t, t);
threadResult = t;
}
}
abstract void runOnce(OperationResult result) throws Exception;
abstract String description();
public void setOid(String oid) {
this.oid = oid;
}
}
class PropertyModifierThread extends WorkerThread {
ItemPath attribute1; // attribute to modify
ItemPath attribute2; // attribute to modify
boolean poly;
boolean checkValue;
PropertyModifierThread(int id, ItemPath attribute1, boolean poly, ItemPath attribute2, boolean checkValue) {
super(id);
this.attribute1 = attribute1;
this.attribute2 = attribute2;
this.poly = poly;
this.setName("Modifier for " + attributeNames());
this.checkValue = checkValue;
}
private String attributeNames() {
return lastName(attribute1) + (attribute2 != null ? "/" + lastName(attribute2) : "");
}
@Override
String description() {
return attributeNames();
}
private String lastName(ItemPath path) {
List<ItemPathSegment> segments = path.getSegments();
for (int i = segments.size()-1; i >= 0; i++) {
if (segments.get(i) instanceof NameItemPathSegment) {
return ((NameItemPathSegment) segments.get(i)).getName().getLocalPart();
}
}
return "?";
}
void runOnce(OperationResult result) {
PrismObjectDefinition<?> userPrismDefinition = prismContext.getSchemaRegistry().findObjectDefinitionByCompileTimeClass(UserType.class);
String prefix = lastName(attribute1);
String dataWritten = "[" + prefix + ":" + Integer.toString(counter) + "]";
PrismPropertyDefinition<?> propertyDefinition1 = userPrismDefinition.findPropertyDefinition(attribute1);
if (propertyDefinition1 == null) {
throw new IllegalArgumentException("No definition for " + attribute1 + " in " + userPrismDefinition);
}
PropertyDelta<?> delta1 = new PropertyDelta<>(attribute1, propertyDefinition1, prismContext);
//noinspection unchecked
delta1.setValueToReplace(new PrismPropertyValue(poly ? new PolyString(dataWritten) : dataWritten));
List<ItemDelta> deltas = new ArrayList<>();
deltas.add(delta1);
ItemDefinition propertyDefinition2 = null;
if (attribute2 != null) {
propertyDefinition2 = userPrismDefinition.findItemDefinition(attribute2);
if (propertyDefinition2 == null) {
throw new IllegalArgumentException("No definition for " + attribute2 + " in " + userPrismDefinition);
}
ItemDelta delta2;
if (propertyDefinition2 instanceof PrismContainerDefinition) {
delta2 = new ContainerDelta(attribute2, (PrismContainerDefinition) propertyDefinition2, prismContext);
} else {
delta2 = new PropertyDelta(attribute2, (PrismPropertyDefinition) propertyDefinition2, prismContext);
}
if (ConstructionType.COMPLEX_TYPE.equals(propertyDefinition2.getTypeName())) {
ConstructionType act = new ConstructionType();
act.setDescription(dataWritten);
delta2.setValueToReplace(act.asPrismContainerValue());
} else {
delta2.setValueToReplace(new PrismPropertyValue(dataWritten));
}
deltas.add(delta2);
}
try {
repositoryService.modifyObject(UserType.class, oid, deltas, result);
result.computeStatus();
if (result.isError()) {
LOGGER.error("Error found in operation result:\n{}", result.debugDump());
throw new IllegalStateException("Error found in operation result");
}
} catch (Exception e) {
String msg = "modifyObject failed while modifying attribute(s) " + attributeNames() + " to value " + dataWritten;
throw new RuntimeException(msg, e);
}
if (checkValue) {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
}
PrismObject<UserType> user;
try {
user = repositoryService.getObject(UserType.class, oid, null, result);
} catch (Exception e) {
String msg = "getObject failed while getting attribute(s) " + attributeNames();
throw new RuntimeException(msg, e);
}
// check the attribute
String dataRead;
if (poly) {
dataRead = user.findProperty(attribute1).getRealValue(PolyString.class).getOrig();
} else {
dataRead = user.findProperty(attribute1).getRealValue(String.class);
}
if (!dataWritten.equals(dataRead)) {
threadResult = new RuntimeException("Data read back (" + dataRead + ") does not match the data written (" + dataWritten + ") on attribute " + attribute1);
LOGGER.error("compare failed", threadResult);
stop = true;
return;
}
if (attribute2 != null) {
if (ConstructionType.COMPLEX_TYPE.equals(propertyDefinition2.getTypeName())) {
dataRead = ((ConstructionType)user.findContainer(attribute2).getValue().getValue()).getDescription();
} else {
dataRead = user.findProperty(attribute2).getRealValue(String.class);
}
if (!dataWritten.equals(dataRead)) {
threadResult = new RuntimeException("Data read back (" + dataRead + ") does not match the data written (" + dataWritten + ") on attribute " + attribute2);
LOGGER.error("compare failed", threadResult);
stop = true;
return;
}
}
String currentVersion = user.getVersion();
String versionError = SqlRepoTestUtil.checkVersionProgress(lastVersion, currentVersion);
if (versionError != null) {
threadResult = new RuntimeException(versionError);
LOGGER.error(versionError);
stop = true;
return;
}
lastVersion = currentVersion;
}
}
public void setOid(String oid) {
this.oid = oid;
}
}
abstract class DeltaExecutionThread extends WorkerThread {
String description;
DeltaExecutionThread(int id, String oid, String description) {
super(id);
this.oid = oid;
this.description = description;
this.setName("Executor: " + description);
}
@Override
String description() {
return description;
}
abstract Collection<ItemDelta<?, ?>> getItemDeltas() throws Exception;
void runOnce(OperationResult result) throws Exception {
repositoryService.modifyObject(UserType.class, oid, getItemDeltas(), result);
}
}
@Test
public void test010SearchIterative() throws Exception {
String name = "Test10";
final String newFullName = "new-full-name";
final File file = new File("src/test/resources/concurrency/user.xml");
PrismObject<UserType> user = prismContext.parseObject(file);
user.asObjectable().setName(new PolyStringType(name));
final OperationResult result = new OperationResult("Concurrency Test10");
String oid = repositoryService.addObject(user, null, result);
repositoryService.searchObjectsIterative(UserType.class,
QueryBuilder.queryFor(UserType.class, prismContext)
.item(UserType.F_NAME).eqPoly(name).matchingOrig().build(),
(object, parentResult) -> {
LOGGER.info("Handling " + object + "...");
ObjectDelta delta = ObjectDelta.createModificationReplaceProperty(UserType.class, object.getOid(),
UserType.F_FULL_NAME, prismContext, new PolyString(newFullName));
try {
repositoryService.modifyObject(UserType.class,
object.getOid(),
delta.getModifications(),
parentResult);
} catch (Exception e) {
throw new RuntimeException("Exception in handle method", e);
}
return true;
},
null, true, result);
PrismObject<UserType> reloaded = repositoryService.getObject(UserType.class, oid, null, result);
AssertJUnit.assertEquals("Full name was not changed", newFullName, reloaded.asObjectable().getFullName().getOrig());
}
@Test
public void test100AddOperationExecution() throws Exception {
if (getConfiguration().isUsingH2()) {
return; // TODO
}
int THREADS = 8;
long DURATION = 30000L;
Session session = getFactory().openSession();
session.doWork(connection -> System.out.println(">>>>" + connection.getTransactionIsolation()));
session.close();
UserType user = new UserType(prismContext).name("jack");
OperationResult result = new OperationResult("test100AddOperationExecution");
String oid = repositoryService.addObject(user.asPrismObject(), null, result);
PrismTestUtil.display("object added", oid);
LOGGER.info("Starting worker threads");
List<DeltaExecutionThread> threads = new ArrayList<>();
for (int i = 0; i < THREADS; i++) {
final int threadIndex = i;
DeltaExecutionThread thread = new DeltaExecutionThread(i, oid, "operationExecution adder #" + i) {
@Override
Collection<ItemDelta<?, ?>> getItemDeltas() throws Exception {
return DeltaBuilder.deltaFor(UserType.class, prismContext)
.item(UserType.F_OPERATION_EXECUTION).add(
new OperationExecutionType(prismContext)
.channel(threadIndex + ":" + counter)
.timestamp(XmlTypeConverter.createXMLGregorianCalendar(new Date())))
.asItemDeltas();
}
};
thread.start();
threads.add(thread);
}
waitForThreads(threads, DURATION);
}
@Test
public void test110AddAssignments() throws Exception {
if (getConfiguration().isUsingH2()) {
return; // TODO
}
int THREADS = 8;
long DURATION = 30000L;
UserType user = new UserType(prismContext).name("alice");
OperationResult result = new OperationResult("test110AddAssignments");
String oid = repositoryService.addObject(user.asPrismObject(), null, result);
PrismTestUtil.display("object added", oid);
LOGGER.info("Starting worker threads");
List<DeltaExecutionThread> threads = new ArrayList<>();
for (int i = 0; i < THREADS; i++) {
final int threadIndex = i;
DeltaExecutionThread thread = new DeltaExecutionThread(i, oid, "assignment adder #" + i) {
@Override
Collection<ItemDelta<?, ?>> getItemDeltas() throws Exception {
return DeltaBuilder.deltaFor(UserType.class, prismContext)
.item(UserType.F_ASSIGNMENT).add(
new AssignmentType(prismContext)
.targetRef("0000-" + threadIndex + "-" + counter, OrgType.COMPLEX_TYPE))
.asItemDeltas();
}
};
thread.start();
threads.add(thread);
}
waitForThreads(threads, DURATION);
PrismObject<UserType> userAfter = repositoryService.getObject(UserType.class, oid, null, result);
display("user after", userAfter);
}
protected void waitForThreads(List<? extends WorkerThread> threads, long DURATION) throws InterruptedException {
LOGGER.info("*** Waiting {} ms ***", DURATION);
long startTime = System.currentTimeMillis();
main:
while (System.currentTimeMillis() - startTime < DURATION) {
for (WorkerThread thread : threads) {
if (!thread.isAlive()) {
LOGGER.error("At least one of threads died prematurely, finishing waiting.");
break main;
}
}
Thread.sleep(100);
}
for (WorkerThread thread : threads) {
thread.stop = true; // stop the threads
System.out.println("Thread " + thread.id + " has done " + (thread.counter - 1) + " iterations");
LOGGER.info("Thread " + thread.id + " has done " + (thread.counter - 1) + " iterations");
}
// we do not have to wait for the threads to be stopped, just examine their results
Thread.sleep(1000); // give the threads a chance to finish (before repo will be shut down)
for (WorkerThread thread : threads) {
LOGGER.info("Modifier thread " + thread.id + " finished with an exception: ", thread.threadResult);
}
for (WorkerThread thread : threads) {
if (thread.threadResult != null) {
throw new AssertionError("Modifier thread " + thread.id + " finished with an exception: " + thread.threadResult, thread.threadResult);
}
}
}
private SqlRepositoryConfiguration getConfiguration() {
return ((SqlRepositoryServiceImpl) repositoryService).getConfiguration();
}
}
| |
/**
* Copyright 2012-2014 Julien Eluard and contributors
* This project includes software developed by Julien Eluard: https://github.com/jeluard/
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.semver.jardiff;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.annotation.Nonnull;
import org.osjava.jardiff.AbstractDiffHandler;
import org.osjava.jardiff.ClassInfo;
import org.osjava.jardiff.DiffException;
import org.osjava.jardiff.FieldInfo;
import org.osjava.jardiff.MethodInfo;
import org.semver.Delta;
import org.semver.Delta.Add;
import org.semver.Delta.Change;
import org.semver.Delta.Deprecate;
import org.semver.Delta.Difference;
import org.semver.Delta.Remove;
/**
*
* {@link org.osjava.jardiff.DiffHandler} implementation accumulating {@link Difference}.
*
*/
public final class DifferenceAccumulatingHandler extends AbstractDiffHandler {
private String currentClassName;
private final Set<String> includes;
private final boolean includesAreRegExp;
private final Set<String> excludes;
private final boolean excludesAreRegExp;
private final Set<Difference> differences = new HashSet<Difference>();
public DifferenceAccumulatingHandler() {
this(Collections.<String>emptySet(), Collections.<String>emptySet());
}
public DifferenceAccumulatingHandler(@Nonnull final Set<String> includes, @Nonnull final Set<String> excludes) {
this(includes, false, excludes, false);
}
public DifferenceAccumulatingHandler(@Nonnull final Set<String> includes, final boolean includesAreRegExp,
@Nonnull final Set<String> excludes, final boolean excludesAreRegExp) {
this.includes = includes;
this.includesAreRegExp = includesAreRegExp;
this.excludes = excludes;
this.excludesAreRegExp = excludesAreRegExp;
}
public String getCurrentClassName() {
return this.currentClassName;
}
@Override
public void startDiff(final String previous, final String current) throws DiffException {
}
@Override
public void endDiff() throws DiffException {
}
@Override
public void startOldContents() throws DiffException {
}
@Override
public void endOldContents() throws DiffException {
}
@Override
public void startNewContents() throws DiffException {
}
@Override
public void endNewContents() throws DiffException {
}
@Override
public void contains(final ClassInfo classInfo) throws DiffException {
}
@Override
public void startAdded() throws DiffException {
}
@Override
public void classAdded(final ClassInfo classInfo) throws DiffException {
if (!isClassConsidered(classInfo.getName())) {
return;
}
this.differences.add(new Add(getClassName(classInfo.getName()), classInfo));
}
@Override
public void fieldAdded(final FieldInfo fieldInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Add(getCurrentClassName(), fieldInfo));
}
@Override
public void methodAdded(final MethodInfo methodInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Add(getCurrentClassName(), methodInfo));
}
@Override
public void endAdded() throws DiffException {
}
@Override
public void startChanged() throws DiffException {
}
@Override
public void startClassChanged(final String className) throws DiffException {
this.currentClassName = getClassName(className);
}
@Override
public void classChanged(final ClassInfo oldClassInfo, final ClassInfo newClassInfo) throws DiffException {
if (!isClassConsidered(oldClassInfo.getName())) {
return;
}
this.differences.add(new Change(getClassName(oldClassInfo.getName()), oldClassInfo, newClassInfo));
}
@Override
public void classDeprecated(final ClassInfo oldClassInfo, final ClassInfo newClassInfo) throws DiffException {
if (!isClassConsidered(oldClassInfo.getName())) {
return;
}
this.differences.add(new Deprecate(getClassName(oldClassInfo.getName()), oldClassInfo, newClassInfo));
}
@Override
public void fieldChanged(final FieldInfo oldFieldInfo, final FieldInfo newFieldInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Change(getCurrentClassName(), oldFieldInfo, newFieldInfo));
}
@Override
public void fieldDeprecated(final FieldInfo oldFieldInfo, final FieldInfo newFieldInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Deprecate(getCurrentClassName(), oldFieldInfo, newFieldInfo));
}
@Override
public void methodChanged(final MethodInfo oldMethodInfo, final MethodInfo newMethodInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Change(getCurrentClassName(), oldMethodInfo, newMethodInfo));
}
@Override
public void methodDeprecated(final MethodInfo oldMethodInfo, final MethodInfo newMethodInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Deprecate(getCurrentClassName(), oldMethodInfo, newMethodInfo));
}
@Override
public void endClassChanged() throws DiffException {
}
@Override
public void endChanged() throws DiffException {
}
@Override
public void startRemoved() throws DiffException {
}
@Override
public void classRemoved(final ClassInfo classInfo) throws DiffException {
if (!isClassConsidered(classInfo.getName())) {
return;
}
this.differences.add(new Remove(getClassName(classInfo.getName()), classInfo));
}
@Override
public void fieldRemoved(final FieldInfo fieldInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Remove(getCurrentClassName(), fieldInfo));
}
@Override
public void methodRemoved(final MethodInfo methodInfo) throws DiffException {
if (!isClassConsidered(getCurrentClassName())) {
return;
}
this.differences.add(new Remove(getCurrentClassName(), methodInfo));
}
@Override
public void endRemoved() throws DiffException {
}
/**
*
* Is considered a class whose package:
* * is included
* * is not excluded
*
* If includes are provided then package must be defined here.
*
* @return
*/
protected boolean isClassConsidered( final String className ) {
// Fix case where class names are reported with '.'
final String fixedClassName = className.replace('.', '/');
for ( String exclude : this.excludes ) {
final Pattern excludePattern;
if( !excludesAreRegExp ) {
if ( exclude.contains( "/**/" ) ) {
exclude = exclude.replaceAll( "/\\*\\*/", "{0,1}**/" );
}
if ( exclude.contains( "/*/" ) ) {
exclude = exclude.replaceAll( "/\\*/", "{0,1}*/{0,1}" );
}
excludePattern = simplifyRegularExpression( exclude, false );
} else {
excludePattern = Pattern.compile( exclude );
}
final Matcher excludeMatcher = excludePattern.matcher( fixedClassName );
while ( excludeMatcher.find() ) {
return false;
}
}
if ( !this.includes.isEmpty() ) {
for ( String include : this.includes ) {
final Pattern includePattern;
if( !includesAreRegExp ) {
if ( include.contains( "/**/" ) ) {
include = include.replaceAll( "/\\*\\*/", "{0,1}**/" );
}
if ( include.contains( "/*/" ) ) {
include = include.replaceAll( "/\\*/", "{0,1}*/{0,1}" );
}
includePattern = simplifyRegularExpression( include, false );
} else {
includePattern = Pattern.compile( include );
}
final Matcher includeMatcher = includePattern.matcher( fixedClassName );
while ( includeMatcher.find() ) {
return true;
}
}
return false;
}
return true;
}
/**
*
* Simplifies the given regular expression by the following pattern:<br>
* All substrings not containing "{0,1}", "*" and "?" get surrounded by "\\Q" and "\\E". Then all occurrences of
* "**" are replaced by ".*", "*" with "[^/]*" and all occurrences of "?" are replaced by "." In the end a "$" will
* be appended.
*
* @param regEx the regular expression which is in a simple form.
* @return the simple regular expression converted to a normal regular expression.
*/
private static Pattern simplifyRegularExpression( final String regEx, final boolean caseSensitive ) {
final StringBuilder strBuild = new StringBuilder();
final Pattern p = Pattern.compile( "\\{0,1\\}|\\*|\\?|[[^*^?^{^}]|^]+", Pattern.CASE_INSENSITIVE );
final Matcher m = p.matcher( regEx );
while ( m.find() ) {
final String token = m.group();
if ( token.equals( "*" ) || token.equals( "?" ) ) { //$NON-NLS-1$ //$NON-NLS-2$
strBuild.append( token );
} else if ( token.equals( "{0,1}" ) ) {
strBuild.append( "/" );
strBuild.append( token );
} else {
// Surround all tokens that are not "*" or "?" with "\\Q" and \\E"
strBuild.append( "\\Q" ).append( token ).append( "\\E" ); //$NON-NLS-1$ //$NON-NLS-2$
}
}
// Replace all "*" and "?" with .* and .+
strBuild.append( "$" );
String result = strBuild.toString();
result = result.replaceAll( "(?<!\\*)\\*(?!\\*)", "[^/]*" );
result = result.replaceAll( "[\\*][\\s]*[\\*]", ".\\*" );
result = result.replaceAll( "\\?", "." );
if ( caseSensitive ) {
return Pattern.compile( result );
} else {
return Pattern.compile( result, Pattern.CASE_INSENSITIVE );
}
}
public Delta getDelta() {
return new Delta(this.differences);
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.nodemanager.recovery;
import static org.fusesource.leveldbjni.JniDBFactory.asString;
import static org.fusesource.leveldbjni.JniDBFactory.bytes;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Timer;
import java.util.TimerTask;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Time;
import org.apache.hadoop.yarn.api.protocolrecords.StartContainerRequest;
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StartContainerRequestPBImpl;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.impl.pb.ProtoUtils;
import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto;
import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.MasterKeyProto;
import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.VersionProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.ContainerManagerApplicationProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.DeletionServiceDeleteTaskProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LocalizedResourceProto;
import org.apache.hadoop.yarn.proto.YarnServerNodemanagerRecoveryProtos.LogDeleterProto;
import org.apache.hadoop.yarn.proto.YarnServiceProtos.StartContainerRequestProto;
import org.apache.hadoop.yarn.server.api.records.MasterKey;
import org.apache.hadoop.yarn.server.api.records.impl.pb.MasterKeyPBImpl;
import org.apache.hadoop.yarn.server.records.Version;
import org.apache.hadoop.yarn.server.records.impl.pb.VersionPBImpl;
import org.apache.hadoop.yarn.server.utils.LeveldbIterator;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.fusesource.leveldbjni.JniDBFactory;
import org.fusesource.leveldbjni.internal.NativeDB;
import org.iq80.leveldb.DB;
import org.iq80.leveldb.DBException;
import org.iq80.leveldb.Logger;
import org.iq80.leveldb.Options;
import org.iq80.leveldb.WriteBatch;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
public class NMLeveldbStateStoreService extends NMStateStoreService {
public static final Log LOG =
LogFactory.getLog(NMLeveldbStateStoreService.class);
private static final String DB_NAME = "yarn-nm-state";
private static final String DB_SCHEMA_VERSION_KEY = "nm-schema-version";
/**
* Changes from 1.0 to 1.1: Save AMRMProxy state in NMSS.
* Changes from 1.2 to 1.2: Save queued container information.
*/
private static final Version CURRENT_VERSION_INFO = Version.newInstance(1, 2);
private static final String DELETION_TASK_KEY_PREFIX =
"DeletionService/deltask_";
private static final String APPLICATIONS_KEY_PREFIX =
"ContainerManager/applications/";
@Deprecated
private static final String FINISHED_APPS_KEY_PREFIX =
"ContainerManager/finishedApps/";
private static final String LOCALIZATION_KEY_PREFIX = "Localization/";
private static final String LOCALIZATION_PUBLIC_KEY_PREFIX =
LOCALIZATION_KEY_PREFIX + "public/";
private static final String LOCALIZATION_PRIVATE_KEY_PREFIX =
LOCALIZATION_KEY_PREFIX + "private/";
private static final String LOCALIZATION_STARTED_SUFFIX = "started/";
private static final String LOCALIZATION_COMPLETED_SUFFIX = "completed/";
private static final String LOCALIZATION_FILECACHE_SUFFIX = "filecache/";
private static final String LOCALIZATION_APPCACHE_SUFFIX = "appcache/";
private static final String CONTAINERS_KEY_PREFIX =
"ContainerManager/containers/";
private static final String CONTAINER_REQUEST_KEY_SUFFIX = "/request";
private static final String CONTAINER_VERSION_KEY_SUFFIX = "/version";
private static final String CONTAINER_DIAGS_KEY_SUFFIX = "/diagnostics";
private static final String CONTAINER_LAUNCHED_KEY_SUFFIX = "/launched";
private static final String CONTAINER_QUEUED_KEY_SUFFIX = "/queued";
private static final String CONTAINER_RESOURCE_CHANGED_KEY_SUFFIX =
"/resourceChanged";
private static final String CONTAINER_KILLED_KEY_SUFFIX = "/killed";
private static final String CONTAINER_EXIT_CODE_KEY_SUFFIX = "/exitcode";
private static final String CONTAINER_REMAIN_RETRIES_KEY_SUFFIX =
"/remainingRetryAttempts";
private static final String CONTAINER_WORK_DIR_KEY_SUFFIX = "/workdir";
private static final String CONTAINER_LOG_DIR_KEY_SUFFIX = "/logdir";
private static final String CURRENT_MASTER_KEY_SUFFIX = "CurrentMasterKey";
private static final String PREV_MASTER_KEY_SUFFIX = "PreviousMasterKey";
private static final String NEXT_MASTER_KEY_SUFFIX = "NextMasterKey";
private static final String NM_TOKENS_KEY_PREFIX = "NMTokens/";
private static final String NM_TOKENS_CURRENT_MASTER_KEY =
NM_TOKENS_KEY_PREFIX + CURRENT_MASTER_KEY_SUFFIX;
private static final String NM_TOKENS_PREV_MASTER_KEY =
NM_TOKENS_KEY_PREFIX + PREV_MASTER_KEY_SUFFIX;
private static final String CONTAINER_TOKENS_KEY_PREFIX =
"ContainerTokens/";
private static final String CONTAINER_TOKENS_CURRENT_MASTER_KEY =
CONTAINER_TOKENS_KEY_PREFIX + CURRENT_MASTER_KEY_SUFFIX;
private static final String CONTAINER_TOKENS_PREV_MASTER_KEY =
CONTAINER_TOKENS_KEY_PREFIX + PREV_MASTER_KEY_SUFFIX;
private static final String LOG_DELETER_KEY_PREFIX = "LogDeleters/";
private static final String AMRMPROXY_KEY_PREFIX = "AMRMProxy/";
private static final byte[] EMPTY_VALUE = new byte[0];
private DB db;
private boolean isNewlyCreated;
private Timer compactionTimer;
/**
* Map of containerID vs List of unknown key suffixes.
*/
private ListMultimap<ContainerId, String> containerUnknownKeySuffixes =
ArrayListMultimap.create();
public NMLeveldbStateStoreService() {
super(NMLeveldbStateStoreService.class.getName());
}
@Override
protected void startStorage() throws IOException {
}
@Override
protected void closeStorage() throws IOException {
if (compactionTimer != null) {
compactionTimer.cancel();
compactionTimer = null;
}
if (db != null) {
db.close();
}
}
@Override
public boolean isNewlyCreated() {
return isNewlyCreated;
}
@Override
public List<RecoveredContainerState> loadContainersState()
throws IOException {
ArrayList<RecoveredContainerState> containers =
new ArrayList<RecoveredContainerState>();
ArrayList<ContainerId> containersToRemove =
new ArrayList<ContainerId>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(CONTAINERS_KEY_PREFIX));
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(CONTAINERS_KEY_PREFIX)) {
break;
}
int idEndPos = key.indexOf('/', CONTAINERS_KEY_PREFIX.length());
if (idEndPos < 0) {
throw new IOException("Unable to determine container in key: " + key);
}
ContainerId containerId = ContainerId.fromString(
key.substring(CONTAINERS_KEY_PREFIX.length(), idEndPos));
String keyPrefix = key.substring(0, idEndPos+1);
RecoveredContainerState rcs = loadContainerState(containerId,
iter, keyPrefix);
// Don't load container without StartContainerRequest
if (rcs.startRequest != null) {
containers.add(rcs);
} else {
containersToRemove.add(containerId);
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
// remove container without StartContainerRequest
for (ContainerId containerId : containersToRemove) {
LOG.warn("Remove container " + containerId +
" with incomplete records");
try {
removeContainer(containerId);
// TODO: kill and cleanup the leaked container
} catch (IOException e) {
LOG.error("Unable to remove container " + containerId +
" in store", e);
}
}
return containers;
}
private RecoveredContainerState loadContainerState(ContainerId containerId,
LeveldbIterator iter, String keyPrefix) throws IOException {
RecoveredContainerState rcs = new RecoveredContainerState();
rcs.status = RecoveredContainerStatus.REQUESTED;
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
iter.next();
String suffix = key.substring(keyPrefix.length()-1); // start with '/'
if (suffix.equals(CONTAINER_REQUEST_KEY_SUFFIX)) {
rcs.startRequest = new StartContainerRequestPBImpl(
StartContainerRequestProto.parseFrom(entry.getValue()));
} else if (suffix.equals(CONTAINER_VERSION_KEY_SUFFIX)) {
rcs.version = Integer.parseInt(asString(entry.getValue()));
} else if (suffix.equals(CONTAINER_DIAGS_KEY_SUFFIX)) {
rcs.diagnostics = asString(entry.getValue());
} else if (suffix.equals(CONTAINER_QUEUED_KEY_SUFFIX)) {
if (rcs.status == RecoveredContainerStatus.REQUESTED) {
rcs.status = RecoveredContainerStatus.QUEUED;
}
} else if (suffix.equals(CONTAINER_LAUNCHED_KEY_SUFFIX)) {
if ((rcs.status == RecoveredContainerStatus.REQUESTED)
|| (rcs.status == RecoveredContainerStatus.QUEUED)) {
rcs.status = RecoveredContainerStatus.LAUNCHED;
}
} else if (suffix.equals(CONTAINER_KILLED_KEY_SUFFIX)) {
rcs.killed = true;
} else if (suffix.equals(CONTAINER_EXIT_CODE_KEY_SUFFIX)) {
rcs.status = RecoveredContainerStatus.COMPLETED;
rcs.exitCode = Integer.parseInt(asString(entry.getValue()));
} else if (suffix.equals(CONTAINER_RESOURCE_CHANGED_KEY_SUFFIX)) {
rcs.capability = new ResourcePBImpl(
ResourceProto.parseFrom(entry.getValue()));
} else if (suffix.equals(CONTAINER_REMAIN_RETRIES_KEY_SUFFIX)) {
rcs.setRemainingRetryAttempts(
Integer.parseInt(asString(entry.getValue())));
} else if (suffix.equals(CONTAINER_WORK_DIR_KEY_SUFFIX)) {
rcs.setWorkDir(asString(entry.getValue()));
} else if (suffix.equals(CONTAINER_LOG_DIR_KEY_SUFFIX)) {
rcs.setLogDir(asString(entry.getValue()));
} else {
LOG.warn("the container " + containerId
+ " will be killed because of the unknown key " + key
+ " during recovery.");
containerUnknownKeySuffixes.put(containerId, suffix);
rcs.setRecoveryType(RecoveredContainerType.KILL);
}
}
return rcs;
}
@Override
public void storeContainer(ContainerId containerId, int containerVersion,
StartContainerRequest startRequest) throws IOException {
String idStr = containerId.toString();
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainer: containerId= " + idStr
+ ", startRequest= " + startRequest);
}
String keyRequest = CONTAINERS_KEY_PREFIX + idStr
+ CONTAINER_REQUEST_KEY_SUFFIX;
String keyVersion = getContainerVersionKey(idStr);
try {
WriteBatch batch = db.createWriteBatch();
try {
batch.put(bytes(keyRequest),
((StartContainerRequestPBImpl) startRequest)
.getProto().toByteArray());
if (containerVersion != 0) {
batch.put(bytes(keyVersion),
bytes(Integer.toString(containerVersion)));
}
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
@VisibleForTesting
String getContainerVersionKey(String containerId) {
return CONTAINERS_KEY_PREFIX + containerId + CONTAINER_VERSION_KEY_SUFFIX;
}
@Override
public void storeContainerQueued(ContainerId containerId) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerQueued: containerId=" + containerId);
}
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_QUEUED_KEY_SUFFIX;
try {
db.put(bytes(key), EMPTY_VALUE);
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerDiagnostics(ContainerId containerId,
StringBuilder diagnostics) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerDiagnostics: containerId=" + containerId
+ ", diagnostics=" + diagnostics);
}
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_DIAGS_KEY_SUFFIX;
try {
db.put(bytes(key), bytes(diagnostics.toString()));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerLaunched(ContainerId containerId)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerLaunched: containerId=" + containerId);
}
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_LAUNCHED_KEY_SUFFIX;
try {
db.put(bytes(key), EMPTY_VALUE);
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerResourceChanged(ContainerId containerId,
int containerVersion, Resource capability) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerResourceChanged: containerId=" + containerId
+ ", capability=" + capability);
}
String keyResChng = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_RESOURCE_CHANGED_KEY_SUFFIX;
String keyVersion = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_VERSION_KEY_SUFFIX;
try {
WriteBatch batch = db.createWriteBatch();
try {
// New value will overwrite old values for the same key
batch.put(bytes(keyResChng),
ProtoUtils.convertToProtoFormat(capability).toByteArray());
batch.put(bytes(keyVersion), bytes(Integer.toString(containerVersion)));
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerKilled(ContainerId containerId)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerKilled: containerId=" + containerId);
}
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_KILLED_KEY_SUFFIX;
try {
db.put(bytes(key), EMPTY_VALUE);
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerCompleted(ContainerId containerId,
int exitCode) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeContainerCompleted: containerId=" + containerId);
}
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_EXIT_CODE_KEY_SUFFIX;
try {
db.put(bytes(key), bytes(Integer.toString(exitCode)));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerRemainingRetryAttempts(ContainerId containerId,
int remainingRetryAttempts) throws IOException {
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_REMAIN_RETRIES_KEY_SUFFIX;
try {
db.put(bytes(key), bytes(Integer.toString(remainingRetryAttempts)));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerWorkDir(ContainerId containerId,
String workDir) throws IOException {
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_WORK_DIR_KEY_SUFFIX;
try {
db.put(bytes(key), bytes(workDir));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void storeContainerLogDir(ContainerId containerId,
String logDir) throws IOException {
String key = CONTAINERS_KEY_PREFIX + containerId.toString()
+ CONTAINER_LOG_DIR_KEY_SUFFIX;
try {
db.put(bytes(key), bytes(logDir));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeContainer(ContainerId containerId)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("removeContainer: containerId=" + containerId);
}
String keyPrefix = CONTAINERS_KEY_PREFIX + containerId.toString();
try {
WriteBatch batch = db.createWriteBatch();
try {
batch.delete(bytes(keyPrefix + CONTAINER_REQUEST_KEY_SUFFIX));
batch.delete(bytes(keyPrefix + CONTAINER_DIAGS_KEY_SUFFIX));
batch.delete(bytes(keyPrefix + CONTAINER_LAUNCHED_KEY_SUFFIX));
batch.delete(bytes(keyPrefix + CONTAINER_QUEUED_KEY_SUFFIX));
batch.delete(bytes(keyPrefix + CONTAINER_KILLED_KEY_SUFFIX));
batch.delete(bytes(keyPrefix + CONTAINER_EXIT_CODE_KEY_SUFFIX));
List<String> unknownKeysForContainer = containerUnknownKeySuffixes
.removeAll(containerId);
for (String unknownKeySuffix : unknownKeysForContainer) {
batch.delete(bytes(keyPrefix + unknownKeySuffix));
}
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public RecoveredApplicationsState loadApplicationsState()
throws IOException {
RecoveredApplicationsState state = new RecoveredApplicationsState();
state.applications = new ArrayList<ContainerManagerApplicationProto>();
String keyPrefix = APPLICATIONS_KEY_PREFIX;
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(keyPrefix));
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
state.applications.add(
ContainerManagerApplicationProto.parseFrom(entry.getValue()));
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
cleanupDeprecatedFinishedApps();
return state;
}
@Override
public void storeApplication(ApplicationId appId,
ContainerManagerApplicationProto p) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("storeApplication: appId=" + appId
+ ", proto=" + p);
}
String key = APPLICATIONS_KEY_PREFIX + appId;
try {
db.put(bytes(key), p.toByteArray());
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeApplication(ApplicationId appId)
throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("removeApplication: appId=" + appId);
}
try {
WriteBatch batch = db.createWriteBatch();
try {
String key = APPLICATIONS_KEY_PREFIX + appId;
batch.delete(bytes(key));
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public RecoveredLocalizationState loadLocalizationState()
throws IOException {
RecoveredLocalizationState state = new RecoveredLocalizationState();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(LOCALIZATION_PUBLIC_KEY_PREFIX));
state.publicTrackerState = loadResourceTrackerState(iter,
LOCALIZATION_PUBLIC_KEY_PREFIX);
iter.seek(bytes(LOCALIZATION_PRIVATE_KEY_PREFIX));
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(LOCALIZATION_PRIVATE_KEY_PREFIX)) {
break;
}
int userEndPos = key.indexOf('/',
LOCALIZATION_PRIVATE_KEY_PREFIX.length());
if (userEndPos < 0) {
throw new IOException("Unable to determine user in resource key: "
+ key);
}
String user = key.substring(
LOCALIZATION_PRIVATE_KEY_PREFIX.length(), userEndPos);
state.userResources.put(user, loadUserLocalizedResources(iter,
key.substring(0, userEndPos+1)));
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
return state;
}
private LocalResourceTrackerState loadResourceTrackerState(
LeveldbIterator iter, String keyPrefix) throws IOException {
final String completedPrefix = keyPrefix + LOCALIZATION_COMPLETED_SUFFIX;
final String startedPrefix = keyPrefix + LOCALIZATION_STARTED_SUFFIX;
LocalResourceTrackerState state = new LocalResourceTrackerState();
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
if (key.startsWith(completedPrefix)) {
state.localizedResources = loadCompletedResources(iter,
completedPrefix);
} else if (key.startsWith(startedPrefix)) {
state.inProgressResources = loadStartedResources(iter, startedPrefix);
} else {
throw new IOException("Unexpected key in resource tracker state: "
+ key);
}
}
return state;
}
private List<LocalizedResourceProto> loadCompletedResources(
LeveldbIterator iter, String keyPrefix) throws IOException {
List<LocalizedResourceProto> rsrcs =
new ArrayList<LocalizedResourceProto>();
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
if (LOG.isDebugEnabled()) {
LOG.debug("Loading completed resource from " + key);
}
rsrcs.add(LocalizedResourceProto.parseFrom(entry.getValue()));
iter.next();
}
return rsrcs;
}
private Map<LocalResourceProto, Path> loadStartedResources(
LeveldbIterator iter, String keyPrefix) throws IOException {
Map<LocalResourceProto, Path> rsrcs =
new HashMap<LocalResourceProto, Path>();
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
Path localPath = new Path(key.substring(keyPrefix.length()));
if (LOG.isDebugEnabled()) {
LOG.debug("Loading in-progress resource at " + localPath);
}
rsrcs.put(LocalResourceProto.parseFrom(entry.getValue()), localPath);
iter.next();
}
return rsrcs;
}
private RecoveredUserResources loadUserLocalizedResources(
LeveldbIterator iter, String keyPrefix) throws IOException {
RecoveredUserResources userResources = new RecoveredUserResources();
while (iter.hasNext()) {
Entry<byte[],byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
if (key.startsWith(LOCALIZATION_FILECACHE_SUFFIX, keyPrefix.length())) {
userResources.privateTrackerState = loadResourceTrackerState(iter,
keyPrefix + LOCALIZATION_FILECACHE_SUFFIX);
} else if (key.startsWith(LOCALIZATION_APPCACHE_SUFFIX,
keyPrefix.length())) {
int appIdStartPos = keyPrefix.length() +
LOCALIZATION_APPCACHE_SUFFIX.length();
int appIdEndPos = key.indexOf('/', appIdStartPos);
if (appIdEndPos < 0) {
throw new IOException("Unable to determine appID in resource key: "
+ key);
}
ApplicationId appId = ApplicationId.fromString(
key.substring(appIdStartPos, appIdEndPos));
userResources.appTrackerStates.put(appId,
loadResourceTrackerState(iter, key.substring(0, appIdEndPos+1)));
} else {
throw new IOException("Unexpected user resource key " + key);
}
}
return userResources;
}
@Override
public void startResourceLocalization(String user, ApplicationId appId,
LocalResourceProto proto, Path localPath) throws IOException {
String key = getResourceStartedKey(user, appId, localPath.toString());
try {
db.put(bytes(key), proto.toByteArray());
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void finishResourceLocalization(String user, ApplicationId appId,
LocalizedResourceProto proto) throws IOException {
String localPath = proto.getLocalPath();
String startedKey = getResourceStartedKey(user, appId, localPath);
String completedKey = getResourceCompletedKey(user, appId, localPath);
if (LOG.isDebugEnabled()) {
LOG.debug("Storing localized resource to " + completedKey);
}
try {
WriteBatch batch = db.createWriteBatch();
try {
batch.delete(bytes(startedKey));
batch.put(bytes(completedKey), proto.toByteArray());
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeLocalizedResource(String user, ApplicationId appId,
Path localPath) throws IOException {
String localPathStr = localPath.toString();
String startedKey = getResourceStartedKey(user, appId, localPathStr);
String completedKey = getResourceCompletedKey(user, appId, localPathStr);
if (LOG.isDebugEnabled()) {
LOG.debug("Removing local resource at " + localPathStr);
}
try {
WriteBatch batch = db.createWriteBatch();
try {
batch.delete(bytes(startedKey));
batch.delete(bytes(completedKey));
db.write(batch);
} finally {
batch.close();
}
} catch (DBException e) {
throw new IOException(e);
}
}
private String getResourceStartedKey(String user, ApplicationId appId,
String localPath) {
return getResourceTrackerKeyPrefix(user, appId)
+ LOCALIZATION_STARTED_SUFFIX + localPath;
}
private String getResourceCompletedKey(String user, ApplicationId appId,
String localPath) {
return getResourceTrackerKeyPrefix(user, appId)
+ LOCALIZATION_COMPLETED_SUFFIX + localPath;
}
private String getResourceTrackerKeyPrefix(String user,
ApplicationId appId) {
if (user == null) {
return LOCALIZATION_PUBLIC_KEY_PREFIX;
}
if (appId == null) {
return LOCALIZATION_PRIVATE_KEY_PREFIX + user + "/"
+ LOCALIZATION_FILECACHE_SUFFIX;
}
return LOCALIZATION_PRIVATE_KEY_PREFIX + user + "/"
+ LOCALIZATION_APPCACHE_SUFFIX + appId + "/";
}
@Override
public RecoveredDeletionServiceState loadDeletionServiceState()
throws IOException {
RecoveredDeletionServiceState state = new RecoveredDeletionServiceState();
state.tasks = new ArrayList<DeletionServiceDeleteTaskProto>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(DELETION_TASK_KEY_PREFIX));
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String key = asString(entry.getKey());
if (!key.startsWith(DELETION_TASK_KEY_PREFIX)) {
break;
}
state.tasks.add(
DeletionServiceDeleteTaskProto.parseFrom(entry.getValue()));
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
return state;
}
@Override
public void storeDeletionTask(int taskId,
DeletionServiceDeleteTaskProto taskProto) throws IOException {
String key = DELETION_TASK_KEY_PREFIX + taskId;
try {
db.put(bytes(key), taskProto.toByteArray());
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeDeletionTask(int taskId) throws IOException {
String key = DELETION_TASK_KEY_PREFIX + taskId;
try {
db.delete(bytes(key));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public RecoveredNMTokensState loadNMTokensState() throws IOException {
RecoveredNMTokensState state = new RecoveredNMTokensState();
state.applicationMasterKeys =
new HashMap<ApplicationAttemptId, MasterKey>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(NM_TOKENS_KEY_PREFIX));
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String fullKey = asString(entry.getKey());
if (!fullKey.startsWith(NM_TOKENS_KEY_PREFIX)) {
break;
}
String key = fullKey.substring(NM_TOKENS_KEY_PREFIX.length());
if (key.equals(CURRENT_MASTER_KEY_SUFFIX)) {
state.currentMasterKey = parseMasterKey(entry.getValue());
} else if (key.equals(PREV_MASTER_KEY_SUFFIX)) {
state.previousMasterKey = parseMasterKey(entry.getValue());
} else if (key.startsWith(
ApplicationAttemptId.appAttemptIdStrPrefix)) {
ApplicationAttemptId attempt;
try {
attempt = ApplicationAttemptId.fromString(key);
} catch (IllegalArgumentException e) {
throw new IOException("Bad application master key state for "
+ fullKey, e);
}
state.applicationMasterKeys.put(attempt,
parseMasterKey(entry.getValue()));
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
return state;
}
@Override
public void storeNMTokenCurrentMasterKey(MasterKey key)
throws IOException {
storeMasterKey(NM_TOKENS_CURRENT_MASTER_KEY, key);
}
@Override
public void storeNMTokenPreviousMasterKey(MasterKey key)
throws IOException {
storeMasterKey(NM_TOKENS_PREV_MASTER_KEY, key);
}
@Override
public void storeNMTokenApplicationMasterKey(
ApplicationAttemptId attempt, MasterKey key) throws IOException {
storeMasterKey(NM_TOKENS_KEY_PREFIX + attempt, key);
}
@Override
public void removeNMTokenApplicationMasterKey(
ApplicationAttemptId attempt) throws IOException {
String key = NM_TOKENS_KEY_PREFIX + attempt;
try {
db.delete(bytes(key));
} catch (DBException e) {
throw new IOException(e);
}
}
private MasterKey parseMasterKey(byte[] keyData) throws IOException {
return new MasterKeyPBImpl(MasterKeyProto.parseFrom(keyData));
}
private void storeMasterKey(String dbKey, MasterKey key)
throws IOException {
MasterKeyPBImpl pb = (MasterKeyPBImpl) key;
try {
db.put(bytes(dbKey), pb.getProto().toByteArray());
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public RecoveredContainerTokensState loadContainerTokensState()
throws IOException {
RecoveredContainerTokensState state = new RecoveredContainerTokensState();
state.activeTokens = new HashMap<ContainerId, Long>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(CONTAINER_TOKENS_KEY_PREFIX));
final int containerTokensKeyPrefixLength =
CONTAINER_TOKENS_KEY_PREFIX.length();
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String fullKey = asString(entry.getKey());
if (!fullKey.startsWith(CONTAINER_TOKENS_KEY_PREFIX)) {
break;
}
String key = fullKey.substring(containerTokensKeyPrefixLength);
if (key.equals(CURRENT_MASTER_KEY_SUFFIX)) {
state.currentMasterKey = parseMasterKey(entry.getValue());
} else if (key.equals(PREV_MASTER_KEY_SUFFIX)) {
state.previousMasterKey = parseMasterKey(entry.getValue());
} else if (key.startsWith(ConverterUtils.CONTAINER_PREFIX)) {
loadContainerToken(state, fullKey, key, entry.getValue());
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
return state;
}
private static void loadContainerToken(RecoveredContainerTokensState state,
String key, String containerIdStr, byte[] value) throws IOException {
ContainerId containerId;
Long expTime;
try {
containerId = ContainerId.fromString(containerIdStr);
expTime = Long.parseLong(asString(value));
} catch (IllegalArgumentException e) {
throw new IOException("Bad container token state for " + key, e);
}
state.activeTokens.put(containerId, expTime);
}
@Override
public void storeContainerTokenCurrentMasterKey(MasterKey key)
throws IOException {
storeMasterKey(CONTAINER_TOKENS_CURRENT_MASTER_KEY, key);
}
@Override
public void storeContainerTokenPreviousMasterKey(MasterKey key)
throws IOException {
storeMasterKey(CONTAINER_TOKENS_PREV_MASTER_KEY, key);
}
@Override
public void storeContainerToken(ContainerId containerId, Long expTime)
throws IOException {
String key = CONTAINER_TOKENS_KEY_PREFIX + containerId;
try {
db.put(bytes(key), bytes(expTime.toString()));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeContainerToken(ContainerId containerId)
throws IOException {
String key = CONTAINER_TOKENS_KEY_PREFIX + containerId;
try {
db.delete(bytes(key));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public RecoveredLogDeleterState loadLogDeleterState() throws IOException {
RecoveredLogDeleterState state = new RecoveredLogDeleterState();
state.logDeleterMap = new HashMap<ApplicationId, LogDeleterProto>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(LOG_DELETER_KEY_PREFIX));
final int logDeleterKeyPrefixLength = LOG_DELETER_KEY_PREFIX.length();
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String fullKey = asString(entry.getKey());
if (!fullKey.startsWith(LOG_DELETER_KEY_PREFIX)) {
break;
}
String appIdStr = fullKey.substring(logDeleterKeyPrefixLength);
ApplicationId appId = null;
try {
appId = ApplicationId.fromString(appIdStr);
} catch (IllegalArgumentException e) {
LOG.warn("Skipping unknown log deleter key " + fullKey);
continue;
}
LogDeleterProto proto = LogDeleterProto.parseFrom(entry.getValue());
state.logDeleterMap.put(appId, proto);
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
return state;
}
@Override
public void storeLogDeleter(ApplicationId appId, LogDeleterProto proto)
throws IOException {
String key = getLogDeleterKey(appId);
try {
db.put(bytes(key), proto.toByteArray());
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeLogDeleter(ApplicationId appId) throws IOException {
String key = getLogDeleterKey(appId);
try {
db.delete(bytes(key));
} catch (DBException e) {
throw new IOException(e);
}
}
@SuppressWarnings("deprecation")
private void cleanupDeprecatedFinishedApps() {
try {
cleanupKeysWithPrefix(FINISHED_APPS_KEY_PREFIX);
} catch (Exception e) {
LOG.warn("cleanup keys with prefix " + FINISHED_APPS_KEY_PREFIX +
" from leveldb failed", e);
}
}
private void cleanupKeysWithPrefix(String prefix) throws IOException {
WriteBatch batch = null;
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
try {
batch = db.createWriteBatch();
iter.seek(bytes(prefix));
while (iter.hasNext()) {
byte[] key = iter.next().getKey();
String keyStr = asString(key);
if (!keyStr.startsWith(prefix)) {
break;
}
batch.delete(key);
if (LOG.isDebugEnabled()) {
LOG.debug("cleanup " + keyStr + " from leveldb");
}
}
db.write(batch);
} catch (DBException e) {
throw new IOException(e);
} finally {
if (batch != null) {
batch.close();
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
}
private String getLogDeleterKey(ApplicationId appId) {
return LOG_DELETER_KEY_PREFIX + appId;
}
@Override
public RecoveredAMRMProxyState loadAMRMProxyState() throws IOException {
RecoveredAMRMProxyState result = new RecoveredAMRMProxyState();
Set<String> unknownKeys = new HashSet<>();
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(AMRMPROXY_KEY_PREFIX));
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(AMRMPROXY_KEY_PREFIX)) {
break;
}
String suffix = key.substring(AMRMPROXY_KEY_PREFIX.length());
if (suffix.equals(CURRENT_MASTER_KEY_SUFFIX)) {
iter.next();
result.setCurrentMasterKey(parseMasterKey(entry.getValue()));
LOG.info("Recovered for AMRMProxy: current master key id "
+ result.getCurrentMasterKey().getKeyId());
} else if (suffix.equals(NEXT_MASTER_KEY_SUFFIX)) {
iter.next();
result.setNextMasterKey(parseMasterKey(entry.getValue()));
LOG.info("Recovered for AMRMProxy: next master key id "
+ result.getNextMasterKey().getKeyId());
} else { // Load AMRMProxy application context map for an app attempt
// Parse appAttemptId, also handle the unknown keys
int idEndPos;
ApplicationAttemptId attemptId;
try {
idEndPos = key.indexOf('/', AMRMPROXY_KEY_PREFIX.length());
if (idEndPos < 0) {
throw new IOException(
"Unable to determine attemptId in key: " + key);
}
attemptId = ApplicationAttemptId.fromString(
key.substring(AMRMPROXY_KEY_PREFIX.length(), idEndPos));
} catch (Exception e) {
// Try to move on for back-forward compatibility
LOG.warn("Unknown key " + key + ", remove and move on", e);
// Do this because iter.remove() is not supported here
unknownKeys.add(key);
continue;
}
// Parse the context map for the appAttemptId
Map<String, byte[]> appContext =
loadAMRMProxyAppContextMap(iter, key.substring(0, idEndPos + 1));
result.getAppContexts().put(attemptId, appContext);
LOG.info("Recovered for AMRMProxy: " + attemptId + ", map size "
+ appContext.size());
}
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
// Delete all unknown keys
try {
for (String key : unknownKeys) {
db.delete(bytes(key));
}
} catch (DBException e) {
throw new IOException(e);
}
return result;
}
private Map<String, byte[]> loadAMRMProxyAppContextMap(LeveldbIterator iter,
String keyPrefix) throws IOException {
Map<String, byte[]> appContextMap = new HashMap<>();
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.peekNext();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
iter.next();
String suffix = key.substring(keyPrefix.length());
byte[] data = entry.getValue();
appContextMap.put(suffix, Arrays.copyOf(data, data.length));
}
return appContextMap;
}
@Override
public void storeAMRMProxyCurrentMasterKey(MasterKey key) throws IOException {
storeMasterKey(AMRMPROXY_KEY_PREFIX + CURRENT_MASTER_KEY_SUFFIX, key);
}
@Override
public void storeAMRMProxyNextMasterKey(MasterKey key) throws IOException {
String dbkey = AMRMPROXY_KEY_PREFIX + NEXT_MASTER_KEY_SUFFIX;
if (key == null) {
// When key is null, delete the entry instead
try {
db.delete(bytes(dbkey));
} catch (DBException e) {
throw new IOException(e);
}
return;
}
storeMasterKey(dbkey, key);
}
@Override
public void storeAMRMProxyAppContextEntry(ApplicationAttemptId attempt,
String key, byte[] data) throws IOException {
String fullkey = AMRMPROXY_KEY_PREFIX + attempt + "/" + key;
try {
db.put(bytes(fullkey), data);
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeAMRMProxyAppContextEntry(ApplicationAttemptId attempt,
String key) throws IOException {
String fullkey = AMRMPROXY_KEY_PREFIX + attempt + "/" + key;
try {
db.delete(bytes(fullkey));
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
public void removeAMRMProxyAppContext(ApplicationAttemptId attempt)
throws IOException {
Set<String> candidates = new HashSet<>();
String keyPrefix = AMRMPROXY_KEY_PREFIX + attempt + "/";
LeveldbIterator iter = null;
try {
iter = new LeveldbIterator(db);
iter.seek(bytes(keyPrefix));
while (iter.hasNext()) {
Entry<byte[], byte[]> entry = iter.next();
String key = asString(entry.getKey());
if (!key.startsWith(keyPrefix)) {
break;
}
// Do this because iter.remove() is not supported here
candidates.add(key);
}
} catch (DBException e) {
throw new IOException(e);
} finally {
if (iter != null) {
iter.close();
}
}
// Delete all candidate keys
try {
for (String key : candidates) {
db.delete(bytes(key));
}
} catch (DBException e) {
throw new IOException(e);
}
}
@Override
protected void initStorage(Configuration conf)
throws IOException {
db = openDatabase(conf);
checkVersion();
startCompactionTimer(conf);
}
protected DB openDatabase(Configuration conf) throws IOException {
Path storeRoot = createStorageDir(conf);
Options options = new Options();
options.createIfMissing(false);
options.logger(new LeveldbLogger());
LOG.info("Using state database at " + storeRoot + " for recovery");
File dbfile = new File(storeRoot.toString());
try {
db = JniDBFactory.factory.open(dbfile, options);
} catch (NativeDB.DBException e) {
if (e.isNotFound() || e.getMessage().contains(" does not exist ")) {
LOG.info("Creating state database at " + dbfile);
isNewlyCreated = true;
options.createIfMissing(true);
try {
db = JniDBFactory.factory.open(dbfile, options);
// store version
storeVersion();
} catch (DBException dbErr) {
throw new IOException(dbErr.getMessage(), dbErr);
}
} else {
throw e;
}
}
return db;
}
private Path createStorageDir(Configuration conf) throws IOException {
final String storeUri = conf.get(YarnConfiguration.NM_RECOVERY_DIR);
if (storeUri == null) {
throw new IOException("No store location directory configured in " +
YarnConfiguration.NM_RECOVERY_DIR);
}
Path root = new Path(storeUri, DB_NAME);
FileSystem fs = FileSystem.getLocal(conf);
fs.mkdirs(root, new FsPermission((short)0700));
return root;
}
private void startCompactionTimer(Configuration conf) {
long intervalMsec = conf.getLong(
YarnConfiguration.NM_RECOVERY_COMPACTION_INTERVAL_SECS,
YarnConfiguration.DEFAULT_NM_RECOVERY_COMPACTION_INTERVAL_SECS) * 1000;
if (intervalMsec > 0) {
compactionTimer = new Timer(
this.getClass().getSimpleName() + " compaction timer", true);
compactionTimer.schedule(new CompactionTimerTask(),
intervalMsec, intervalMsec);
}
}
private class CompactionTimerTask extends TimerTask {
@Override
public void run() {
long start = Time.monotonicNow();
LOG.info("Starting full compaction cycle");
try {
db.compactRange(null, null);
} catch (DBException e) {
LOG.error("Error compacting database", e);
}
long duration = Time.monotonicNow() - start;
LOG.info("Full compaction cycle completed in " + duration + " msec");
}
}
private static class LeveldbLogger implements Logger {
private static final Log LOG = LogFactory.getLog(LeveldbLogger.class);
@Override
public void log(String message) {
LOG.info(message);
}
}
Version loadVersion() throws IOException {
byte[] data = db.get(bytes(DB_SCHEMA_VERSION_KEY));
// if version is not stored previously, treat it as CURRENT_VERSION_INFO.
if (data == null || data.length == 0) {
return getCurrentVersion();
}
Version version =
new VersionPBImpl(VersionProto.parseFrom(data));
return version;
}
private void storeVersion() throws IOException {
dbStoreVersion(CURRENT_VERSION_INFO);
}
// Only used for test
@VisibleForTesting
void storeVersion(Version state) throws IOException {
dbStoreVersion(state);
}
private void dbStoreVersion(Version state) throws IOException {
String key = DB_SCHEMA_VERSION_KEY;
byte[] data =
((VersionPBImpl) state).getProto().toByteArray();
try {
db.put(bytes(key), data);
} catch (DBException e) {
throw new IOException(e);
}
}
Version getCurrentVersion() {
return CURRENT_VERSION_INFO;
}
@VisibleForTesting
DB getDB() {
return db;
}
/**
* 1) Versioning scheme: major.minor. For e.g. 1.0, 1.1, 1.2...1.25, 2.0 etc.
* 2) Any incompatible change of state-store is a major upgrade, and any
* compatible change of state-store is a minor upgrade.
* 3) Within a minor upgrade, say 1.1 to 1.2:
* overwrite the version info and proceed as normal.
* 4) Within a major upgrade, say 1.2 to 2.0:
* throw exception and indicate user to use a separate upgrade tool to
* upgrade NM state or remove incompatible old state.
*/
protected void checkVersion() throws IOException {
Version loadedVersion = loadVersion();
LOG.info("Loaded NM state version info " + loadedVersion);
if (loadedVersion.equals(getCurrentVersion())) {
return;
}
if (loadedVersion.isCompatibleTo(getCurrentVersion())) {
LOG.info("Storing NM state version info " + getCurrentVersion());
storeVersion();
} else {
throw new IOException(
"Incompatible version for NM state: expecting NM state version "
+ getCurrentVersion() + ", but loading version " + loadedVersion);
}
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.plugin.hive.metastore;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import io.airlift.json.JsonCodec;
import io.airlift.json.JsonCodecFactory;
import io.airlift.json.ObjectMapperProvider;
import io.airlift.slice.Slices;
import io.airlift.units.Duration;
import io.prestosql.plugin.hive.HiveBasicStatistics;
import io.prestosql.plugin.hive.HiveBucketProperty;
import io.prestosql.plugin.hive.HiveModule;
import io.prestosql.plugin.hive.HiveType;
import io.prestosql.plugin.hive.PartitionStatistics;
import io.prestosql.plugin.hive.RecordingMetastoreConfig;
import io.prestosql.plugin.hive.authentication.HiveIdentity;
import io.prestosql.plugin.hive.metastore.HivePrivilegeInfo.HivePrivilege;
import io.prestosql.plugin.hive.metastore.SortingColumn.Order;
import io.prestosql.plugin.hive.util.HiveBlockEncodingSerde;
import io.prestosql.spi.block.Block;
import io.prestosql.spi.block.TestingBlockJsonSerde;
import io.prestosql.spi.predicate.Domain;
import io.prestosql.spi.predicate.TupleDomain;
import io.prestosql.spi.security.PrestoPrincipal;
import io.prestosql.spi.security.RoleGrant;
import io.prestosql.spi.statistics.ColumnStatisticType;
import io.prestosql.spi.type.TestingTypeManager;
import io.prestosql.spi.type.Type;
import org.testng.annotations.Test;
import java.io.File;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.OptionalLong;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import static io.prestosql.plugin.hive.HiveBasicStatistics.createEmptyStatistics;
import static io.prestosql.plugin.hive.util.HiveBucketing.BucketingVersion.BUCKETING_V1;
import static io.prestosql.spi.security.PrincipalType.USER;
import static io.prestosql.spi.statistics.ColumnStatisticType.MAX_VALUE;
import static io.prestosql.spi.statistics.ColumnStatisticType.MIN_VALUE;
import static io.prestosql.spi.type.VarcharType.createUnboundedVarcharType;
import static io.prestosql.spi.type.VarcharType.createVarcharType;
import static io.prestosql.testing.TestingConnectorSession.SESSION;
import static org.testng.Assert.assertEquals;
public class TestRecordingHiveMetastore
{
private static final Database DATABASE = new Database(
"database",
Optional.of("location"),
"owner",
USER,
Optional.of("comment"),
ImmutableMap.of("param", "value"));
private static final Column TABLE_COLUMN = new Column(
"column",
HiveType.HIVE_INT,
Optional.of("comment"));
private static final Storage TABLE_STORAGE = new Storage(
StorageFormat.create("serde", "input", "output"),
"location",
Optional.of(new HiveBucketProperty(ImmutableList.of("column"), BUCKETING_V1, 10, ImmutableList.of(new SortingColumn("column", Order.ASCENDING)))),
true,
ImmutableMap.of("param", "value2"));
private static final Table TABLE = new Table(
"database",
"table",
"owner",
"table_type",
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value3"),
Optional.of("original_text"),
Optional.of("expanded_text"),
OptionalLong.empty());
private static final Partition PARTITION = new Partition(
"database",
"table",
ImmutableList.of("value"),
TABLE_STORAGE,
ImmutableList.of(TABLE_COLUMN),
ImmutableMap.of("param", "value4"));
private static final PartitionStatistics PARTITION_STATISTICS = new PartitionStatistics(
new HiveBasicStatistics(10, 11, 10000, 10001),
ImmutableMap.of("column", new HiveColumnStatistics(
Optional.of(new IntegerStatistics(
OptionalLong.of(-100),
OptionalLong.of(102))),
Optional.empty(),
Optional.empty(),
Optional.empty(),
Optional.empty(),
OptionalLong.of(1234),
OptionalLong.of(1235),
OptionalLong.of(1),
OptionalLong.of(8))));
private static final HivePrivilegeInfo PRIVILEGE_INFO = new HivePrivilegeInfo(HivePrivilege.SELECT, true, new HivePrincipal(USER, "grantor"), new HivePrincipal(USER, "grantee"));
private static final RoleGrant ROLE_GRANT = new RoleGrant(new PrestoPrincipal(USER, "grantee"), "role", true);
private static final HiveIdentity HIVE_CONTEXT = new HiveIdentity(SESSION);
private static final List<String> PARTITION_COLUMN_NAMES = ImmutableList.of(TABLE_COLUMN.getName());
private static final Domain PARTITION_COLUMN_EQUAL_DOMAIN = Domain.singleValue(createUnboundedVarcharType(), Slices.utf8Slice("value1"));
private static final TupleDomain<String> TUPLE_DOMAIN = TupleDomain.withColumnDomains(ImmutableMap.<String, Domain>builder()
.put(TABLE_COLUMN.getName(), PARTITION_COLUMN_EQUAL_DOMAIN)
.build());
@Test
public void testRecordingHiveMetastore()
throws IOException
{
RecordingMetastoreConfig recordingConfig = new RecordingMetastoreConfig()
.setRecordingPath(File.createTempFile("recording_test", "json").getAbsolutePath())
.setRecordingDuration(new Duration(10, TimeUnit.MINUTES));
JsonCodec<RecordingHiveMetastore.Recording> jsonCodec = createJsonCodec();
RecordingHiveMetastore recordingHiveMetastore = new RecordingHiveMetastore(new TestingHiveMetastore(), recordingConfig, jsonCodec);
validateMetadata(recordingHiveMetastore);
recordingHiveMetastore.dropDatabase(HIVE_CONTEXT, "other_database");
recordingHiveMetastore.writeRecording();
RecordingMetastoreConfig replayingConfig = recordingConfig
.setReplay(true);
recordingHiveMetastore = new RecordingHiveMetastore(new UnimplementedHiveMetastore(), replayingConfig, createJsonCodec());
recordingHiveMetastore.loadRecording();
validateMetadata(recordingHiveMetastore);
}
private JsonCodec<RecordingHiveMetastore.Recording> createJsonCodec()
{
ObjectMapperProvider objectMapperProvider = new ObjectMapperProvider();
HiveModule.TypeDeserializer typeDeserializer = new HiveModule.TypeDeserializer(new TestingTypeManager());
objectMapperProvider.setJsonDeserializers(
ImmutableMap.of(
Block.class, new TestingBlockJsonSerde.Deserializer(new HiveBlockEncodingSerde()),
Type.class, typeDeserializer));
objectMapperProvider.setJsonSerializers(ImmutableMap.of(Block.class, new TestingBlockJsonSerde.Serializer(new HiveBlockEncodingSerde())));
JsonCodec<RecordingHiveMetastore.Recording> jsonCodec = new JsonCodecFactory(objectMapperProvider).jsonCodec(RecordingHiveMetastore.Recording.class);
return jsonCodec;
}
private void validateMetadata(HiveMetastore hiveMetastore)
{
assertEquals(hiveMetastore.getDatabase("database"), Optional.of(DATABASE));
assertEquals(hiveMetastore.getAllDatabases(), ImmutableList.of("database"));
assertEquals(hiveMetastore.getTable(HIVE_CONTEXT, "database", "table"), Optional.of(TABLE));
assertEquals(hiveMetastore.getSupportedColumnStatistics(createVarcharType(123)), ImmutableSet.of(MIN_VALUE, MAX_VALUE));
assertEquals(hiveMetastore.getTableStatistics(HIVE_CONTEXT, TABLE), PARTITION_STATISTICS);
assertEquals(hiveMetastore.getPartitionStatistics(HIVE_CONTEXT, TABLE, ImmutableList.of(PARTITION)), ImmutableMap.of("value", PARTITION_STATISTICS));
assertEquals(hiveMetastore.getAllTables("database"), ImmutableList.of("table"));
assertEquals(hiveMetastore.getTablesWithParameter("database", "param", "value3"), ImmutableList.of("table"));
assertEquals(hiveMetastore.getAllViews("database"), ImmutableList.of());
assertEquals(hiveMetastore.getPartition(HIVE_CONTEXT, TABLE, ImmutableList.of("value")), Optional.of(PARTITION));
assertEquals(hiveMetastore.getPartitionNamesByFilter(HIVE_CONTEXT, "database", "table", PARTITION_COLUMN_NAMES, TupleDomain.all()), Optional.of(ImmutableList.of("value")));
assertEquals(hiveMetastore.getPartitionNamesByFilter(HIVE_CONTEXT, "database", "table", PARTITION_COLUMN_NAMES, TUPLE_DOMAIN), Optional.of(ImmutableList.of("value")));
assertEquals(hiveMetastore.getPartitionsByNames(HIVE_CONTEXT, TABLE, ImmutableList.of("value")), ImmutableMap.of("value", Optional.of(PARTITION)));
assertEquals(hiveMetastore.listTablePrivileges("database", "table", "owner", Optional.of(new HivePrincipal(USER, "user"))), ImmutableSet.of(PRIVILEGE_INFO));
assertEquals(hiveMetastore.listRoles(), ImmutableSet.of("role"));
assertEquals(hiveMetastore.listRoleGrants(new HivePrincipal(USER, "user")), ImmutableSet.of(ROLE_GRANT));
assertEquals(hiveMetastore.listGrantedPrincipals("role"), ImmutableSet.of(ROLE_GRANT));
}
private static class TestingHiveMetastore
extends UnimplementedHiveMetastore
{
@Override
public Optional<Database> getDatabase(String databaseName)
{
if (databaseName.equals("database")) {
return Optional.of(DATABASE);
}
return Optional.empty();
}
@Override
public List<String> getAllDatabases()
{
return ImmutableList.of("database");
}
@Override
public Optional<Table> getTable(HiveIdentity identity, String databaseName, String tableName)
{
if (databaseName.equals("database") && tableName.equals("table")) {
return Optional.of(TABLE);
}
return Optional.empty();
}
@Override
public Set<ColumnStatisticType> getSupportedColumnStatistics(Type type)
{
if (type.equals(createVarcharType(123))) {
return ImmutableSet.of(MIN_VALUE, MAX_VALUE);
}
return ImmutableSet.of();
}
@Override
public PartitionStatistics getTableStatistics(HiveIdentity identity, Table table)
{
if (table.getDatabaseName().equals("database") && table.getTableName().equals("table")) {
return PARTITION_STATISTICS;
}
return new PartitionStatistics(createEmptyStatistics(), ImmutableMap.of());
}
@Override
public Map<String, PartitionStatistics> getPartitionStatistics(HiveIdentity identity, Table table, List<Partition> partitions)
{
boolean partitionMatches = partitions.stream()
.anyMatch(partition -> partition.getValues().get(0).equals("value"));
if (table.getDatabaseName().equals("database") && table.getTableName().equals("table") && partitionMatches) {
return ImmutableMap.of("value", PARTITION_STATISTICS);
}
return ImmutableMap.of();
}
@Override
public List<String> getAllTables(String databaseName)
{
if (databaseName.equals("database")) {
return ImmutableList.of("table");
}
return ImmutableList.of();
}
@Override
public List<String> getTablesWithParameter(String databaseName, String parameterKey, String parameterValue)
{
if (databaseName.equals("database") && parameterKey.equals("param") && parameterValue.equals("value3")) {
return ImmutableList.of("table");
}
return ImmutableList.of();
}
@Override
public List<String> getAllViews(String databaseName)
{
return ImmutableList.of();
}
@Override
public void dropDatabase(HiveIdentity identity, String databaseName)
{
// noop for test purpose
}
@Override
public Optional<Partition> getPartition(HiveIdentity identity, Table table, List<String> partitionValues)
{
if (table.getDatabaseName().equals("database") && table.getTableName().equals("table") && partitionValues.equals(ImmutableList.of("value"))) {
return Optional.of(PARTITION);
}
return Optional.empty();
}
@Override
public Optional<List<String>> getPartitionNamesByFilter(HiveIdentity identity, String databaseName, String tableName, List<String> columnNames, TupleDomain<String> partitionKeysFilter)
{
Domain filterDomain = partitionKeysFilter.getDomains().get().get(TABLE_COLUMN.getName());
if (databaseName.equals("database") && tableName.equals("table") && (filterDomain == null || filterDomain.equals(PARTITION_COLUMN_EQUAL_DOMAIN))) {
return Optional.of(ImmutableList.of("value"));
}
return Optional.empty();
}
@Override
public Map<String, Optional<Partition>> getPartitionsByNames(HiveIdentity identity, Table table, List<String> partitionNames)
{
if (table.getDatabaseName().equals("database") && table.getTableName().equals("table") && partitionNames.contains("value")) {
return ImmutableMap.of("value", Optional.of(PARTITION));
}
return ImmutableMap.of();
}
@Override
public Set<HivePrivilegeInfo> listTablePrivileges(String databaseName, String tableName, String tableOwner, Optional<HivePrincipal> principal)
{
if (databaseName.equals("database") && tableName.equals("table") && principal.get().getType() == USER && principal.get().getName().equals("user")) {
return ImmutableSet.of(PRIVILEGE_INFO);
}
return ImmutableSet.of();
}
@Override
public Set<String> listRoles()
{
return ImmutableSet.of("role");
}
@Override
public Set<RoleGrant> listGrantedPrincipals(String role)
{
return ImmutableSet.of(ROLE_GRANT);
}
@Override
public Set<RoleGrant> listRoleGrants(HivePrincipal principal)
{
return ImmutableSet.of(ROLE_GRANT);
}
}
}
| |
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package tr.gov.turkiye.esignuidesk.view.pane;
import java.awt.Point;
import java.util.ArrayList;
import javax.swing.JButton;
import javax.swing.JTextField;
import tr.gov.turkiye.esignuidesk.annotation.FocusOwner;
import tr.gov.turkiye.esignuidesk.config.Config;
import tr.gov.turkiye.esignuidesk.controller.LogicManager;
import tr.gov.turkiye.esignuidesk.data.UserData;
import tr.gov.turkiye.esignuidesk.props.ScreenProperties;
import tr.gov.turkiye.esignuidesk.support.Utils;
/**
*
* @author iakpolat
*/
public class PinPane extends javax.swing.JPanel {
private final ArrayList<JButton> btnList;
/**
* Creates new form PinPane
*/
public PinPane() {
initComponents();
setBounds(Config.DEF_PANEL_STARTING_X, Config.DEF_PANEL_STARTING_Y, Config.DEF_PANEL_WIDTH, Config.DEF_PANEL_HEIGHT);
btnList = new ArrayList<>();
btnList.add(b1);
btnList.add(b2);
btnList.add(b3);
btnList.add(b4);
btnList.add(b5);
btnList.add(b6);
btnList.add(b7);
btnList.add(b8);
btnList.add(b9);
btnList.add(b0);
if(rndNumberCB.isSelected())
randomizeNumberPositions();
else
normalizeNumberPositions();
}
/**
* Randomizes button positions.
*/
public final void randomizeNumberPositions() {
Utils.randomizePositions(btnList, 3, Config.pinBtnStartPoint.x, Config.pinBtnStartPoint.y,
Config.numberBtnSize.width, Config.numberBtnSize.height);
}
/**
* Changes button positions to the original.
*/
public final void normalizeNumberPositions() {
Utils.setComponentPositions(btnList, 3, Config.pinBtnStartPoint.x, Config.pinBtnStartPoint.y,
Config.numberBtnSize.width, Config.numberBtnSize.height);
}
@FocusOwner
public JTextField getPinField() {
return pinField;
}
/**
* This method is called from within the constructor to initialize the form.
* WARNING: Do NOT modify this code. The content of this method is always
* regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
b1 = new javax.swing.JButton();
b2 = new javax.swing.JButton();
b3 = new javax.swing.JButton();
b4 = new javax.swing.JButton();
b5 = new javax.swing.JButton();
b6 = new javax.swing.JButton();
b7 = new javax.swing.JButton();
b8 = new javax.swing.JButton();
b9 = new javax.swing.JButton();
b0 = new javax.swing.JButton();
delBtn = new javax.swing.JButton();
messagePane = new javax.swing.JLabel();
signBtn = new javax.swing.JButton();
cancelBtn = new javax.swing.JButton();
pinField = new javax.swing.JPasswordField();
showPassCB = new javax.swing.JCheckBox();
rndNumberCB = new javax.swing.JCheckBox();
setBackground(new java.awt.Color(255, 255, 255));
setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(0, 0, 0)));
setMaximumSize(new java.awt.Dimension(260, 250));
setName(Config.pinPaneName);
setSize(new java.awt.Dimension(260, 250));
setLayout(null);
b1.setText("1");
b1.setFocusable(false);
b1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b1ActionPerformed(evt);
}
});
add(b1);
b1.setBounds(20, 60, 40, 40);
b2.setText("2");
b2.setFocusable(false);
b2.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b2ActionPerformed(evt);
}
});
add(b2);
b2.setBounds(60, 60, 40, 40);
b3.setText("3");
b3.setFocusable(false);
b3.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b3ActionPerformed(evt);
}
});
add(b3);
b3.setBounds(100, 60, 40, 40);
b4.setText("4");
b4.setFocusable(false);
b4.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b4ActionPerformed(evt);
}
});
add(b4);
b4.setBounds(20, 100, 40, 40);
b5.setText("5");
b5.setFocusable(false);
b5.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b5ActionPerformed(evt);
}
});
add(b5);
b5.setBounds(60, 100, 40, 40);
b6.setText("6");
b6.setFocusable(false);
b6.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b6ActionPerformed(evt);
}
});
add(b6);
b6.setBounds(100, 100, 40, 40);
b7.setText("7");
b7.setFocusable(false);
b7.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b7ActionPerformed(evt);
}
});
add(b7);
b7.setBounds(20, 140, 40, 40);
b8.setText("8");
b8.setFocusable(false);
b8.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b8ActionPerformed(evt);
}
});
add(b8);
b8.setBounds(60, 140, 40, 40);
b9.setText("9");
b9.setFocusable(false);
b9.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b9ActionPerformed(evt);
}
});
add(b9);
b9.setBounds(100, 140, 40, 40);
b0.setText("0");
b0.setFocusable(false);
b0.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
b0ActionPerformed(evt);
}
});
add(b0);
b0.setBounds(20, 180, 40, 40);
delBtn.setText("del");
delBtn.setFocusable(false);
delBtn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
delBtnActionPerformed(evt);
}
});
add(delBtn);
delBtn.setBounds(60, 180, 80, 40);
messagePane.setHorizontalAlignment(javax.swing.SwingConstants.CENTER);
messagePane.setText(ScreenProperties.getValue("enter_pin"));
messagePane.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
add(messagePane);
messagePane.setBounds(0, 10, 160, 16);
signBtn.setLabel(ScreenProperties.getValue("sign"));
signBtn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
signBtnActionPerformed(evt);
}
});
add(signBtn);
signBtn.setBounds(90, 220, 70, 29);
cancelBtn.setText(ScreenProperties.getValue("back"));
cancelBtn.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
cancelBtnActionPerformed(evt);
}
});
add(cancelBtn);
cancelBtn.setBounds(20, 220, 70, 29);
pinField.setNextFocusableComponent(signBtn);
pinField.setPreferredSize(new java.awt.Dimension(140, 28));
add(pinField);
pinField.setBounds(10, 30, 245, 28);
showPassCB.setText(ScreenProperties.getValue("show_pass")
);
showPassCB.setIconTextGap(0);
showPassCB.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
showPassCBActionPerformed(evt);
}
});
add(showPassCB);
showPassCB.setBounds(140, 70, 120, 23);
rndNumberCB.setText(ScreenProperties.getValue("show_pass"));
rndNumberCB.setSelected(true);
rndNumberCB.setText(ScreenProperties.getValue("rnd_number")
);
rndNumberCB.setIconTextGap(0);
rndNumberCB.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
rndNumberCBActionPerformed(evt);
}
});
add(rndNumberCB);
rndNumberCB.setBounds(140, 90, 120, 23);
}// </editor-fold>//GEN-END:initComponents
private void buttonAction(java.awt.event.ActionEvent evt) {
if(rndNumberCB.isSelected()) {
randomizeNumberPositions();
}
if(evt.getSource() instanceof JButton)
pinField.setText(pinField.getText()+Utils.getButtonNumber((JButton) evt.getSource()));
else
System.out.println("Event source must be JButton.");
}
private void delBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_delBtnActionPerformed
if(pinField.getPassword().length>0)
pinField.setText(pinField.getText().substring(0,pinField.getPassword().length-1));
}//GEN-LAST:event_delBtnActionPerformed
private void b1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b1ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b1ActionPerformed
private void b7ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b7ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b7ActionPerformed
private void b8ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b8ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b8ActionPerformed
private void b2ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b2ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b2ActionPerformed
private void signBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_signBtnActionPerformed
UserData.pinPass = pinField.getPassword();
LogicManager.done(Config.PIN_PANE_ID);
}//GEN-LAST:event_signBtnActionPerformed
private void cancelBtnActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_cancelBtnActionPerformed
LogicManager.cancel(Config.PIN_PANE_ID);
}//GEN-LAST:event_cancelBtnActionPerformed
private void b3ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b3ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b3ActionPerformed
private void b4ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b4ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b4ActionPerformed
private void b5ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b5ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b5ActionPerformed
private void b6ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b6ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b6ActionPerformed
private void b9ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b9ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b9ActionPerformed
private void b0ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_b0ActionPerformed
buttonAction(evt);
}//GEN-LAST:event_b0ActionPerformed
private void showPassCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_showPassCBActionPerformed
if(pinField.getEchoChar()==(char)0) {
pinField.setEchoChar('*');
} else {
pinField.setEchoChar((char)0);
}
}//GEN-LAST:event_showPassCBActionPerformed
private void rndNumberCBActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_rndNumberCBActionPerformed
if(rndNumberCB.isSelected()) {
randomizeNumberPositions();
} else {
normalizeNumberPositions();
}
}//GEN-LAST:event_rndNumberCBActionPerformed
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JButton b0;
private javax.swing.JButton b1;
private javax.swing.JButton b2;
private javax.swing.JButton b3;
private javax.swing.JButton b4;
private javax.swing.JButton b5;
private javax.swing.JButton b6;
private javax.swing.JButton b7;
private javax.swing.JButton b8;
private javax.swing.JButton b9;
private javax.swing.JButton cancelBtn;
private javax.swing.JButton delBtn;
private javax.swing.JLabel messagePane;
private javax.swing.JPasswordField pinField;
private javax.swing.JCheckBox rndNumberCB;
private javax.swing.JCheckBox showPassCB;
private javax.swing.JButton signBtn;
// End of variables declaration//GEN-END:variables
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.debugger.engine;
import com.intellij.debugger.engine.events.DebuggerCommandImpl;
import com.intellij.debugger.engine.events.SuspendContextCommandImpl;
import com.intellij.debugger.engine.managerThread.DebuggerCommand;
import com.intellij.debugger.engine.managerThread.DebuggerManagerThread;
import com.intellij.debugger.engine.managerThread.SuspendContextCommand;
import com.intellij.debugger.impl.InvokeAndWaitThread;
import com.intellij.debugger.impl.PrioritizedTask;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.progress.ProgressManager;
import com.intellij.openapi.progress.util.ProgressIndicatorListenerAdapter;
import com.intellij.openapi.progress.util.ProgressWindow;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Disposer;
import com.intellij.util.concurrency.AppExecutorUtil;
import com.sun.jdi.VMDisconnectedException;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.TestOnly;
import java.util.concurrent.TimeUnit;
/**
* @author lex
*/
public class DebuggerManagerThreadImpl extends InvokeAndWaitThread<DebuggerCommandImpl> implements DebuggerManagerThread, Disposable {
private static final Logger LOG = Logger.getInstance(DebuggerManagerThreadImpl.class);
private static final ThreadLocal<DebuggerCommandImpl> myCurrentCommand = new ThreadLocal<>();
static final int COMMAND_TIMEOUT = 3000;
private volatile boolean myDisposed;
DebuggerManagerThreadImpl(@NotNull Disposable parent, Project project) {
super(project);
Disposer.register(parent, this);
}
@Override
public void dispose() {
myDisposed = true;
}
@TestOnly
public static DebuggerManagerThreadImpl createTestInstance(@NotNull Disposable parent, Project project) {
return new DebuggerManagerThreadImpl(parent, project);
}
public static boolean isManagerThread() {
return currentThread() instanceof DebuggerManagerThreadImpl;
}
public static void assertIsManagerThread() {
LOG.assertTrue(isManagerThread(), "Should be invoked in manager thread, use DebuggerManagerThreadImpl.getInstance(..).invoke...");
}
@Override
public void invokeAndWait(DebuggerCommandImpl managerCommand) {
LOG.assertTrue(!isManagerThread(), "Should be invoked outside manager thread, use DebuggerManagerThreadImpl.getInstance(..).invoke...");
super.invokeAndWait(managerCommand);
}
public void invoke(DebuggerCommandImpl managerCommand) {
if (currentThread() == this) {
processEvent(managerCommand);
}
else {
schedule(managerCommand);
}
}
public void invoke(PrioritizedTask.Priority priority, Runnable runnable) {
invoke(new DebuggerCommandImpl(priority) {
@Override
protected void action() {
runnable.run();
}
});
}
@Override
public boolean pushBack(DebuggerCommandImpl managerCommand) {
final boolean pushed = super.pushBack(managerCommand);
if (!pushed) {
managerCommand.notifyCancelled();
}
return pushed;
}
public void schedule(PrioritizedTask.Priority priority, Runnable runnable) {
schedule(new DebuggerCommandImpl(priority) {
@Override
protected void action() {
runnable.run();
}
});
}
@Override
public boolean schedule(DebuggerCommandImpl managerCommand) {
final boolean scheduled = super.schedule(managerCommand);
if (!scheduled) {
managerCommand.notifyCancelled();
}
return scheduled;
}
/**
* waits COMMAND_TIMEOUT milliseconds
* if worker thread is still processing the same command
* calls terminateCommand
*/
public void terminateAndInvoke(DebuggerCommandImpl command, int terminateTimeoutMillis) {
final DebuggerCommandImpl currentCommand = myEvents.getCurrentEvent();
invoke(command);
if (currentCommand != null) {
AppExecutorUtil.getAppScheduledExecutorService().schedule(
() -> {
if (currentCommand == myEvents.getCurrentEvent()) {
// if current command is still in progress, cancel it
getCurrentRequest().requestStop();
try {
getCurrentRequest().join();
}
catch (InterruptedException ignored) {
}
catch (Exception e) {
throw new RuntimeException(e);
}
finally {
if (!myDisposed) {
startNewWorkerThread();
}
}
}
}, terminateTimeoutMillis, TimeUnit.MILLISECONDS);
}
}
@Override
public void processEvent(@NotNull DebuggerCommandImpl managerCommand) {
assertIsManagerThread();
myCurrentCommand.set(managerCommand);
try {
if (myEvents.isClosed()) {
managerCommand.notifyCancelled();
}
else {
managerCommand.run();
}
}
catch (VMDisconnectedException e) {
LOG.debug(e);
}
catch (RuntimeException e) {
throw e;
}
catch (InterruptedException e) {
throw new RuntimeException(e);
}
catch (Exception e) {
LOG.error(e);
}
finally {
myCurrentCommand.set(null);
}
}
public static DebuggerCommandImpl getCurrentCommand() {
return myCurrentCommand.get();
}
public void startProgress(DebuggerCommandImpl command, ProgressWindow progressWindow) {
new ProgressIndicatorListenerAdapter() {
@Override
public void cancelled() {
command.release();
}
}.installToProgress(progressWindow);
ApplicationManager.getApplication().executeOnPooledThread(
() -> ProgressManager.getInstance().runProcess(() -> invokeAndWait(command), progressWindow));
}
void startLongProcessAndFork(Runnable process) {
assertIsManagerThread();
startNewWorkerThread();
try {
process.run();
}
finally {
final WorkerThreadRequest request = getCurrentThreadRequest();
if (LOG.isDebugEnabled()) {
LOG.debug("Switching back to " + request);
}
super.invokeAndWait(new DebuggerCommandImpl() {
@Override
protected void action() {
switchToRequest(request);
}
@Override
protected void commandCancelled() {
LOG.debug("Event queue was closed, killing request");
request.requestStop();
}
});
}
}
@Override
public void invokeCommand(final DebuggerCommand command) {
if(command instanceof SuspendContextCommand) {
SuspendContextCommand suspendContextCommand = (SuspendContextCommand)command;
schedule(new SuspendContextCommandImpl((SuspendContextImpl)suspendContextCommand.getSuspendContext()) {
@Override
public void contextAction(@NotNull SuspendContextImpl suspendContext) {
command.action();
}
@Override
protected void commandCancelled() {
command.commandCancelled();
}
});
}
else {
schedule(new DebuggerCommandImpl() {
@Override
protected void action() {
command.action();
}
@Override
protected void commandCancelled() {
command.commandCancelled();
}
});
}
}
public boolean isIdle() {
return myEvents.isEmpty();
}
void restartIfNeeded() {
if (myEvents.isClosed()) {
myEvents.reopen();
startNewWorkerThread();
}
}
}
| |
package com.ocs.dynamo.ui.composite.form.process;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.atLeast;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.mockito.Mock;
import com.ocs.dynamo.exception.OCSRuntimeException;
import com.ocs.dynamo.service.TestEntityService;
import com.ocs.dynamo.test.BaseMockitoTest;
import com.ocs.dynamo.test.MockUtil;
import com.ocs.dynamo.ui.composite.form.process.ProgressForm.ProgressMode;
import com.vaadin.flow.component.UI;
import com.vaadin.flow.component.orderedlayout.VerticalLayout;
import com.vaadin.flow.server.Command;
import com.vaadin.flow.server.VaadinSession;
public class ProgressFormTest extends BaseMockitoTest {
@Mock
private UI ui;
@Mock
private VaadinSession session;
@Mock
private TestEntityService service;
private int called = 0;
private boolean afterWorkCalled = false;
@BeforeEach
public void setUp() {
when(ui.getSession()).thenReturn(session);
}
@Test
public void testCreateSimple() throws InterruptedException {
called = 0;
afterWorkCalled = false;
ProgressForm<Object> pf = new ProgressForm<Object>(UI.getCurrent(), ProgressMode.SIMPLE) {
private static final long serialVersionUID = -3009623960109461650L;
@Override
protected void process(Object t, int estimatedSize) {
for (int i = 0; i < 100; i++) {
getCounter().increment();
}
called++;
}
@Override
protected int estimateSize(Object t) {
return 100;
}
@Override
protected void doBuildLayout(VerticalLayout main) {
}
@Override
protected void afterWorkComplete(boolean exceptionOccurred) {
assertFalse(exceptionOccurred);
afterWorkCalled = true;
}
};
MockUtil.injectUI(pf, ui);
pf.build();
assertEquals(0, pf.getCounter().getCurrent());
assertEquals(0, called);
pf.startWork(null);
Thread.sleep(1000);
assertEquals(1, called);
assertEquals(100, pf.getCounter().getCurrent());
assertTrue(afterWorkCalled);
}
@Test
public void testCreateProgressBar() throws InterruptedException {
when(ui.access(any())).thenAnswer(a -> {
Command c = (Command) a.getArgument(0);
c.execute();
return null;
});
called = 0;
afterWorkCalled = false;
ProgressForm<Object> pf = new ProgressForm<Object>(UI.getCurrent(), ProgressMode.PROGRESSBAR) {
private static final long serialVersionUID = -3009623960109461650L;
@Override
protected void process(Object t, int estimatedSize) {
for (int i = 0; i < 100; i++) {
try {
Thread.sleep(20);
} catch (InterruptedException e) {
}
getCounter().increment();
}
called++;
}
@Override
protected int estimateSize(Object t) {
return 100;
}
@Override
protected void doBuildLayout(VerticalLayout main) {
}
@Override
protected void afterWorkComplete(boolean exceptionOccurred) {
assertFalse(exceptionOccurred);
afterWorkCalled = true;
}
};
MockUtil.injectUI(pf, ui);
pf.build();
assertEquals(0, pf.getCounter().getCurrent());
assertEquals(0, called);
pf.startWork(null);
Thread.sleep(5000);
verify(ui, atLeast(1)).access(any(Command.class));
assertEquals(1, called);
assertEquals(100, pf.getCounter().getCurrent());
assertTrue(afterWorkCalled);
}
@Test
public void testException() throws InterruptedException {
when(ui.access(any())).thenAnswer(a -> {
Command c = (Command) a.getArgument(0);
c.execute();
return null;
});
called = 0;
afterWorkCalled = false;
ProgressForm<Object> pf = new ProgressForm<Object>(UI.getCurrent(), ProgressMode.SIMPLE) {
private static final long serialVersionUID = -3009623960109461650L;
@Override
protected void process(Object t, int estimatedSize) {
throw new OCSRuntimeException("Test");
}
@Override
protected int estimateSize(Object t) {
return 100;
}
@Override
protected void doBuildLayout(VerticalLayout main) {
}
@Override
protected void afterWorkComplete(boolean exceptionOccurred) {
assertTrue(exceptionOccurred);
afterWorkCalled = true;
}
};
MockUtil.injectUI(pf, ui);
pf.build();
assertEquals(0, pf.getCounter().getCurrent());
assertEquals(0, called);
pf.startWork(null);
Thread.sleep(1000);
assertEquals(0, called);
assertEquals(0, pf.getCounter().getCurrent());
assertTrue(afterWorkCalled);
}
}
| |
/*
* JBoss, Home of Professional Open Source.
* Copyright 2014 Red Hat, Inc., and individual contributors
* as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.undertow.servlet.handlers;
import static io.undertow.servlet.handlers.ServletPathMatch.Type.REDIRECT;
import static io.undertow.servlet.handlers.ServletPathMatch.Type.REWRITE;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.EnumMap;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import javax.servlet.DispatcherType;
import javax.servlet.http.MappingMatch;
import io.undertow.server.HandlerWrapper;
import io.undertow.server.HttpHandler;
import io.undertow.server.handlers.cache.LRUCache;
import io.undertow.server.handlers.resource.Resource;
import io.undertow.server.handlers.resource.ResourceManager;
import io.undertow.servlet.UndertowServletMessages;
import io.undertow.servlet.api.Deployment;
import io.undertow.servlet.api.DeploymentInfo;
import io.undertow.servlet.api.FilterMappingInfo;
import io.undertow.servlet.api.ServletInfo;
import io.undertow.servlet.core.ManagedFilter;
import io.undertow.servlet.core.ManagedFilters;
import io.undertow.servlet.core.ManagedServlet;
import io.undertow.servlet.core.ManagedServlets;
import io.undertow.servlet.handlers.security.ServletSecurityRoleHandler;
/**
* Facade around {@link ServletPathMatchesData}. This facade is responsible for re-generating the matches if anything changes.
*
* @author Stuart Douglas
*/
public class ServletPathMatches {
public static final String DEFAULT_SERVLET_NAME = "default";
private final Deployment deployment;
private volatile String[] welcomePages;
private final ResourceManager resourceManager;
private volatile ServletPathMatchesData data;
private final LRUCache<String, ServletPathMatch> pathMatchCache = new LRUCache<>(1000, -1, true); //TODO: configurable
public ServletPathMatches(final Deployment deployment) {
this.deployment = deployment;
this.welcomePages = deployment.getDeploymentInfo().getWelcomePages().toArray(new String[deployment.getDeploymentInfo().getWelcomePages().size()]);
this.resourceManager = deployment.getDeploymentInfo().getResourceManager();
}
public void initData(){
getData();
}
public ServletChain getServletHandlerByName(final String name) {
return getData().getServletHandlerByName(name);
}
public ServletPathMatch getServletHandlerByPath(final String path) {
ServletPathMatch existing = pathMatchCache.get(path);
if(existing != null) {
return existing;
}
ServletPathMatch match = getData().getServletHandlerByPath(path);
if (!match.isRequiredWelcomeFileMatch()) {
pathMatchCache.add(path, match);
return match;
}
try {
String remaining = match.getRemaining() == null ? match.getMatched() : match.getRemaining();
Resource resource = resourceManager.getResource(remaining);
if (resource == null || !resource.isDirectory()) {
pathMatchCache.add(path, match);
return match;
}
boolean pathEndsWithSlash = remaining.endsWith("/");
final String pathWithTrailingSlash = pathEndsWithSlash ? remaining : remaining + "/";
ServletPathMatch welcomePage = findWelcomeFile(pathWithTrailingSlash, !pathEndsWithSlash);
if (welcomePage != null) {
pathMatchCache.add(path, welcomePage);
return welcomePage;
} else {
welcomePage = findWelcomeServlet(pathWithTrailingSlash, !pathEndsWithSlash);
if (welcomePage != null) {
pathMatchCache.add(path, welcomePage);
return welcomePage;
} else if(pathEndsWithSlash) {
pathMatchCache.add(path, match);
return match;
} else {
ServletPathMatch redirect = new ServletPathMatch(match.getServletChain(), match.getMatched(), match.getRemaining(), REDIRECT, "/");
pathMatchCache.add(path, redirect);
return redirect;
}
}
} catch (IOException e) {
throw new RuntimeException(e);
}
}
public void invalidate() {
this.data = null;
this.pathMatchCache.clear();
}
private ServletPathMatchesData getData() {
ServletPathMatchesData data = this.data;
if (data != null) {
return data;
}
synchronized (this) {
if (this.data != null) {
return this.data;
}
return this.data = setupServletChains();
}
}
private ServletPathMatch findWelcomeFile(final String path, boolean requiresRedirect) {
if(File.separatorChar != '/' && path.contains(File.separator)) {
return null;
}
StringBuilder sb = new StringBuilder();
for (String i : welcomePages) {
try {
sb.append(path);
sb.append(i);
final String mergedPath = sb.toString();
sb.setLength(0);
Resource resource = resourceManager.getResource(mergedPath);
if (resource != null) {
final ServletPathMatch handler = data.getServletHandlerByPath(mergedPath);
return new ServletPathMatch(handler.getServletChain(), mergedPath, null, requiresRedirect ? REDIRECT : REWRITE, mergedPath);
}
} catch (IOException e) {
}
}
return null;
}
private ServletPathMatch findWelcomeServlet(final String path, boolean requiresRedirect) {
StringBuilder sb = new StringBuilder();
for (String i : welcomePages) {
sb.append(path);
sb.append(i);
final String mergedPath = sb.toString();
sb.setLength(0);
final ServletPathMatch handler = data.getServletHandlerByPath(mergedPath);
if (handler != null && !handler.isRequiredWelcomeFileMatch()) {
return new ServletPathMatch(handler.getServletChain(), handler.getMatched(), handler.getRemaining(), requiresRedirect ? REDIRECT : REWRITE, mergedPath);
}
}
return null;
}
public void setWelcomePages(List<String> welcomePages) {
this.welcomePages = welcomePages.toArray(new String[welcomePages.size()]);
}
/**
* Sets up the handlers in the servlet chain. We setup a chain for every path + extension match possibility.
* (i.e. if there a m path mappings and n extension mappings we have n*m chains).
* <p>
* If a chain consists of only the default servlet then we add it as an async handler, so that resources can be
* served up directly without using blocking operations.
* <p>
* TODO: this logic is a bit convoluted at the moment, we should look at simplifying it
*/
private ServletPathMatchesData setupServletChains() {
//create the default servlet
ServletHandler defaultServlet = null;
final ManagedServlets servlets = deployment.getServlets();
final ManagedFilters filters = deployment.getFilters();
final Map<String, ServletHandler> extensionServlets = new HashMap<>();
final Map<String, ServletHandler> pathServlets = new HashMap<>();
final Set<String> pathMatches = new HashSet<>();
final Set<String> extensionMatches = new HashSet<>();
DeploymentInfo deploymentInfo = deployment.getDeploymentInfo();
//loop through all filter mappings, and add them to the set of known paths
for (FilterMappingInfo mapping : deploymentInfo.getFilterMappings()) {
if (mapping.getMappingType() == FilterMappingInfo.MappingType.URL) {
String path = mapping.getMapping();
if (path.equals("*")) {
//UNDERTOW-95, support this non-standard filter mapping
path = "/*";
}
if (!path.startsWith("*.")) {
pathMatches.add(path);
} else {
extensionMatches.add(path.substring(2));
}
}
}
//now loop through all servlets.
for (Map.Entry<String, ServletHandler> entry : servlets.getServletHandlers().entrySet()) {
final ServletHandler handler = entry.getValue();
//add the servlet to the appropriate path maps
for (String path : handler.getManagedServlet().getServletInfo().getMappings()) {
if (path.equals("/")) {
//the default servlet
pathMatches.add("/*");
if (defaultServlet != null) {
throw UndertowServletMessages.MESSAGES.twoServletsWithSameMapping(path);
}
defaultServlet = handler;
} else if (!path.startsWith("*.")) {
//either an exact or a /* based path match
if (path.isEmpty()) {
path = "/";
}
pathMatches.add(path);
if (pathServlets.containsKey(path)) {
throw UndertowServletMessages.MESSAGES.twoServletsWithSameMapping(path);
}
pathServlets.put(path, handler);
} else {
//an extension match based servlet
String ext = path.substring(2);
extensionMatches.add(ext);
if(extensionServlets.containsKey(ext)) {
throw UndertowServletMessages.MESSAGES.twoServletsWithSameMapping(path);
}
extensionServlets.put(ext, handler);
}
}
}
ServletHandler managedDefaultServlet = servlets.getServletHandler(DEFAULT_SERVLET_NAME);
if(managedDefaultServlet == null) {
//we always create a default servlet, even if it is not going to have any path mappings registered
managedDefaultServlet = servlets.addServlet(new ServletInfo(DEFAULT_SERVLET_NAME, DefaultServlet.class));
}
if (defaultServlet == null) {
//no explicit default servlet was specified, so we register our mapping
pathMatches.add("/*");
defaultServlet = managedDefaultServlet;
}
final ServletPathMatchesData.Builder builder = ServletPathMatchesData.builder();
//we now loop over every path in the application, and build up the patches based on this path
//these paths contain both /* and exact matches.
for (final String path : pathMatches) {
//resolve the target servlet, will return null if this is the default servlet
MatchData targetServletMatch = resolveServletForPath(path, pathServlets, extensionServlets, defaultServlet);
final Map<DispatcherType, List<ManagedFilter>> noExtension = new EnumMap<>(DispatcherType.class);
final Map<String, Map<DispatcherType, List<ManagedFilter>>> extension = new HashMap<>();
//initialize the extension map. This contains all the filers in the noExtension map, plus
//any filters that match the extension key
for (String ext : extensionMatches) {
extension.put(ext, new EnumMap<DispatcherType, List<ManagedFilter>>(DispatcherType.class));
}
//loop over all the filters, and add them to the appropriate map in the correct order
for (final FilterMappingInfo filterMapping : deploymentInfo.getFilterMappings()) {
ManagedFilter filter = filters.getManagedFilter(filterMapping.getFilterName());
if (filterMapping.getMappingType() == FilterMappingInfo.MappingType.SERVLET) {
if (targetServletMatch.handler != null) {
if (filterMapping.getMapping().equals(targetServletMatch.handler.getManagedServlet().getServletInfo().getName()) || filterMapping.getMapping().equals("*")) {
addToListMap(noExtension, filterMapping.getDispatcher(), filter);
}
}
for (Map.Entry<String, Map<DispatcherType, List<ManagedFilter>>> entry : extension.entrySet()) {
ServletHandler pathServlet = targetServletMatch.handler;
boolean defaultServletMatch = targetServletMatch.defaultServlet;
if (defaultServletMatch && extensionServlets.containsKey(entry.getKey())) {
pathServlet = extensionServlets.get(entry.getKey());
}
if (filterMapping.getMapping().equals(pathServlet.getManagedServlet().getServletInfo().getName()) || filterMapping.getMapping().equals("*")) {
addToListMap(extension.get(entry.getKey()), filterMapping.getDispatcher(), filter);
}
}
} else {
if (filterMapping.getMapping().isEmpty() || !filterMapping.getMapping().startsWith("*.")) {
if (isFilterApplicable(path, filterMapping.getMapping())) {
addToListMap(noExtension, filterMapping.getDispatcher(), filter);
for (Map<DispatcherType, List<ManagedFilter>> l : extension.values()) {
addToListMap(l, filterMapping.getDispatcher(), filter);
}
}
} else {
addToListMap(extension.get(filterMapping.getMapping().substring(2)), filterMapping.getDispatcher(), filter);
}
}
}
//resolve any matches and add them to the builder
if (path.endsWith("/*")) {
String prefix = path.substring(0, path.length() - 2);
//add the default non-extension match
builder.addPrefixMatch(prefix, createHandler(deploymentInfo, targetServletMatch.handler, noExtension, targetServletMatch.matchedPath, targetServletMatch.defaultServlet, targetServletMatch.mappingMatch, targetServletMatch.userPath), targetServletMatch.defaultServlet || targetServletMatch.handler.getManagedServlet().getServletInfo().isRequireWelcomeFileMapping());
//build up the chain for each non-extension match
for (Map.Entry<String, Map<DispatcherType, List<ManagedFilter>>> entry : extension.entrySet()) {
ServletHandler pathServlet = targetServletMatch.handler;
String pathMatch = targetServletMatch.matchedPath;
final boolean defaultServletMatch;
final String servletMatchPattern;
final MappingMatch mappingMatch;
if (targetServletMatch.defaultServlet) {
// Path matches always take precedence over extension matches, however the default servlet is matched
// at a lower priority, after extension matches. The "/*" pattern is applied implicitly onto the
// default servlet. If there's an extension match in addition to a non-default servlet path match,
// the servlet path match is higher priority. However if the path match is the default servlets
// default catch-all path, the extension match is a higher priority.
ServletHandler extensionServletHandler = extensionServlets.get(entry.getKey());
if (extensionServletHandler != null) {
defaultServletMatch = false;
pathServlet = extensionServletHandler;
servletMatchPattern = "*." + entry.getKey();
mappingMatch = MappingMatch.EXTENSION;
} else {
defaultServletMatch = true;
servletMatchPattern = "/";
mappingMatch = MappingMatch.DEFAULT;
}
} else {
defaultServletMatch = false;
servletMatchPattern = path;
mappingMatch = MappingMatch.PATH;
}
HttpHandler handler = pathServlet;
if (!entry.getValue().isEmpty()) {
handler = new FilterHandler(entry.getValue(), deploymentInfo.isAllowNonStandardWrappers(), handler);
}
builder.addExtensionMatch(prefix, entry.getKey(), servletChain(handler, pathServlet.getManagedServlet(), entry.getValue(), pathMatch, deploymentInfo, defaultServletMatch, mappingMatch, servletMatchPattern));
}
} else if (path.isEmpty()) {
//the context root match
builder.addExactMatch("/", createHandler(deploymentInfo, targetServletMatch.handler, noExtension, targetServletMatch.matchedPath, targetServletMatch.defaultServlet, targetServletMatch.mappingMatch, targetServletMatch.userPath));
} else {
//we need to check for an extension match, so paths like /exact.txt will have the correct filter applied
int lastSegmentIndex = path.lastIndexOf('/');
String lastSegment;
if(lastSegmentIndex > 0) {
lastSegment = path.substring(lastSegmentIndex);
} else {
lastSegment = path;
}
if (lastSegment.contains(".")) {
String ext = lastSegment.substring(lastSegment.lastIndexOf('.') + 1);
if (extension.containsKey(ext)) {
Map<DispatcherType, List<ManagedFilter>> extMap = extension.get(ext);
builder.addExactMatch(path, createHandler(deploymentInfo, targetServletMatch.handler, extMap, targetServletMatch.matchedPath, targetServletMatch.defaultServlet, targetServletMatch.mappingMatch, targetServletMatch.userPath));
} else {
builder.addExactMatch(path, createHandler(deploymentInfo, targetServletMatch.handler, noExtension, targetServletMatch.matchedPath, targetServletMatch.defaultServlet, targetServletMatch.mappingMatch, targetServletMatch.userPath));
}
} else {
builder.addExactMatch(path, createHandler(deploymentInfo, targetServletMatch.handler, noExtension, targetServletMatch.matchedPath, targetServletMatch.defaultServlet, targetServletMatch.mappingMatch, targetServletMatch.userPath));
}
}
}
//now setup name based mappings
//these are used for name based dispatch
for (Map.Entry<String, ServletHandler> entry : servlets.getServletHandlers().entrySet()) {
final Map<DispatcherType, List<ManagedFilter>> filtersByDispatcher = new EnumMap<>(DispatcherType.class);
for (final FilterMappingInfo filterMapping : deploymentInfo.getFilterMappings()) {
ManagedFilter filter = filters.getManagedFilter(filterMapping.getFilterName());
if (filterMapping.getMappingType() == FilterMappingInfo.MappingType.SERVLET) {
if (filterMapping.getMapping().equals(entry.getKey())) {
addToListMap(filtersByDispatcher, filterMapping.getDispatcher(), filter);
}
}
}
if (filtersByDispatcher.isEmpty()) {
builder.addNameMatch(entry.getKey(), servletChain(entry.getValue(), entry.getValue().getManagedServlet(), filtersByDispatcher, null, deploymentInfo, false, MappingMatch.EXACT, ""));
} else {
builder.addNameMatch(entry.getKey(), servletChain(new FilterHandler(filtersByDispatcher, deploymentInfo.isAllowNonStandardWrappers(), entry.getValue()), entry.getValue().getManagedServlet(), filtersByDispatcher, null, deploymentInfo, false, MappingMatch.EXACT, ""));
}
}
return builder.build();
}
private ServletChain createHandler(final DeploymentInfo deploymentInfo, final ServletHandler targetServlet, final Map<DispatcherType, List<ManagedFilter>> noExtension, final String servletPath, final boolean defaultServlet, MappingMatch mappingMatch, String pattern) {
final ServletChain initialHandler;
if (noExtension.isEmpty()) {
initialHandler = servletChain(targetServlet, targetServlet.getManagedServlet(), noExtension, servletPath, deploymentInfo, defaultServlet, mappingMatch, pattern);
} else {
FilterHandler handler = new FilterHandler(noExtension, deploymentInfo.isAllowNonStandardWrappers(), targetServlet);
initialHandler = servletChain(handler, targetServlet.getManagedServlet(), noExtension, servletPath, deploymentInfo, defaultServlet, mappingMatch, pattern);
}
return initialHandler;
}
private static MatchData resolveServletForPath(final String path, final Map<String, ServletHandler> pathServlets, final Map<String, ServletHandler> extensionServlets, ServletHandler defaultServlet) {
if (pathServlets.containsKey(path)) {
if (path.endsWith("/*")) {
final String base = path.substring(0, path.length() - 2);
return new MatchData(pathServlets.get(path), base, path, MappingMatch.PATH, false);
} else {
if(path.equals("/")) {
return new MatchData(pathServlets.get(path), path, "", MappingMatch.CONTEXT_ROOT, false);
}
return new MatchData(pathServlets.get(path), path, path, MappingMatch.EXACT, false);
}
}
String match = null;
ServletHandler servlet = null;
String userPath = "";
for (final Map.Entry<String, ServletHandler> entry : pathServlets.entrySet()) {
String key = entry.getKey();
if (key.endsWith("/*")) {
final String base = key.substring(0, key.length() - 1);
if (match == null || base.length() > match.length()) {
if (path.startsWith(base) || path.equals(base.substring(0, base.length() - 1))) {
match = base.substring(0, base.length() - 1);
servlet = entry.getValue();
userPath = key;
}
}
}
}
if (servlet != null) {
return new MatchData(servlet, match, userPath, MappingMatch.PATH, false);
}
int index = path.lastIndexOf('.');
if (index != -1) {
String ext = path.substring(index + 1);
servlet = extensionServlets.get(ext);
if (servlet != null) {
return new MatchData(servlet, null, "*." + ext, MappingMatch.EXTENSION, false);
}
}
return new MatchData(defaultServlet, null, "/", MappingMatch.DEFAULT, true);
}
private static boolean isFilterApplicable(final String path, final String filterPath) {
String modifiedPath;
if (filterPath.equals("*")) {
modifiedPath = "/*";
} else {
modifiedPath = filterPath;
}
if (path.isEmpty()) {
return modifiedPath.equals("/*") || modifiedPath.equals("/");
}
if (modifiedPath.endsWith("/*")) {
String baseFilterPath = modifiedPath.substring(0, modifiedPath.length() - 1);
String exactFilterPath = modifiedPath.substring(0, modifiedPath.length() - 2);
return path.startsWith(baseFilterPath) || path.equals(exactFilterPath);
} else {
return modifiedPath.equals(path);
}
}
private static <K, V> void addToListMap(final Map<K, List<V>> map, final K key, final V value) {
List<V> list = map.get(key);
if (list == null) {
map.put(key, list = new ArrayList<>());
}
list.add(value);
}
private static ServletChain servletChain(HttpHandler next, final ManagedServlet managedServlet, Map<DispatcherType, List<ManagedFilter>> filters, final String servletPath, final DeploymentInfo deploymentInfo, boolean defaultServlet, MappingMatch mappingMatch, String pattern) {
HttpHandler servletHandler = next;
if(!deploymentInfo.isSecurityDisabled()) {
servletHandler = new ServletSecurityRoleHandler(servletHandler, deploymentInfo.getAuthorizationManager());
}
servletHandler = wrapHandlers(servletHandler, managedServlet.getServletInfo().getHandlerChainWrappers());
return new ServletChain(servletHandler, managedServlet, servletPath, defaultServlet, mappingMatch, pattern, filters);
}
private static HttpHandler wrapHandlers(final HttpHandler wrapee, final List<HandlerWrapper> wrappers) {
HttpHandler current = wrapee;
for (HandlerWrapper wrapper : wrappers) {
current = wrapper.wrap(current);
}
return current;
}
private static class MatchData {
final ServletHandler handler;
final String matchedPath;
final String userPath;
final MappingMatch mappingMatch;
final boolean defaultServlet;
private MatchData(final ServletHandler handler, final String matchedPath, String userPath, MappingMatch mappingMatch, boolean defaultServlet) {
this.handler = handler;
this.matchedPath = matchedPath;
this.userPath = userPath;
this.mappingMatch = mappingMatch;
this.defaultServlet = defaultServlet;
}
}
}
| |
// Copyright 2003-2005 Arthur van Hoff Rick Blair
// Licensed under Apache License version 2.0
// Original license LGPL
package javax.jmdns.impl;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import javax.jmdns.impl.constants.DNSRecordClass;
import javax.jmdns.impl.constants.DNSRecordType;
/**
* A table of DNS entries. This is a map table which can handle multiple entries with the same name.
* <p/>
* Storing multiple entries with the same name is implemented using a linked list. This is hidden from the user and can change in later implementation.
* <p/>
* Here's how to iterate over all entries:
*
* <pre>
* for (Iterator i=dnscache.allValues().iterator(); i.hasNext(); ) {
* DNSEntry entry = i.next();
* ...do something with entry...
* }
* </pre>
* <p/>
* And here's how to iterate over all entries having a given name:
*
* <pre>
* for (Iterator i=dnscache.getDNSEntryList(name).iterator(); i.hasNext(); ) {
* DNSEntry entry = i.next();
* ...do something with entry...
* }
* </pre>
*
* @author Arthur van Hoff, Werner Randelshofer, Rick Blair, Pierre Frisch
*/
public class DNSCache extends ConcurrentHashMap<String, List<DNSEntry>> {
// private static Logger logger = Logger.getLogger(DNSCache.class.getName());
private static final long serialVersionUID = 3024739453186759259L;
/**
*
*/
public static final DNSCache EmptyCache = new _EmptyCache();
static final class _EmptyCache extends DNSCache {
private static final long serialVersionUID = 8487377323074567224L;
/**
* {@inheritDoc}
*/
@Override
public int size() {
return 0;
}
/**
* {@inheritDoc}
*/
@Override
public boolean isEmpty() {
return true;
}
/**
* {@inheritDoc}
*/
@Override
public boolean containsKey(Object key) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public boolean containsValue(Object value) {
return false;
}
/**
* {@inheritDoc}
*/
@Override
public List<DNSEntry> get(Object key) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public Set<String> keySet() {
return Collections.emptySet();
}
/**
* {@inheritDoc}
*/
@Override
public Collection<List<DNSEntry>> values() {
return Collections.emptySet();
}
/**
* {@inheritDoc}
*/
@Override
public boolean equals(Object o) {
return (o instanceof Map) && ((Map<?, ?>) o).size() == 0;
}
/**
* {@inheritDoc}
*/
@Override
public List<DNSEntry> put(String key, List<DNSEntry> value) {
return null;
}
/**
* {@inheritDoc}
*/
@Override
public int hashCode() {
return 0;
}
}
/**
*
*/
public DNSCache() {
this(1024);
}
/**
* @param map
*/
public DNSCache(DNSCache map) {
this(map != null ? map.size() : 1024);
if (map != null) {
this.putAll(map);
}
}
/**
* Create a table with a given initial size.
*
* @param initialCapacity
*/
public DNSCache(int initialCapacity) {
super(initialCapacity);
}
// ====================================================================
// Map
/**
* {@inheritDoc}
*/
@Override
protected Object clone() throws CloneNotSupportedException {
return new DNSCache(this);
}
// ====================================================================
/**
* Returns all entries in the cache
*
* @return all entries in the cache
*/
public Collection<DNSEntry> allValues() {
List<DNSEntry> allValues = new ArrayList<DNSEntry>();
for (List<? extends DNSEntry> entry : this.values()) {
if (entry != null) {
allValues.addAll(entry);
}
}
return allValues;
}
/**
* Iterate only over items with matching name. Returns an list of DNSEntry or null. To retrieve all entries, one must iterate over this linked list.
*
* @param name
* @return list of DNSEntries
*/
public Collection<? extends DNSEntry> getDNSEntryList(String name) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
entryList = new ArrayList<DNSEntry>(entryList);
}
} else {
entryList = Collections.emptyList();
}
return entryList;
}
private Collection<? extends DNSEntry> _getDNSEntryList(String name) {
return this.get(name != null ? name.toLowerCase() : null);
}
/**
* Get a matching DNS entry from the table (using isSameEntry). Returns the entry that was found.
*
* @param dnsEntry
* @return DNSEntry
*/
public DNSEntry getDNSEntry(DNSEntry dnsEntry) {
DNSEntry result = null;
if (dnsEntry != null) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(dnsEntry.getKey());
if (entryList != null) {
synchronized (entryList) {
for (DNSEntry testDNSEntry : entryList) {
if (testDNSEntry.isSameEntry(dnsEntry)) {
result = testDNSEntry;
break;
}
}
}
}
}
return result;
}
/**
* Get a matching DNS entry from the table.
*
* @param name
* @param type
* @param recordClass
* @return DNSEntry
*/
public DNSEntry getDNSEntry(String name, DNSRecordType type, DNSRecordClass recordClass) {
DNSEntry result = null;
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
for (DNSEntry testDNSEntry : entryList) {
if (testDNSEntry.matchRecordType(type) && testDNSEntry.matchRecordClass(recordClass)) {
result = testDNSEntry;
break;
}
}
}
}
return result;
}
/**
* Get all matching DNS entries from the table.
*
* @param name
* @param type
* @param recordClass
* @return list of entries
*/
public Collection<? extends DNSEntry> getDNSEntryList(String name, DNSRecordType type, DNSRecordClass recordClass) {
Collection<? extends DNSEntry> entryList = this._getDNSEntryList(name);
if (entryList != null) {
synchronized (entryList) {
entryList = new ArrayList<DNSEntry>(entryList);
for (Iterator<? extends DNSEntry> i = entryList.iterator(); i.hasNext();) {
DNSEntry testDNSEntry = i.next();
if (!testDNSEntry.matchRecordType(type) || (!testDNSEntry.matchRecordClass(recordClass))) {
i.remove();
}
}
}
} else {
entryList = Collections.emptyList();
}
return entryList;
}
/**
* Adds an entry to the table.
*
* @param dnsEntry
* @return true if the entry was added
*/
public boolean addDNSEntry(final DNSEntry dnsEntry) {
boolean result = false;
if (dnsEntry != null) {
List<DNSEntry> entryList = this.get(dnsEntry.getKey());
if (entryList == null) {
this.putIfAbsent(dnsEntry.getKey(), new ArrayList<DNSEntry>());
entryList = this.get(dnsEntry.getKey());
}
synchronized (entryList) {
entryList.add(dnsEntry);
}
// This is probably not very informative
result = true;
}
return result;
}
/**
* Removes a specific entry from the table. Returns true if the entry was found.
*
* @param dnsEntry
* @return true if the entry was removed
*/
public boolean removeDNSEntry(DNSEntry dnsEntry) {
boolean result = false;
if (dnsEntry != null) {
List<DNSEntry> entryList = this.get(dnsEntry.getKey());
if (entryList != null) {
synchronized (entryList) {
entryList.remove(dnsEntry);
}
}
}
return result;
}
/**
* Replace an existing entry by a new one.<br/>
* <b>Note:</b> the 2 entries must have the same key.
*
* @param newDNSEntry
* @param existingDNSEntry
* @return <code>true</code> if the entry has been replace, <code>false</code> otherwise.
*/
public boolean replaceDNSEntry(DNSEntry newDNSEntry, DNSEntry existingDNSEntry) {
boolean result = false;
if ((newDNSEntry != null) && (existingDNSEntry != null) && (newDNSEntry.getKey().equals(existingDNSEntry.getKey()))) {
List<DNSEntry> entryList = this.get(newDNSEntry.getKey());
if (entryList == null) {
this.putIfAbsent(newDNSEntry.getKey(), new ArrayList<DNSEntry>());
entryList = this.get(newDNSEntry.getKey());
}
synchronized (entryList) {
entryList.remove(existingDNSEntry);
entryList.add(newDNSEntry);
}
// This is probably not very informative
result = true;
}
return result;
}
/**
* {@inheritDoc}
*/
@Override
public synchronized String toString() {
StringBuffer aLog = new StringBuffer(2000);
aLog.append("\t---- cache ----");
for (String key : this.keySet()) {
aLog.append("\n\t\t");
aLog.append("\n\t\tname '");
aLog.append(key);
aLog.append("' ");
List<? extends DNSEntry> entryList = this.get(key);
if ((entryList != null) && (!entryList.isEmpty())) {
synchronized (entryList) {
for (DNSEntry entry : entryList) {
aLog.append("\n\t\t\t");
aLog.append(entry.toString());
}
}
} else {
aLog.append(" no entries");
}
}
return aLog.toString();
}
}
| |
package com.merakianalytics.orianna.types.dto.spectator;
import java.util.List;
import com.merakianalytics.orianna.types.dto.DataObject;
public class FeaturedGameInfo extends DataObject {
private static final long serialVersionUID = -6447264136509843849L;
private List<BannedChampion> bannedChampions;
private long gameId, gameStartTime, mapId, gameLength, gameQueueConfigId;
private Observer observers;
private List<Participant> participants;
private String platformId, gameMode, gameType;
/*
* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
@Override
public boolean equals(final Object obj) {
if(this == obj) {
return true;
}
if(obj == null) {
return false;
}
if(getClass() != obj.getClass()) {
return false;
}
final FeaturedGameInfo other = (FeaturedGameInfo)obj;
if(bannedChampions == null) {
if(other.bannedChampions != null) {
return false;
}
} else if(!bannedChampions.equals(other.bannedChampions)) {
return false;
}
if(gameId != other.gameId) {
return false;
}
if(gameLength != other.gameLength) {
return false;
}
if(gameMode == null) {
if(other.gameMode != null) {
return false;
}
} else if(!gameMode.equals(other.gameMode)) {
return false;
}
if(gameQueueConfigId != other.gameQueueConfigId) {
return false;
}
if(gameStartTime != other.gameStartTime) {
return false;
}
if(gameType == null) {
if(other.gameType != null) {
return false;
}
} else if(!gameType.equals(other.gameType)) {
return false;
}
if(mapId != other.mapId) {
return false;
}
if(observers == null) {
if(other.observers != null) {
return false;
}
} else if(!observers.equals(other.observers)) {
return false;
}
if(participants == null) {
if(other.participants != null) {
return false;
}
} else if(!participants.equals(other.participants)) {
return false;
}
if(platformId == null) {
if(other.platformId != null) {
return false;
}
} else if(!platformId.equals(other.platformId)) {
return false;
}
return true;
}
/**
* @return the bannedChampions
*/
public List<BannedChampion> getBannedChampions() {
return bannedChampions;
}
/**
* @return the gameId
*/
public long getGameId() {
return gameId;
}
/**
* @return the gameLength
*/
public long getGameLength() {
return gameLength;
}
/**
* @return the gameMode
*/
public String getGameMode() {
return gameMode;
}
/**
* @return the gameQueueConfigId
*/
public long getGameQueueConfigId() {
return gameQueueConfigId;
}
/**
* @return the gameStartTime
*/
public long getGameStartTime() {
return gameStartTime;
}
/**
* @return the gameType
*/
public String getGameType() {
return gameType;
}
/**
* @return the mapId
*/
public long getMapId() {
return mapId;
}
/**
* @return the observers
*/
public Observer getObservers() {
return observers;
}
/**
* @return the participants
*/
public List<Participant> getParticipants() {
return participants;
}
/**
* @return the platformId
*/
public String getPlatformId() {
return platformId;
}
/*
* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (bannedChampions == null ? 0 : bannedChampions.hashCode());
result = prime * result + (int)(gameId ^ gameId >>> 32);
result = prime * result + (int)(gameLength ^ gameLength >>> 32);
result = prime * result + (gameMode == null ? 0 : gameMode.hashCode());
result = prime * result + (int)(gameQueueConfigId ^ gameQueueConfigId >>> 32);
result = prime * result + (int)(gameStartTime ^ gameStartTime >>> 32);
result = prime * result + (gameType == null ? 0 : gameType.hashCode());
result = prime * result + (int)(mapId ^ mapId >>> 32);
result = prime * result + (observers == null ? 0 : observers.hashCode());
result = prime * result + (participants == null ? 0 : participants.hashCode());
result = prime * result + (platformId == null ? 0 : platformId.hashCode());
return result;
}
/**
* @param bannedChampions
* the bannedChampions to set
*/
public void setBannedChampions(final List<BannedChampion> bannedChampions) {
this.bannedChampions = bannedChampions;
}
/**
* @param gameId
* the gameId to set
*/
public void setGameId(final long gameId) {
this.gameId = gameId;
}
/**
* @param gameLength
* the gameLength to set
*/
public void setGameLength(final long gameLength) {
this.gameLength = gameLength;
}
/**
* @param gameMode
* the gameMode to set
*/
public void setGameMode(final String gameMode) {
this.gameMode = gameMode;
}
/**
* @param gameQueueConfigId
* the gameQueueConfigId to set
*/
public void setGameQueueConfigId(final long gameQueueConfigId) {
this.gameQueueConfigId = gameQueueConfigId;
}
/**
* @param gameStartTime
* the gameStartTime to set
*/
public void setGameStartTime(final long gameStartTime) {
this.gameStartTime = gameStartTime;
}
/**
* @param gameType
* the gameType to set
*/
public void setGameType(final String gameType) {
this.gameType = gameType;
}
/**
* @param mapId
* the mapId to set
*/
public void setMapId(final long mapId) {
this.mapId = mapId;
}
/**
* @param observers
* the observers to set
*/
public void setObservers(final Observer observers) {
this.observers = observers;
}
/**
* @param participants
* the participants to set
*/
public void setParticipants(final List<Participant> participants) {
this.participants = participants;
}
/**
* @param platformId
* the platformId to set
*/
public void setPlatformId(final String platformId) {
this.platformId = platformId;
}
}
| |
/*
* Copyright 2016 Inuyama-ya sanp <develop@xgmtk.org>.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.xgmtk.lore.util;
import java.util.Arrays;
import java.util.Iterator;
import java.util.List;
import java.util.function.Function;
/**
* Implementation of immutable 2D array.
*
* @author Takayuki,Kando <develop@xgmtk.org>
* @param <T> Type of elements.
*/
public final class ArrayMatrix<T> implements ImmutableMatrix<T> {
/**
* Inner use only.
*
* @param <T>
*/
private final class ColIterator<T> implements Iterator<T> {
private int index;
private final int sup;
/**
* Initializer.
*
* @param offset
*/
public ColIterator(int offset) {
this.index = offset;
this.sup = offset+cols;
}
@Override
public boolean hasNext() {
return this.index < this.sup;
}
@Override
@SuppressWarnings("unchecked")
public T next() {
if(!hasNext()){
throw new IndexOutOfBoundsException();
}
return (T)elements[this.index++];
}
}
/**
* Inner use only.
*
* @param <T>
*/
private final class RowVector<T> implements ImmutableVector<T> {
private final int rowIndex;
private final int offset;
/**
* Initializer.
*
* @param rowIndex
* @param offset
*/
public RowVector(int rowIndex, int offset) {
this.rowIndex = rowIndex;
this.offset = offset;
}
@Override
public int size() {
return cols;
}
@Override
@SuppressWarnings("unchecked")
public T get(int i) {
if(i >= cols){
throw new IndexOutOfBoundsException();
}
return (T)elements[this.rowIndex * cols + i];
}
@Override
public Iterator<T> iterator() {
return new ColIterator<>(this.offset);
}
}
/**
* Inner use only.
*
* @param <T>
*/
private class RowVectorIterator<T> implements Iterator<ImmutableVector<T>> {
private int index;
private int rowIndex;
private final int rowSup;
/**
* Initializer.
*/
public RowVectorIterator() {
this.index = 0;
this.rowSup = rows*cols;
}
@Override
public boolean hasNext() {
return this.index < this.rowSup;
}
@Override
public ImmutableVector<T> next() {
if(!hasNext()){
throw new IndexOutOfBoundsException();
}
int ix = this.index;
this.index+=cols;
return new RowVector<>(this.rowIndex++, ix);
}
}
private final int rows;
private final int cols;
private final Object[] elements;
public ArrayMatrix(List<List<T>> elements) {
super();
this.rows = elements.size();
this.cols = this.rows == 0? 0: elements.get(0).size();
this.elements = new Object[rows*cols];
int i = 0;
for(List<T> row : elements){
for(T v : row){
this.elements[i] = v;
++i;
}
}
}
/**
* Copy initializer.
*
* @param src A source matrix to copying.
* @param elementCopier element copier
*/
public ArrayMatrix(ImmutableMatrix<T> src, Function<T, T> elementCopier) {
super();
this.rows = src.numberOfRows();
this.cols = src.numberOfColumns();
this.elements = new Object[rows*cols];
int i = 0;
for(ImmutableVector<T> row : src){
for(T v : row){
this.elements[i] = elementCopier.apply(v);
++i;
}
}
}
@Override
public int numberOfRows() {
return this.rows;
}
@Override
public int numberOfColumns() {
return this.cols;
}
@Override
@SuppressWarnings("unchecked")
public T get(int indexRow, int indexColumn) {
if(indexRow >= this.rows || indexColumn >= this.cols){
throw new IndexOutOfBoundsException();
}
return (T)this.elements[indexRow * this.cols+ indexColumn];
}
@Override
public ImmutableVector<T> get(int indexRow) {
if(indexRow >= this.rows){
throw new IndexOutOfBoundsException();
}
return new RowVector<>(indexRow, indexRow*this.cols);
}
@Override
public Iterator<ImmutableVector<T>> iterator() {
return new RowVectorIterator<>();
}
@Override
public boolean equals(Object o){
if(!(o instanceof ArrayMatrix<?>)){
return false;
}
ArrayMatrix<?> v = (ArrayMatrix<?>)o;
if(this.numberOfRows() != v.numberOfRows() || this.numberOfColumns() != v.numberOfColumns()){
return false;
}
return Arrays.deepEquals(this.elements, v.elements);
}
@Override
public int hashCode() {
int hash = 7;
hash = 47 * hash + this.rows;
hash = 47 * hash + this.cols;
hash = 47 * hash + Arrays.deepHashCode(this.elements);
return hash;
}
}
| |
/*
* Copyright (c) 2016, WSO2 Inc. (http://wso2.com) All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.wso2.msf4j.internal.router;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.io.Resources;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import org.wso2.msf4j.HttpStreamHandler;
import org.wso2.msf4j.HttpStreamer;
import org.wso2.msf4j.Microservice;
import org.wso2.msf4j.Request;
import org.wso2.msf4j.util.BufferUtil;
import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.nio.ByteBuffer;
import java.nio.charset.Charset;
import java.util.List;
import java.util.SortedSet;
import java.util.concurrent.TimeUnit;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Context;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import static org.testng.AssertJUnit.fail;
/**
* Test handler.
*/
@SuppressWarnings("UnusedParameters")
@Path("/test/v1")
public class TestMicroservice implements Microservice {
private static final Gson GSON = new Gson();
@GET
public String noMethodPathGet() {
return "no-@Path-GET";
}
@POST
public String noMethodPathPost() {
return "no-@Path-POST";
}
@PUT
public String noMethodPathPut() {
return "no-@Path-PUT";
}
@DELETE
public String noMethodPathDelete() {
return "no-@Path-DELETE";
}
@Path("jsonConsumeStringProduce")
@POST
@Consumes("text/json")
@Produces("text/plain")
public String jsonConsume01(Pet input) {
return input.getDetails();
}
@Path("textConsumeJsonProduce")
@POST
@Produces("text/json")
@Consumes("text/plain")
public TextBean textConsume01(String input) {
TextBean textBean = new TextBean();
textBean.setText(input);
return textBean;
}
@Path("textConsumeTextProduce")
@POST
@Consumes("text/plain")
@Produces("text/plain")
public String textConsume02(String input) {
return input + "-processed";
}
@Path("textConsumeTextProduceXml")
@POST
@Consumes("text/xml")
@Produces("text/xml")
public XmlBean textConsume03(XmlBean input) {
return input;
}
@Path("sleep/{seconds}")
@GET
public Response testSleep(@PathParam("seconds") int seconds) {
try {
TimeUnit.SECONDS.sleep(seconds);
return Response.status(Response.Status.OK).entity("slept: " + seconds + "s").build();
} catch (InterruptedException e) {
return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(e.getMessage()).build();
}
}
@Path("response/typehtml")
@GET
public Response produceHtmlContent0() {
return Response.ok()
.type(MediaType.TEXT_HTML_TYPE)
.entity("Hello")
.build();
}
@Path("response/typehtml/str")
@GET
public Response produceHtmlContent1() {
return Response.ok()
.type(MediaType.TEXT_HTML)
.entity("Hello")
.build();
}
@Path("resource")
@GET
public Response testGet() {
JsonObject object = new JsonObject();
object.addProperty("status", "Handled get in resource end-point");
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("tweets/{id}")
@GET
public Response testGetTweet(@PathParam("id") String id) {
JsonObject object = new JsonObject();
object.addProperty("status", String.format("Handled get in tweets end-point, id: %s", id));
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("tweets/{id}")
@PUT
public Response testPutTweet(@PathParam("id") String id) {
JsonObject object = new JsonObject();
object.addProperty("status", String.format("Handled put in tweets end-point, id: %s", id));
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("facebook/{id}/message")
@DELETE
public void testNoMethodRoute(@PathParam("id") String id) {
}
@Path("facebook/{id}/message")
@PUT
public Response testPutMessage(@PathParam("id") String id, @Context Request request) {
String message = String.format("Handled put in tweets end-point, id: %s. ", id);
try {
String data = getStringContent(request);
message = message.concat(String.format("Content: %s", data));
} catch (IOException e) {
//This condition should never occur
fail();
}
JsonObject object = new JsonObject();
object.addProperty("result", message);
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("facebook/{id}/message")
@POST
public Response testPostMessage(@PathParam("id") String id, @Context Request request) {
String message = String.format("Handled post in tweets end-point, id: %s. ", id);
try {
String data = getStringContent(request);
message = message.concat(String.format("Content: %s", data));
} catch (IOException e) {
//This condition should never occur
fail();
}
JsonObject object = new JsonObject();
object.addProperty("result", message);
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("/user/{userId}/message/{messageId}")
@GET
public Response testMultipleParametersInPath(@PathParam("userId") String userId,
@PathParam("messageId") int messageId) {
JsonObject object = new JsonObject();
object.addProperty("result", String.format("Handled multiple path parameters %s %d", userId, messageId));
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("/message/{messageId}/user/{userId}")
@GET
public Response testMultipleParametersInDifferentParameterDeclarationOrder(@PathParam("userId") String userId,
@PathParam("messageId") int messageId) {
JsonObject object = new JsonObject();
object.addProperty("result", String.format("Handled multiple path parameters %s %d", userId, messageId));
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("/NotRoutable/{id}")
@GET
public Response notRoutableParameterMismatch(@PathParam("userid") String userId) {
JsonObject object = new JsonObject();
object.addProperty("result", String.format("Handled Not routable path %s ", userId));
return Response.status(Response.Status.OK).entity(object).build();
}
@Path("/exception")
@GET
public void exception() {
throw new IllegalArgumentException("Illegal argument");
}
private String getStringContent(Request request) throws IOException {
return Charset.defaultCharset().decode(BufferUtil.merge(request.getFullMessageBody())).toString();
}
@Path("/multi-match/**")
@GET
public String multiMatchAll() {
return "multi-match-*";
}
@Path("/multi-match/{param}")
@GET
public String multiMatchParam(@PathParam("param") String param) {
return "multi-match-param-" + param;
}
@Path("/multi-match/foo")
@GET
public String multiMatchFoo() {
return "multi-match-get-actual-foo";
}
@Path("/multi-match/foo")
@PUT
public String multiMatchParamPut() {
return "multi-match-put-actual-foo";
}
@Path("/multi-match/{param}/bar")
@GET
public String multiMatchParamBar(@PathParam("param") String param) {
return "multi-match-param-bar-" + param;
}
@Path("/multi-match/foo/{param}")
@GET
public String multiMatchFooParam(@PathParam("param") String param) {
return "multi-match-get-foo-param-" + param;
}
@Path("/multi-match/foo/{param}/bar")
@GET
public String multiMatchFooParamBar(@PathParam("param") String param) {
return "multi-match-foo-param-bar-" + param;
}
@Path("/multi-match/foo/bar/{param}")
@GET
public String multiMatchFooBarParam(@PathParam("param") String param) {
return "multi-match-foo-bar-param-" + param;
}
@Path("/multi-match/foo/{param}/bar/baz")
@GET
public String multiMatchFooParamBarBaz(@PathParam("param") String param) {
return "multi-match-foo-param-bar-baz-" + param;
}
@Path("/multi-match/foo/bar/{param}/{id}")
@GET
public String multiMatchFooBarParamId(@PathParam("param") String param, @PathParam("id") String id) {
return "multi-match-foo-bar-param-" + param + "-id-" + id;
}
@Path("/fileserver/{fileType}")
@GET
public Response serveFile(@PathParam("fileType") String fileType) throws Exception {
File file;
if (fileType.equals("png")) {
file = new File(Resources.getResource("testPngFile.png").toURI());
return Response.ok(file).build();
} else if (fileType.equals("jpg")) {
file = new File(Resources.getResource("testJpgFile.jpg").toURI());
return Response.ok(file).header("X-Custom-Header", "wso2").build();
} else if (fileType.equals("txt")) {
file = new File(Resources.getResource("testTxtFile.txt").toURI());
return Response.ok(file).build();
}
return Response.noContent().build();
}
@Path("/stream/upload")
@PUT
public void streamUpload(@Context HttpStreamer httpStreamer) throws Exception {
final StringBuffer sb = new StringBuffer();
httpStreamer.callback(new HttpStreamHandler() {
private org.wso2.msf4j.Response response;
@Override
public void init(org.wso2.msf4j.Response response) {
this.response = response;
}
@Override
public void chunk(ByteBuffer content) throws Exception {
sb.append(Charset.defaultCharset().decode(content).toString());
}
@Override
public void end() throws Exception {
response.setStatus(Response.Status.OK.getStatusCode());
response.setEntity(sb.toString());
response.send();
}
@Override
public void error(Throwable cause) {
sb.delete(0, sb.length());
}
});
}
@Path("/stream/upload/fail")
@PUT
public HttpStreamHandler streamUploadFailure() {
final int fileSize = 30 * 1024 * 1024;
return new HttpStreamHandler() {
private org.wso2.msf4j.Response response;
ByteBuffer offHeapBuffer = ByteBuffer.allocateDirect(fileSize);
@Override
public void init(org.wso2.msf4j.Response response) {
this.response = response;
}
@Override
public void chunk(ByteBuffer content) throws Exception {
offHeapBuffer.put(content.array());
}
@Override
public void end() throws Exception {
int bytesUploaded = offHeapBuffer.position();
response.setStatus(Response.Status.OK.getStatusCode());
response.setEntity("Uploaded:" + bytesUploaded);
response.send();
}
@Override
public void error(Throwable cause) {
offHeapBuffer = null;
}
};
}
@Path("/aggregate/upload")
@PUT
public String aggregatedUpload(@Context Request request) {
ByteBuffer content = BufferUtil.merge(request.getFullMessageBody());
int bytesUploaded = content.capacity();
return "Uploaded:" + bytesUploaded;
}
@Path("/gzipfile")
@GET
public Response gzipFile() throws IOException, URISyntaxException {
File file = new File(Resources.getResource("testJpgFile.jpg").toURI());
return Response.ok().entity(file).build();
}
@Path("/uexception")
@GET
public void testException() {
throw Throwables.propagate(new RuntimeException("User Exception"));
}
@Path("/noresponse")
@GET
public void testNoResponse() {
}
@Path("/stringQueryParam/{path}")
@GET
public String testStringQueryParam(@PathParam("path") String path, @QueryParam("name") String name) {
return path + ":" + name;
}
@Path("/primitiveQueryParam")
@GET
public String testPrimitiveQueryParam(@QueryParam("age") int age) {
return Integer.toString(age);
}
@Path("/sortedSetQueryParam")
@GET
public String testSortedSetQueryParam(@QueryParam("id") SortedSet<Integer> ids) {
return Joiner.on(',').join(ids);
}
@Path("/listHeaderParam")
@GET
public String testListHeaderParam(@HeaderParam("name") List<String> names) {
return Joiner.on(',').join(names);
}
@Path("/headerResponse")
@GET
public Response testHeaderResponse(@HeaderParam("name") String name) {
return Response.status(Response.Status.OK.getStatusCode()).entity("Entity").header("name", name).build();
}
@Path("/defaultValue")
@GET
public Object testDefaultValue(@DefaultValue("30") @QueryParam("age") Integer age,
@DefaultValue("hello") @QueryParam("name") String name,
@DefaultValue("casking") @HeaderParam("hobby") List<String> hobbies) {
JsonObject response = new JsonObject();
response.addProperty("age", age);
response.addProperty("name", name);
response.add("hobby", GSON.toJsonTree(hobbies, new TypeToken<List<String>>() {
}.getType()));
return response;
}
@Path("/connectionClose")
@GET
public Response testConnectionClose() {
return Response.status(Response.Status.OK).entity("Close connection").header("Connection", "close").build();
}
@Path("/uploadReject")
@POST
public Response testUploadReject() {
return Response.status(Response.Status.BAD_REQUEST).entity("Rejected").header("Connection", "close").build();
}
@Path("/customException")
@POST
public void testCustomException() throws CustomException {
throw new CustomException();
}
@Path("/mappedException")
@GET
public void testExceptionMapping() throws MappedException {
throw new MappedException("Mapped exception thrown");
}
@Path("/mappedException2")
@GET
public void testExceptionMapping2() throws MappedException2 {
throw new MappedException2("Mapped exception 2 thrown");
}
/**
* Custom exception class for testing exception handler.
*/
public static final class CustomException extends Exception {
public static final int HTTP_RESPONSE_STATUS = Response.Status.INTERNAL_SERVER_ERROR.getStatusCode();
}
}
| |
package io.cattle.platform.inator.wrapper;
import io.cattle.platform.core.constants.CommonStatesConstants;
import io.cattle.platform.core.constants.HealthcheckConstants;
import io.cattle.platform.core.constants.HostConstants;
import io.cattle.platform.core.constants.InstanceConstants;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.model.DeploymentUnit;
import io.cattle.platform.core.model.Host;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.core.util.SystemLabels;
import io.cattle.platform.inator.Inator;
import io.cattle.platform.inator.Inator.DesiredState;
import io.cattle.platform.inator.Result;
import io.cattle.platform.inator.UnitRef;
import io.cattle.platform.inator.factory.InatorServices;
import io.cattle.platform.inator.util.StateUtil;
import io.cattle.platform.object.meta.ObjectMetaDataManager;
import io.cattle.platform.object.util.DataAccessor;
import io.cattle.platform.object.util.TransitioningUtils;
import org.apache.commons.lang3.StringUtils;
import java.util.Date;
import java.util.Map;
public class DeploymentUnitWrapper implements BasicStateWrapper {
DeploymentUnit unit;
Service service;
InatorServices svc;
Host host;
public DeploymentUnitWrapper(DeploymentUnit unit, Service service, InatorServices svc) {
super();
this.unit = unit;
this.svc = svc;
this.service = service;
}
public boolean isDeployable() {
if (host == null && unit.getHostId() != null) {
host = svc.objectManager.loadResource(Host.class, unit.getHostId());
}
if (host == null) {
return hasActiveHosts(unit.getClusterId());
}
return StringUtils.isBlank(host.getAgentState()) || CommonStatesConstants.ACTIVE.equals(host.getAgentState());
}
private boolean hasActiveHosts(Long clusterId) {
return svc.metadataManager.getMetadataForCluster(clusterId).getHosts().stream()
.anyMatch(host -> CommonStatesConstants.ACTIVE.equals(host.getAgentState())
&& CommonStatesConstants.ACTIVE.equals(host.getState()));
}
public Inator.DesiredState getDesiredState() {
if (service != null && (CommonStatesConstants.REMOVING.equals(service.getState()) || service.getRemoved() != null)) {
return DesiredState.REMOVED;
}
return StateUtil.getDesiredState(unit.getState(), unit.getRemoved());
}
@Override
public boolean remove() {
if (unit.getRemoved() != null) {
return true;
}
svc.processManager.remove(unit, null);
return false;
}
@Override
public boolean isTransitioning() {
return svc.objectMetadataManager.isTransitioningState(DeploymentUnit.class, unit.getState());
}
@Override
public void create() {
svc.processManager.createThenActivate(unit, null);
}
@Override
public void activate() {
svc.processManager.activate(unit, null);
}
@Override
public void deactivate() {
svc.processManager.deactivate(unit, null);
}
@Override
public String getState() {
return unit.getState();
}
@Override
public String getHealthState() {
return HealthcheckConstants.HEALTH_STATE_HEALTHY;
}
@Override
public Date getRemoved() {
return unit.getRemoved();
}
@Override
public ObjectMetaDataManager getMetadataManager() {
return svc.objectMetadataManager;
}
public DeploymentUnit getInternal() {
return unit;
}
public Long getRevisionId() {
if (unit.getRequestedRevisionId() != null) {
return unit.getRequestedRevisionId();
}
return unit.getRevisionId();
}
public Long getRequestRevisionId() {
return unit.getRequestedRevisionId();
}
public Long getAppliedRevisionId() {
return unit.getRevisionId();
}
public int getServiceIndex() {
if (unit.getServiceIndex() == null) {
return 0;
}
return Integer.parseInt(unit.getServiceIndex());
}
public UnitRef getRef() {
return newRef(unit.getHostId(), unit.getServiceIndex(), svc);
}
public Long getHostId() {
Long hostId = unit.getHostId();
if (hostId != null) {
return hostId;
}
Map<String, Object> labels = DataAccessor.fieldMap(unit, InstanceConstants.FIELD_LABELS);
Object hostObj = labels.get(SystemLabels.LABEL_SERVICE_REQUESTED_HOST_ID);
if (hostObj instanceof Number) {
return ((Number) hostObj).longValue();
}
return null;
}
public String getIndex() {
return unit.getServiceIndex();
}
public static UnitRef newRef(Long hostId, String index, InatorServices svc) {
if (hostId == null) {
return new UnitRef(ServiceConstants.KIND_DEPLOYMENT_UNIT + "/" + index);
}
Object id = svc.idFormatter.formatId(HostConstants.TYPE, hostId);
return new UnitRef(ServiceConstants.KIND_DEPLOYMENT_UNIT + "/" + id + "/" + index);
}
public static String getIndex(UnitRef ref) {
String refString = ref.toString();
if (refString.startsWith(ServiceConstants.KIND_DEPLOYMENT_UNIT + "/")) {
return refString.substring(ServiceConstants.KIND_DEPLOYMENT_UNIT.length() + 1);
}
return null;
}
public String getUuid() {
return unit.getUuid();
}
public Long getId() {
return unit.getId();
}
public String getDisplayName() {
return String.format("%s (%s)", unit.getKind(), svc.idFormatter.formatId(unit.getKind(), unit.getId()));
}
public Result pause() {
if (!StateUtil.isPaused(unit.getState())) {
svc.processManager.scheduleProcessInstance(ServiceConstants.PROCESS_DU_PAUSE, unit, null);
}
return Result.good();
}
public Long getStackId() {
return unit.getStackId();
}
public Long getServiceId() {
return unit.getServiceId();
}
public void setApplied() {
boolean changed = false;
if (unit.getRequestedRevisionId() != null && !unit.getRevisionId().equals(unit.getRequestedRevisionId())) {
unit.setRevisionId(unit.getRequestedRevisionId());
changed = true;
}
if (service != null) {
Long serviceRestart = DataAccessor.fieldLong(service, ServiceConstants.FIELD_RESTART_TRIGGER);
if (serviceRestart == null) {
serviceRestart = 0L;
}
if (!getRestartTrigger().equals(serviceRestart)) {
DataAccessor.setField(unit, ServiceConstants.FIELD_RESTART_TRIGGER, serviceRestart);
changed = true;
}
}
if (changed) {
svc.objectManager.persist(unit);
svc.triggerServiceReconcile(unit.getServiceId());
}
}
public Long getRestartTrigger() {
Long val = DataAccessor.fieldLong(unit, ServiceConstants.FIELD_RESTART_TRIGGER);
return val == null ? 0L : val;
}
public String getTransitioningMessage() {
return TransitioningUtils.getTransitioningErrorMessage(unit);
}
}
| |
/*
* Copyright 2014 Guidewire Software, Inc.
*/
package gw.internal.gosu.parser;
import gw.config.BaseService;
import gw.config.CommonServices;
import gw.fs.IDirectory;
import gw.lang.parser.GlobalScope;
import gw.lang.parser.IAttributeSource;
import gw.lang.parser.IParseIssue;
import gw.lang.parser.ITypeUsesMap;
import gw.lang.parser.ILanguageLevel;
import gw.lang.parser.exceptions.IncompatibleTypeException;
import gw.lang.parser.exceptions.ParseIssue;
import gw.lang.parser.expressions.IQueryExpression;
import gw.lang.parser.expressions.IQueryExpressionEvaluator;
import gw.lang.reflect.IEntityAccess;
import gw.lang.reflect.IFeatureInfo;
import gw.lang.reflect.IGosuClassLoadingObserver;
import gw.lang.reflect.IPropertyInfo;
import gw.lang.reflect.IType;
import gw.lang.reflect.ITypeLoader;
import gw.lang.reflect.AbstractTypeSystemListener;
import gw.lang.reflect.RefreshRequest;
import gw.lang.reflect.TypeSystem;
import gw.lang.reflect.gs.GosuClassTypeLoader;
import gw.lang.reflect.gs.ICompilableType;
import gw.lang.reflect.module.IModule;
import gw.util.GosuExceptionUtil;
import gw.util.IFeatureFilter;
import gw.util.ILogger;
import gw.util.SystemOutLogger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Date;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
/**
*/
public class DefaultEntityAccess extends BaseService implements IEntityAccess
{
public static final SystemOutLogger DEFAULT_LOGGER = new SystemOutLogger(SystemOutLogger.LoggingLevel.WARN);
private static DefaultEntityAccess g_instance;
private static final ITypeUsesMap EMPTY_TYPE_USES = new TypeUsesMap( Collections.<String>emptyList() ).lock();
private List<IGosuClassLoadingObserver> _classLoadingObservers;
public static DefaultEntityAccess instance()
{
return g_instance == null ? g_instance = new DefaultEntityAccess() : g_instance;
}
private Map _scopes = new HashMap();
/**
*/
public DefaultEntityAccess()
{
_classLoadingObservers = Collections.synchronizedList(new ArrayList<IGosuClassLoadingObserver>());
}
public ITypeLoader getDefaultTypeLoader()
{
return DefaultTypeLoader.instance();
}
@Override
public ITypeUsesMap getDefaultTypeUses()
{
return EMPTY_TYPE_USES;
}
/**
* @return x
*/
@Override
public boolean isDomainInstance( Object value )
{
return false;
}
/**
* @return x
*/
@Override
public boolean isEntityClass( IType cls )
{
return false;
}
@Override
public boolean isViewEntityClass( IType type )
{
return false;
}
@Override
public IType getPrimaryEntityClass( IType type )
{
return null;
}
/**
* @return x
*/
@Override
public boolean isKeyableEntityClass( IType cls )
{
return false;
}
/**
* @return x
*/
@Override
public boolean isDomainClass( IType cls )
{
return false;
}
@Override
public boolean isTypekey( IType cls )
{
return false;
}
/**
* @return x
*/
@Override
public Object getEntityInstanceFrom( Object entity, IType classEntity )
{
return null;
}
/**
* @return x
*/
@Override
public boolean areBeansEqual( Object bean1, Object bean2 )
{
return bean1.equals( bean2 );
}
/**
* @param type
* @param value
* @return
* @throws RuntimeException
*/
@Override
public boolean verifyValueForType( IType type, Object value ) throws RuntimeException
{
try
{
IType valueType = TypeLoaderAccess.instance().getIntrinsicTypeFromObject(value);
CommonServices.getCoercionManager().verifyTypesComparable( type, valueType, false );
}
catch( ParseIssue pe )
{
throw new IncompatibleTypeException( "Value of type: " + TypeLoaderAccess.instance().getIntrinsicTypeFromObject( value ).getName() +
" is not compatible with symbol type: " + type.getName() );
}
return true;
}
@Override
public String makeStringFrom( Object obj )
{
if( obj == null )
{
return null;
}
return obj.toString();
}
/**
* @return x
*/
@Override
public long getHashedEntityId( String strId, IType classEntity )
{
return -1;
}
/**
* @return x
*/
@Override
public boolean isInternal( IType cls )
{
return false;
}
@Override
public ILogger getLogger()
{
return DEFAULT_LOGGER;
}
@Override
public Locale getLocale()
{
return Locale.getDefault();
}
@Override
public Date getCurrentTime()
{
return new Date();
}
@Override
public void addEnhancementMethods(IType typeToEnhance, Collection methodsToAddTo)
{
IModule module = TypeSystem.getCurrentModule();
addEnhancementMethods(typeToEnhance, methodsToAddTo, module, new HashSet<IModule>());
}
private void addEnhancementMethods(IType typeToEnhance, Collection methodsToAddTo, IModule module, Set<IModule> visited)
{
if(visited.contains(module))
{
return;
}
visited.add(module);
if( GosuClassTypeLoader.getDefaultClassLoader(module) != null )
{
GosuClassTypeLoader.getDefaultClassLoader(module).getEnhancementIndex().addEnhancementMethods( typeToEnhance, methodsToAddTo);
}
for(IModule dep : module.getModuleTraversalList())
{
addEnhancementMethods(typeToEnhance, methodsToAddTo, dep, visited);
}
}
@Override
public void addEnhancementProperties(IType typeToEnhance, Map propertyInfosToAddTo, boolean caseSensitive)
{
IModule module = TypeSystem.getCurrentModule();
addEnhancementProperties(typeToEnhance, propertyInfosToAddTo, caseSensitive, module, new HashSet<IModule>());
}
private void addEnhancementProperties(IType typeToEnhance, Map propertyInfosToAddTo, boolean caseSensitive, IModule module, Set<IModule> visited)
{
if(visited.contains(module))
{
return;
}
visited.add(module);
if( GosuClassTypeLoader.getDefaultClassLoader(module) != null )
{
GosuClassTypeLoader.getDefaultClassLoader(module).getEnhancementIndex().addEnhancementProperties( typeToEnhance, propertyInfosToAddTo, caseSensitive);
}
for(IModule dep : module.getModuleTraversalList())
{
addEnhancementProperties(typeToEnhance, propertyInfosToAddTo, caseSensitive, dep, visited);
}
}
@Override
public IQueryExpressionEvaluator getQueryExpressionEvaluator( IQueryExpression queryExpression )
{
return null;
}
@Override
public IFeatureFilter getQueryExpressionFeatureFilter()
{
return new IFeatureFilter() {
@Override
public boolean acceptFeature(IType beanType, IFeatureInfo fi) {
return false;
}
};
}
@Override
public ClassLoader getPluginClassLoader()
{
return DefaultEntityAccess.class.getClassLoader();
}
@Override
public Object constructObject( Class cls )
{
try
{
return cls.newInstance();
}
catch( Exception e )
{
throw GosuExceptionUtil.forceThrow( e );
}
}
@Override
public IAttributeSource getAttributeSource( GlobalScope scope )
{
IAttributeSource source = (IAttributeSource)_scopes.get( scope );
if( source == null )
{
source = new ThreadLocalAttributeSource( scope );
_scopes.put( scope, source );
}
return source;
}
public void clearAttributeScopes()
{
_scopes.clear();
}
private static class ThreadLocalAttributeSource extends AbstractTypeSystemListener implements IAttributeSource
{
private GlobalScope _scope;
private ThreadLocal _values = new ThreadLocal();
public ThreadLocalAttributeSource( GlobalScope scope )
{
_scope = scope;
TypeLoaderAccess.instance().addTypeLoaderListenerAsWeakRef( this );
}
public GlobalScope getScope()
{
return _scope;
}
@Override
public boolean hasAttribute( String strAttr )
{
Map map = getMap();
return map.containsKey( strAttr );
}
@Override
public Object getAttribute( String strAttr )
{
Map map = getMap();
return map.get(strAttr);
}
@Override
public void setAttribute( String strAttr, Object value )
{
Map map = getMap();
map.put( strAttr, value );
}
@Override
public boolean equals( Object o )
{
if( this == o )
{
return true;
}
if( o == null || getClass() != o.getClass() )
{
return false;
}
final ThreadLocalAttributeSource that = (ThreadLocalAttributeSource)o;
return _scope.equals( that._scope );
}
@Override
public int hashCode()
{
return _scope.hashCode();
}
private Map getMap()
{
if( _values.get() == null )
{
_values.set( new HashMap() );
}
return (Map)_values.get();
}
@Override
public void refreshedTypes(RefreshRequest request)
{
}
@Override
public void refreshed()
{
}
}
@Override
public Object[] convertToExternalIfNecessary( Object[] args, Class[] argTypes, Class methodOwner )
{
return args;
}
@Override
public Object convertToInternalIfNecessary( Object obj, Class methodOwner )
{
return obj;
}
@Override
public boolean isExternal( Class methodOwner )
{
return false;
}
@Override
public StringBuilder getPluginRepositories()
{
return new StringBuilder();
}
@Override
public String getWebServerPaths()
{
return "";
}
@Override
public boolean isUnreachableCodeDetectionOn()
{
return true;
}
@Override
public boolean isWarnOnImplicitCoercionsOn()
{
return true;
}
@Override
public IType getKeyType()
{
return null;
}
@Override
public IPropertyInfo getEntityIdProperty( IType rootType )
{
return null;
}
@Override
public boolean shouldAddWarning( IType type, IParseIssue warning )
{
return true;
}
@Override
public boolean isServerMutable()
{
return true;
}
@Override
public boolean isDevMode() {
return true;
}
@Override
public boolean isRetainDebugInfo()
{
return false;
}
@Override
public ILanguageLevel getLanguageLevel()
{
return new StandardLanguageLevel();
}
@Override
public List<IGosuClassLoadingObserver> getGosuClassLoadingObservers() {
return _classLoadingObservers;
}
@Override
public boolean areUsesStatementsAllowedInStatementLists(ICompilableType gosuClass) {
return false;
}
@Override
public List<IDirectory> getAdditionalSourceRoots() {
return Collections.EMPTY_LIST;
}
@Override
public void reloadedTypes(String[] types) {
//nothing to do
}
}
| |
package org.jgroups;
import org.jgroups.conf.ClassConfigurator;
import org.jgroups.util.*;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Map;
import java.util.function.Supplier;
/**
* A Message encapsulates data sent to members of a group. It contains among other things the
* address of the sender, the destination address, a payload (byte buffer) and a list of headers.
* Headers are added by protocols on the sender side and removed by protocols on the receiver's side.
* <p>
* The byte buffer can point to a reference, and we can subset it using index and length. However,
* when the message is serialized, we only write the bytes between index and length.
*
* @since 2.0
* @author Bela Ban
*/
public class Message implements Streamable, Constructable<Message> {
protected Address dest;
protected Address sender;
/** The payload */
protected byte[] buf;
/** The index into the payload (usually 0) */
protected int offset;
/** The number of bytes in the buffer (usually buf.length is buf not equal to null). */
protected int length;
/** All headers are placed here */
protected volatile Header[] headers;
protected volatile short flags;
protected volatile byte transient_flags; // transient_flags is neither marshalled nor copied
static final byte DEST_SET = 1;
static final byte SRC_SET = 1 << 1;
static final byte BUF_SET = 1 << 2;
// =============================== Flags ====================================
public enum Flag {
OOB((short) 1), // message is out-of-band
DONT_BUNDLE( (short)(1 << 1)), // don't bundle message at the transport
NO_FC( (short)(1 << 2)), // bypass flow control
NO_RELIABILITY((short)(1 << 4)), // bypass UNICAST(2) and NAKACK
NO_TOTAL_ORDER((short)(1 << 5)), // bypass total order (e.g. SEQUENCER)
NO_RELAY( (short)(1 << 6)), // bypass relaying (RELAY)
RSVP( (short)(1 << 7)), // ack of a multicast (https://issues.jboss.org/browse/JGRP-1389)
RSVP_NB( (short)(1 << 8)), // non blocking RSVP
INTERNAL( (short)(1 << 9)), // for internal use by JGroups only, don't use !
SKIP_BARRIER( (short)(1 << 10)); // passing messages through a closed BARRIER
final short value;
Flag(short value) {this.value=value;}
public short value() {return value;}
}
// =========================== Transient flags ==============================
public enum TransientFlag {
OOB_DELIVERED( (short)(1)),
DONT_LOOPBACK( (short)(1 << 1)); // don't loop back up if this flag is set and it is a multicast message
final short value;
TransientFlag(short flag) {value=flag;}
public short value() {return value;}
}
/**
* Constructs a message given a destination address
* @param dest The Address of the receiver. If it is null, then the message is sent to the group. Otherwise, it is
* sent to a single member.
*/
public Message(Address dest) {
setDest(dest);
headers=createHeaders(Util.DEFAULT_HEADERS);
}
/**
* Constructs a message given a destination and source address and the payload byte buffer
* @param dest The Address of the receiver. If it is null, then the message is sent to the group. Otherwise, it is
* sent to a single member.
* @param buf The payload. Note that this buffer must not be modified (e.g. buf[0]='x' is not
* allowed) since we don't copy the contents.
*/
public Message(Address dest, byte[] buf) {
this(dest, buf, 0, buf != null? buf.length : 0);
}
/**
* Constructs a message. The index and length parameters provide a reference to a byte buffer, rather than a copy,
* and refer to a subset of the buffer. This is important when we want to avoid copying. When the message is
* serialized, only the subset is serialized.</p>
* <em>
* Note that the byte[] buffer passed as argument must not be modified. Reason: if we retransmit the
* message, it would still have a ref to the original byte[] buffer passed in as argument, and so we would
* retransmit a changed byte[] buffer !
* </em>
*
* @param dest The Address of the receiver. If it is null, then the message is sent to the group. Otherwise, it is
* sent to a single member.
* @param buf A reference to a byte buffer
* @param offset The index into the byte buffer
* @param length The number of bytes to be used from <tt>buf</tt>. Both index and length are checked
* for array index violations and an ArrayIndexOutOfBoundsException will be thrown if invalid
*/
public Message(Address dest, byte[] buf, int offset, int length) {
this(dest);
setBuffer(buf, offset, length);
}
public Message(Address dest, Buffer buf) {
this(dest);
setBuffer(buf);
}
/**
* Constructs a message given a destination and source address and the payload object
* @param dest The Address of the receiver. If it is null, then the message is sent to the group. Otherwise, it is
* sent to a single member.
* @param obj The object that will be marshalled into the byte buffer. Has to be serializable (e.g. implementing
* Serializable, Externalizable or Streamable, or be a basic type (e.g. Integer, Short etc)).
*/
public Message(Address dest, Object obj) {
this(dest);
setObject(obj);
}
public Message() {
this(true);
}
public Message(boolean create_headers) {
if(create_headers)
headers=createHeaders(Util.DEFAULT_HEADERS);
}
public Supplier<? extends Message> create() {
return Message::new;
}
public Address getDest() {return dest;}
public Address dest() {return dest;}
public Message setDest(Address new_dest) {dest=new_dest; return this;}
public Message dest(Address new_dest) {dest=new_dest; return this;}
public Address getSrc() {return sender;}
public Address src() {return sender;}
public Message setSrc(Address new_src) {sender=new_src; return this;}
public Message src(Address new_src) {sender=new_src; return this;}
public int getOffset() {return offset;}
public int offset() {return offset;}
public int getLength() {return length;}
public int length() {return length;}
/**
* Returns a <em>reference</em> to the payload (byte buffer). Note that this buffer should not be
* modified as we do not copy the buffer on copy() or clone(): the buffer of the copied message
* is simply a reference to the old buffer.<br/>
* Even if offset and length are used: we return the <em>entire</em> buffer, not a subset.
*/
public byte[] getRawBuffer() {return buf;}
public byte[] rawBuffer() {return buf;}
public byte[] buffer() {return getBuffer();}
public Buffer buffer2() {return getBuffer2();}
public Message buffer(byte[] b) {return setBuffer(b);}
public Message buffer(Buffer b) {return setBuffer(b);}
public int getNumHeaders() {return Headers.size(this.headers);}
public int numHeaders() {return Headers.size(this.headers);}
/**
* Returns a copy of the buffer if offset and length are used, otherwise a reference.
* @return byte array with a copy of the buffer.
*/
public byte[] getBuffer() {
if(buf == null)
return null;
if(offset == 0 && length == buf.length)
return buf;
else {
byte[] retval=new byte[length];
System.arraycopy(buf, offset, retval, 0, length);
return retval;
}
}
public Buffer getBuffer2() {
if(buf == null)
return null;
return new Buffer(buf, offset, length);
}
/**
* Sets the buffer.<p/>
* Note that the byte[] buffer passed as argument must not be modified. Reason: if we retransmit the
* message, it would still have a ref to the original byte[] buffer passed in as argument, and so we would
* retransmit a changed byte[] buffer !
*/
public Message setBuffer(byte[] b) {
buf=b;
if(buf != null) {
offset=0;
length=buf.length;
}
else
offset=length=0;
return this;
}
/**
* Sets the internal buffer to point to a subset of a given buffer.<p/>
* <em>
* Note that the byte[] buffer passed as argument must not be modified. Reason: if we retransmit the
* message, it would still have a ref to the original byte[] buffer passed in as argument, and so we would
* retransmit a changed byte[] buffer !
* </em>
*
* @param b The reference to a given buffer. If null, we'll reset the buffer to null
* @param offset The initial position
* @param length The number of bytes
*/
public Message setBuffer(byte[] b, int offset, int length) {
buf=b;
if(buf != null) {
if(offset < 0 || offset > buf.length)
throw new ArrayIndexOutOfBoundsException(offset);
if((offset + length) > buf.length)
throw new ArrayIndexOutOfBoundsException((offset+length));
this.offset=offset;
this.length=length;
}
else
this.offset=this.length=0;
return this;
}
/**
* Sets the buffer<p/>
* Note that the byte[] buffer passed as argument must not be modified. Reason: if we retransmit the
* message, it would still have a ref to the original byte[] buffer passed in as argument, and so we would
* retransmit a changed byte[] buffer !
*/
public Message setBuffer(Buffer buf) {
if(buf != null) {
this.buf=buf.getBuf();
this.offset=buf.getOffset();
this.length=buf.getLength();
}
return this;
}
/**
* Returns a reference to the headers hashmap, which is <em>immutable</em>. Any attempt to modify
* the returned map will cause a runtime exception
*/
public Map<Short,Header> getHeaders() {
return Headers.getHeaders(this.headers);
}
public String printHeaders() {
return Headers.printHeaders(this.headers);
}
/**
* Takes an object and uses Java serialization to generate the byte[] buffer which is set in the
* message. Parameter 'obj' has to be serializable (e.g. implementing Serializable,
* Externalizable or Streamable, or be a basic type (e.g. Integer, Short etc)).
*/
public Message setObject(Object obj) {
if(obj == null) return this;
if(obj instanceof byte[])
return setBuffer((byte[])obj);
if(obj instanceof Buffer)
return setBuffer((Buffer)obj);
try {
return setBuffer(Util.objectToByteBuffer(obj));
}
catch(Exception ex) {
throw new IllegalArgumentException(ex);
}
}
public <T extends Object> T getObject() {
return getObject(null);
}
/**
* Uses custom serialization to create an object from the buffer of the message. Note that this is dangerous when
* using your own classloader, e.g. inside of an application server ! Most likely, JGroups will use the system
* classloader to deserialize the buffer into an object, whereas (for example) a web application will want to use
* the webapp's classloader, resulting in a ClassCastException. The recommended way is for the application to use
* their own serialization and only pass byte[] buffer to JGroups.<p/>
* As of 3.5, a classloader can be passed in. It will be used first to find a class, before contacting
* the other classloaders in the list. If null, the default list of classloaders will be used.
* @return the object
*/
public <T extends Object> T getObject(ClassLoader loader) {
try {
return Util.objectFromByteBuffer(buf, offset, length, loader);
}
catch(Exception ex) {
throw new IllegalArgumentException(ex);
}
}
/**
* Sets a number of flags in a message
* @param flags The flag or flags
* @return A reference to the message
*/
public Message setFlag(Flag ... flags) {
if(flags != null) {
short tmp=this.flags;
for(Flag flag : flags) {
if(flag != null)
tmp|=flag.value();
}
this.flags=tmp;
}
return this;
}
/**
* Same as {@link #setFlag(Flag...)} except that transient flags are not marshalled
* @param flags The flag
*/
public Message setTransientFlag(TransientFlag ... flags) {
if(flags != null) {
short tmp=this.transient_flags;
for(TransientFlag flag : flags)
if(flag != null)
tmp|=flag.value();
this.transient_flags=(byte)tmp;
}
return this;
}
/**
* Sets the flags from a short. <em>Not recommended</em> (use {@link #setFlag(org.jgroups.Message.Flag...)} instead),
* as the internal representation of flags might change anytime.
* @param flag
* @return
*/
public Message setFlag(short flag) {
short tmp=this.flags;
tmp|=flag;
this.flags=tmp;
return this;
}
public Message setTransientFlag(short flag) {
short tmp=this.transient_flags;
tmp|=flag;
this.transient_flags=(byte)tmp;
return this;
}
/**
* Returns the internal representation of flags. Don't use this, as the internal format might change at any time !
* This is only used by unit test code
* @return
*/
public short getFlags() {return flags;}
public short getTransientFlags() {return transient_flags;}
/**
* Clears a number of flags in a message
* @param flags The flags
* @return A reference to the message
*/
public Message clearFlag(Flag ... flags) {
if(flags != null) {
short tmp=this.flags;
for(Flag flag : flags)
if(flag != null)
tmp&=~flag.value();
this.flags=tmp;
}
return this;
}
public Message clearTransientFlag(TransientFlag ... flags) {
if(flags != null) {
short tmp=this.transient_flags;
for(TransientFlag flag : flags)
if(flag != null)
tmp&=~flag.value();
this.transient_flags=(byte)tmp;
}
return this;
}
public static boolean isFlagSet(short flags, Flag flag) {
return flag != null && ((flags & flag.value()) == flag.value());
}
/**
* Checks if a given flag is set
* @param flag The flag
* @return Whether or not the flag is currently set
*/
public boolean isFlagSet(Flag flag) {
return isFlagSet(flags, flag);
}
public static boolean isTransientFlagSet(short flags, TransientFlag flag) {
return flag != null && (flags & flag.value()) == flag.value();
}
public boolean isTransientFlagSet(TransientFlag flag) {
return isTransientFlagSet(transient_flags, flag);
}
/**
* Atomically checks if a given flag is set and - if not - sets it. When multiple threads
* concurrently call this method with the same flag, only one of them will be able to set the
* flag
*
* @param flag
* @return True if the flag could be set, false if not (was already set)
*/
public synchronized boolean setTransientFlagIfAbsent(TransientFlag flag) {
if(isTransientFlagSet(flag))
return false;
setTransientFlag(flag);
return true;
}
/*---------------------- Used by protocol layers ----------------------*/
/** Puts a header given an ID into the hashmap. Overwrites potential existing entry. */
public Message putHeader(short id, Header hdr) {
if(id < 0)
throw new IllegalArgumentException("An ID of " + id + " is invalid");
if(hdr != null)
hdr.setProtId(id);
synchronized(this) {
Header[] resized_array=Headers.putHeader(this.headers, id, hdr, true);
if(resized_array != null)
this.headers=resized_array;
}
return this;
}
public <T extends Header> T getHeader(short id) {
if(id <= 0)
throw new IllegalArgumentException("An ID of " + id + " is invalid. Add the protocol which calls " +
"getHeader() to jg-protocol-ids.xml");
return Headers.getHeader(this.headers, id);
}
/** Returns a header for a range of IDs, or null if not found */
public <T extends Header> T getHeader(short ... ids) {
if(ids == null || ids.length == 0)
return null;
return Headers.getHeader(this.headers, ids);
}
/*---------------------------------------------------------------------*/
public Message copy() {
return copy(true);
}
/**
* Create a copy of the message. If offset and length are used (to refer to another buffer), the
* copy will contain only the subset offset and length point to, copying the subset into the new
* copy.
*
* @param copy_buffer
* @return Message with specified data
*/
public Message copy(boolean copy_buffer) {
return copy(copy_buffer, true);
}
/**
* Create a copy of the message. If offset and length are used (to refer to another buffer), the
* copy will contain only the subset offset and length point to, copying the subset into the new
* copy.<p/>
* Note that for headers, only the arrays holding references to the headers are copied, not the headers themselves !
* The consequence is that the headers array of the copy hold the *same* references as the original, so do *not*
* modify the headers ! If you want to change a header, copy it and call {@link Message#putHeader(short,Header)} again.
*
* @param copy_buffer
* @param copy_headers
* Copy the headers
* @return Message with specified data
*/
public Message copy(boolean copy_buffer, boolean copy_headers) {
Message retval=new Message(false);
retval.dest=dest;
retval.sender=sender;
short tmp_flags=this.flags;
byte tmp_tflags=this.transient_flags;
retval.flags=tmp_flags;
retval.transient_flags=tmp_tflags;
if(copy_buffer && buf != null)
retval.setBuffer(buf, offset, length);
retval.headers=copy_headers && headers != null? Headers.copy(this.headers) : createHeaders(Util.DEFAULT_HEADERS);
return retval;
}
/**
* Doesn't copy any headers except for those with ID >= copy_headers_above
*
* @param copy_buffer
* @param starting_id
* @return A message with headers whose ID are >= starting_id
*/
public Message copy(boolean copy_buffer, short starting_id) {
return copy(copy_buffer, starting_id, (short[])null);
}
/**
* Copies a message. Copies only headers with IDs >= starting_id or IDs which are in the copy_only_ids list
* @param copy_buffer
* @param starting_id
* @param copy_only_ids
* @return
*/
public Message copy(boolean copy_buffer, short starting_id, short ... copy_only_ids) {
Message retval=copy(copy_buffer, false);
for(Map.Entry<Short,Header> entry: getHeaders().entrySet()) {
short id=entry.getKey();
if(id >= starting_id || Util.containsId(id, copy_only_ids))
retval.putHeader(id, entry.getValue());
}
return retval;
}
public Message makeReply() {
Message retval=new Message(sender);
if(dest != null)
retval.setSrc(dest);
return retval;
}
public String toString() {
return String.format("[%s to %s, %d bytes%s%s]",
sender,
dest == null? "<all>" : dest,
length,
flags > 0? ", flags=" + flagsToString(flags) : "",
transient_flags > 0? ", transient_flags=" + transientFlagsToString(transient_flags) : "");
}
public String printObjectHeaders() {
return Headers.printObjectHeaders(this.headers);
}
/* ----------------------------------- Interface Streamable ------------------------------- */
/** Writes the message to the output stream */
@Override public void writeTo(DataOutput out) throws IOException {
byte leading=0;
if(dest != null)
leading=Util.setFlag(leading, DEST_SET);
if(sender != null)
leading=Util.setFlag(leading, SRC_SET);
if(buf != null)
leading=Util.setFlag(leading, BUF_SET);
// 1. write the leading byte first
out.write(leading);
// 2. the flags (e.g. OOB, LOW_PRIO), skip the transient flags
out.writeShort(flags);
// 3. dest_addr
if(dest != null)
Util.writeAddress(dest, out);
// 4. src_addr
if(sender != null)
Util.writeAddress(sender, out);
// 5. headers
Header[] hdrs=this.headers;
int size=Headers.size(hdrs);
out.writeShort(size);
if(size > 0) {
for(Header hdr : hdrs) {
if(hdr == null)
break;
out.writeShort(hdr.getProtId());
writeHeader(hdr, out);
}
}
// 6. buf
if(buf != null) {
out.writeInt(length);
out.write(buf, offset, length);
}
}
/**
* Writes the message to the output stream, but excludes the dest and src addresses unless the
* src address given as argument is different from the message's src address
* @param excluded_headers Don't marshal headers that are part of excluded_headers
*/
public void writeToNoAddrs(Address src, DataOutput out, short ... excluded_headers) throws IOException {
byte leading=0;
boolean write_src_addr=src == null || sender != null && !sender.equals(src);
if(write_src_addr)
leading=Util.setFlag(leading, SRC_SET);
if(buf != null)
leading=Util.setFlag(leading, BUF_SET);
// 1. write the leading byte first
out.write(leading);
// 2. the flags (e.g. OOB, LOW_PRIO)
out.writeShort(flags);
// 4. src_addr
if(write_src_addr)
Util.writeAddress(sender, out);
// 5. headers
Header[] hdrs=this.headers;
int size=Headers.size(hdrs, excluded_headers);
out.writeShort(size);
if(size > 0) {
for(Header hdr : hdrs) {
if(hdr == null)
break;
short id=hdr.getProtId();
if(Util.containsId(id, excluded_headers))
continue;
out.writeShort(id);
writeHeader(hdr, out);
}
}
// 6. buf
if(buf != null) {
out.writeInt(length);
out.write(buf, offset, length);
}
}
/** Reads the message's contents from an input stream */
@Override public void readFrom(DataInput in) throws IOException, ClassNotFoundException {
// 1. read the leading byte first
byte leading=in.readByte();
// 2. the flags
flags=in.readShort();
// 3. dest_addr
if(Util.isFlagSet(leading, DEST_SET))
dest=Util.readAddress(in);
// 4. src_addr
if(Util.isFlagSet(leading, SRC_SET))
sender=Util.readAddress(in);
// 5. headers
int len=in.readShort();
this.headers=createHeaders(len);
for(int i=0; i < len; i++) {
short id=in.readShort();
Header hdr=readHeader(in).setProtId(id);
this.headers[i]=hdr;
}
// 6. buf
if(Util.isFlagSet(leading, BUF_SET)) {
len=in.readInt();
buf=new byte[len];
in.readFully(buf, 0, len);
length=len;
}
}
/** Reads the message's contents from an input stream, but skips the buffer and instead returns the
* position (offset) at which the buffer starts */
public int readFromSkipPayload(ByteArrayDataInputStream in) throws IOException, ClassNotFoundException {
// 1. read the leading byte first
byte leading=in.readByte();
// 2. the flags
flags=in.readShort();
// 3. dest_addr
if(Util.isFlagSet(leading, DEST_SET))
dest=Util.readAddress(in);
// 4. src_addr
if(Util.isFlagSet(leading, SRC_SET))
sender=Util.readAddress(in);
// 5. headers
int len=in.readShort();
headers=createHeaders(len);
for(int i=0; i < len; i++) {
short id=in.readShort();
Header hdr=readHeader(in).setProtId(id);
this.headers[i]=hdr;
}
// 6. buf
if(!Util.isFlagSet(leading, BUF_SET))
return -1;
length=in.readInt();
return in.position();
}
/* --------------------------------- End of Interface Streamable ----------------------------- */
/**
* Returns the exact size of the marshalled message. Uses method size() of each header to compute
* the size, so if a Header subclass doesn't implement size() we will use an approximation.
* However, most relevant header subclasses have size() implemented correctly. (See
* org.jgroups.tests.SizeTest).<p/>
* The return type is a long as this is the length of the payload ({@link #getLength()}) plus metadata (e.g. flags,
* headers, source and dest addresses etc). Since the largest payload can be Integer.MAX_VALUE, adding the metadata
* might lead to an int overflow, that's why we use a long.
* @return The number of bytes for the marshalled message
*/
public long size() {
long retval=(long)Global.BYTE_SIZE // leading byte
+ Global.SHORT_SIZE; // flags
if(dest != null)
retval+=Util.size(dest);
if(sender != null)
retval+=Util.size(sender);
retval+=Global.SHORT_SIZE; // number of headers
retval+=Headers.marshalledSize(this.headers);
if(buf != null)
retval+=Global.INT_SIZE // length (integer)
+ length; // number of bytes in the buffer
return retval;
}
/* ----------------------------------- Private methods ------------------------------- */
public static String flagsToString(short flags) {
StringBuilder sb=new StringBuilder();
boolean first=true;
Flag[] all_flags=Flag.values();
for(Flag flag: all_flags) {
if(isFlagSet(flags, flag)) {
if(first)
first=false;
else
sb.append("|");
sb.append(flag);
}
}
return sb.toString();
}
public static String transientFlagsToString(short flags) {
StringBuilder sb=new StringBuilder();
boolean first=true;
TransientFlag[] all_flags=TransientFlag.values();
for(TransientFlag flag: all_flags) {
if(isTransientFlagSet(flags, flag)) {
if(first)
first=false;
else
sb.append("|");
sb.append(flag);
}
}
return sb.toString();
}
protected static void writeHeader(Header hdr, DataOutput out) throws IOException {
short magic_number=hdr.getMagicId();
out.writeShort(magic_number);
hdr.writeTo(out);
}
protected static Header readHeader(DataInput in) throws IOException, ClassNotFoundException {
short magic_number=in.readShort();
Header hdr=ClassConfigurator.create(magic_number);
hdr.readFrom(in);
return hdr;
}
protected static Header[] createHeaders(int size) {
return size > 0? new Header[size] : new Header[3];
}
/* ------------------------------- End of Private methods ---------------------------- */
}
| |
package edu.berkeley.cs.succinct.util.stream;
import edu.berkeley.cs.succinct.util.CommonUtils;
import edu.berkeley.cs.succinct.util.dictionary.Tables;
import edu.berkeley.cs.succinct.util.stream.serops.BitMapOps;
import org.apache.hadoop.fs.FSDataInputStream;
import java.io.IOException;
import static edu.berkeley.cs.succinct.util.DictionaryUtils.*;
public class WaveletTreeStream {
private FSDataInputStream stream;
private long startPos;
public WaveletTreeStream(FSDataInputStream stream, long startPos) throws IOException {
this.stream = stream;
this.startPos = startPos;
}
public long lookup(int contextPos, int cellPos, int startIdx, int endIdx) throws IOException {
stream.seek(startPos);
return waveletTreeLookup(contextPos, cellPos, startIdx, endIdx);
}
private long waveletTreeLookup(int contextPos, int cellPos, int startIdx, int endIdx)
throws IOException {
byte m = stream.readByte();
int left = (int) stream.readLong();
int right = (int) stream.readLong();
int dictPos = (int) stream.getPos();
long p, v;
if (contextPos > m && contextPos <= endIdx) {
if (right == 0) {
return select1(dictPos, cellPos);
}
stream.seek(startPos + right);
p = waveletTreeLookup(contextPos, cellPos, m + 1, endIdx);
v = select1(dictPos, (int) p);
} else {
if (left == 0) {
return select0(dictPos, cellPos);
}
stream.seek(startPos + left);
p = waveletTreeLookup(contextPos, cellPos, startIdx, m);
v = select0(dictPos, (int) p);
}
return v;
}
private long select0(long dictPos, int i) throws IOException {
assert (i >= 0);
RandomAccessLongStream dictBuf = new RandomAccessLongStream(stream, dictPos, Integer.MAX_VALUE);
long size = dictBuf.get();
long val = i + 1;
int sp = 0;
int ep = (int) (size / CommonUtils.two32);
int m;
long r;
int pos = 0;
int blockClass, blockOffset;
long sel;
int lastBlock;
long rankL12, posL12;
int l3Size = (int) ((size / CommonUtils.two32) + 1);
int l12Size = (int) ((size / 2048) + 1);
int basePos = (int) dictBuf.position();
while (sp <= ep) {
m = (sp + ep) / 2;
r = (m * CommonUtils.two32 - dictBuf.get(basePos + m));
if (val > r) {
sp = m + 1;
} else {
ep = m - 1;
}
}
ep = Math.max(ep, 0);
val -= (ep * CommonUtils.two32 - dictBuf.get(basePos + ep));
pos += dictBuf.get(basePos + l3Size + ep);
sp = (int) (ep * CommonUtils.two32 / 2048);
ep = (int) (Math.min(((ep + 1) * CommonUtils.two32 / 2048), Math.ceil((double) size / 2048.0))
- 1);
assert (val <= CommonUtils.two32);
assert (pos >= 0);
dictBuf.position(basePos + 2 * l3Size);
basePos = (int) dictBuf.position();
while (sp <= ep) {
m = (sp + ep) / 2;
r = m * 2048 - GETRANKL2(dictBuf.get(basePos + m));
if (val > r) {
sp = m + 1;
} else {
ep = m - 1;
}
}
ep = Math.max(ep, 0);
sel = (long) (ep) * 2048L;
rankL12 = dictBuf.get(basePos + ep);
posL12 = dictBuf.get(basePos + l12Size + ep);
val -= (ep * 2048 - GETRANKL2(rankL12));
pos += GETPOSL2(posL12);
assert (val <= 2048);
assert (pos >= 0);
r = (512 - GETRANKL1(rankL12, 1));
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 1);
val -= r;
sel += 512;
r = (512 - GETRANKL1(rankL12, 2));
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 2);
val -= r;
sel += 512;
r = (512 - GETRANKL1(rankL12, 3));
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 3);
val -= r;
sel += 512;
}
}
}
dictBuf.position(basePos + 2 * l12Size);
assert (val <= 512);
assert (pos >= 0);
dictBuf.get(); // TODO: Could remove this field altogether
while (true) {
blockClass = (int) BitMapOps.getValPos(dictBuf, pos, 4);
short offsetSize = (short) Tables.offsetBits[blockClass];
pos += 4;
blockOffset = (int) ((blockClass == 0) ? BitMapOps.getBit(dictBuf, pos) * 16 : 0);
pos += offsetSize;
if (val <= (16 - (blockClass + blockOffset))) {
pos -= (4 + offsetSize);
break;
}
val -= (16 - (blockClass + blockOffset));
sel += 16;
}
blockClass = (int) BitMapOps.getValPos(dictBuf, pos, 4);
pos += 4;
blockOffset = (int) BitMapOps.getValPos(dictBuf, pos, Tables.offsetBits[blockClass]);
lastBlock = Tables.decodeTable[blockClass][blockOffset];
long count = 0;
for (i = 0; i < 16; i++) {
if (((lastBlock >> (15 - i)) & 1) == 0) {
count++;
}
if (count == val) {
return sel + i;
}
}
return sel;
}
private long select1(long dictPos, int i) throws IOException {
assert (i >= 0);
RandomAccessLongStream dictBuf = new RandomAccessLongStream(stream, dictPos, Integer.MAX_VALUE);
long size = dictBuf.get();
long val = i + 1;
int sp = 0;
int ep = (int) (size / CommonUtils.two32);
int m;
long r;
int pos = 0;
int blockClass, blockOffset;
long sel;
int lastBlock;
long rankL12, posL12;
int l3Size = (int) ((size / CommonUtils.two32) + 1);
int l12Size = (int) ((size / 2048) + 1);
int basePos = (int) dictBuf.position();
while (sp <= ep) {
m = (sp + ep) / 2;
r = dictBuf.get(basePos + m);
if (val > r) {
sp = m + 1;
} else {
ep = m - 1;
}
}
ep = Math.max(ep, 0);
val -= dictBuf.get(basePos + ep);
pos += dictBuf.get(basePos + l3Size + ep);
sp = (int) (ep * CommonUtils.two32 / 2048);
ep = (int) (Math.min(((ep + 1) * CommonUtils.two32 / 2048), Math.ceil((double) size / 2048.0))
- 1);
assert (val <= CommonUtils.two32);
assert (pos >= 0);
dictBuf.position(basePos + 2 * l3Size);
basePos = (int) dictBuf.position();
while (sp <= ep) {
m = (sp + ep) / 2;
r = GETRANKL2(dictBuf.get(basePos + m));
if (val > r) {
sp = m + 1;
} else {
ep = m - 1;
}
}
ep = Math.max(ep, 0);
sel = (long) (ep) * 2048L;
rankL12 = dictBuf.get(basePos + ep);
posL12 = dictBuf.get(basePos + l12Size + ep);
val -= GETRANKL2(rankL12);
pos += GETPOSL2(posL12);
assert (val <= 2048);
assert (pos >= 0);
r = GETRANKL1(rankL12, 1);
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 1);
val -= r;
sel += 512;
r = GETRANKL1(rankL12, 2);
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 2);
val -= r;
sel += 512;
r = GETRANKL1(rankL12, 3);
if (sel + 512 < size && val > r) {
pos += GETPOSL1(posL12, 3);
val -= r;
sel += 512;
}
}
}
dictBuf.position(basePos + 2 * l12Size);
assert (val <= 512);
assert (pos >= 0);
dictBuf.get(); // TODO: Could remove this field altogether
while (true) {
blockClass = (int) BitMapOps.getValPos(dictBuf, pos, 4);
short offsetSize = (short) Tables.offsetBits[blockClass];
pos += 4;
blockOffset = (int) ((blockClass == 0) ? BitMapOps.getBit(dictBuf, pos) * 16 : 0);
pos += offsetSize;
if (val <= (blockClass + blockOffset)) {
pos -= (4 + offsetSize);
break;
}
val -= (blockClass + blockOffset);
sel += 16;
}
blockClass = (int) BitMapOps.getValPos(dictBuf, pos, 4);
pos += 4;
blockOffset = (int) BitMapOps.getValPos(dictBuf, pos, Tables.offsetBits[blockClass]);
lastBlock = Tables.decodeTable[blockClass][blockOffset];
long count = 0;
for (i = 0; i < 16; i++) {
if (((lastBlock >>> (15 - i)) & 1) == 1) {
count++;
}
if (count == val) {
return sel + i;
}
}
return sel;
}
public void close() throws IOException {
stream.close();
}
}
| |
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.webapps;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.support.test.InstrumentationRegistry;
import android.view.View;
import androidx.test.filters.MediumTest;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.chromium.base.ApplicationStatus;
import org.chromium.base.test.util.CommandLineFlags;
import org.chromium.base.test.util.Criteria;
import org.chromium.base.test.util.CriteriaHelper;
import org.chromium.base.test.util.DisabledTest;
import org.chromium.base.test.util.Feature;
import org.chromium.base.test.util.UrlUtils;
import org.chromium.chrome.browser.ChromeTabbedActivity;
import org.chromium.chrome.browser.IntentHandler;
import org.chromium.chrome.browser.ShortcutHelper;
import org.chromium.chrome.browser.app.ChromeActivity;
import org.chromium.chrome.browser.browserservices.intents.BrowserServicesIntentDataProvider;
import org.chromium.chrome.browser.browserservices.intents.WebappConstants;
import org.chromium.chrome.browser.flags.ChromeSwitches;
import org.chromium.chrome.browser.preferences.ChromePreferenceKeys;
import org.chromium.chrome.browser.preferences.SharedPreferencesManager;
import org.chromium.chrome.test.ChromeJUnit4ClassRunner;
import org.chromium.chrome.test.MultiActivityTestRule;
import org.chromium.chrome.test.util.ChromeApplicationTestUtils;
import org.chromium.chrome.test.util.browser.webapps.WebappTestHelper;
import org.chromium.content_public.browser.test.util.TestThreadUtils;
/**
* Tests that WebappActivities are launched correctly.
*
* This test seems a little wonky because WebappActivities launched differently, depending on what
* OS the user is on. Pre-L, WebappActivities were manually instanced and assigned by the
* WebappManager. On L and above, WebappActivities are automatically instanced by Android and the
* FLAG_ACTIVITY_NEW_DOCUMENT mechanism. Moreover, we don't have access to the task list pre-L so
* we have to assume that any non-running WebappActivities are not listed in Android's Overview.
*/
@RunWith(ChromeJUnit4ClassRunner.class)
@CommandLineFlags.Add({ChromeSwitches.DISABLE_FIRST_RUN_EXPERIENCE})
public class WebappModeTest {
@Rule
public MultiActivityTestRule mTestRule = new MultiActivityTestRule();
private static final String WEBAPP_1_ID = "webapp_id_1";
private static final String WEBAPP_1_URL = UrlUtils.encodeHtmlDataUri(
"<html><head><title>Web app #1</title><meta name='viewport' "
+ "content='width=device-width initial-scale=0.5, maximum-scale=0.5'></head>"
+ "<body bgcolor='#011684'>Webapp 1</body></html>");
private static final String WEBAPP_1_TITLE = "Web app #1";
private static final String WEBAPP_2_ID = "webapp_id_2";
private static final String WEBAPP_2_URL =
UrlUtils.encodeHtmlDataUri("<html><body bgcolor='#840116'>Webapp 2</body></html>");
private static final String WEBAPP_2_TITLE = "Web app #2";
private static final String WEBAPP_ICON = "iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAIAAACQd1PeAAAACXB"
+ "IWXMAAAsTAAALEwEAmpwYAAAAB3RJTUUH3wQIFB4cxOfiSQAAABl0RVh0Q29tbWVudABDcmVhdGVkIHdpdG"
+ "ggR0lNUFeBDhcAAAAMSURBVAjXY2AUawEAALcAnI/TkI8AAAAASUVORK5CYII=";
private Intent createIntent(String id, String url, String title, String icon, boolean addMac) {
Intent intent = WebappTestHelper.createMinimalWebappIntent(id, url);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
intent.setPackage(InstrumentationRegistry.getTargetContext().getPackageName());
intent.setAction(WebappLauncherActivity.ACTION_START_WEBAPP);
if (addMac) {
// Needed for security reasons. If the MAC is excluded, the URL of the webapp is opened
// in a browser window, instead.
String mac = ShortcutHelper.getEncodedMac(url);
intent.putExtra(WebappConstants.EXTRA_MAC, mac);
}
intent.putExtra(WebappConstants.EXTRA_ICON, icon);
intent.putExtra(WebappConstants.EXTRA_NAME, title);
return intent;
}
private void fireWebappIntent(String id, String url, String title, String icon,
boolean addMac) {
Intent intent = createIntent(id, url, title, icon, addMac);
InstrumentationRegistry.getTargetContext().startActivity(intent);
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
ChromeApplicationTestUtils.waitUntilChromeInForeground();
}
@Before
public void setUp() {
TestThreadUtils.runOnUiThreadBlocking(() -> {
WebappRegistry.refreshSharedPrefsForTesting();
// Register the webapps so when the data storage is opened, the test doesn't crash.
// There is no race condition with the retrieval as AsyncTasks are run sequentially on
// the background thread.
WebappRegistry.getInstance().register(
WEBAPP_1_ID, new WebappRegistry.FetchWebappDataStorageCallback() {
@Override
public void onWebappDataStorageRetrieved(WebappDataStorage storage) {
BrowserServicesIntentDataProvider intentDataProvider =
WebappIntentDataProviderFactory.create(createIntent(WEBAPP_1_ID,
WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, true));
storage.updateFromWebappIntentDataProvider(intentDataProvider);
}
});
WebappRegistry.getInstance().register(
WEBAPP_2_ID, new WebappRegistry.FetchWebappDataStorageCallback() {
@Override
public void onWebappDataStorageRetrieved(WebappDataStorage storage) {
BrowserServicesIntentDataProvider intentDataProvider =
WebappIntentDataProviderFactory.create(createIntent(WEBAPP_1_ID,
WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, true));
storage.updateFromWebappIntentDataProvider(intentDataProvider);
}
});
});
}
/**
* Tests that WebappActivities are started properly.
*/
@Test
@MediumTest
@Feature({"Webapps"})
public void testWebappLaunches() {
final WebappActivity firstActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
final int firstTabId = firstActivity.getActivityTab().getId();
// Firing a different Intent should start a new WebappActivity instance.
fireWebappIntent(WEBAPP_2_ID, WEBAPP_2_URL, WEBAPP_2_TITLE, WEBAPP_ICON, true);
CriteriaHelper.pollUiThread(() -> {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
Criteria.checkThat(isWebappActivityReady(lastActivity), Matchers.is(true));
WebappActivity lastWebappActivity = (WebappActivity) lastActivity;
Criteria.checkThat(
lastWebappActivity.getActivityTab().getId(), Matchers.not(firstTabId));
});
// Firing the first Intent should bring back the first WebappActivity instance, or at least
// a WebappActivity with the same tab if the other one was killed by Android mid-test.
fireWebappIntent(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, true);
CriteriaHelper.pollUiThread(() -> {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
Criteria.checkThat(isWebappActivityReady(lastActivity), Matchers.is(true));
WebappActivity lastWebappActivity = (WebappActivity) lastActivity;
Criteria.checkThat(
lastWebappActivity.getActivityTab().getId(), Matchers.is(firstTabId));
});
}
/**
* Tests that the WebappActivity gets the next available Tab ID instead of 0.
*/
@Test
@MediumTest
@Feature({"Webapps"})
public void testWebappTabIdsProperlyAssigned() {
SharedPreferencesManager.getInstance().writeInt(
ChromePreferenceKeys.TAB_ID_MANAGER_NEXT_ID, 11684);
final WebappActivity webappActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
Assert.assertEquals(
"Wrong Tab ID was used", 11684, webappActivity.getActivityTab().getId());
}
/**
* Tests that a WebappActivity can be brought forward by firing an Intent with
* TabOpenType.BRING_TAB_TO_FRONT.
*/
@Test
@MediumTest
@Feature({"Webapps"})
@DisabledTest(message = "crbug.com/1064395")
public void testBringTabToFront() {
// Start the WebappActivity.
final WebappActivity firstActivity =
startWebappActivity(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON);
final int webappTabId = firstActivity.getActivityTab().getId();
// Return home.
final Context context = InstrumentationRegistry.getTargetContext();
ChromeApplicationTestUtils.fireHomeScreenIntent(context);
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
// Bring the WebappActivity back via an Intent.
Intent intent = IntentHandler.createTrustedBringTabToFrontIntent(
webappTabId, IntentHandler.BringToFrontSource.NOTIFICATION);
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
// When Chrome is back in the foreground, confirm that the correct Activity was restored.
// Because of Android killing Activities willy-nilly, it may not be the same Activity, but
// it should have the same Tab ID.
InstrumentationRegistry.getInstrumentation().waitForIdleSync();
ChromeApplicationTestUtils.waitUntilChromeInForeground();
CriteriaHelper.pollInstrumentationThread(() -> {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
Criteria.checkThat(isWebappActivityReady(lastActivity), Matchers.is(true));
WebappActivity lastWebappActivity = (WebappActivity) lastActivity;
Criteria.checkThat(
lastWebappActivity.getActivityTab().getId(), Matchers.is(webappTabId));
});
}
/**
* Ensure WebappActivities can't be launched without proper security checks.
*/
@Test
//@MediumTest
//@Feature({"Webapps"})
@DisabledTest(message = "crbug.com/755114")
public void testWebappRequiresValidMac() throws Exception {
// Try to start a WebappActivity. Fail because the Intent is insecure.
fireWebappIntent(WEBAPP_1_ID, WEBAPP_1_URL, WEBAPP_1_TITLE, WEBAPP_ICON, false);
CriteriaHelper.pollUiThread(() -> {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
Criteria.checkThat(lastActivity, Matchers.instanceOf(ChromeTabbedActivity.class));
});
ChromeActivity chromeActivity =
(ChromeActivity) ApplicationStatus.getLastTrackedFocusedActivity();
mTestRule.waitForFullLoad(chromeActivity, WEBAPP_1_TITLE);
// Firing a correct Intent should start a WebappActivity instance instead of the browser.
fireWebappIntent(WEBAPP_2_ID, WEBAPP_2_URL, WEBAPP_2_TITLE, WEBAPP_ICON, true);
CriteriaHelper.pollUiThread(() -> {
return isWebappActivityReady(ApplicationStatus.getLastTrackedFocusedActivity());
});
}
/**
* Starts a WebappActivity for the given data and waits for it to be initialized. We can't use
* ActivityTestUtils.waitForActivity() because of the way WebappActivity is instanced on pre-L
* devices.
*/
private WebappActivity startWebappActivity(String id, String url, String title, String icon) {
fireWebappIntent(id, url, title, icon, true);
CriteriaHelper.pollUiThread(() -> {
Activity lastActivity = ApplicationStatus.getLastTrackedFocusedActivity();
return isWebappActivityReady(lastActivity);
}, 10000, CriteriaHelper.DEFAULT_POLLING_INTERVAL);
return (WebappActivity) ApplicationStatus.getLastTrackedFocusedActivity();
}
/** Returns true when the last Activity is a WebappActivity and is ready for testing .*/
private boolean isWebappActivityReady(Activity lastActivity) {
if (!(lastActivity instanceof WebappActivity)) return false;
WebappActivity webappActivity = (WebappActivity) lastActivity;
if (webappActivity.getActivityTab() == null) return false;
View rootView = webappActivity.findViewById(android.R.id.content);
if (!rootView.hasWindowFocus()) return false;
return true;
}
}
| |
// Copyright 2000-2020 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.dnd;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.util.Disposer;
import com.intellij.openapi.util.Pair;
import com.intellij.openapi.util.Ref;
import com.intellij.util.Function;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import javax.swing.*;
import java.awt.*;
/**
* @author Konstantin Bulenkov
*/
public final class DnDSupport implements DnDTarget, DnDSource, DnDDropHandler.WithResult, Disposable {
private final JComponent myComponent;
private final Function<? super DnDActionInfo, ? extends DnDDragStartBean> myBeanProvider;
private final Function<? super DnDActionInfo, ? extends DnDImage> myImageProvider;
private final @Nullable DnDDropHandler.WithResult myHandler;
private final @Nullable DnDTargetChecker myChecker;
private final Runnable myDropEndedCallback;
private final DnDDropActionHandler myDropActionHandler;
private final Runnable myCleanUpCallback;
private final boolean myAsTarget;
private final boolean myAsSource;
private DnDSupport(JComponent component,
Function<? super DnDActionInfo, ? extends DnDDragStartBean> beanProvider,
Function<? super DnDActionInfo, ? extends DnDImage> imageProvider,
DnDDropHandler.WithResult handler,
DnDTargetChecker checker,
Runnable dropEndedCallback,
Disposable parent,
DnDDropActionHandler dropActionHandler,
Runnable cleanUpCallback,
boolean asTarget,
boolean asSource,
boolean asNativeTarget) {
myComponent = component;
myBeanProvider = beanProvider;
myImageProvider = imageProvider;
myHandler = handler;
myChecker = checker;
myDropEndedCallback = dropEndedCallback;
myDropActionHandler = dropActionHandler;
myCleanUpCallback = cleanUpCallback;
myAsTarget = asTarget;
myAsSource = asSource;
if (myAsTarget) {
DnDManager.getInstance().registerTarget(asNativeTarget ? new DnDNativeTargetWrapper(this) : this, myComponent);
}
if (myAsSource) {
DnDManager.getInstance().registerSource(this, myComponent);
}
if (parent != null) {
Disposer.register(parent, this);
}
}
@Override
public boolean canStartDragging(DnDAction action, Point dragOrigin) {
return myBeanProvider != null
&& myAsSource
&& myBeanProvider.fun(new DnDActionInfo(action, dragOrigin)) != null;
}
@Override
public DnDDragStartBean startDragging(DnDAction action, Point dragOrigin) {
return myBeanProvider.fun(new DnDActionInfo(action, dragOrigin));
}
@Override
public Pair<Image, Point> createDraggedImage(DnDAction action, Point dragOrigin) {
if (myImageProvider != null) {
final DnDImage image = myImageProvider.fun(new DnDActionInfo(action, dragOrigin));
if (image != null) {
final Point point = image.getPoint();
return Pair.create(image.getImage(), point == null ? dragOrigin : point);
}
}
return null;
}
@Override
public void dragDropEnd() {
if (myDropEndedCallback != null) {
myDropEndedCallback.run();
}
}
@Override
public void dropActionChanged(int gestureModifiers) {
if (myDropActionHandler != null) {
myDropActionHandler.dropActionChanged(gestureModifiers);
}
}
@Override
public boolean update(DnDEvent event) {
if (myChecker == null) {
event.setDropPossible(true);
return false;
}
return myChecker.update(event);
}
@Override
public boolean tryDrop(DnDEvent aEvent) {
return myHandler == null || myHandler.tryDrop(aEvent);
}
@Override
public void cleanUpOnLeave() {
if (myCleanUpCallback != null) {
myCleanUpCallback.run();
}
}
@Override
public void dispose() {
if (myAsSource) {
DnDManager.getInstance().unregisterSource(this, myComponent);
}
if (myAsTarget) {
DnDManager.getInstance().unregisterTarget(this, myComponent);
}
}
private static final class DnDNativeTargetWrapper implements DnDNativeTarget, DnDDropHandler.WithResult {
@NotNull private final DnDSupport myTarget;
private DnDNativeTargetWrapper(@NotNull DnDSupport target) {
myTarget = target;
}
@Override
public void cleanUpOnLeave() {
myTarget.cleanUpOnLeave();
}
@Override
public void updateDraggedImage(Image image, Point dropPoint, Point imageOffset) {
myTarget.updateDraggedImage(image, dropPoint, imageOffset);
}
@Override
public boolean tryDrop(DnDEvent event) {
return myTarget.tryDrop(event);
}
@Override
public boolean update(DnDEvent event) {
return myTarget.update(event);
}
}
@NotNull
public static DnDSupportBuilder createBuilder(@NotNull JComponent component) {
final JComponent myComponent = component;
final Ref<Boolean> asTarget = Ref.create(true);
final Ref<Boolean> asSource = Ref.create(true);
final Ref<Boolean> asNativeTarget = Ref.create(false);
final Ref<Function<? super DnDActionInfo, ? extends DnDImage>> imageProvider = Ref.create(null);
final Ref<Function<? super DnDActionInfo, ? extends DnDDragStartBean>> beanProvider = Ref.create(null);
final Ref<Runnable> dropEnded = Ref.create(null);
final Ref<Disposable> disposable = Ref.create(null);
final Ref<DnDDropHandler.WithResult> dropHandler = Ref.create(null);
final Ref<DnDTargetChecker> targetChecker = Ref.create(null);
final Ref<DnDDropActionHandler> dropActionHandler = Ref.create(null);
final Ref<Runnable> cleanUp = Ref.create(null);
return new DnDSupportBuilder() {
@Override
public DnDSupportBuilder disableAsTarget() {
asTarget.set(false);
return this;
}
@Override
public DnDSupportBuilder disableAsSource() {
asSource.set(false);
return this;
}
@Override
public DnDSupportBuilder enableAsNativeTarget() {
asNativeTarget.set(true);
return this;
}
@Override
public DnDSupportBuilder setImageProvider(Function<? super DnDActionInfo, ? extends DnDImage> fun) {
imageProvider.set(fun);
return this;
}
@Override
public DnDSupportBuilder setBeanProvider(Function<? super DnDActionInfo, ? extends DnDDragStartBean> fun) {
beanProvider.set(fun);
return this;
}
@Override
public DnDSupportBuilder setDropHandler(DnDDropHandler handler) {
return setDropHandlerWithResult(e -> {
handler.drop(e);
return true;
});
}
@Override
public DnDSupportBuilder setDropHandlerWithResult(DnDDropHandler.WithResult handler) {
dropHandler.set(handler);
return this;
}
@Override
public DnDSupportBuilder setTargetChecker(DnDTargetChecker checker) {
targetChecker.set(checker);
return this;
}
@Override
public DnDSupportBuilder setDropActionHandler(DnDDropActionHandler handler) {
dropActionHandler.set(handler);
return this;
}
@Override
public DnDSupportBuilder setDisposableParent(Disposable parent) {
disposable.set(parent);
return this;
}
@Override
public DnDSupportBuilder setCleanUpOnLeaveCallback(Runnable callback) {
cleanUp.set(callback);
return this;
}
@Override
public DnDSupportBuilder setDropEndedCallback(Runnable callback) {
dropEnded.set(callback);
return this;
}
@Override
public void install() {
new DnDSupport(myComponent,
beanProvider.get(),
imageProvider.get(),
dropHandler.get(),
targetChecker.get(),
dropEnded.get(),
disposable.get(),
dropActionHandler.get(),
cleanUp.get(),
asTarget.get(),
asSource.get(),
asNativeTarget.get());
}
};
}
}
| |
/**
* Copyright 2012 Riparian Data
* http://www.ripariandata.com
* contact@ripariandata.com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.ripariandata.timberwolf.mail.exchange;
import com.microsoft.schemas.exchange.services.x2006.messages.FindFolderResponseType;
import com.microsoft.schemas.exchange.services.x2006.messages.FindFolderType;
import com.microsoft.schemas.exchange.services.x2006.messages.GetItemResponseType;
import com.microsoft.schemas.exchange.services.x2006.messages.GetItemType;
import com.microsoft.schemas.exchange.services.x2006.messages.SyncFolderItemsResponseType;
import com.microsoft.schemas.exchange.services.x2006.messages.SyncFolderItemsType;
import java.io.IOException;
import java.io.InputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.nio.charset.Charset;
import org.apache.xmlbeans.XmlException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.xmlsoap.schemas.soap.envelope.BodyType;
import org.xmlsoap.schemas.soap.envelope.EnvelopeDocument;
import org.xmlsoap.schemas.soap.envelope.EnvelopeType;
import static com.ripariandata.timberwolf.Utilities.inputStreamToString;
/**
* ExchangeService handles packing xmlbeans objects into a SOAP envelope,
* sending them off to the Exchange server and then returning the xmlbeans
* objects that come back.
*
* Note that all the service calls are performed synchronously.
*/
public class ExchangeService
{
private static final Logger LOG = LoggerFactory.getLogger(ExchangeService.class);
private static final String DECLARATION = "<?xml version=\"1.0\" encoding=\"utf-8\"?>";
private static final String SOAP_ENCODING = "UTF-8";
private String endpoint;
private HttpUrlConnectionFactory connectionFactory;
public ExchangeService(final String url, final HttpUrlConnectionFactory factory)
{
endpoint = url;
connectionFactory = factory;
}
/**
* Creates a new ExchangeService that talks to the given Exchange server.
*
* @param url A string representing the URL of the service endpoint for the Exchange server.
*/
public ExchangeService(final String url)
{
this(url, new SaslHttpUrlConnectionFactory());
}
/**
* Sends a SOAP envelope request and returns the response.
*
*
* @param envelope An EnvelopeDocument with the SOAP envelope to send to Exchange.
* @return An SOAP body from the Exchange's response.
* @throws HttpErrorException If the HTTP response from Exchange has a non-200 status code.
* @throws ServiceCallException If there was a non-HTTP error sending the response,
* such as an improper encoding or IO error.
*/
private BodyType sendRequest(final EnvelopeDocument envelope)
throws HttpErrorException, ServiceCallException
{
String request = DECLARATION + envelope.xmlText();
LOG.trace("Sending SOAP request to {}. SOAP envelope:", endpoint);
LOG.trace(envelope.toString());
HttpURLConnection conn = createConnection(request);
int code = getResponseCode(conn);
String charset = getCharset(conn);
InputStream responseData = getInputStream(conn);
int amtAvailable = getAmountAvailable(responseData);
if (code == HttpURLConnection.HTTP_OK)
{
checkNonEmptyResponse(request, amtAvailable);
EnvelopeDocument response = parseResponse(responseData, charset);
LOG.trace("SOAP response received from {}. SOAP envelope:", endpoint);
LOG.trace(response.toString());
return getSoapBody(response);
}
else
{
return logAndThrowHttpErrorCode(request, code, responseData, amtAvailable, charset);
}
}
/**
* If for whatever reason we fail to get the charset, this logs that fact,
* and returns UTF-8.
* @param connection the connection with a response
* @return the character encoding for that response
*/
private static String getCharset(final HttpURLConnection connection)
{
final String defaultCharset = "UTF-8";
String contentType = connection.getHeaderField("Content-Type");
if (contentType == null)
{
LOG.debug("Error getting charset for response, no Content-Type specified, falling back to \"{}\"",
contentType, defaultCharset);
return defaultCharset;
}
for (String part : contentType.replace(" ", "").split(";"))
{
if (part.startsWith("charset="))
{
String charset = part.split("=", 2)[1].trim();
if (Charset.isSupported(charset))
{
return charset;
}
else
{
LOG.debug("Error getting charset from Content-Type: \"{}\", falling back to \"{}\"", contentType,
defaultCharset);
LOG.debug("Charset: \"{}\" is not supported", charset);
return defaultCharset;
}
}
}
LOG.debug("Could not find charset in Content-Type: \"{}\", falling back to \"{}\"", contentType,
defaultCharset);
return defaultCharset;
}
private BodyType logAndThrowHttpErrorCode(final String request, final int code, final InputStream responseData,
final int amtAvailable, final String charset)
throws ServiceCallException, HttpErrorException
{
LOG.error("Server responded with HTTP error code {}.", code);
if (!LOG.isTraceEnabled())
{
LOG.debug("Request that generated the error:");
LOG.debug(request);
}
if (amtAvailable > 0)
{
LOG.debug("Error response body:");
try
{
LOG.debug(inputStreamToString(responseData, charset));
}
catch (IOException ioe)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"Error reading from the response stream.", ioe));
}
}
throw new HttpErrorException(code);
}
/**
* Extracts the soap body from the response.
* @param response the response from exchange
* @return the soap body from the response
* @throws ServiceCallException if there was an error extracting the body.
* The error will be logged.
*/
private BodyType getSoapBody(final EnvelopeDocument response) throws ServiceCallException
{
BodyType body = response.getEnvelope().getBody();
if (body != null)
{
return body;
}
else
{
LOG.error("SOAP envelope did not contain a valid body.");
if (!LOG.isTraceEnabled())
{
LOG.debug("SOAP envelope:");
LOG.debug(response.xmlText());
}
throw new ServiceCallException(ServiceCallException.Reason.OTHER,
"SOAP response did not contain a body.");
}
}
private void checkNonEmptyResponse(final String request, final int amtAvailable) throws ServiceCallException
{
if (amtAvailable == 0)
{
LOG.error("HTTP response was successful, but has no data.");
if (!LOG.isTraceEnabled())
{
LOG.debug("Request that generated the empty response:");
LOG.debug(request);
}
throw new ServiceCallException(ServiceCallException.Reason.OTHER, "Response has empty body.");
}
}
private EnvelopeDocument parseResponse(final InputStream responseData, final String charset)
throws ServiceCallException
{
EnvelopeDocument response;
try
{
response = EnvelopeDocument.Factory.parse(responseData);
}
catch (IOException e)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"There was an error reading from the response stream.", e));
}
catch (XmlException e)
{
LOG.error("There was an error parsing the SOAP response from Exchange.");
LOG.debug("Response body:");
try
{
LOG.debug(inputStreamToString(responseData, charset));
throw new ServiceCallException(ServiceCallException.Reason.OTHER, "Error parsing SOAP response.", e);
}
catch (IOException ioe)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"There was an error reading from the response stream.", ioe));
}
}
return response;
}
private int getAmountAvailable(final InputStream responseData) throws ServiceCallException
{
int amtAvailable;
try
{
amtAvailable = responseData.available();
}
catch (IOException e)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"There was an error reading from the response stream."));
}
return amtAvailable;
}
private InputStream getInputStream(final HttpURLConnection conn) throws ServiceCallException
{
InputStream responseData;
try
{
responseData = conn.getInputStream();
}
catch (IOException e)
{
throw new ServiceCallException(ServiceCallException.Reason.OTHER,
"There was an error getting the input stream for the response.", e);
}
return responseData;
}
private int getResponseCode(final HttpURLConnection conn) throws ServiceCallException
{
int code;
try
{
code = conn.getResponseCode();
}
catch (IOException e)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"There was an error getting the HTTP status code for the response.", e));
}
return code;
}
private HttpURLConnection createConnection(final String request) throws ServiceCallException
{
HttpURLConnection conn;
try
{
conn = connectionFactory.newInstance(endpoint, request.getBytes(SOAP_ENCODING));
}
catch (UnsupportedEncodingException e)
{
throw ServiceCallException.log(LOG, new ServiceCallException(ServiceCallException.Reason.OTHER,
"Request body could not be encoded into " + SOAP_ENCODING, e));
}
return conn;
}
EnvelopeDocument createEmptyRequest(final String targetUser)
{
EnvelopeDocument request = EnvelopeDocument.Factory.newInstance();
EnvelopeType envelope = request.addNewEnvelope();
envelope.addNewHeader().addNewExchangeImpersonation().addNewConnectingSID().setPrincipalName(targetUser);
return request;
}
/**
* Returns the result of a sync folder items request.
* @param syncFolderItems A SyncFolderItemsType object that specifies the folder to sync.
* @param targetUser The principal name of the user to find items for.
* @return A SyncFolderItemsResponseType object with the requested items.
* @throws ServiceCallException If the HTTP response from Exchange has a non-200 status code.
* @throws HttpErrorException If there was a non-HTTP error sending the response,
* such as an improper encoding or IO error.
*/
public SyncFolderItemsResponseType syncFolderItems(final SyncFolderItemsType syncFolderItems,
final String targetUser)
throws ServiceCallException, HttpErrorException
{
EnvelopeDocument request = createEmptyRequest(targetUser);
EnvelopeType envelope = request.getEnvelope();
envelope.addNewBody().setSyncFolderItems(syncFolderItems);
return sendRequest(request).getSyncFolderItemsResponse();
}
/**
* Returns the results of a get item request.
*
* @param getItem A GetItemType object that specifies the set of items to
* gather from the Exchange server.
* @param targetUser The principal name of the user to get items for.
* @return A GetItemResponseType object with the requested items.
* @throws HttpErrorException If the HTTP response from Exchange has a non-200 status code.
* @throws ServiceCallException If there was a non-HTTP error sending the response,
* such as an improper encoding or IO error.
*/
public GetItemResponseType getItem(final GetItemType getItem, final String targetUser)
throws ServiceCallException, HttpErrorException
{
EnvelopeDocument request = createEmptyRequest(targetUser);
EnvelopeType envelope = request.getEnvelope();
envelope.addNewBody().setGetItem(getItem);
return sendRequest(request).getGetItemResponse();
}
/**
* Returns the response of a FindFolder request.
* @param findFolder The FindFolder request,
* @param targetUser The principal name of the user to find folders for.
* @return The response.
* @throws ServiceCallException A non-HTTP error has occurred during the request.
* @throws HttpErrorException A HTTP error has occurred during the request.
*/
public FindFolderResponseType findFolder(final FindFolderType findFolder, final String targetUser)
throws ServiceCallException, HttpErrorException
{
EnvelopeDocument request = createEmptyRequest(targetUser);
EnvelopeType envelope = request.getEnvelope();
envelope.addNewBody().setFindFolder(findFolder);
return sendRequest(request).getFindFolderResponse();
}
}
| |
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/monitoring/v3/metric_service.proto
package com.google.monitoring.v3;
/**
*
*
* <pre>
* Summary of the result of a failed request to write data to a time series.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateTimeSeriesSummary}
*/
public final class CreateTimeSeriesSummary extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.CreateTimeSeriesSummary)
CreateTimeSeriesSummaryOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateTimeSeriesSummary.newBuilder() to construct.
private CreateTimeSeriesSummary(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateTimeSeriesSummary() {
errors_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateTimeSeriesSummary();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private CreateTimeSeriesSummary(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
totalPointCount_ = input.readInt32();
break;
}
case 16:
{
successPointCount_ = input.readInt32();
break;
}
case 26:
{
if (!((mutable_bitField0_ & 0x00000001) != 0)) {
errors_ =
new java.util.ArrayList<
com.google.monitoring.v3.CreateTimeSeriesSummary.Error>();
mutable_bitField0_ |= 0x00000001;
}
errors_.add(
input.readMessage(
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.parser(),
extensionRegistry));
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
if (((mutable_bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
}
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateTimeSeriesSummary.class,
com.google.monitoring.v3.CreateTimeSeriesSummary.Builder.class);
}
public interface ErrorOrBuilder
extends
// @@protoc_insertion_point(interface_extends:google.monitoring.v3.CreateTimeSeriesSummary.Error)
com.google.protobuf.MessageOrBuilder {
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return Whether the status field is set.
*/
boolean hasStatus();
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return The status.
*/
com.google.rpc.Status getStatus();
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
com.google.rpc.StatusOrBuilder getStatusOrBuilder();
/**
*
*
* <pre>
* The number of points that couldn't be written because of `status`.
* </pre>
*
* <code>int32 point_count = 2;</code>
*
* @return The pointCount.
*/
int getPointCount();
}
/**
*
*
* <pre>
* Detailed information about an error category.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateTimeSeriesSummary.Error}
*/
public static final class Error extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.monitoring.v3.CreateTimeSeriesSummary.Error)
ErrorOrBuilder {
private static final long serialVersionUID = 0L;
// Use Error.newBuilder() to construct.
private Error(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Error() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Error();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
private Error(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.rpc.Status.Builder subBuilder = null;
if (status_ != null) {
subBuilder = status_.toBuilder();
}
status_ = input.readMessage(com.google.rpc.Status.parser(), extensionRegistry);
if (subBuilder != null) {
subBuilder.mergeFrom(status_);
status_ = subBuilder.buildPartial();
}
break;
}
case 16:
{
pointCount_ = input.readInt32();
break;
}
default:
{
if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) {
done = true;
}
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_Error_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_Error_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.class,
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder.class);
}
public static final int STATUS_FIELD_NUMBER = 1;
private com.google.rpc.Status status_;
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return Whether the status field is set.
*/
@java.lang.Override
public boolean hasStatus() {
return status_ != null;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return The status.
*/
@java.lang.Override
public com.google.rpc.Status getStatus() {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
@java.lang.Override
public com.google.rpc.StatusOrBuilder getStatusOrBuilder() {
return getStatus();
}
public static final int POINT_COUNT_FIELD_NUMBER = 2;
private int pointCount_;
/**
*
*
* <pre>
* The number of points that couldn't be written because of `status`.
* </pre>
*
* <code>int32 point_count = 2;</code>
*
* @return The pointCount.
*/
@java.lang.Override
public int getPointCount() {
return pointCount_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (status_ != null) {
output.writeMessage(1, getStatus());
}
if (pointCount_ != 0) {
output.writeInt32(2, pointCount_);
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (status_ != null) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStatus());
}
if (pointCount_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pointCount_);
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.CreateTimeSeriesSummary.Error)) {
return super.equals(obj);
}
com.google.monitoring.v3.CreateTimeSeriesSummary.Error other =
(com.google.monitoring.v3.CreateTimeSeriesSummary.Error) obj;
if (hasStatus() != other.hasStatus()) return false;
if (hasStatus()) {
if (!getStatus().equals(other.getStatus())) return false;
}
if (getPointCount() != other.getPointCount()) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasStatus()) {
hash = (37 * hash) + STATUS_FIELD_NUMBER;
hash = (53 * hash) + getStatus().hashCode();
}
hash = (37 * hash) + POINT_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getPointCount();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.monitoring.v3.CreateTimeSeriesSummary.Error prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Detailed information about an error category.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateTimeSeriesSummary.Error}
*/
public static final class Builder
extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.CreateTimeSeriesSummary.Error)
com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_Error_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_Error_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.class,
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder.class);
}
// Construct using com.google.monitoring.v3.CreateTimeSeriesSummary.Error.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {}
}
@java.lang.Override
public Builder clear() {
super.clear();
if (statusBuilder_ == null) {
status_ = null;
} else {
status_ = null;
statusBuilder_ = null;
}
pointCount_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_Error_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error getDefaultInstanceForType() {
return com.google.monitoring.v3.CreateTimeSeriesSummary.Error.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error build() {
com.google.monitoring.v3.CreateTimeSeriesSummary.Error result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error buildPartial() {
com.google.monitoring.v3.CreateTimeSeriesSummary.Error result =
new com.google.monitoring.v3.CreateTimeSeriesSummary.Error(this);
if (statusBuilder_ == null) {
result.status_ = status_;
} else {
result.status_ = statusBuilder_.build();
}
result.pointCount_ = pointCount_;
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index,
java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.CreateTimeSeriesSummary.Error) {
return mergeFrom((com.google.monitoring.v3.CreateTimeSeriesSummary.Error) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.CreateTimeSeriesSummary.Error other) {
if (other == com.google.monitoring.v3.CreateTimeSeriesSummary.Error.getDefaultInstance())
return this;
if (other.hasStatus()) {
mergeStatus(other.getStatus());
}
if (other.getPointCount() != 0) {
setPointCount(other.getPointCount());
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.v3.CreateTimeSeriesSummary.Error parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage =
(com.google.monitoring.v3.CreateTimeSeriesSummary.Error) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private com.google.rpc.Status status_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
statusBuilder_;
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return Whether the status field is set.
*/
public boolean hasStatus() {
return statusBuilder_ != null || status_ != null;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*
* @return The status.
*/
public com.google.rpc.Status getStatus() {
if (statusBuilder_ == null) {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
} else {
return statusBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public Builder setStatus(com.google.rpc.Status value) {
if (statusBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
status_ = value;
onChanged();
} else {
statusBuilder_.setMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public Builder setStatus(com.google.rpc.Status.Builder builderForValue) {
if (statusBuilder_ == null) {
status_ = builderForValue.build();
onChanged();
} else {
statusBuilder_.setMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public Builder mergeStatus(com.google.rpc.Status value) {
if (statusBuilder_ == null) {
if (status_ != null) {
status_ = com.google.rpc.Status.newBuilder(status_).mergeFrom(value).buildPartial();
} else {
status_ = value;
}
onChanged();
} else {
statusBuilder_.mergeFrom(value);
}
return this;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public Builder clearStatus() {
if (statusBuilder_ == null) {
status_ = null;
onChanged();
} else {
status_ = null;
statusBuilder_ = null;
}
return this;
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public com.google.rpc.Status.Builder getStatusBuilder() {
onChanged();
return getStatusFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
public com.google.rpc.StatusOrBuilder getStatusOrBuilder() {
if (statusBuilder_ != null) {
return statusBuilder_.getMessageOrBuilder();
} else {
return status_ == null ? com.google.rpc.Status.getDefaultInstance() : status_;
}
}
/**
*
*
* <pre>
* The status of the requested write operation.
* </pre>
*
* <code>.google.rpc.Status status = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status, com.google.rpc.Status.Builder, com.google.rpc.StatusOrBuilder>
getStatusFieldBuilder() {
if (statusBuilder_ == null) {
statusBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.rpc.Status,
com.google.rpc.Status.Builder,
com.google.rpc.StatusOrBuilder>(getStatus(), getParentForChildren(), isClean());
status_ = null;
}
return statusBuilder_;
}
private int pointCount_;
/**
*
*
* <pre>
* The number of points that couldn't be written because of `status`.
* </pre>
*
* <code>int32 point_count = 2;</code>
*
* @return The pointCount.
*/
@java.lang.Override
public int getPointCount() {
return pointCount_;
}
/**
*
*
* <pre>
* The number of points that couldn't be written because of `status`.
* </pre>
*
* <code>int32 point_count = 2;</code>
*
* @param value The pointCount to set.
* @return This builder for chaining.
*/
public Builder setPointCount(int value) {
pointCount_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The number of points that couldn't be written because of `status`.
* </pre>
*
* <code>int32 point_count = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPointCount() {
pointCount_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.CreateTimeSeriesSummary.Error)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesSummary.Error)
private static final com.google.monitoring.v3.CreateTimeSeriesSummary.Error DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.CreateTimeSeriesSummary.Error();
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary.Error getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Error> PARSER =
new com.google.protobuf.AbstractParser<Error>() {
@java.lang.Override
public Error parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Error(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Error> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Error> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
public static final int TOTAL_POINT_COUNT_FIELD_NUMBER = 1;
private int totalPointCount_;
/**
*
*
* <pre>
* The number of points in the request.
* </pre>
*
* <code>int32 total_point_count = 1;</code>
*
* @return The totalPointCount.
*/
@java.lang.Override
public int getTotalPointCount() {
return totalPointCount_;
}
public static final int SUCCESS_POINT_COUNT_FIELD_NUMBER = 2;
private int successPointCount_;
/**
*
*
* <pre>
* The number of points that were successfully written.
* </pre>
*
* <code>int32 success_point_count = 2;</code>
*
* @return The successPointCount.
*/
@java.lang.Override
public int getSuccessPointCount() {
return successPointCount_;
}
public static final int ERRORS_FIELD_NUMBER = 3;
private java.util.List<com.google.monitoring.v3.CreateTimeSeriesSummary.Error> errors_;
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
@java.lang.Override
public java.util.List<com.google.monitoring.v3.CreateTimeSeriesSummary.Error> getErrorsList() {
return errors_;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder>
getErrorsOrBuilderList() {
return errors_;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
@java.lang.Override
public int getErrorsCount() {
return errors_.size();
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error getErrors(int index) {
return errors_.get(index);
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder getErrorsOrBuilder(
int index) {
return errors_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (totalPointCount_ != 0) {
output.writeInt32(1, totalPointCount_);
}
if (successPointCount_ != 0) {
output.writeInt32(2, successPointCount_);
}
for (int i = 0; i < errors_.size(); i++) {
output.writeMessage(3, errors_.get(i));
}
unknownFields.writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (totalPointCount_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(1, totalPointCount_);
}
if (successPointCount_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, successPointCount_);
}
for (int i = 0; i < errors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, errors_.get(i));
}
size += unknownFields.getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.monitoring.v3.CreateTimeSeriesSummary)) {
return super.equals(obj);
}
com.google.monitoring.v3.CreateTimeSeriesSummary other =
(com.google.monitoring.v3.CreateTimeSeriesSummary) obj;
if (getTotalPointCount() != other.getTotalPointCount()) return false;
if (getSuccessPointCount() != other.getSuccessPointCount()) return false;
if (!getErrorsList().equals(other.getErrorsList())) return false;
if (!unknownFields.equals(other.unknownFields)) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TOTAL_POINT_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getTotalPointCount();
hash = (37 * hash) + SUCCESS_POINT_COUNT_FIELD_NUMBER;
hash = (53 * hash) + getSuccessPointCount();
if (getErrorsCount() > 0) {
hash = (37 * hash) + ERRORS_FIELD_NUMBER;
hash = (53 * hash) + getErrorsList().hashCode();
}
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.monitoring.v3.CreateTimeSeriesSummary prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Summary of the result of a failed request to write data to a time series.
* </pre>
*
* Protobuf type {@code google.monitoring.v3.CreateTimeSeriesSummary}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.monitoring.v3.CreateTimeSeriesSummary)
com.google.monitoring.v3.CreateTimeSeriesSummaryOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.monitoring.v3.CreateTimeSeriesSummary.class,
com.google.monitoring.v3.CreateTimeSeriesSummary.Builder.class);
}
// Construct using com.google.monitoring.v3.CreateTimeSeriesSummary.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getErrorsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
totalPointCount_ = 0;
successPointCount_ = 0;
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
} else {
errorsBuilder_.clear();
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.monitoring.v3.MetricServiceProto
.internal_static_google_monitoring_v3_CreateTimeSeriesSummary_descriptor;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary getDefaultInstanceForType() {
return com.google.monitoring.v3.CreateTimeSeriesSummary.getDefaultInstance();
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary build() {
com.google.monitoring.v3.CreateTimeSeriesSummary result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary buildPartial() {
com.google.monitoring.v3.CreateTimeSeriesSummary result =
new com.google.monitoring.v3.CreateTimeSeriesSummary(this);
int from_bitField0_ = bitField0_;
result.totalPointCount_ = totalPointCount_;
result.successPointCount_ = successPointCount_;
if (errorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
errors_ = java.util.Collections.unmodifiableList(errors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.errors_ = errors_;
} else {
result.errors_ = errorsBuilder_.build();
}
onBuilt();
return result;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.monitoring.v3.CreateTimeSeriesSummary) {
return mergeFrom((com.google.monitoring.v3.CreateTimeSeriesSummary) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.monitoring.v3.CreateTimeSeriesSummary other) {
if (other == com.google.monitoring.v3.CreateTimeSeriesSummary.getDefaultInstance())
return this;
if (other.getTotalPointCount() != 0) {
setTotalPointCount(other.getTotalPointCount());
}
if (other.getSuccessPointCount() != 0) {
setSuccessPointCount(other.getSuccessPointCount());
}
if (errorsBuilder_ == null) {
if (!other.errors_.isEmpty()) {
if (errors_.isEmpty()) {
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureErrorsIsMutable();
errors_.addAll(other.errors_);
}
onChanged();
}
} else {
if (!other.errors_.isEmpty()) {
if (errorsBuilder_.isEmpty()) {
errorsBuilder_.dispose();
errorsBuilder_ = null;
errors_ = other.errors_;
bitField0_ = (bitField0_ & ~0x00000001);
errorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getErrorsFieldBuilder()
: null;
} else {
errorsBuilder_.addAllMessages(other.errors_);
}
}
}
this.mergeUnknownFields(other.unknownFields);
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.monitoring.v3.CreateTimeSeriesSummary parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.monitoring.v3.CreateTimeSeriesSummary) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private int totalPointCount_;
/**
*
*
* <pre>
* The number of points in the request.
* </pre>
*
* <code>int32 total_point_count = 1;</code>
*
* @return The totalPointCount.
*/
@java.lang.Override
public int getTotalPointCount() {
return totalPointCount_;
}
/**
*
*
* <pre>
* The number of points in the request.
* </pre>
*
* <code>int32 total_point_count = 1;</code>
*
* @param value The totalPointCount to set.
* @return This builder for chaining.
*/
public Builder setTotalPointCount(int value) {
totalPointCount_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The number of points in the request.
* </pre>
*
* <code>int32 total_point_count = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTotalPointCount() {
totalPointCount_ = 0;
onChanged();
return this;
}
private int successPointCount_;
/**
*
*
* <pre>
* The number of points that were successfully written.
* </pre>
*
* <code>int32 success_point_count = 2;</code>
*
* @return The successPointCount.
*/
@java.lang.Override
public int getSuccessPointCount() {
return successPointCount_;
}
/**
*
*
* <pre>
* The number of points that were successfully written.
* </pre>
*
* <code>int32 success_point_count = 2;</code>
*
* @param value The successPointCount to set.
* @return This builder for chaining.
*/
public Builder setSuccessPointCount(int value) {
successPointCount_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* The number of points that were successfully written.
* </pre>
*
* <code>int32 success_point_count = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearSuccessPointCount() {
successPointCount_ = 0;
onChanged();
return this;
}
private java.util.List<com.google.monitoring.v3.CreateTimeSeriesSummary.Error> errors_ =
java.util.Collections.emptyList();
private void ensureErrorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
errors_ =
new java.util.ArrayList<com.google.monitoring.v3.CreateTimeSeriesSummary.Error>(
errors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.CreateTimeSeriesSummary.Error,
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder,
com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder>
errorsBuilder_;
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public java.util.List<com.google.monitoring.v3.CreateTimeSeriesSummary.Error> getErrorsList() {
if (errorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(errors_);
} else {
return errorsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public int getErrorsCount() {
if (errorsBuilder_ == null) {
return errors_.size();
} else {
return errorsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error getErrors(int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder setErrors(
int index, com.google.monitoring.v3.CreateTimeSeriesSummary.Error value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.set(index, value);
onChanged();
} else {
errorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder setErrors(
int index, com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.set(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder addErrors(com.google.monitoring.v3.CreateTimeSeriesSummary.Error value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(value);
onChanged();
} else {
errorsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder addErrors(
int index, com.google.monitoring.v3.CreateTimeSeriesSummary.Error value) {
if (errorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureErrorsIsMutable();
errors_.add(index, value);
onChanged();
} else {
errorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder addErrors(
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder addErrors(
int index, com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder builderForValue) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.add(index, builderForValue.build());
onChanged();
} else {
errorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder addAllErrors(
java.lang.Iterable<? extends com.google.monitoring.v3.CreateTimeSeriesSummary.Error>
values) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, errors_);
onChanged();
} else {
errorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder clearErrors() {
if (errorsBuilder_ == null) {
errors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
errorsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public Builder removeErrors(int index) {
if (errorsBuilder_ == null) {
ensureErrorsIsMutable();
errors_.remove(index);
onChanged();
} else {
errorsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder getErrorsBuilder(
int index) {
return getErrorsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder getErrorsOrBuilder(
int index) {
if (errorsBuilder_ == null) {
return errors_.get(index);
} else {
return errorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public java.util.List<? extends com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder>
getErrorsOrBuilderList() {
if (errorsBuilder_ != null) {
return errorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(errors_);
}
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder addErrorsBuilder() {
return getErrorsFieldBuilder()
.addBuilder(com.google.monitoring.v3.CreateTimeSeriesSummary.Error.getDefaultInstance());
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder addErrorsBuilder(
int index) {
return getErrorsFieldBuilder()
.addBuilder(
index, com.google.monitoring.v3.CreateTimeSeriesSummary.Error.getDefaultInstance());
}
/**
*
*
* <pre>
* The number of points that failed to be written. Order is not guaranteed.
* </pre>
*
* <code>repeated .google.monitoring.v3.CreateTimeSeriesSummary.Error errors = 3;</code>
*/
public java.util.List<com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder>
getErrorsBuilderList() {
return getErrorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.CreateTimeSeriesSummary.Error,
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder,
com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder>
getErrorsFieldBuilder() {
if (errorsBuilder_ == null) {
errorsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.monitoring.v3.CreateTimeSeriesSummary.Error,
com.google.monitoring.v3.CreateTimeSeriesSummary.Error.Builder,
com.google.monitoring.v3.CreateTimeSeriesSummary.ErrorOrBuilder>(
errors_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
errors_ = null;
}
return errorsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.monitoring.v3.CreateTimeSeriesSummary)
}
// @@protoc_insertion_point(class_scope:google.monitoring.v3.CreateTimeSeriesSummary)
private static final com.google.monitoring.v3.CreateTimeSeriesSummary DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.monitoring.v3.CreateTimeSeriesSummary();
}
public static com.google.monitoring.v3.CreateTimeSeriesSummary getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateTimeSeriesSummary> PARSER =
new com.google.protobuf.AbstractParser<CreateTimeSeriesSummary>() {
@java.lang.Override
public CreateTimeSeriesSummary parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new CreateTimeSeriesSummary(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<CreateTimeSeriesSummary> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateTimeSeriesSummary> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.monitoring.v3.CreateTimeSeriesSummary getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| |
/*
* Copyright 2014-2021 Web Firm Framework
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* @author WFF
*/
package com.webfirmframework.wffweb.css;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import org.junit.Test;
import com.webfirmframework.wffweb.InvalidValueException;
/**
*
* @author WFF
* @since 1.0.0
*/
public class MarginTopTest {
@Test
public void testMarginTop() {
MarginTop marginTop = new MarginTop();
assertEquals("0px", marginTop.getCssValue());
}
@Test
public void testMarginTopString() {
{
MarginTop marginTop = new MarginTop(MarginTop.INITIAL);
assertEquals(MarginTop.INITIAL, marginTop.getCssValue());
}
{
MarginTop marginTop = new MarginTop("50px");
assertEquals("50px", marginTop.getCssValue());
}
}
@Test
public void testMarginTopMarginTop() {
MarginTop marginTop = new MarginTop("50px");
MarginTop marginTop1 = new MarginTop(marginTop);
assertEquals("50px", marginTop1.getCssValue());
}
@Test
public void testMarginTopFloat() {
MarginTop marginTop = new MarginTop(75);
assertEquals("75.0%", marginTop.getCssValue());
}
@Test
public void testMarginTopFloatCssLengthUnit() {
{
MarginTop marginTop = new MarginTop(75, CssLengthUnit.PER);
assertEquals("75.0%", marginTop.getCssValue());
assertEquals(CssLengthUnit.PER, marginTop.getUnit());
assertTrue(marginTop.getValue() == 75);
}
{
MarginTop marginTop = new MarginTop(75, CssLengthUnit.CH);
assertEquals("75.0ch", marginTop.getCssValue());
}
{
MarginTop marginTop = new MarginTop(75, CssLengthUnit.EM);
assertEquals("75.0em", marginTop.getCssValue());
}
}
@Test
public void testSetPercent() {
{
MarginTop marginTop = new MarginTop();
marginTop.setPercent(75);
assertEquals("75.0%", marginTop.getCssValue());
assertEquals(CssLengthUnit.PER, marginTop.getUnit());
assertTrue(marginTop.getValue() == 75);
}
}
@Test
public void testGetCssName() {
MarginTop marginTop = new MarginTop();
assertEquals(CssNameConstants.MARGIN_TOP, marginTop.getCssName());
}
@Test
public void testGetCssValue() {
MarginTop marginTop = new MarginTop();
marginTop.setAsInherit();
assertEquals(MarginTop.INHERIT, marginTop.getCssValue());
marginTop.setAsInitial();
assertEquals(MarginTop.INITIAL, marginTop.getCssValue());
}
@Test
public void testToString() {
MarginTop marginTop = new MarginTop(75, CssLengthUnit.EM);
assertEquals(marginTop.getCssName()+": 75.0em", marginTop.toString());
}
@Test
public void testGetValue() {
MarginTop marginTop = new MarginTop();
marginTop.setPercent(75);
assertTrue(marginTop.getValue() == 75);
}
@Test
public void testGetUnit() {
MarginTop marginTop = new MarginTop();
marginTop.setPercent(75);
assertEquals(CssLengthUnit.PER, marginTop.getUnit());
}
@Test
public void testSetCssValueString() {
MarginTop marginTop = new MarginTop();
marginTop.setCssValue("75%");
assertEquals("75%", marginTop.getCssValue());
assertEquals(CssLengthUnit.PER, marginTop.getUnit());
assertTrue(marginTop.getValue() == 75);
}
@Test
public void testSetAsInitial() {
MarginTop marginTop = new MarginTop();
marginTop.setAsInitial();
assertEquals(MarginTop.INITIAL, marginTop.getCssValue());
assertNull(marginTop.getValue());
assertNull(marginTop.getUnit());
}
@Test
public void testSetAsInherit() {
MarginTop marginTop = new MarginTop();
marginTop.setAsInherit();
assertEquals(MarginTop.INHERIT, marginTop.getCssValue());
assertNull(marginTop.getValue());
assertNull(marginTop.getUnit());
}
@Test
public void testSetAsAuto() {
MarginTop marginTop = new MarginTop();
marginTop.setAsAuto();
assertEquals(MarginTop.AUTO, marginTop.getCssValue());
assertNull(marginTop.getValue());
assertNull(marginTop.getUnit());
}
@Test
public void testIsValid() {
{
final boolean valid = MarginTop.isValid("45px");
assertTrue(valid);
final boolean invalid = MarginTop.isValid("55");
assertFalse(invalid);
}
{
final boolean valid = MarginTop.isValid("45em");
assertTrue(valid);
final boolean invalid = MarginTop.isValid("dfd");
assertFalse(invalid);
}
{
final boolean valid = MarginTop.isValid("45%");
assertTrue(valid);
final boolean invalid = MarginTop.isValid("45 px");
assertFalse(invalid);
}
{
final boolean valid = MarginTop.isValid("45em");
assertTrue(valid);
final boolean invalid = MarginTop.isValid("45sem");
assertFalse(invalid);
}
{
final boolean valid = MarginTop.isValid("45rem");
assertTrue(valid);
}
{
final boolean valid = MarginTop.isValid("--1px");
assertFalse(valid);
}
}
@Test(expected = InvalidValueException.class)
public void testInvalidValueForSetCssValue() throws Exception {
MarginTop margin = new MarginTop();
margin.setCssValue("dfdpx");
}
}
| |
/*
* Copyright 2016 MovingBlocks
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.terasology.world.chunks.internal;
import com.google.common.base.Objects;
import com.google.common.base.Preconditions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.terasology.math.AABB;
import org.terasology.math.Region3i;
import org.terasology.math.geom.BaseVector3i;
import org.terasology.math.geom.Vector3f;
import org.terasology.math.geom.Vector3i;
import org.terasology.monitoring.chunk.ChunkMonitor;
import org.terasology.protobuf.EntityData;
import org.terasology.rendering.primitives.ChunkMesh;
import org.terasology.world.biomes.Biome;
import org.terasology.world.biomes.BiomeManager;
import org.terasology.world.block.Block;
import org.terasology.world.block.BlockManager;
import org.terasology.world.chunks.Chunk;
import org.terasology.world.chunks.ChunkBlockIterator;
import org.terasology.world.chunks.ChunkConstants;
import org.terasology.world.chunks.blockdata.TeraArray;
import org.terasology.world.chunks.blockdata.TeraDenseArray16Bit;
import org.terasology.world.chunks.blockdata.TeraDenseArray8Bit;
import org.terasology.world.chunks.deflate.TeraDeflator;
import org.terasology.world.chunks.deflate.TeraStandardDeflator;
import org.terasology.world.liquid.LiquidData;
import java.text.DecimalFormat;
/**
* Chunks are the basic components of the world. Each chunk contains a fixed amount of blocks
* determined by its dimensions. They are used to manage the world efficiently and
* to reduce the batch count within the render loop.
* <br><br>
* Chunks are tessellated on creation and saved to vertex arrays. From those VBOs are generated
* which are then used for the actual rendering process.
*/
public class ChunkImpl implements Chunk {
private static final Logger logger = LoggerFactory.getLogger(ChunkImpl.class);
private static final DecimalFormat PERCENT_FORMAT = new DecimalFormat("0.##");
private static final DecimalFormat SIZE_FORMAT = new DecimalFormat("#,###");
private final Vector3i chunkPos = new Vector3i();
private BlockManager blockManager;
private BiomeManager biomeManager;
private TeraArray sunlightData;
private TeraArray sunlightRegenData;
private TeraArray lightData;
private TeraArray blockData;
private volatile TeraArray blockDataSnapshot;
private TeraArray extraData;
private volatile TeraArray extraDataSnapshot;
private TeraArray biomeData;
private volatile TeraArray biomeDataSnapshot;
private AABB aabb;
private Region3i region;
private boolean disposed;
private boolean ready;
private volatile boolean dirty;
private boolean animated;
// Rendering
private ChunkMesh activeMesh;
private ChunkMesh pendingMesh;
private boolean adjacentChunksReady;
public ChunkImpl(int x, int y, int z, BlockManager blockManager, BiomeManager biomeManager) {
this(new Vector3i(x, y, z), blockManager, biomeManager);
}
public ChunkImpl(Vector3i chunkPos, BlockManager blockManager, BiomeManager biomeManager) {
this(chunkPos, new TeraDenseArray16Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z),
new TeraDenseArray8Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z),
new TeraDenseArray8Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z),
blockManager, biomeManager);
}
public ChunkImpl(Vector3i chunkPos, TeraArray blocks, TeraArray liquid, TeraArray biome, BlockManager blockManager,
BiomeManager biomeManager) {
this.chunkPos.set(Preconditions.checkNotNull(chunkPos));
this.blockData = Preconditions.checkNotNull(blocks);
this.extraData = Preconditions.checkNotNull(liquid);
this.biomeData = Preconditions.checkNotNull(biome);
sunlightData = new TeraDenseArray8Bit(getChunkSizeX(), getChunkSizeY(), getChunkSizeZ());
sunlightRegenData = new TeraDenseArray8Bit(getChunkSizeX(), getChunkSizeY(), getChunkSizeZ());
lightData = new TeraDenseArray8Bit(getChunkSizeX(), getChunkSizeY(), getChunkSizeZ());
dirty = true;
this.blockManager = blockManager;
this.biomeManager = biomeManager;
region = Region3i.createFromMinAndSize(new Vector3i(chunkPos.x * ChunkConstants.SIZE_X, chunkPos.y * ChunkConstants.SIZE_Y, chunkPos.z * ChunkConstants.SIZE_Z),
ChunkConstants.CHUNK_SIZE);
ChunkMonitor.fireChunkCreated(this);
}
@Override
public Vector3i getPosition() {
return new Vector3i(chunkPos);
}
@Override
public boolean isDirty() {
return dirty;
}
@Override
public void setDirty(boolean dirty) {
this.dirty = dirty;
}
@Override
public int getEstimatedMemoryConsumptionInBytes() {
return blockData.getEstimatedMemoryConsumptionInBytes()
+ sunlightData.getEstimatedMemoryConsumptionInBytes()
+ sunlightRegenData.getEstimatedMemoryConsumptionInBytes()
+ lightData.getEstimatedMemoryConsumptionInBytes()
+ extraData.getEstimatedMemoryConsumptionInBytes()
+ biomeData.getEstimatedMemoryConsumptionInBytes();
}
@Override
public final Block getBlock(BaseVector3i pos) {
short id = (short) blockData.get(pos.x(), pos.y(), pos.z());
return blockManager.getBlock(id);
}
@Override
public final Block getBlock(int x, int y, int z) {
short id = (short) blockData.get(x, y, z);
return blockManager.getBlock(id);
}
@Override
public Block setBlock(int x, int y, int z, Block block) {
if (blockData == blockDataSnapshot) {
blockData = blockData.copy();
}
int oldValue = blockData.set(x, y, z, block.getId());
if (oldValue != block.getId()) {
if (!block.isLiquid()) {
setLiquid(x, y, z, new LiquidData());
}
}
return blockManager.getBlock((short) oldValue);
}
@Override
public Block setBlock(BaseVector3i pos, Block block) {
return setBlock(pos.x(), pos.y(), pos.z(), block);
}
@Override
public byte getSunlight(BaseVector3i pos) {
return getSunlight(pos.x(), pos.y(), pos.z());
}
@Override
public byte getSunlight(int x, int y, int z) {
return (byte) sunlightData.get(x, y, z);
}
@Override
public boolean setSunlight(BaseVector3i pos, byte amount) {
return setSunlight(pos.x(), pos.y(), pos.z(), amount);
}
@Override
public boolean setSunlight(int x, int y, int z, byte amount) {
Preconditions.checkArgument(amount >= 0 && amount <= ChunkConstants.MAX_SUNLIGHT);
return sunlightData.set(x, y, z, amount) != amount;
}
@Override
public byte getSunlightRegen(BaseVector3i pos) {
return getSunlightRegen(pos.x(), pos.y(), pos.z());
}
@Override
public byte getSunlightRegen(int x, int y, int z) {
return (byte) sunlightRegenData.get(x, y, z);
}
@Override
public boolean setSunlightRegen(BaseVector3i pos, byte amount) {
return setSunlightRegen(pos.x(), pos.y(), pos.z(), amount);
}
@Override
public boolean setSunlightRegen(int x, int y, int z, byte amount) {
Preconditions.checkArgument(amount >= 0 && amount <= ChunkConstants.MAX_SUNLIGHT_REGEN);
return sunlightRegenData.set(x, y, z, amount) != amount;
}
@Override
public byte getLight(BaseVector3i pos) {
return getLight(pos.x(), pos.y(), pos.z());
}
@Override
public byte getLight(int x, int y, int z) {
return (byte) lightData.get(x, y, z);
}
@Override
public boolean setLight(BaseVector3i pos, byte amount) {
return setLight(pos.x(), pos.y(), pos.z(), amount);
}
@Override
public boolean setLight(int x, int y, int z, byte amount) {
Preconditions.checkArgument(amount >= 0 && amount <= ChunkConstants.MAX_LIGHT);
return lightData.set(x, y, z, amount) != amount;
}
@Override
public void setLiquid(BaseVector3i pos, LiquidData state) {
setLiquid(pos.x(), pos.y(), pos.z(), state);
}
@Override
public void setLiquid(int x, int y, int z, LiquidData newState) {
byte newValue = newState.toByte();
if (extraData == extraDataSnapshot) {
extraData = extraData.copy();
}
extraData.set(x, y, z, newValue);
}
@Override
public LiquidData getLiquid(BaseVector3i pos) {
return getLiquid(pos.x(), pos.y(), pos.z());
}
@Override
public LiquidData getLiquid(int x, int y, int z) {
return new LiquidData((byte) extraData.get(x, y, z));
}
@Override
public Biome getBiome(int x, int y, int z) {
return biomeManager.getBiomeByShortId((short) biomeData.get(x, y, z));
}
@Override
public Biome getBiome(BaseVector3i pos) {
return getBiome(pos.x(), pos.y(), pos.z());
}
@Override
public Biome setBiome(int x, int y, int z, Biome biome) {
if (biomeData == biomeDataSnapshot) {
biomeData = biomeData.copy();
}
short shortId = biomeManager.getBiomeShortId(biome);
short previousShortId = (short) biomeData.set(x, y, z, shortId);
return biomeManager.getBiomeByShortId(previousShortId);
}
@Override
public Biome setBiome(BaseVector3i pos, Biome biome) {
return setBiome(pos.x(), pos.y(), pos.z(), biome);
}
@Override
public Vector3i getChunkWorldOffset() {
return new Vector3i(getChunkWorldOffsetX(), getChunkWorldOffsetY(), getChunkWorldOffsetZ());
}
@Override
public int getChunkWorldOffsetX() {
return chunkPos.x * getChunkSizeX();
}
@Override
public int getChunkWorldOffsetY() {
return chunkPos.y * getChunkSizeY();
}
@Override
public int getChunkWorldOffsetZ() {
return chunkPos.z * getChunkSizeZ();
}
@Override
public Vector3i chunkToWorldPosition(BaseVector3i blockPos) {
return chunkToWorldPosition(blockPos.x(), blockPos.y(), blockPos.z());
}
@Override
public Vector3i chunkToWorldPosition(int x, int y, int z) {
return new Vector3i(chunkToWorldPositionX(x), chunkToWorldPositionY(y), chunkToWorldPositionZ(z));
}
@Override
public int chunkToWorldPositionX(int x) {
return x + getChunkWorldOffsetX();
}
@Override
public int chunkToWorldPositionY(int y) {
return y + getChunkWorldOffsetY();
}
@Override
public int chunkToWorldPositionZ(int z) {
return z + getChunkWorldOffsetZ();
}
@Override
public AABB getAABB() {
if (aabb == null) {
Vector3f min = getChunkWorldOffset().toVector3f();
Vector3f max = ChunkConstants.CHUNK_SIZE.toVector3f();
max.add(min);
aabb = AABB.createMinMax(min, max);
}
return aabb;
}
@Override
public void deflate() {
final TeraDeflator def = new TeraStandardDeflator();
if (logger.isDebugEnabled()) {
int blocksSize = blockData.getEstimatedMemoryConsumptionInBytes();
int sunlightSize = sunlightData.getEstimatedMemoryConsumptionInBytes();
int sunlightRegenSize = sunlightRegenData.getEstimatedMemoryConsumptionInBytes();
int lightSize = lightData.getEstimatedMemoryConsumptionInBytes();
int liquidSize = extraData.getEstimatedMemoryConsumptionInBytes();
int biomeSize = biomeData.getEstimatedMemoryConsumptionInBytes();
int totalSize = blocksSize + sunlightRegenSize + sunlightSize + lightSize + liquidSize + biomeSize;
blockData = def.deflate(blockData);
lightData = def.deflate(lightData);
extraData = def.deflate(extraData);
biomeData = def.deflate(biomeData);
int blocksReduced = blockData.getEstimatedMemoryConsumptionInBytes();
int lightReduced = lightData.getEstimatedMemoryConsumptionInBytes();
int liquidReduced = extraData.getEstimatedMemoryConsumptionInBytes();
int biomeReduced = biomeData.getEstimatedMemoryConsumptionInBytes();
int totalReduced = blocksReduced + sunlightRegenSize + sunlightSize + lightReduced + liquidReduced + biomeReduced;
double blocksPercent = 100d - (100d / blocksSize * blocksReduced);
double lightPercent = 100d - (100d / lightSize * lightReduced);
double liquidPercent = 100d - (100d / liquidSize * liquidReduced);
double biomePercent = 100d - (100d / biomeSize * biomeReduced);
double totalPercent = 100d - (100d / totalSize * totalReduced);
logger.debug("chunk {}: " +
"size-before: {} " +
"bytes, size-after: {} " +
"bytes, total-deflated-by: {}%, " +
"blocks-deflated-by={}%, " +
"light-deflated-by={}%, " +
"liquid-deflated-by={}%, " +
"biome-deflated-by={}%",
chunkPos,
SIZE_FORMAT.format(totalSize),
SIZE_FORMAT.format(totalReduced),
PERCENT_FORMAT.format(totalPercent),
PERCENT_FORMAT.format(blocksPercent),
PERCENT_FORMAT.format(lightPercent),
PERCENT_FORMAT.format(liquidPercent),
PERCENT_FORMAT.format(biomePercent));
ChunkMonitor.fireChunkDeflated(this, totalSize, totalReduced);
} else {
final int oldSize = getEstimatedMemoryConsumptionInBytes();
blockData = def.deflate(blockData);
lightData = def.deflate(lightData);
extraData = def.deflate(extraData);
biomeData = def.deflate(biomeData);
ChunkMonitor.fireChunkDeflated(this, oldSize, getEstimatedMemoryConsumptionInBytes());
}
}
@Override
public void deflateSunlight() {
final TeraDeflator def = new TeraStandardDeflator();
if (logger.isDebugEnabled()) {
int blocksSize = blockData.getEstimatedMemoryConsumptionInBytes();
int sunlightSize = sunlightData.getEstimatedMemoryConsumptionInBytes();
int sunlightRegenSize = sunlightRegenData.getEstimatedMemoryConsumptionInBytes();
int lightSize = lightData.getEstimatedMemoryConsumptionInBytes();
int liquidSize = extraData.getEstimatedMemoryConsumptionInBytes();
int totalSize = blocksSize + sunlightRegenSize + sunlightSize + lightSize + liquidSize;
sunlightData = def.deflate(sunlightData);
sunlightRegenData = def.deflate(sunlightRegenData);
int sunlightReduced = sunlightData.getEstimatedMemoryConsumptionInBytes();
int sunlightRegenReduced = sunlightRegenData.getEstimatedMemoryConsumptionInBytes();
int totalReduced = blocksSize + sunlightRegenReduced + sunlightReduced + lightSize + liquidSize;
double sunlightPercent = 100d - (100d / sunlightSize * sunlightReduced);
double sunlightRegenPercent = 100d - (100d / sunlightRegenSize * sunlightRegenReduced);
double totalPercent = 100d - (100d / totalSize * totalReduced);
logger.debug("chunk {}: " +
"size-before: {} " +
"bytes, size-after: {} " +
"bytes, total-deflated-by: {}%, " +
"sunlight-deflated-by={}%, " +
"sunlight-regen-deflated-by={}%, " +
chunkPos,
SIZE_FORMAT.format(totalSize),
SIZE_FORMAT.format(totalReduced),
PERCENT_FORMAT.format(totalPercent),
PERCENT_FORMAT.format(sunlightPercent),
PERCENT_FORMAT.format(sunlightRegenPercent));
ChunkMonitor.fireChunkDeflated(this, totalSize, totalReduced);
} else {
final int oldSize = getEstimatedMemoryConsumptionInBytes();
sunlightData = def.deflate(sunlightData);
sunlightRegenData = def.deflate(sunlightRegenData);
ChunkMonitor.fireChunkDeflated(this, oldSize, getEstimatedMemoryConsumptionInBytes());
}
}
@Override
public String toString() {
return "Chunk" + chunkPos.toString();
}
@Override
public int hashCode() {
return Objects.hashCode(chunkPos);
}
@Override
public boolean equals(Object obj) {
// According to hashCode() two ChunkImpls are not equal when their
// position differs. The default equals() compares object instances.
// The same instance has the same chunkPos, so this is valid.
return super.equals(obj);
}
@Override
public void setMesh(ChunkMesh mesh) {
this.activeMesh = mesh;
}
@Override
public void setPendingMesh(ChunkMesh mesh) {
this.pendingMesh = mesh;
}
@Override
public void setAnimated(boolean animated) {
this.animated = animated;
}
@Override
public boolean isAnimated() {
return animated;
}
@Override
public boolean hasMesh() {
return activeMesh != null;
}
@Override
public boolean hasPendingMesh() {
return pendingMesh != null;
}
@Override
public ChunkMesh getMesh() {
return activeMesh;
}
@Override
public ChunkMesh getPendingMesh() {
return pendingMesh;
}
@Override
public void markReady() {
ready = true;
}
@Override
public void prepareForReactivation() {
if (disposed) {
disposed = false;
sunlightData = new TeraDenseArray8Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z);
sunlightRegenData = new TeraDenseArray8Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z);
lightData = new TeraDenseArray8Bit(ChunkConstants.SIZE_X, ChunkConstants.SIZE_Y, ChunkConstants.SIZE_Z);
}
}
@Override
public void dispose() {
disposed = true;
ready = false;
disposeMesh();
/*
* Explicitly do not clear data, so that background threads that work with the chunk can finish.
*/
ChunkMonitor.fireChunkDisposed(this);
}
@Override
public void disposeMesh() {
if (activeMesh != null) {
activeMesh.dispose();
activeMesh = null;
}
}
@Override
public boolean isReady() {
return ready;
}
@Override
public boolean isDisposed() {
return disposed;
}
@Override
public Region3i getRegion() {
return region;
}
@Override
public int getChunkSizeX() {
return ChunkConstants.SIZE_X;
}
@Override
public int getChunkSizeY() {
return ChunkConstants.SIZE_Y;
}
@Override
public int getChunkSizeZ() {
return ChunkConstants.SIZE_Z;
}
@Override
public ChunkBlockIterator getBlockIterator() {
return new ChunkBlockIteratorImpl(blockManager, getChunkWorldOffset(), blockData);
}
@Override
public EntityData.ChunkStore.Builder encode() {
return ChunkSerializer.encode(chunkPos, blockData, extraData, biomeData);
}
/**
* Calling this method results in a (cheap) snapshot to be taken of the current state of the chunk.
* This snapshot can then be obtained and rleased by calling {@link #encodeAndReleaseSnapshot()}.
*/
public void createSnapshot() {
this.blockDataSnapshot = this.blockData;
this.extraDataSnapshot = this.extraData;
this.biomeDataSnapshot = this.biomeData;
}
/**
* This method can only be
* called once after {@link #createSnapshot()} has been called. It can be called from a different thread than
* {@link #createSnapshot()}, but it must be made sure that neither method is still running when the other gets
* called.
*
* @return an encoded version of the snapshot taken with {@link #createSnapshot()}.
*/
public EntityData.ChunkStore.Builder encodeAndReleaseSnapshot() {
EntityData.ChunkStore.Builder result = ChunkSerializer.encode(chunkPos, blockDataSnapshot, extraDataSnapshot, biomeDataSnapshot);
this.blockDataSnapshot = null;
this.extraDataSnapshot = null;
this.biomeDataSnapshot = null;
return result;
}
}
| |
package org.apache.roller.ui.authoring.struts.forms;
import org.apache.roller.RollerException;
import java.util.Locale;
/**
* Generated by XDoclet/ejbdoclet/strutsform. This class can be further processed with XDoclet/webdoclet/strutsconfigxml and XDoclet/webdoclet/strutsvalidationxml.
*
* @struts.form name="weblogEntryForm"
*/
public class WeblogEntryForm
extends org.apache.struts.action.ActionForm
implements java.io.Serializable
{
protected java.lang.String id;
protected org.apache.roller.pojos.WeblogCategoryData category;
protected java.util.List categories;
protected org.apache.roller.pojos.WebsiteData website;
protected org.apache.roller.pojos.UserData creator;
protected java.lang.String title;
protected java.lang.String summary;
protected java.lang.String text;
protected java.lang.String contentType;
protected java.lang.String contentSrc;
protected java.lang.String anchor;
protected java.util.Set entryAttributes;
protected java.sql.Timestamp pubTime;
protected java.sql.Timestamp updateTime;
protected java.lang.String status;
protected java.lang.String link;
protected java.lang.String plugins;
protected java.lang.Boolean allowComments;
protected java.lang.Integer commentDays;
protected java.lang.Boolean rightToLeft;
protected java.lang.Boolean pinnedToMain;
protected java.lang.String locale;
protected boolean commentsStillAllowed;
protected java.util.List comments;
protected int commentCount;
protected java.util.List referers;
protected java.lang.String permalink;
protected java.lang.String permaLink;
protected java.lang.String commentsLink;
protected java.lang.String displayTitle;
protected java.lang.String rss09xDescription;
protected java.util.List pluginsList;
protected boolean draft;
protected boolean pending;
protected boolean published;
protected java.lang.String transformedText;
protected java.lang.String transformedSummary;
protected java.lang.String displayContent;
/** Default empty constructor. */
public WeblogEntryForm() {}
/** Constructor that takes the data object as argument. */
public WeblogEntryForm(org.apache.roller.pojos.WeblogEntryData dataHolder, java.util.Locale locale) throws RollerException
{
copyFrom(dataHolder, locale);
}
public java.lang.String getId()
{
return this.id;
}
/**
*/
public void setId( java.lang.String id )
{
this.id = id;
}
public org.apache.roller.pojos.WeblogCategoryData getCategory()
{
return this.category;
}
/**
*/
public void setCategory( org.apache.roller.pojos.WeblogCategoryData category )
{
this.category = category;
}
public java.util.List getCategories()
{
return this.categories;
}
/**
*/
public void setCategories( java.util.List categories )
{
this.categories = categories;
}
public org.apache.roller.pojos.WebsiteData getWebsite()
{
return this.website;
}
/**
*/
public void setWebsite( org.apache.roller.pojos.WebsiteData website )
{
this.website = website;
}
public org.apache.roller.pojos.UserData getCreator()
{
return this.creator;
}
/**
*/
public void setCreator( org.apache.roller.pojos.UserData creator )
{
this.creator = creator;
}
public java.lang.String getTitle()
{
return this.title;
}
/**
*/
public void setTitle( java.lang.String title )
{
this.title = title;
}
public java.lang.String getSummary()
{
return this.summary;
}
/**
*/
public void setSummary( java.lang.String summary )
{
this.summary = summary;
}
public java.lang.String getText()
{
return this.text;
}
/**
*/
public void setText( java.lang.String text )
{
this.text = text;
}
public java.lang.String getContentType()
{
return this.contentType;
}
/**
*/
public void setContentType( java.lang.String contentType )
{
this.contentType = contentType;
}
public java.lang.String getContentSrc()
{
return this.contentSrc;
}
/**
*/
public void setContentSrc( java.lang.String contentSrc )
{
this.contentSrc = contentSrc;
}
public java.lang.String getAnchor()
{
return this.anchor;
}
/**
*/
public void setAnchor( java.lang.String anchor )
{
this.anchor = anchor;
}
public java.util.Set getEntryAttributes()
{
return this.entryAttributes;
}
/**
*/
public void setEntryAttributes( java.util.Set entryAttributes )
{
this.entryAttributes = entryAttributes;
}
public java.sql.Timestamp getPubTime()
{
return this.pubTime;
}
/**
*/
public void setPubTime( java.sql.Timestamp pubTime )
{
this.pubTime = pubTime;
}
public java.sql.Timestamp getUpdateTime()
{
return this.updateTime;
}
/**
*/
public void setUpdateTime( java.sql.Timestamp updateTime )
{
this.updateTime = updateTime;
}
public java.lang.String getStatus()
{
return this.status;
}
/**
*/
public void setStatus( java.lang.String status )
{
this.status = status;
}
public java.lang.String getLink()
{
return this.link;
}
/**
*/
public void setLink( java.lang.String link )
{
this.link = link;
}
public java.lang.String getPlugins()
{
return this.plugins;
}
/**
*/
public void setPlugins( java.lang.String plugins )
{
this.plugins = plugins;
}
public java.lang.Boolean getAllowComments()
{
return this.allowComments;
}
/**
*/
public void setAllowComments( java.lang.Boolean allowComments )
{
this.allowComments = allowComments;
}
public java.lang.Integer getCommentDays()
{
return this.commentDays;
}
/**
*/
public void setCommentDays( java.lang.Integer commentDays )
{
this.commentDays = commentDays;
}
public java.lang.Boolean getRightToLeft()
{
return this.rightToLeft;
}
/**
*/
public void setRightToLeft( java.lang.Boolean rightToLeft )
{
this.rightToLeft = rightToLeft;
}
public java.lang.Boolean getPinnedToMain()
{
return this.pinnedToMain;
}
/**
*/
public void setPinnedToMain( java.lang.Boolean pinnedToMain )
{
this.pinnedToMain = pinnedToMain;
}
public java.lang.String getLocale()
{
return this.locale;
}
/**
*/
public void setLocale( java.lang.String locale )
{
this.locale = locale;
}
public boolean getCommentsStillAllowed()
{
return this.commentsStillAllowed;
}
/**
*/
public void setCommentsStillAllowed( boolean commentsStillAllowed )
{
this.commentsStillAllowed = commentsStillAllowed;
}
public java.util.List getComments()
{
return this.comments;
}
/**
*/
public void setComments( java.util.List comments )
{
this.comments = comments;
}
public int getCommentCount()
{
return this.commentCount;
}
/**
*/
public void setCommentCount( int commentCount )
{
this.commentCount = commentCount;
}
public java.util.List getReferers()
{
return this.referers;
}
/**
*/
public void setReferers( java.util.List referers )
{
this.referers = referers;
}
public java.lang.String getPermalink()
{
return this.permalink;
}
/**
*/
public void setPermalink( java.lang.String permalink )
{
this.permalink = permalink;
}
public java.lang.String getPermaLink()
{
return this.permaLink;
}
/**
*/
public void setPermaLink( java.lang.String permaLink )
{
this.permaLink = permaLink;
}
public java.lang.String getCommentsLink()
{
return this.commentsLink;
}
/**
*/
public void setCommentsLink( java.lang.String commentsLink )
{
this.commentsLink = commentsLink;
}
public java.lang.String getDisplayTitle()
{
return this.displayTitle;
}
/**
*/
public void setDisplayTitle( java.lang.String displayTitle )
{
this.displayTitle = displayTitle;
}
public java.lang.String getRss09xDescription()
{
return this.rss09xDescription;
}
/**
*/
public void setRss09xDescription( java.lang.String rss09xDescription )
{
this.rss09xDescription = rss09xDescription;
}
public java.util.List getPluginsList()
{
return this.pluginsList;
}
/**
*/
public void setPluginsList( java.util.List pluginsList )
{
this.pluginsList = pluginsList;
}
public boolean isDraft()
{
return this.draft;
}
/**
*/
public void setDraft( boolean draft )
{
this.draft = draft;
}
public boolean isPending()
{
return this.pending;
}
/**
*/
public void setPending( boolean pending )
{
this.pending = pending;
}
public boolean isPublished()
{
return this.published;
}
/**
*/
public void setPublished( boolean published )
{
this.published = published;
}
public java.lang.String getTransformedText()
{
return this.transformedText;
}
/**
*/
public void setTransformedText( java.lang.String transformedText )
{
this.transformedText = transformedText;
}
public java.lang.String getTransformedSummary()
{
return this.transformedSummary;
}
/**
*/
public void setTransformedSummary( java.lang.String transformedSummary )
{
this.transformedSummary = transformedSummary;
}
public java.lang.String getDisplayContent()
{
return this.displayContent;
}
/**
*/
public void setDisplayContent( java.lang.String displayContent )
{
this.displayContent = displayContent;
}
/**
* Copy values from this form bean to the specified data object.
* Only copies primitive types (Boolean, boolean, String, Integer, int, Timestamp, Date)
*/
public void copyTo(org.apache.roller.pojos.WeblogEntryData dataHolder, Locale locale) throws RollerException
{
dataHolder.setId(this.id);
dataHolder.setTitle(this.title);
dataHolder.setSummary(this.summary);
dataHolder.setText(this.text);
dataHolder.setContentType(this.contentType);
dataHolder.setContentSrc(this.contentSrc);
dataHolder.setAnchor(this.anchor);
dataHolder.setPubTime(this.pubTime);
dataHolder.setUpdateTime(this.updateTime);
dataHolder.setStatus(this.status);
dataHolder.setLink(this.link);
dataHolder.setPlugins(this.plugins);
dataHolder.setAllowComments(this.allowComments);
dataHolder.setCommentDays(this.commentDays);
dataHolder.setRightToLeft(this.rightToLeft);
dataHolder.setPinnedToMain(this.pinnedToMain);
dataHolder.setLocale(this.locale);
dataHolder.setCommentsStillAllowed(this.commentsStillAllowed);
dataHolder.setCommentCount(this.commentCount);
dataHolder.setPermalink(this.permalink);
dataHolder.setPermaLink(this.permaLink);
dataHolder.setCommentsLink(this.commentsLink);
dataHolder.setDisplayTitle(this.displayTitle);
dataHolder.setRss09xDescription(this.rss09xDescription);
dataHolder.setDraft(this.draft);
dataHolder.setPending(this.pending);
dataHolder.setPublished(this.published);
dataHolder.setTransformedText(this.transformedText);
dataHolder.setTransformedSummary(this.transformedSummary);
dataHolder.setDisplayContent(this.displayContent);
}
/**
* Copy values from specified data object to this form bean.
* Includes all types.
*/
public void copyFrom(org.apache.roller.pojos.WeblogEntryData dataHolder, Locale locale) throws RollerException
{
this.id = dataHolder.getId();
this.title = dataHolder.getTitle();
this.summary = dataHolder.getSummary();
this.text = dataHolder.getText();
this.contentType = dataHolder.getContentType();
this.contentSrc = dataHolder.getContentSrc();
this.anchor = dataHolder.getAnchor();
this.pubTime = dataHolder.getPubTime();
this.updateTime = dataHolder.getUpdateTime();
this.status = dataHolder.getStatus();
this.link = dataHolder.getLink();
this.plugins = dataHolder.getPlugins();
this.allowComments = dataHolder.getAllowComments();
this.commentDays = dataHolder.getCommentDays();
this.rightToLeft = dataHolder.getRightToLeft();
this.pinnedToMain = dataHolder.getPinnedToMain();
this.locale = dataHolder.getLocale();
this.commentsStillAllowed = dataHolder.getCommentsStillAllowed();
this.commentCount = dataHolder.getCommentCount();
this.permalink = dataHolder.getPermalink();
this.permaLink = dataHolder.getPermaLink();
this.commentsLink = dataHolder.getCommentsLink();
this.displayTitle = dataHolder.getDisplayTitle();
this.rss09xDescription = dataHolder.getRss09xDescription();
this.draft = dataHolder.isDraft();
this.pending = dataHolder.isPending();
this.published = dataHolder.isPublished();
this.transformedText = dataHolder.getTransformedText();
this.transformedSummary = dataHolder.getTransformedSummary();
this.displayContent = dataHolder.getDisplayContent();
}
public void doReset(
org.apache.struts.action.ActionMapping mapping,
javax.servlet.ServletRequest request)
{
this.id = null;
this.title = null;
this.summary = null;
this.text = null;
this.contentType = null;
this.contentSrc = null;
this.anchor = null;
this.pubTime = null;
this.updateTime = null;
this.status = null;
this.link = null;
this.plugins = null;
this.allowComments = null;
this.commentDays = null;
this.rightToLeft = null;
this.pinnedToMain = null;
this.locale = null;
this.commentsStillAllowed = false;
this.commentCount = 0;
this.permalink = null;
this.permaLink = null;
this.commentsLink = null;
this.displayTitle = null;
this.rss09xDescription = null;
this.draft = false;
this.pending = false;
this.published = false;
this.transformedText = null;
this.transformedSummary = null;
this.displayContent = null;
}
public void reset(
org.apache.struts.action.ActionMapping mapping,
javax.servlet.ServletRequest request)
{
doReset(mapping, request);
}
public void reset(
org.apache.struts.action.ActionMapping mapping,
javax.servlet.http.HttpServletRequest request)
{
doReset(mapping, request);
}
}
| |
/*
* Copyright 2014 Soichiro Kashima
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.snippet.app;
import android.app.Activity;
import android.hardware.Camera;
import android.hardware.Camera.AutoFocusCallback;
import android.os.Build;
import android.os.Bundle;
import android.util.DisplayMetrics;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.Window;
import android.view.WindowManager;
import android.widget.RelativeLayout;
import android.widget.Toast;
import com.snippet.R;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
public class CameraActivity extends Activity {
private static final String TAG = CameraActivity.class.getSimpleName();
private Camera myCamera;
private boolean mClicked;
@SuppressWarnings("deprecation")
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_camera);
SurfaceView mySurfaceView = (SurfaceView) findViewById(R.id.surface);
SurfaceHolder holder = mySurfaceView.getHolder();
holder.addCallback(mSurfaceListener);
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.HONEYCOMB) {
holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
findViewById(R.id.button_take_picture).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(final View v) {
if (myCamera != null && !mClicked) {
mClicked = true;
autoFocus();
}
}
});
}
@Override
protected void onPause() {
super.onPause();
releaseCamera();
}
private SurfaceHolder.Callback mSurfaceListener = new SurfaceHolder.Callback() {
public void surfaceCreated(SurfaceHolder holder) {
if (myCamera == null) {
myCamera = Camera.open();
}
try {
myCamera.setPreviewDisplay(holder);
} catch (IOException e) {
e.printStackTrace();
}
}
public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
Camera.Parameters parameters = myCamera.getParameters();
// Determine picture size
final int minArea = 2000000;
int area = 0;
Camera.Size preferredSize = null;
for (Camera.Size size : parameters.getSupportedPictureSizes()) {
int currentSize = size.width * size.height;
if (minArea <= currentSize) {
if (area == 0 || currentSize < area) {
area = currentSize;
preferredSize = size;
}
}
}
if (preferredSize != null) {
parameters.setPictureSize(preferredSize.width, preferredSize.height);
}
// Determine preview size
double aspectRatioPicture = (double) parameters.getPictureSize().width
/ parameters.getPictureSize().height;
double minAspectRatioDiff = Double.MAX_VALUE;
List<Camera.Size> availableSizes = new ArrayList<Camera.Size>();
for (Camera.Size size : parameters.getSupportedPreviewSizes()) {
double aspectRatio = (double) size.width / size.height;
double aspectRatioDiff = Math.abs(aspectRatio - aspectRatioPicture);
if (aspectRatioDiff < minAspectRatioDiff) {
minAspectRatioDiff = aspectRatioDiff;
availableSizes.clear();
availableSizes.add(size);
} else if (aspectRatioDiff == minAspectRatioDiff) {
availableSizes.add(size);
}
}
int maxArea = 0;
Camera.Size preferredPreviewSize = null;
area = 0;
View parent = findViewById(R.id.parent);
int w = parent.getWidth();
int h = parent.getHeight();
for (Camera.Size size : availableSizes) {
area = size.width * size.height;
if (size.width <= w && size.height <= h && maxArea < area) {
maxArea = area;
preferredPreviewSize = size;
}
}
if (preferredPreviewSize != null) {
parameters.setPreviewSize(preferredPreviewSize.width,
preferredPreviewSize.height);
}
// Set focus mode
String focusMode = parameters.getFocusMode();
if (!Camera.Parameters.FOCUS_MODE_AUTO.equals(focusMode)
&& !Camera.Parameters.FOCUS_MODE_MACRO.equals(focusMode)) {
for (String mode : parameters.getSupportedFocusModes()) {
if (Camera.Parameters.FOCUS_MODE_MACRO.equals(mode)) {
parameters.setFocusMode(mode);
break;
}
}
if (!Camera.Parameters.FOCUS_MODE_MACRO.equals(parameters.getFocusMode())) {
for (String mode : parameters.getSupportedFocusModes()) {
if (Camera.Parameters.FOCUS_MODE_AUTO.equals(mode)) {
parameters.setFocusMode(mode);
break;
}
}
}
}
myCamera.setParameters(parameters);
parameters = myCamera.getParameters();
Log.i("TEST", "PictureSize: "
+ parameters.getPictureSize().width
+ ", "
+ parameters.getPictureSize().height
+ " AspectRatio: "
+ ((double) parameters.getPictureSize().width / parameters
.getPictureSize().height));
Log.i("TEST", "PreviewSize: "
+ parameters.getPreviewSize().width
+ ", "
+ parameters.getPreviewSize().height
+ " AspectRatio: "
+ ((double) parameters.getPreviewSize().width / parameters
.getPreviewSize().height));
Log.i("TEST", "FocusMode: " + parameters.getFocusMode());
View surface = findViewById(R.id.surface);
RelativeLayout.LayoutParams params = (RelativeLayout.LayoutParams) surface.getLayoutParams();
if (Build.VERSION.SDK_INT <= Build.VERSION_CODES.GINGERBREAD_MR1) {
params.height = parameters.getPreviewSize().height;
params.width = parameters.getPreviewSize().width;
params.rightMargin = 0;
params.addRule(RelativeLayout.CENTER_IN_PARENT);
} else {
DisplayMetrics metrics = getResources().getDisplayMetrics();
// surface
float ws = metrics.widthPixels -
getResources().getDimensionPixelSize(R.dimen.height_element);
float hs = metrics.heightPixels;
float rs = ws / hs;
// preview
float wp = parameters.getPreviewSize().width;
float hp = parameters.getPreviewSize().height;
float rp = parameters.getPreviewSize().width / hp;
if (rp < rs) {
Log.i(TAG, "Surface is wider than preview");
params.width = (int) ws;
params.height = (int) (hp * ws / wp);
params.topMargin = (int) ((hs - params.height) / 2);
params.bottomMargin = (int) ((hs - params.height) / 2);
} else {
Log.i(TAG, "Preview is wider than surface");
params.width = (int) (wp * hs / hp);
params.height = (int) hs;
params.leftMargin = (int) ((ws - params.width) / 2);
params.rightMargin = (int) ((ws - params.width) / 2);
}
Log.i(TAG, "metrics.widthPixels=" + metrics.widthPixels);
Log.i(TAG, "metrics.heightPixels=" + metrics.heightPixels);
Log.i(TAG, "ws=" + ws);
Log.i(TAG, "hs=" + hs);
Log.i(TAG, "wp=" + wp);
Log.i(TAG, "hp=" + hp);
Log.i(TAG, "params.width=" + params.width);
Log.i(TAG, "params.height=" + params.height);
Log.i(TAG, "params.leftMargin=" + params.leftMargin);
Log.i(TAG, "params.rightMargin=" + params.rightMargin);
}
surface.setLayoutParams(params);
surface.requestLayout();
myCamera.startPreview();
}
public void surfaceDestroyed(SurfaceHolder holder) {
releaseCamera();
}
};
private void releaseCamera() {
if (myCamera != null) {
myCamera.stopPreview();
myCamera.release();
myCamera = null;
}
}
private void autoFocus() {
myCamera.autoFocus(new AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
takePicture();
}
});
}
private void takePicture() {
myCamera.takePicture(myShutterCallback, null, myJpegPictureCallback);
}
private Camera.ShutterCallback myShutterCallback = new Camera.ShutterCallback() {
@Override
public void onShutter() {
// Do nothing, but this callback is necessary
}
};
private Camera.PictureCallback myJpegPictureCallback = new Camera.PictureCallback() {
@Override
public void onPictureTaken(byte[] data, Camera camera) {
if (data == null) {
Toast.makeText(CameraActivity.this,
"Take picture failed", Toast.LENGTH_SHORT).show();
} else {
Camera.Parameters params = camera.getParameters();
Camera.Size size = params.getPictureSize();
Toast.makeText(CameraActivity.this,
"Size: " + size.width + " x " + size.height, Toast.LENGTH_SHORT)
.show();
}
camera.startPreview();
mClicked = false;
}
};
}
| |
/*
* Trident - A Multithreaded Server Alternative
* Copyright 2014 The TridentSDK Team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.tridentsdk.server.concurrent;
import com.google.common.base.Preconditions;
import com.google.common.collect.Lists;
import com.google.common.collect.Sets;
import net.tridentsdk.concurrent.SelectableThread;
import net.tridentsdk.concurrent.SelectableThreadPool;
import net.tridentsdk.docs.InternalUseOnly;
import javax.annotation.Nonnull;
import javax.annotation.concurrent.GuardedBy;
import javax.annotation.concurrent.ThreadSafe;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.LockSupport;
import java.util.concurrent.locks.StampedLock;
/**
* Thread pool which allows tasks and result-bearing tasks to be executed concurrently
*
* <p>Internally, this class manages a List of the workers, which are simply TaskExecutors, and a global Set of other
* executors. This allows all workers and executors in the server to be found easily. The worker List is an expandable
* collection of internal thread workers. The decision to use a copy-on-write List instead of a Set was made based on
* the need for index based access, as well as the majority of operations upon the collection iterations from thread
* searching. Unfortunately, there are still many writes, as scaling requires the tracking of new workers, and the
* removal of the workers that are no longer needed.</p>
*
* <p>This thread pool always maintains the starting concurrent. Scaling is done once the current workers are occupied at
* the time of observation. Workers are deemed as occupied if concurrent are in the process of attempting insertion into
* the worker's internal queue. Workers are managed by native park and unparking, rather than using conditions. This
* provides numerous advantages, which include reduced overhead, as it is native, and is not bound to a particular scaleLock.
* Additionally, native thread scheduling provides for more control over basic thread stopping, rather than using the
* thread queue of a condition, or default guarding intrinsics.</p>
*
* <p>There are two basic locking areas: first on the thread advancement counter, and in the worker itself. They are
* both StampedLocks, which provide increased throughput (in fact, is the primary motivator for creating this class).
* In place of this class can be instead, a ThreadPoolExecutor. However, many new concurrent updates in Java 8
* rationalize an effort to create a new class which fully utilizes those features, and subsequently providing this
* class which is optimized to execute the heterogeneous tasks provided by the server. The first scaleLock protects the
* index which to pull workers from the worker Set, and a separate scaleLock, per-worker, protects the internal Deque. A
* Deque was selected as it can be inserted from both ends, sizable, and is array-based. Tests confirm that array
* based collections do outperform their node-based counter parts, as there is reduced instantiation overhead. The
* explicitly declared scaleLock allows to check occupation of the worker, which increases scalability.</p>
*
* <p>No thread pool would be complete without tuning. This class provides 3 basic tuning properties, which modify
* <em>expiring concurrent</em>. Expiring concurrent are new concurrent are those created to scale the executor. They are
* created when the current concurrent in the pool (including previously started expiring concurrent) are all occupied.
* One may modify the time which the worker expires, whether the task queue must be empty, and the maximum amount
* of concurrent in the pool.</p>
*
* @author The TridentSDK Team
*/
@ThreadSafe
public class ConcurrentTaskExecutor extends AbstractExecutorService implements SelectableThreadPool {
private static final Set<ConcurrentTaskExecutor> EXECUTORS = Sets.newConcurrentHashSet();
private static final int INITIALIZING = 0;
private static final int STARTING = 1;
private static final int RUNNING = 2;
private static final int STOPPING = 3;
private static final int TERMINATED = 4;
private final String name;
private final List<SelectableThread> workerSet = Lists.newCopyOnWriteArrayList();
private final AtomicInteger count = new AtomicInteger();
private final int core;
@GuardedBy("coreLock")
private int coreIdx = 0;
private final StampedLock coreLock = new StampedLock();
@GuardedBy("scaleLock")
private int scaleIdx = 0;
private final StampedLock scaleLock = new StampedLock();
private volatile int state = INITIALIZING;
private volatile long expireIntervalMillis = 60_000;
private volatile boolean mustEmptyBeforeExpire = true;
private volatile int maxScale = 50;
@Override
public int maxThreads() {
return maxScale;
}
@Override
public void setMaxThreads(int maxScale) {
this.maxScale = maxScale;
}
@Override
public long threadExpiryTime() {
return expireIntervalMillis;
}
@Override
public void setThreadExpiryTime(long expireIntervalMillis) {
this.expireIntervalMillis = expireIntervalMillis;
}
@Override
public boolean mustEmptyBeforeExpire() {
return mustEmptyBeforeExpire;
}
@Override
public void setMustEmptyBeforeExpire(boolean mustEmptyBeforeExpire) {
this.mustEmptyBeforeExpire = mustEmptyBeforeExpire;
}
private ConcurrentTaskExecutor(int startingThreadCount, String name) {
Preconditions.checkArgument(startingThreadCount > 0);
this.name = name;
this.core = startingThreadCount;
state = STARTING;
for (int i = 0; i < startingThreadCount; i++) {
addWorker(false);
}
state = RUNNING;
}
public static ConcurrentTaskExecutor create(int startingThreadCount, String name) {
ConcurrentTaskExecutor ex = new ConcurrentTaskExecutor(startingThreadCount, name);
EXECUTORS.add(ex);
return ex;
}
@InternalUseOnly
public static Collection<ConcurrentTaskExecutor> executors() {
return EXECUTORS;
}
private ConcurrentWorker addWorker(boolean expire) {
ConcurrentWorker worker;
if (count.get() < maxThreads()) {
if (expire) {
worker = new ExpiringWorker(count.getAndIncrement());
} else {
worker = new ConcurrentWorker(count.getAndIncrement());
}
workerSet.add(worker);
worker.start();
} else {
worker = (ConcurrentWorker) workerSet.get(ThreadLocalRandom.current().nextInt(workerSet.size()));
}
return worker;
}
@Override
public SelectableThread selectCore() {
int count;
int max = this.core - 1;
long stamp = coreLock.readLock();
try {
count = this.coreIdx;
} finally {
coreLock.unlockRead(stamp);
}
if (count >= max) {
count = 0;
stamp = coreLock.writeLock();
try {
this.coreIdx = 0;
} finally {
coreLock.unlockWrite(stamp);
}
} else {
stamp = coreLock.writeLock();
try {
coreIdx++;
} finally {
coreLock.unlockWrite(stamp);
}
}
return workerSet.get(count);
}
// TODO HOT CONCURRENT METHOD NOT INLINEABLE: TOO LARGE
// 150 bytes
@Override
public SelectableThread selectNext() {
int count;
int max = this.workerSet.size();
long stamp = scaleLock.readLock();
try {
count = this.scaleIdx;
} finally {
scaleLock.unlockRead(stamp);
}
if (count >= max) {
count = 0;
stamp = scaleLock.writeLock();
try {
this.scaleIdx = 0;
} finally {
scaleLock.unlockWrite(stamp);
}
} else {
stamp = scaleLock.writeLock();
try {
scaleIdx++;
} finally {
scaleLock.unlockWrite(stamp);
}
}
return workerSet.get(count);
}
@Override
public SelectableThread selectScaled() {
for (SelectableThread ex : workerSet) {
ConcurrentWorker w = (ConcurrentWorker) ex;
if (!w.isHeld()) {
return w;
}
}
return addWorker(true);
}
@Override
public List<SelectableThread> workers() {
return workerSet;
}
@Override
public void shutdown() {
state = STOPPING;
workerSet.forEach(SelectableThread::interrupt);
workerSet.clear();
EXECUTORS.remove(this);
state = TERMINATED;
}
// Executor implementations
@Override
public List<Runnable> shutdownNow() {
shutdown();
return Collections.EMPTY_LIST;
}
@Override
public boolean isShutdown() {
return state > STOPPING;
}
@Override
public boolean isTerminated() {
return state == TERMINATED;
}
@Override
public boolean awaitTermination(long l, TimeUnit timeUnit) throws InterruptedException {
long units = timeUnit.convert(System.nanoTime(), timeUnit);
new Thread(this::shutdownNow).start();
while (state != TERMINATED) {
if (timeUnit.convert(System.nanoTime(), timeUnit) - units > l) {
return false;
}
}
return true;
}
@Nonnull @Override
public <T> Future<T> submit(Callable<T> task) {
final RunnableFuture<T> future = new FutureTask<>(task);
execute(future::run);
return future;
}
// TODO HOT CONCURRENT METHOD NOT INLINEABLE: CODE CACHE TOO SMALL
// 65 bytes
@Override
public void execute(@Nonnull Runnable runnable) {
for (SelectableThread ex : workerSet) {
ConcurrentWorker w = (ConcurrentWorker) ex;
if (!w.isHeld()) {
w.execute(runnable);
return;
}
}
ConcurrentWorker w = addWorker(true);
w.execute(runnable);
}
// Workers
private class ConcurrentWorker extends Thread implements SelectableThread {
@GuardedBy("scaleLock")
final Deque<Runnable> tasks = new ArrayDeque<>(64);
final StampedLock lock = new StampedLock();
volatile boolean held;
public ConcurrentWorker(int index) {
super("Pool " + name + " #" + index);
}
@Override
public void run() {
while (!isInterrupted()) {
try {
Runnable runnable = nextTask();
if (runnable == null) {
held = false;
LockSupport.park();
} else {
runnable.run();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
Runnable nextTask() {
long stamp = lock.writeLock();
try {
return tasks.pollLast();
} finally {
lock.unlockWrite(stamp);
}
}
boolean isHeld() {
return held;
}
@Override
public void execute(Runnable task) {
if (Thread.currentThread().equals(asThread())) {
task.run();
return;
}
long stamp = lock.writeLock();
try {
tasks.offerFirst(task);
} finally {
lock.unlockWrite(stamp);
}
held = true;
LockSupport.unpark(this);
}
@Override
public <V> Future<V> submitTask(Callable<V> task) {
final RunnableFuture<V> future = new FutureTask<>(task);
execute(future::run);
return future;
}
@Override
public void interrupt() {
LockSupport.unpark(asThread());
super.interrupt();
long stamp = lock.writeLock();
try {
tasks.clear();
} finally {
lock.unlockWrite(stamp);
}
}
@Override
public Thread asThread() {
return this;
}
}
private class ExpiringWorker extends ConcurrentWorker {
long last = System.currentTimeMillis();
public ExpiringWorker(int index) {
super(index);
}
@Override
Runnable nextTask() {
long stamp = lock.writeLock();
Runnable runnable;
try {
runnable = tasks.pollLast();
} finally {
lock.unlockWrite(stamp);
}
if (runnable == null) {
// Expiration mechanics, in the case of spurious wakeups
long time = System.currentTimeMillis();
if ((time - this.last) >= expireIntervalMillis) {
this.interrupt();
}
// Processing tasks very very quickly can result in stackoverflows
// if this method is called too often recursively
return () -> LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(expireIntervalMillis));
} else {
// Expiration mechanics
long time = System.currentTimeMillis();
if ((time - this.last) >= expireIntervalMillis) {
if (mustEmptyBeforeExpire) {
if (isEmpty()) {
return () -> {
runnable.run();
this.interrupt();
};
}
}
}
this.last = time;
return runnable;
}
}
// TODO HOT CONCURRENT METHOD NOT INLINEABLE: TOO LARGE
// 79 bytes
@Override
public void interrupt() {
// Most important thing: don't allow new tasks to be submitted
workerSet.remove(this);
count.decrementAndGet();
Queue<Runnable> left;
long stamp = lock.readLock();
try {
left = tasks;
} finally {
lock.unlockRead(stamp);
}
// in case I dun goofed
left.forEach(r -> selectCore().execute(r));
super.interrupt();
}
private boolean isEmpty() {
long stamp = lock.readLock();
try {
return tasks.isEmpty();
} finally {
lock.unlockRead(stamp);
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cocoon.acting.modular;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.Map;
import org.apache.avalon.excalibur.datasource.DataSourceComponent;
import org.apache.avalon.framework.activity.Disposable;
import org.apache.avalon.framework.configuration.Configuration;
import org.apache.avalon.framework.configuration.ConfigurationException;
import org.apache.avalon.framework.parameters.Parameters;
import org.apache.avalon.framework.service.ServiceException;
import org.apache.avalon.framework.service.ServiceManager;
import org.apache.avalon.framework.service.ServiceSelector;
import org.apache.avalon.framework.thread.ThreadSafe;
import org.apache.cocoon.ProcessingException;
import org.apache.cocoon.acting.AbstractComplementaryConfigurableAction;
import org.apache.cocoon.components.modules.database.AutoIncrementModule;
import org.apache.cocoon.components.modules.input.InputModule;
import org.apache.cocoon.components.modules.output.OutputModule;
import org.apache.cocoon.environment.Redirector;
import org.apache.cocoon.environment.SourceResolver;
import org.apache.cocoon.util.HashMap;
import org.apache.cocoon.util.JDBCTypeConversions;
import org.apache.commons.lang.BooleanUtils;
/**
* Abstract action for common function needed by database actions.
* The difference to the other Database*Actions is, that the actions
* in this package use additional components ("modules") for reading
* and writing parameters. In addition the descriptor format has
* changed to accomodate the new features.
*
* <p>This action is heavily based upon the original DatabaseAddActions.</p>
*
* <p>Modes have to be configured in cocoon.xconf. Mode names from
* descriptor.xml file are looked up in the component service. Default
* mode names can only be set during action setup. </p>
*
* <p>The number of affected rows is returned to the sitemap with the
* "row-count" parameter if at least one row was affected.</p>
*
* <p>All known column types can be found in
* {@link org.apache.cocoon.util.JDBCTypeConversions JDBCTypeConversions}.</p>
*
* <table>
* <tr><td colspan="2">Configuration options (setup):</td></tr>
* <tr><td>input </td><td>default mode name for reading values (request-param)</td></tr>
* <tr><td>autoincrement </td><td>default mode name for obtaining values from autoincrement columns (auto)</td></tr>
* <tr><td>append-row </td><td>append row number in square brackets to column name for output (yes)</td></tr>
* <tr><td>append-table-name</td><td>add table name to column name for both in- and output (yes)</td></tr>
* <tr><td>first-row </td><td>row index of first row (0)</td></tr>
* <tr><td>path-separator </td><td>string to separate table name from column name (.)</td></tr>
* </table>
*
* <table>
* <tr><td colspan="2">Configuration options (setup and per invocation):</td></tr>
* <tr><td>throw-exception </td><td>throw an exception when an error occurs (default: false)</td></tr>
* <tr><td>descriptor </td><td>file containing database description</td></tr>
* <tr><td>table-set </td><td>table-set name to work with </td></tr>
* <tr><td>output </td><td>mode name for writing values (request-attr)</td></tr>
* <tr><td>reloadable </td><td>dynamically reload descriptor file if change is detected</td></tr>
* <tr><td>use-transactions </td><td>defaults to yes</td></tr>
* <tr><td>connection </td><td>configured datasource connection to use (overrides value from descriptor file)</td></tr>
* <tr><td>fail-on-empty </td><td>(boolean) fail is statement affected zero rows (true)</td></tr>
* </table>
*
* @version $Id$
* @see org.apache.cocoon.components.modules.input
* @see org.apache.cocoon.components.modules.output
* @see org.apache.cocoon.components.modules.database
* @see org.apache.cocoon.util.JDBCTypeConversions
*/
public abstract class DatabaseAction extends AbstractComplementaryConfigurableAction implements Disposable, ThreadSafe {
// ========================================================================
// constants
// ========================================================================
static final Integer MODE_AUTOINCR = new Integer(0);
static final Integer MODE_OTHERS = new Integer(1);
static final Integer MODE_OUTPUT = new Integer(2);
static final String ATTRIBUTE_KEY = "org.apache.cocoon.action.modular.DatabaseAction.outputModeName";
// These can be overidden from cocoon.xconf
static final String inputHint = "request-param"; // default to request parameters
static final String outputHint = "request-attr"; // default to request attributes
static final String databaseHint = "manual"; // default to manual auto increments
static final String INPUT_MODULE_SELECTOR = InputModule.ROLE + "Selector";
static final String OUTPUT_MODULE_SELECTOR = OutputModule.ROLE + "Selector";
static final String DATABASE_MODULE_SELECTOR = AutoIncrementModule.ROLE + "Selector";
// ========================================================================
// instance vars
// ========================================================================
protected ServiceSelector dbselector;
protected Map defaultModeNames = new HashMap( 3 );
protected final HashMap cachedQueryData = new HashMap();
protected String pathSeparator = ".";
protected int firstRow = 0;
protected boolean failOnEmpty = true;
// ========================================================================
// inner helper classes
// ========================================================================
/**
* Structure that takes all processed data for one column.
*/
protected static class Column {
boolean isKey = false;
boolean isSet = false;
boolean isAutoIncrement = false;
String mode = null;
Configuration modeConf = null;
Configuration columnConf = null;
}
/**
* Structure that takes all processed data for a table depending
* on current default modes
*/
protected static class CacheHelper {
/**
* Generated query string
*/
public String queryString = null;
/**
* if a set is used, column number which is used to determine
* the number of rows to insert.
*/
public int setMaster = -1;
public boolean isSet = false;
public int noOfKeys = 0;
public Column[] columns = null;
public CacheHelper( int cols ) {
this(0,cols);
}
public CacheHelper( int keys, int cols ) {
noOfKeys = keys;
columns = new Column[cols];
for ( int i=0; i<cols; i++ ) {
columns[i] = new Column();
}
}
}
/**
* Structure that takes up both current mode types for database
* operations and table configuration data. Used to access parsed
* configuration data.
*/
protected static class LookUpKey {
public Configuration tableConf = null;
public Map modeTypes = null;
public LookUpKey( Configuration tableConf, Map modeTypes ) {
this.tableConf = tableConf;
this.modeTypes = modeTypes;
}
/* (non-Javadoc)
* @see java.lang.Object#equals(java.lang.Object)
*/
public boolean equals(Object obj) {
boolean result = false;
if (obj != null && obj instanceof LookUpKey) {
LookUpKey luk = (LookUpKey) obj;
result = true;
result = result && (luk.tableConf == null ?
this.tableConf == null : luk.tableConf.equals(this.tableConf));
result = result && (luk.modeTypes == null ?
this.modeTypes == null : luk.modeTypes.equals(this.modeTypes));
}
return result;
}
/* (non-Javadoc)
* @see java.lang.Object#hashCode()
*/
public int hashCode() {
return (this.tableConf != null ?
this.tableConf.hashCode() :
(this.modeTypes != null ? this.modeTypes.hashCode() : super.hashCode()));
}
}
// set up default modes
// <input/>
// <output/>
// <autoincrement/>
//
// all other modes need to be declared in cocoon.xconf
// no need to declare them per action (anymore!)
public void configure(Configuration conf) throws ConfigurationException {
super.configure(conf);
if (this.settings != null) {
this.defaultModeNames.put(MODE_OTHERS, this.settings.get("input", inputHint));
this.defaultModeNames.put(MODE_OUTPUT, this.settings.get("output", outputHint));
this.defaultModeNames.put(MODE_AUTOINCR, this.settings.get("autoincrement", databaseHint));
this.pathSeparator = (String)this.settings.get("path-separator", this.pathSeparator);
String tmp = (String)this.settings.get("first-row",null);
if (tmp != null) {
try {
this.firstRow = Integer.parseInt(tmp);
} catch (NumberFormatException nfe) {
if (getLogger().isWarnEnabled())
getLogger().warn("problem parsing first row option "+tmp+" using default instead.");
}
}
tmp = (String) this.settings.get("fail-on-empty",String.valueOf(this.failOnEmpty));
this.failOnEmpty = BooleanUtils.toBoolean(tmp);
}
}
// ========================================================================
// Avalon methods
// ========================================================================
/**
* Compose the Actions so that we can select our databases.
*/
public void service(ServiceManager manager) throws ServiceException {
super.service(manager);
this.dbselector = (ServiceSelector) manager.lookup(DataSourceComponent.ROLE + "Selector");
}
/**
* dispose
*/
public void dispose() {
this.manager.release(dbselector);
}
// ========================================================================
// protected utility methods
// ========================================================================
/**
* Get the Datasource we need.
*/
protected DataSourceComponent getDataSource( Configuration conf, Parameters parameters )
throws ServiceException {
String sourceName = parameters.getParameter( "connection", (String) settings.get( "connection" ) );
if ( sourceName == null ) {
Configuration dsn = conf.getChild("connection");
return (DataSourceComponent) this.dbselector.select(dsn.getValue(""));
} else {
if (getLogger().isDebugEnabled())
getLogger().debug("Using datasource: "+sourceName);
return (DataSourceComponent) this.dbselector.select(sourceName);
}
}
/**
* Return whether a type is a Large Object (BLOB/CLOB).
*/
protected final boolean isLargeObject (String type) {
if ("ascii".equals(type)) return true;
if ("binary".equals(type)) return true;
if ("image".equals(type)) return true;
return false;
}
/**
* Store a key/value pair in the output attributes. We prefix the key
* with the name of this class to prevent potential name collisions.
*/
protected void setOutputAttribute(Map objectModel, String outputMode, String key, Object value) {
ServiceSelector outputSelector = null;
OutputModule output = null;
try {
outputSelector = (ServiceSelector) this.manager.lookup(OUTPUT_MODULE_SELECTOR);
if (outputMode != null && outputSelector != null && outputSelector.isSelectable(outputMode)) {
output = (OutputModule) outputSelector.select(outputMode);
}
if (output != null) {
output.setAttribute(null, objectModel, key, value);
} else if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode);
}
} catch (Exception e) {
if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode + ":" + e.getMessage());
}
} finally {
if (outputSelector != null) {
if (output != null)
outputSelector.release(output);
this.manager.release(outputSelector);
}
}
}
/**
* Inserts a row or a set of rows into the given table based on the
* request parameters
*
* @param table the table's configuration
* @param conn the database connection
* @param objectModel the objectModel
*/
protected int processTable( Configuration table, Connection conn, Map objectModel,
Map results, Map modeTypes )
throws SQLException, ConfigurationException, Exception {
PreparedStatement statement = null;
int rows = 0;
try {
LookUpKey luk = new LookUpKey(table, modeTypes);
CacheHelper queryData = null;
if (getLogger().isDebugEnabled())
getLogger().debug("modeTypes : "+ modeTypes);
// get cached data
// synchronize complete block since we don't want 100s of threads
// generating the same cached data set. In the long run all data
// is cached anyways so this won't cost much.
synchronized (this.cachedQueryData) {
queryData = (CacheHelper) this.cachedQueryData.get(luk,null);
if (queryData == null) {
queryData = this.getQuery( table, modeTypes, defaultModeNames );
this.cachedQueryData.put(luk,queryData);
}
}
if (getLogger().isDebugEnabled())
getLogger().debug("query: "+queryData.queryString);
statement = conn.prepareStatement(queryData.queryString);
Object[][] columnValues = this.getColumnValues( table, queryData, objectModel );
int setLength = 1;
if ( queryData.isSet ) {
if ( columnValues[ queryData.setMaster ] != null ) {
setLength = columnValues[ queryData.setMaster ].length;
} else {
setLength = 0;
}
}
for ( int rowIndex = 0; rowIndex < setLength; rowIndex++ ) {
if (getLogger().isDebugEnabled()) {
getLogger().debug( "====> row no. " + rowIndex );
}
rows += processRow( objectModel, conn, statement, (String) modeTypes.get(MODE_OUTPUT), table, queryData, columnValues, rowIndex, results );
}
} finally {
try {
if (statement != null) {
statement.close();
}
} catch (SQLException e) {}
}
return rows;
}
/**
* Choose a mode configuration based on its name.
* @param conf Configuration (i.e. a column's configuration) that might have
* several children configurations named "mode".
* @param type desired type (i.e. every mode has a type
* attribute), find the first mode that has a compatible type.
* Special mode "all" matches all queried types.
* @return configuration that has desired type or type "all" or null.
*/
protected Configuration getMode( Configuration conf, String type )
throws ConfigurationException {
String modeAll = "all";
Configuration[] modes = conf.getChildren("mode");
Configuration modeConfig = null;
for ( int i=0; i<modes.length; i++ ) {
String modeType = modes[i].getAttribute("type", "others");
if ( modeType.equals(type) || modeType.equals(modeAll)) {
if (getLogger().isDebugEnabled())
getLogger().debug("requested mode was \""+type+"\" returning \""+modeType+"\"");
modeConfig = modes[i];
break;
}
}
return modeConfig;
}
/**
* compose name for output a long the lines of "table.column"
*/
protected String getOutputName ( Configuration tableConf, Configuration columnConf ) {
return getOutputName( tableConf, columnConf, -1 );
}
/**
* compose name for output a long the lines of "table.column[row]" or
* "table.column" if rowIndex is -1.
* If the section of the sitemap corresponding to the action contains
* <append-table-name>false</append-table-name>
* the name for output is "column[row]"
* If the section of the sitemap corresponding to the action contains
* <append-row>false</append-row>
* the name for output is "column"
*/
protected String getOutputName ( Configuration tableConf, Configuration columnConf, int rowIndex ) {
if ( rowIndex != -1 && this.settings.containsKey("append-row") &&
(this.settings.get("append-row").toString().equalsIgnoreCase("false") ||
this.settings.get("append-row").toString().equalsIgnoreCase("0")) ) {
rowIndex = -1;
} else {
rowIndex = rowIndex + this.firstRow;
}
if ( this.settings.containsKey("append-table-name") &&
(this.settings.get("append-table-name").toString().equalsIgnoreCase("false") ||
this.settings.get("append-table-name").toString().equalsIgnoreCase("0")) )
{
return ( columnConf.getAttribute("name",null)
+ ( rowIndex == -1 ? "" : "[" + rowIndex + "]" ) );
} else {
return ( tableConf.getAttribute("alias", tableConf.getAttribute("name", null) )
+ this.pathSeparator + columnConf.getAttribute("name",null)
+ ( rowIndex == -1 ? "" : "[" + rowIndex + "]" ) );
}
}
/*
* Read all values for a column from an InputModule
*
* If the given column is an autoincrement column, an empty array
* is returned, otherwise if it is part of a set, all available
* values are fetched, or only the first one if it is not part of
* a set.
*
*/
protected Object[] getColumnValue(Configuration tableConf, Column column, Map objectModel)
throws ConfigurationException, ServiceException {
if (column.isAutoIncrement) {
return new Object[1];
} else {
Object[] values;
String cname = getOutputName( tableConf, column.columnConf );
// obtain input module and read values
ServiceSelector inputSelector = null;
InputModule input = null;
try {
inputSelector = (ServiceSelector) this.manager.lookup(INPUT_MODULE_SELECTOR);
if (column.mode != null && inputSelector != null && inputSelector.isSelectable(column.mode)){
input = (InputModule) inputSelector.select(column.mode);
}
if (column.isSet) {
if (getLogger().isDebugEnabled()) {
getLogger().debug( "Trying to set column " + cname + " from " + column.mode + " using getAttributeValues method");
}
values = input.getAttributeValues( cname, column.modeConf, objectModel );
} else {
if (getLogger().isDebugEnabled()) {
getLogger().debug( "Trying to set column " + cname + " from " + column.mode + " using getAttribute method");
}
values = new Object[1];
values[0] = input.getAttribute( cname, column.modeConf, objectModel );
}
if (values != null) {
for ( int i = 0; i < values.length; i++ ) {
if (getLogger().isDebugEnabled()) {
getLogger().debug( "Setting column " + cname + " [" + i + "] " + values[i] );
}
}
}
} finally {
if (inputSelector != null) {
if (input != null) {
inputSelector.release(input);
}
this.manager.release(inputSelector);
}
}
return values;
}
}
/**
* Setup parsed attribute configuration object
*/
protected void fillModes ( Configuration[] conf, boolean isKey, Map defaultModeNames,
Map modeTypes, CacheHelper set )
throws ConfigurationException {
String setMode = null;
int offset = (isKey ? 0: set.noOfKeys);
for (int i = offset; i < conf.length + offset; i++) {
if (getLogger().isDebugEnabled()) {
getLogger().debug("i=" + i);
}
set.columns[i].columnConf = conf[ i - offset ];
set.columns[i].isSet = false;
set.columns[i].isKey = isKey;
set.columns[i].isAutoIncrement = false;
if (isKey & this.honourAutoIncrement()) {
set.columns[i].isAutoIncrement = set.columns[i].columnConf.getAttributeAsBoolean("autoincrement",false);
}
set.columns[i].modeConf = getMode(set.columns[i].columnConf,
selectMode(set.columns[i].isAutoIncrement, modeTypes));
set.columns[i].mode = (set.columns[i].modeConf != null ?
set.columns[i].modeConf.getAttribute("name", selectMode(isKey, defaultModeNames)) :
selectMode(isKey, defaultModeNames));
// Determine set mode for a whole column ...
setMode = set.columns[i].columnConf.getAttribute("set", null); // master vs slave vs null
if (setMode == null && set.columns[i].modeConf != null) {
// ... or for each mode individually
setMode = set.columns[i].modeConf.getAttribute("set", null);
}
if (setMode != null) {
set.columns[i].isSet = true;
set.isSet = true;
if (setMode.equals("master")) {
set.setMaster = i;
}
}
}
}
/**
* create a unique name using the getOutputName method and write
* the value to the output module and the results map if present.
*
*/
protected void setOutput( Map objectModel, String outputMode, Map results,
Configuration table, Configuration column, int rowIndex, Object value ) {
String param = this.getOutputName( table, column, rowIndex );
if (getLogger().isDebugEnabled()) {
getLogger().debug( "Setting column " + param + " to " + value );
}
this.setOutputAttribute(objectModel, outputMode, param, value);
if (results != null) {
results.put( param, String.valueOf( value ) );
}
}
/**
* set a column in a statement using the appropriate JDBC setXXX method.
*
*/
protected void setColumn (PreparedStatement statement, int position, Configuration entry, Object value) throws Exception {
JDBCTypeConversions.setColumn(statement, position, value,
(Integer)JDBCTypeConversions.typeConstants.get(entry.getAttribute("type")));
}
/**
* set a column in a statement using the appropriate JDBC setXXX
* method and propagate the value to the output module and results
* map if present. Effectively combines calls to setColumn and
* setOutput.
*
*/
protected void setColumn (Map objectModel, String outputMode, Map results,
Configuration table, Configuration column, int rowIndex,
Object value, PreparedStatement statement, int position) throws Exception {
if (results != null) {
this.setOutput(objectModel, outputMode, results, table, column, rowIndex, value);
}
this.setColumn( statement, position, column, value );
}
// ========================================================================
// main method
// ========================================================================
/**
* Add a record to the database. This action assumes that
* the file referenced by the "descriptor" parameter conforms
* to the AbstractDatabaseAction specifications.
*/
public Map act(Redirector redirector, SourceResolver resolver, Map objectModel,
String source, Parameters param) throws Exception {
DataSourceComponent datasource = null;
Connection conn = null;
Map results = new HashMap();
int rows = 0;
boolean failed = false;
// read global parameter settings
boolean reloadable = DESCRIPTOR_RELOADABLE_DEFAULT;
// call specific default modes apart from output mode are not supported
// set request attribute
String outputMode = param.getParameter("output", (String) defaultModeNames.get(MODE_OUTPUT));
if (this.settings.containsKey("reloadable")) {
reloadable = Boolean.valueOf((String) this.settings.get("reloadable")).booleanValue();
}
// read local parameter settings
try {
Configuration conf =
this.getConfiguration(param.getParameter("descriptor", (String) this.settings.get("descriptor")),
resolver,
param.getParameterAsBoolean("reloadable",reloadable));
// get database connection and try to turn off autocommit
datasource = this.getDataSource(conf, param);
conn = datasource.getConnection();
if (conn.getAutoCommit() == true) {
try {
conn.setAutoCommit(false);
} catch (Exception ex) {
String tmp = param.getParameter("use-transactions",(String) this.settings.get("use-transactions",null));
if (tmp != null && (tmp.equalsIgnoreCase("no") || tmp.equalsIgnoreCase("false") || tmp.equalsIgnoreCase("0"))) {
if (getLogger().isErrorEnabled())
getLogger().error("This DB connection does not support transactions. If you want to risk your data's integrity by continuing nonetheless set parameter \"use-transactions\" to \"no\".");
throw ex;
}
}
}
// find tables to work with
Configuration[] tables = conf.getChildren("table");
String tablesetname = param.getParameter("table-set", (String) this.settings.get("table-set"));
Map modeTypes = null;
if (tablesetname == null) {
modeTypes = new HashMap(6);
modeTypes.put( MODE_AUTOINCR, "autoincr" );
modeTypes.put( MODE_OTHERS, "others" );
modeTypes.put( MODE_OUTPUT, outputMode );
for (int i = 0; i < tables.length; i++) {
rows += processTable(tables[i], conn, objectModel, results, modeTypes);
}
} else {
// new set based behaviour
// create index for table names / aliases
Map tableIndex = new HashMap(2*tables.length);
String tableName = null;
Object result = null;
for (int i=0; i<tables.length; i++) {
tableName = tables[i].getAttribute("alias",tables[i].getAttribute("name",""));
result = tableIndex.put(tableName,new Integer(i));
if (result != null) {
throw new IOException("Duplicate table entry for "+tableName+" at positions "+result+" and "+i);
}
}
Configuration[] tablesets = conf.getChildren("table-set");
String setname = null;
boolean found = false;
// find tables contained in tableset
int j = 0;
for (j = 0; j < tablesets.length; j++) {
setname = tablesets[j].getAttribute ("name", "");
if (tablesetname.trim().equals (setname.trim ())) {
found = true;
break;
}
}
if (!found) {
throw new IOException(" given set " + tablesetname + " does not exists in a description file.");
}
Configuration[] set = tablesets[j].getChildren("table");
for (int i = 0; i < set.length; i++) {
// look for alternative mode types
modeTypes = new HashMap(6);
modeTypes.put( MODE_AUTOINCR, set[i].getAttribute( "autoincr-mode", "autoincr" ) );
modeTypes.put( MODE_OTHERS, set[i].getAttribute( "others-mode", "others" ) );
modeTypes.put( MODE_OUTPUT, outputMode );
tableName=set[i].getAttribute("name","");
if (tableIndex.containsKey(tableName)) {
j = ((Integer)tableIndex.get(tableName)).intValue();
rows += processTable( tables[j], conn, objectModel, results, modeTypes );
} else {
throw new IOException(" given table " + tableName + " does not exists in a description file.");
}
}
}
if (conn.getAutoCommit() == false) {
conn.commit();
}
// obtain output mode module and rollback output
ServiceSelector outputSelector = null;
OutputModule output = null;
try {
outputSelector = (ServiceSelector) this.manager.lookup(OUTPUT_MODULE_SELECTOR);
if (outputMode != null && outputSelector != null && outputSelector.isSelectable(outputMode)){
output = (OutputModule) outputSelector.select(outputMode);
}
if (output != null) {
output.commit(null, objectModel);
} else if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode);
}
} catch (ServiceException e) {
if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode + ":" + e.getMessage());
}
} finally {
if (outputSelector != null) {
if (output != null) {
outputSelector.release(output);
}
this.manager.release(outputSelector);
}
}
} catch (Exception e) {
failed = true;
if ( conn != null ) {
try {
if (getLogger().isDebugEnabled()) {
getLogger().debug( "Rolling back transaction. Caused by " + e.getMessage() );
e.printStackTrace();
}
conn.rollback();
results = null;
// obtain output mode module and commit output
ServiceSelector outputSelector = null;
OutputModule output = null;
try {
outputSelector = (ServiceSelector) this.manager.lookup(OUTPUT_MODULE_SELECTOR);
if (outputMode != null && outputSelector != null && outputSelector.isSelectable(outputMode)){
output = (OutputModule) outputSelector.select(outputMode);
}
if (output != null) {
output.rollback( null, objectModel, e);
} else if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode);
}
} catch (ServiceException e2) {
if (getLogger().isWarnEnabled()) {
getLogger().warn("Could not select output mode " + outputMode + ":" + e2.getMessage());
}
} finally {
if (outputSelector != null) {
if (output != null) {
outputSelector.release(output);
}
this.manager.release(outputSelector);
}
}
} catch (SQLException se) {
if (getLogger().isDebugEnabled())
getLogger().debug("There was an error rolling back the transaction", se);
}
}
//throw new ProcessingException("Could not add record :position = " + currentIndex, e);
// don't throw an exception, an error has been signalled, that should suffice
String throwException = (String) this.settings.get( "throw-exception",
param.getParameter( "throw-exception", null ) );
if ( throwException != null && BooleanUtils.toBoolean(throwException)) {
throw new ProcessingException("Cannot process the requested SQL statement ",e);
}
} finally {
if (conn != null) {
try {
conn.close();
} catch (SQLException sqe) {
getLogger().warn("There was an error closing the datasource", sqe);
}
}
if (datasource != null)
this.dbselector.release(datasource);
}
if (results != null) {
if (rows>0 || (!failed && !this.failOnEmpty)) {
results.put("row-count",new Integer(rows));
} else {
results = null;
}
} else {
if (rows>0) {
results = new HashMap(1);
results.put("row-count",new Integer(rows));
}
}
return results; // (results == null? results : Collections.unmodifiableMap(results));
}
// ========================================================================
// abstract methods
// ========================================================================
/**
* set all necessary ?s and execute the query
* return number of rows processed
*
* This method is intended to be overridden by classes that
* implement other operations e.g. delete
*/
protected abstract int processRow( Map objectModel, Connection conn, PreparedStatement statement, String outputMode,
Configuration table, CacheHelper queryData, Object[][] columnValues,
int rowIndex, Map results )
throws SQLException, ConfigurationException, Exception;
/**
* determine which mode to use as default mode
*
* This method is intended to be overridden by classes that
* implement other operations e.g. delete
*/
protected abstract String selectMode( boolean isAutoIncrement, Map modes );
/**
* determine whether autoincrement columns should be honoured by
* this operation. This is usually snsible only for INSERTs.
*
* This method is intended to be overridden by classes that
* implement other operations e.g. delete
*/
protected abstract boolean honourAutoIncrement();
/**
* Fetch all values for all columns that are needed to do the
* database operation.
*
* This method is intended to be overridden by classes that
* implement other operations e.g. delete
*/
abstract Object[][] getColumnValues( Configuration tableConf, CacheHelper queryData, Map objectModel )
throws ConfigurationException, ServiceException;
/**
* Get the String representation of the PreparedStatement. This is
* mapped to the Configuration object itself, so if it doesn't exist,
* it will be created.
*
* This method is intended to be overridden by classes that
* implement other operations e.g. delete
*
* @param table the table's configuration object
* @return the insert query as a string
*/
protected abstract CacheHelper getQuery( Configuration table, Map modeTypes, Map defaultModeNames )
throws ConfigurationException, ServiceException;
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.sysds.test.functions.codegenalg.partone;
import java.io.File;
import java.util.HashMap;
import org.junit.Assert;
import org.junit.Test;
import org.apache.sysds.api.DMLScript;
import org.apache.sysds.common.Types.ExecMode;
import org.apache.sysds.hops.OptimizerUtils;
import org.apache.sysds.common.Types.ExecType;
import org.apache.sysds.runtime.matrix.data.MatrixValue.CellIndex;
import org.apache.sysds.test.AutomatedTestBase;
import org.apache.sysds.test.TestConfiguration;
import org.apache.sysds.test.TestUtils;
public class AlgorithmLinregCG extends AutomatedTestBase
{
private final static String TEST_NAME1 = "Algorithm_LinregCG";
private final static String TEST_DIR = "functions/codegenalg/";
private final static String TEST_CLASS_DIR = TEST_DIR + AlgorithmLinregCG.class.getSimpleName() + "/";
private static CodegenTestType currentTestType = CodegenTestType.DEFAULT;
private final static double eps = 1e-1;
private final static int rows = 2468;
private final static int cols = 507;
private final static double sparsity1 = 0.7; //dense
private final static double sparsity2 = 0.1; //sparse
private final static double epsilon = 0.000000001;
private final static double maxiter = 10;
@Override
public void setUp() {
TestUtils.clearAssertionInformation();
addTestConfiguration(TEST_NAME1, new TestConfiguration(TEST_CLASS_DIR, TEST_NAME1, new String[] { "w" }));
}
@Test
public void testLinregCG0DenseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0SparseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0DenseCP() {
runLinregCGTest(TEST_NAME1, false, false, 0, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0SparseCP() {
runLinregCGTest(TEST_NAME1, false, true, 0, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0DenseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0SparseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0DenseSP() {
runLinregCGTest(TEST_NAME1, false, false, 0, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0SparseSP() {
runLinregCGTest(TEST_NAME1, false, true, 0, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1DenseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1SparseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1DenseCP() {
runLinregCGTest(TEST_NAME1, false, false, 1, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1SparseCP() {
runLinregCGTest(TEST_NAME1, false, true, 1, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1DenseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1SparseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1DenseSP() {
runLinregCGTest(TEST_NAME1, false, false, 1, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG1SparseSP() {
runLinregCGTest(TEST_NAME1, false, true, 1, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2DenseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2SparseRewritesCP() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2DenseCP() {
runLinregCGTest(TEST_NAME1, false, false, 2, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2SparseCP() {
runLinregCGTest(TEST_NAME1, false, true, 2, ExecType.CP, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2DenseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2SparseRewritesSP() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2DenseSP() {
runLinregCGTest(TEST_NAME1, false, false, 2, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG2SparseSP() {
runLinregCGTest(TEST_NAME1, false, true, 2, ExecType.SPARK, CodegenTestType.DEFAULT);
}
@Test
public void testLinregCG0DenseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG0SparseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG0DenseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG0SparseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG1DenseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG1SparseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG1DenseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG1SparseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG2DenseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG2SparseRewritesCPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.CP, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG2DenseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG2SparseRewritesSPFuseAll() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.SPARK, CodegenTestType.FUSE_ALL);
}
@Test
public void testLinregCG0DenseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG0SparseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG0DenseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 0, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG0SparseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 0, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG1DenseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG1SparseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG1DenseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 1, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG1SparseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 1, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG2DenseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG2SparseRewritesCPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.CP, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG2DenseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, false, 2, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
@Test
public void testLinregCG2SparseRewritesSPFuseNoRedundancy() {
runLinregCGTest(TEST_NAME1, true, true, 2, ExecType.SPARK, CodegenTestType.FUSE_NO_REDUNDANCY);
}
private void runLinregCGTest( String testname, boolean rewrites, boolean sparse, int intercept, ExecType instType, CodegenTestType CodegenTestType)
{
boolean oldFlag = OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION;
ExecMode platformOld = rtplatform;
switch( instType ){
case SPARK: rtplatform = ExecMode.SPARK; break;
default: rtplatform = ExecMode.HYBRID; break;
}
currentTestType = CodegenTestType;
boolean sparkConfigOld = DMLScript.USE_LOCAL_SPARK_CONFIG;
if( rtplatform == ExecMode.SPARK || rtplatform == ExecMode.HYBRID )
DMLScript.USE_LOCAL_SPARK_CONFIG = true;
try
{
String TEST_NAME = testname;
TestConfiguration config = getTestConfiguration(TEST_NAME);
loadTestConfiguration(config);
fullDMLScriptName = "scripts/algorithms/LinearRegCG.dml";
programArgs = new String[]{ "-stats", "-nvargs", "X="+input("X"), "Y="+input("y"),
"icpt="+String.valueOf(intercept), "tol="+String.valueOf(epsilon),
"maxi="+String.valueOf(maxiter), "reg=0.001", "B="+output("w")};
rCmd = getRCmd(inputDir(), String.valueOf(intercept),String.valueOf(epsilon),
String.valueOf(maxiter), "0.001", expectedDir());
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = rewrites;
//generate actual datasets
double[][] X = getRandomMatrix(rows, cols, 0, 1, sparse?sparsity2:sparsity1, 7);
writeInputMatrixWithMTD("X", X, true);
double[][] y = getRandomMatrix(rows, 1, 0, 10, 1.0, 3);
writeInputMatrixWithMTD("y", y, true);
runTest(true, false, null, -1);
runRScript(true);
//compare matrices
HashMap<CellIndex, Double> dmlfile = readDMLMatrixFromOutputDir("w");
HashMap<CellIndex, Double> rfile = readRMatrixFromExpectedDir("w");
TestUtils.compareMatrices(dmlfile, rfile, eps, "Stat-DML", "Stat-R");
Assert.assertTrue(heavyHittersContainsSubString("spoofRA")
|| heavyHittersContainsSubString("sp_spoofRA"));
}
finally {
rtplatform = platformOld;
DMLScript.USE_LOCAL_SPARK_CONFIG = sparkConfigOld;
OptimizerUtils.ALLOW_ALGEBRAIC_SIMPLIFICATION = oldFlag;
OptimizerUtils.ALLOW_AUTO_VECTORIZATION = true;
OptimizerUtils.ALLOW_OPERATOR_FUSION = true;
}
}
/**
* Override default configuration with custom test configuration to ensure
* scratch space and local temporary directory locations are also updated.
*/
@Override
protected File getConfigTemplateFile() {
return getCodegenConfigFile(SCRIPT_DIR + TEST_DIR, currentTestType);
}
}
| |
/**
* Reads SQM File
*/
package org.arma.guillotine;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import java.util.ArrayList;
import org.apache.log4j.Logger;
public class SQM {
private SQMClass units = new SQMClass("units", null);
private SQMClass triggers = new SQMClass("triggers", null);
private ArrayList<Item> modules = new ArrayList<Item>();
private static Logger logger = Logger.getLogger(SQM.class);
private BufferedReader reader;
private File source;
private MissionTrimmer missionTrimmer;
private ArrayList<String> publicNames = new ArrayList<String>();
public ArrayList<String> getPublicNames() {
return publicNames;
}
public SQMClass getRootType() {
return units;
}
public SQMClass getTriggers() {
return triggers;
}
public File getSource() {
return source;
}
public ArrayList<Item> getModules() {
return modules;
}
public void load(File mission) throws FileNotFoundException {
//TODO: Integrate with SQMParser
logger.debug("Loading SQM Mission: " + mission.getAbsolutePath());
missionTrimmer = new MissionTrimmer(mission.getAbsolutePath());
this.source = mission;
reader = new BufferedReader(new FileReader(mission));
String line;
try {
while ((line = reader.readLine()) != null) {
String input = line.replaceAll("^\\s+", "");
String type = null;
if (input.startsWith("class")) {
String[] spl = line.split(" ", 2);
type = spl[1];
}
if (type != null) {
if (type.equals("Groups")) {
logger.debug("Processing groups... ");
parse(line, units);
logger.debug("Groups processed. "
+ units.getFullCount()
+ " Groups processed.");
}
if (type.equals("Sensors")) {
logger.debug("Processing triggers... ");
parse(line, triggers);
logger.debug("triggers processed. "
+ triggers.getFullCount()
+ " triggers processed.");
}
}
}
} catch (IOException e) {
logger.error(e);
}
logger.debug("Loaded.");
}
public MissionTrimmer getMissionTrimmer() {
return missionTrimmer;
}
/**
* This is the parsing algorithm. If you know a better way, feel free to
* change it.
*
* Please also send your changes to the author.
*
*
* @param input
* @throws IOException
*/
private void parse(String input, SQMClass parent) throws IOException {
String line = input.replaceAll("^\\s+", "");
if (line.startsWith("class")) {
String[] spl = line.split(" ", 2);
SQMClass typeClass = new SQMClass(spl[1], parent);
parent.getChilds().add(typeClass);
while (!(line = reader.readLine().replaceAll("^\\s+", ""))
.startsWith("}")) {
parse(line, typeClass);
}
}
if (parent.getType().equals("Groups")) {
}
if (parent.toString().startsWith("Vehicles")) {
if (parent.getObject() == null) {
parent.setObject(new Vehicle());
}
SQMClass p = parent.getParent();
if (p.toString().startsWith("Item")) {
((Vehicle) parent.getObject()).setSide(((Item) p.getObject())
.getSide());
}
} else if (parent.toString().startsWith("Waypoints")) {
Waypoints waypoints = new Waypoints();
if (parent.getObject() == null) {
parent.setObject(waypoints);
}
SQMClass p = parent.getParent();
if (p.toString().startsWith("Item")) {
((Waypoints) parent.getObject()).setSide(((Item) p.getObject())
.getSide());
}
} else if (parent.toString().startsWith("Sensors")) {
if (parent.getObject() == null) {
parent.setObject(new Triggers());
}
SQMClass p = parent.getParent();
if (p.toString().startsWith("Item")) {
((Triggers) parent.getObject()).setSide(((Item) p.getObject())
.getSide());
}
} else if (parent.toString().startsWith("Item")) {
if (parent.getObject() == null) {
parent.setObject(new Item(parent.toString()));
}
if (line.startsWith("position[]=")) {
String[] tmp = line.split("=", 2);
tmp = tmp[1].split(",", 3);
String x = tmp[0].replaceAll("\\{", "");
String z = tmp[1];
String y = tmp[2].replaceAll("\\}\\;", "");
((Item) parent.getObject())
.setPosition(new Position(x, y, z));
}
if (line.startsWith("id=")) {
String[] tmp = line.split("=", 2);
String id = tmp[1].replaceAll("\\;", "");
((Item) parent.getObject()).setId(id);
} else if (line.startsWith("side=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setSide(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("vehicle=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setVehicle(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("skill=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setSkill(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("leader=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setLeader(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("player=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setPlayer(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("init=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setInit(tmp[1]);
} else if (line.startsWith("name=")) {
String[] tmp = line.split("=", 2);
String name = tmp[1];
name = name.replaceAll("\"", "");
name = name.replaceAll("\\;", "");
((Item) parent.getObject()).setName(name);
} else if (line.startsWith("markerType=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setMarkerType(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("type=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setType(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("rank=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setRank(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("presenceCondition=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setPresenceCondition(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("azimut=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setAzimut(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("colorName=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setColorName(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("fillName=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setFillName(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("a=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setA(tmp[1].replaceAll("\\;", ""));
} else if (line.startsWith("b=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setB(tmp[1].replaceAll("\\;", ""));
} else if (line.startsWith("angle=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setAngle(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("text=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setText(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("rectangular=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setRectangular(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("age=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject())
.setAge(tmp[1].replaceAll("\\;", ""));
} else if (line.startsWith("activationBy=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setActivationBy(tmp[1].replaceAll(
"\\;$", ""));
} else if (line.startsWith("expCond=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setExpCond(tmp[1].replaceAll(
"\\;$", ""));
} else if (line.startsWith("expActiv=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setExpActiv(tmp[1].replaceAll(
"\\;$", ""));
} else if (line.startsWith("expDesactiv=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setExpDesactiv(tmp[1].replaceAll(
"\\;$", ""));
} else if (line.startsWith("interruptable=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setInterruptable(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("activationType=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setActivationType(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("timeoutMin=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setTimeoutMin(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("timeoutMid=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setTimeoutMid(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("timeoutMax=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setTimeoutMax(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("placement=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setPlacement(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("completionRadius=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setCompletionRadius(tmp[1]
.replaceAll("\\;", ""));
} else if (line.startsWith("combatMode=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setCombatMode(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("formation=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setFormation(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("speed=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setSpeed(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("combat=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setCombat(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("description=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setDescription(tmp[1].replaceAll(
"\\;", ""));
} else if (line.startsWith("showWP=")) {
String[] tmp = line.split("=", 2);
((Item) parent.getObject()).setShowWP(tmp[1].replaceAll("\\;",
""));
} else if (line.startsWith("synchronizations[]=")) {
((Item) parent.getObject()).setSyncArray(line);
}
} else {
// unsupported class
}
}
}
| |
/*
* The MIT License (MIT)
*
* Copyright (c) 2015 The MsgCodec Authors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
package com.cinnober.msgcodec;
import com.cinnober.msgcodec.anot.Enumeration;
import com.cinnober.msgcodec.anot.Id;
import com.cinnober.msgcodec.anot.Sequence;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.*;
/**
* @author mikael.brannstrom
* @author Tommy Norling
*
*/
public class SchemaBuilderTest {
@Test
public void testFoo() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(FooMessage.class);
System.out.println(schema.toString());
}
@Test
public void testFooWithAddMessageStep() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.addMessages(FooMessage.class).build();
System.out.println(schema.toString());
}
@Test
public void testBar() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(BarMessage.class, Thing.class);
System.out.println(schema.toString());
}
@Test
public void testFooBarWithAddMessageStep() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.addMessages(BarMessage.class, Thing.class)
.addMessages(FooMessage.class).build();
System.out.println(schema.toString());
}
@Test
public void testFooBar() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(FooMessage.class, BarMessage.class, Thing.class);
System.out.println(schema.toString());
}
/** Test of generic class parameters, as well as recursive add of referred components.
*/
@Test
public void testWrappedFoo() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(WrappedFoo.class);
final GroupDef groupDef = schema.getGroup("WrappedFoo");
assertEquals("FooMessage", groupDef.getField("wrapped").getType().toString());
assertEquals("FooMessage[]", groupDef.getField("wrappedArray").getType().toString());
assertEquals("FooMessage[]", groupDef.getField("wrappedList").getType().toString());
}
/** Test of generic class parameters, as well as recursive add of referred components.
*/
@Test
public void testWrappedWrappedFoo() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(WrappedWrappedFoo.class);
final GroupDef groupDef = schema.getGroup("WrappedWrappedFoo");
assertEquals("FooMessage", groupDef.getField("wrapped").getType().toString());
assertEquals("FooMessage[]", groupDef.getField("wrappedArray").getType().toString());
assertEquals("FooMessage[]", groupDef.getField("wrappedList").getType().toString());
}
/** Test of generic class parameters, as well as recursive add of referred components.
*/
@Test
public void testArrayOnlyWrappedFoo() {
SchemaBuilder builder = new SchemaBuilder();
Schema schmea = builder.build(ArrayOnlyWrappedFoo.class);
final GroupDef groupDef = schmea.getGroup("ArrayOnlyWrappedFoo");
assertEquals("FooMessage[]", groupDef.getField("wrappedArray").getType().toString());
}
/** Test of generic class parameters, as well as recursive add of referred components.
*/
@Test
public void testListOnlyWrappedFoo() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(ListOnlyWrappedFoo.class);
final GroupDef groupDef = schema.getGroup("ListOnlyWrappedFoo");
assertEquals("FooMessage[]", groupDef.getField("wrappedList").getType().toString());
}
@Test
public void testFieldOrder() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(FieldOrderMsg.class);
System.out.println(schema.toString());
GroupDef group = schema.getGroup(FieldOrderMsg.class);
List<FieldDef> fields = group.getFields();
assertEquals("i1", fields.get(0).getName());
assertEquals("i4", fields.get(1).getName());
assertEquals("i2", fields.get(2).getName());
assertEquals("i5", fields.get(3).getName());
assertEquals("i3", fields.get(4).getName());
assertEquals("i6", fields.get(5).getName());
}
@Test
public void testAnnotationMapper() {
SchemaBuilder builder = new SchemaBuilder();
builder.addAnnotationMapper(CustomAnnotation.class, a -> "custom=" + a.value());
Schema schema = builder.build(FooMessage.class);
System.out.println("annotationMapper: \n" + schema.toString());
assertEquals("FooMessage", schema.getGroup("FooMessage").getAnnotation("custom"));
assertEquals("myByte", schema.getGroup("FooMessage").getField("myByte").getAnnotation("custom"));
}
@Test
public void testPrivate() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(SecretMessage.class);
System.out.println(schema.toString());
assertNotNull(schema.getGroup("SecretMessage").getFactory().newInstance());
}
@Test
public void testPrivate2() {
SchemaBuilder builder = new SchemaBuilder();
try {
builder.build(SecretMessage2.class);
fail("Expected exception: no default constructor");
} catch (IllegalArgumentException e) {}
}
@Test
public void testSymbolMapping() {
SchemaBuilder builder = new SchemaBuilder();
Schema schema = builder.build(EnumMessage.class);
GroupDef group = schema.getGroup(1);
SymbolMapping<TestEnum> enumMapping = new SymbolMapping.IdentityEnumMapping<>(TestEnum.class);
SymbolMapping<Integer> intMapping =
new SymbolMapping.IdentityIntegerEnumMapping(
(TypeDef.Enum) schema.getNamedTypes().stream()
.filter(t -> t.getName().equals("TestEnum")).findAny().get().getType());
assertNull("No mapping unless an enum", group.getField("notEnum").getBinding().getSymbolMapping());
assertEquals("Enum has identity mapping", enumMapping, group.getField("e1").getBinding().getSymbolMapping());
assertEquals("Integer enum has identity mapping",
intMapping, group.getField("e2").getBinding().getSymbolMapping());
assertEquals("Enum list has identity mapping",
enumMapping, group.getField("e3").getBinding().getSymbolMapping());
assertEquals("Enum array has identity mapping",
enumMapping, group.getField("e4").getBinding().getSymbolMapping());
}
public static class FieldOrderMsg extends MsgObject {
@Id(1)
public int i1;
@Id(3)
public int i2;
@Id(5)
public int i3;
@Id(2)
public int i4;
@Id(4)
public int i5;
@Id(6)
public int i6;
}
public static enum TestEnum {
V1,
V2,
V3
}
@Id(1)
public static class EnumMessage {
public int notEnum;
public TestEnum e1;
@Enumeration(TestEnum.class)
public int e2;
@Sequence(TestEnum.class)
public List<TestEnum> e3;
public TestEnum[] e4;
}
}
| |
/*******************************************************************************
* Copyright 2011 See AUTHORS file.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package com.badlogic.gdx.math;
import java.util.Random;
/** Utility and fast math functions.
* <p>
* Thanks to Riven on JavaGaming.org for the basis of sin/cos/floor/ceil.
* @author Nathan Sweet */
public final class MathUtils {
static public final float nanoToSec = 1 / 1000000000f;
// ---
static public final float FLOAT_ROUNDING_ERROR = 0.000001f; // 32 bits
static public final float PI = 3.1415927f;
static public final float PI2 = PI * 2;
static public final float E = 2.7182818f;
static private final int SIN_BITS = 14; // 16KB. Adjust for accuracy.
static private final int SIN_MASK = ~(-1 << SIN_BITS);
static private final int SIN_COUNT = SIN_MASK + 1;
static private final float radFull = PI * 2;
static private final float degFull = 360;
static private final float radToIndex = SIN_COUNT / radFull;
static private final float degToIndex = SIN_COUNT / degFull;
/** multiply by this to convert from radians to degrees */
static public final float radiansToDegrees = 180f / PI;
static public final float radDeg = radiansToDegrees;
/** multiply by this to convert from degrees to radians */
static public final float degreesToRadians = PI / 180;
static public final float degRad = degreesToRadians;
static private class Sin {
static final float[] table = new float[SIN_COUNT];
static {
for (int i = 0; i < SIN_COUNT; i++)
table[i] = (float)Math.sin((i + 0.5f) / SIN_COUNT * radFull);
for (int i = 0; i < 360; i += 90)
table[(int)(i * degToIndex) & SIN_MASK] = (float)Math.sin(i * degreesToRadians);
}
}
/** Returns the sine in radians from a lookup table. */
static public float sin (float radians) {
return Sin.table[(int)(radians * radToIndex) & SIN_MASK];
}
/** Returns the cosine in radians from a lookup table. */
static public float cos (float radians) {
return Sin.table[(int)((radians + PI / 2) * radToIndex) & SIN_MASK];
}
/** Returns the sine in radians from a lookup table. */
static public float sinDeg (float degrees) {
return Sin.table[(int)(degrees * degToIndex) & SIN_MASK];
}
/** Returns the cosine in radians from a lookup table. */
static public float cosDeg (float degrees) {
return Sin.table[(int)((degrees + 90) * degToIndex) & SIN_MASK];
}
// ---
/** Returns atan2 in radians, faster but less accurate than Math.atan2. Average error of 0.00231 radians (0.1323 degrees),
* largest error of 0.00488 radians (0.2796 degrees). */
static public float atan2 (float y, float x) {
if (x == 0f) {
if (y > 0f) return PI / 2;
if (y == 0f) return 0f;
return -PI / 2;
}
final float atan, z = y / x;
if (Math.abs(z) < 1f) {
atan = z / (1f + 0.28f * z * z);
if (x < 0f) return atan + (y < 0f ? -PI : PI);
return atan;
}
atan = PI / 2 - z / (z * z + 0.28f);
return y < 0f ? atan - PI : atan;
}
// ---
static public Random random = new Random();
/** Returns a random number between 0 (inclusive) and the specified value (inclusive). */
static public int random (int range) {
return random.nextInt(range + 1);
}
/** Returns a random number between start (inclusive) and end (inclusive). */
static public int random (int start, int end) {
return start + random.nextInt(end - start + 1);
}
/** Returns a random number between 0 (inclusive) and the specified value (inclusive). */
static public long random (long range) {
return (long)(random.nextDouble() * range);
}
/** Returns a random number between start (inclusive) and end (inclusive). */
static public long random (long start, long end) {
return start + (long)(random.nextDouble() * (end - start));
}
/** Returns a random boolean value. */
static public boolean randomBoolean () {
return random.nextBoolean();
}
/** Returns true if a random value between 0 and 1 is less than the specified value. */
static public boolean randomBoolean (float chance) {
return MathUtils.random() < chance;
}
/** Returns random number between 0.0 (inclusive) and 1.0 (exclusive). */
static public float random () {
return random.nextFloat();
}
/** Returns a random number between 0 (inclusive) and the specified value (exclusive). */
static public float random (float range) {
return random.nextFloat() * range;
}
/** Returns a random number between start (inclusive) and end (exclusive). */
static public float random (float start, float end) {
return start + random.nextFloat() * (end - start);
}
/** Returns -1 or 1, randomly. */
static public int randomSign () {
return 1 | (random.nextInt() >> 31);
}
/** Returns a triangularly distributed random number between -1.0 (exclusive) and 1.0 (exclusive), where values around zero are
* more likely.
* <p>
* This is an optimized version of {@link #randomTriangular(float, float, float) randomTriangular(-1, 1, 0)} */
public static float randomTriangular () {
return random.nextFloat() - random.nextFloat();
}
/** Returns a triangularly distributed random number between {@code -max} (exclusive) and {@code max} (exclusive), where values
* around zero are more likely.
* <p>
* This is an optimized version of {@link #randomTriangular(float, float, float) randomTriangular(-max, max, 0)}
* @param max the upper limit */
public static float randomTriangular (float max) {
return (random.nextFloat() - random.nextFloat()) * max;
}
/** Returns a triangularly distributed random number between {@code min} (inclusive) and {@code max} (exclusive), where the
* {@code mode} argument defaults to the midpoint between the bounds, giving a symmetric distribution.
* <p>
* This method is equivalent of {@link #randomTriangular(float, float, float) randomTriangular(min, max, (min + max) * .5f)}
* @param min the lower limit
* @param max the upper limit */
public static float randomTriangular (float min, float max) {
return randomTriangular(min, max, (min + max) * 0.5f);
}
/** Returns a triangularly distributed random number between {@code min} (inclusive) and {@code max} (exclusive), where values
* around {@code mode} are more likely.
* @param min the lower limit
* @param max the upper limit
* @param mode the point around which the values are more likely */
public static float randomTriangular (float min, float max, float mode) {
float u = random.nextFloat();
float d = max - min;
if (u <= (mode - min) / d) return min + (float)Math.sqrt(u * d * (mode - min));
return max - (float)Math.sqrt((1 - u) * d * (max - mode));
}
// ---
/** Returns the next power of two. Returns the specified value if the value is already a power of two. */
static public int nextPowerOfTwo (int value) {
if (value == 0) return 1;
value--;
value |= value >> 1;
value |= value >> 2;
value |= value >> 4;
value |= value >> 8;
value |= value >> 16;
return value + 1;
}
static public boolean isPowerOfTwo (int value) {
return value != 0 && (value & value - 1) == 0;
}
// ---
static public short clamp (short value, short min, short max) {
if (value < min) return min;
if (value > max) return max;
return value;
}
static public int clamp (int value, int min, int max) {
if (value < min) return min;
if (value > max) return max;
return value;
}
static public long clamp (long value, long min, long max) {
if (value < min) return min;
if (value > max) return max;
return value;
}
static public float clamp (float value, float min, float max) {
if (value < min) return min;
if (value > max) return max;
return value;
}
static public double clamp (double value, double min, double max) {
if (value < min) return min;
if (value > max) return max;
return value;
}
// ---
/** Linearly interpolates between fromValue to toValue on progress position. */
static public float lerp (float fromValue, float toValue, float progress) {
return fromValue + (toValue - fromValue) * progress;
}
/** Linearly interpolates between two angles in radians. Takes into account that angles wrap at two pi and always takes the
* direction with the smallest delta angle.
*
* @param fromRadians start angle in radians
* @param toRadians target angle in radians
* @param progress interpolation value in the range [0, 1]
* @return the interpolated angle in the range [0, PI2[ */
public static float lerpAngle (float fromRadians, float toRadians, float progress) {
float delta = ((toRadians - fromRadians + PI2 + PI) % PI2) - PI;
return (fromRadians + delta * progress + PI2) % PI2;
}
/** Linearly interpolates between two angles in degrees. Takes into account that angles wrap at 360 degrees and always takes
* the direction with the smallest delta angle.
*
* @param fromDegrees start angle in degrees
* @param toDegrees target angle in degrees
* @param progress interpolation value in the range [0, 1]
* @return the interpolated angle in the range [0, 360[ */
public static float lerpAngleDeg (float fromDegrees, float toDegrees, float progress) {
float delta = ((toDegrees - fromDegrees + 360 + 180) % 360) - 180;
return (fromDegrees + delta * progress + 360) % 360;
}
// ---
static private final int BIG_ENOUGH_INT = 16 * 1024;
static private final double BIG_ENOUGH_FLOOR = BIG_ENOUGH_INT;
static private final double CEIL = 0.9999999;
static private final double BIG_ENOUGH_CEIL = 16384.999999999996;
static private final double BIG_ENOUGH_ROUND = BIG_ENOUGH_INT + 0.5f;
/** Returns the largest integer less than or equal to the specified float. This method will only properly floor floats from
* -(2^14) to (Float.MAX_VALUE - 2^14). */
static public int floor (float value) {
return (int)(value + BIG_ENOUGH_FLOOR) - BIG_ENOUGH_INT;
}
/** Returns the largest integer less than or equal to the specified float. This method will only properly floor floats that are
* positive. Note this method simply casts the float to int. */
static public int floorPositive (float value) {
return (int)value;
}
/** Returns the smallest integer greater than or equal to the specified float. This method will only properly ceil floats from
* -(2^14) to (Float.MAX_VALUE - 2^14). */
static public int ceil (float value) {
return BIG_ENOUGH_INT - (int)(BIG_ENOUGH_FLOOR - value);
}
/** Returns the smallest integer greater than or equal to the specified float. This method will only properly ceil floats that
* are positive. */
static public int ceilPositive (float value) {
return (int)(value + CEIL);
}
/** Returns the closest integer to the specified float. This method will only properly round floats from -(2^14) to
* (Float.MAX_VALUE - 2^14). */
static public int round (float value) {
return (int)(value + BIG_ENOUGH_ROUND) - BIG_ENOUGH_INT;
}
/** Returns the closest integer to the specified float. This method will only properly round floats that are positive. */
static public int roundPositive (float value) {
return (int)(value + 0.5f);
}
/** Returns true if the value is zero (using the default tolerance as upper bound) */
static public boolean isZero (float value) {
return Math.abs(value) <= FLOAT_ROUNDING_ERROR;
}
/** Returns true if the value is zero.
* @param tolerance represent an upper bound below which the value is considered zero. */
static public boolean isZero (float value, float tolerance) {
return Math.abs(value) <= tolerance;
}
/** Returns true if a is nearly equal to b. The function uses the default floating error tolerance.
* @param a the first value.
* @param b the second value. */
static public boolean isEqual (float a, float b) {
return Math.abs(a - b) <= FLOAT_ROUNDING_ERROR;
}
/** Returns true if a is nearly equal to b.
* @param a the first value.
* @param b the second value.
* @param tolerance represent an upper bound below which the two values are considered equal. */
static public boolean isEqual (float a, float b, float tolerance) {
return Math.abs(a - b) <= tolerance;
}
/** @return the logarithm of value with base a */
static public float log (float a, float value) {
return (float)(Math.log(value) / Math.log(a));
}
/** @return the logarithm of value with base 2 */
static public float log2 (float value) {
return log(2, value);
}
}
| |
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.cognitoidp.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents the request to register a user.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/cognito-idp-2016-04-18/SignUp" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class SignUpRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*/
private String clientId;
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*/
private String secretHash;
/**
* <p>
* The user name of the user you wish to register.
* </p>
*/
private String username;
/**
* <p>
* The password of the user you wish to register.
* </p>
*/
private String password;
/**
* <p>
* An array of name-value pairs representing user attributes.
* </p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* </p>
*/
private java.util.List<AttributeType> userAttributes;
/**
* <p>
* The validation data in the request to register a user.
* </p>
*/
private java.util.List<AttributeType> validationData;
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
* </p>
*/
private AnalyticsMetadataType analyticsMetadata;
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*/
private UserContextDataType userContextData;
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
*/
public void setClientId(String clientId) {
this.clientId = clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @return The ID of the client associated with the user pool.
*/
public String getClientId() {
return this.clientId;
}
/**
* <p>
* The ID of the client associated with the user pool.
* </p>
*
* @param clientId
* The ID of the client associated with the user pool.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withClientId(String clientId) {
setClientId(clientId);
return this;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @param secretHash
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
*/
public void setSecretHash(String secretHash) {
this.secretHash = secretHash;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @return A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
*/
public String getSecretHash() {
return this.secretHash;
}
/**
* <p>
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* </p>
*
* @param secretHash
* A keyed-hash message authentication code (HMAC) calculated using the secret key of a user pool client and
* username plus the client ID in the message.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withSecretHash(String secretHash) {
setSecretHash(secretHash);
return this;
}
/**
* <p>
* The user name of the user you wish to register.
* </p>
*
* @param username
* The user name of the user you wish to register.
*/
public void setUsername(String username) {
this.username = username;
}
/**
* <p>
* The user name of the user you wish to register.
* </p>
*
* @return The user name of the user you wish to register.
*/
public String getUsername() {
return this.username;
}
/**
* <p>
* The user name of the user you wish to register.
* </p>
*
* @param username
* The user name of the user you wish to register.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withUsername(String username) {
setUsername(username);
return this;
}
/**
* <p>
* The password of the user you wish to register.
* </p>
*
* @param password
* The password of the user you wish to register.
*/
public void setPassword(String password) {
this.password = password;
}
/**
* <p>
* The password of the user you wish to register.
* </p>
*
* @return The password of the user you wish to register.
*/
public String getPassword() {
return this.password;
}
/**
* <p>
* The password of the user you wish to register.
* </p>
*
* @param password
* The password of the user you wish to register.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withPassword(String password) {
setPassword(password);
return this;
}
/**
* <p>
* An array of name-value pairs representing user attributes.
* </p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* </p>
*
* @return An array of name-value pairs representing user attributes.</p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
*/
public java.util.List<AttributeType> getUserAttributes() {
return userAttributes;
}
/**
* <p>
* An array of name-value pairs representing user attributes.
* </p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* </p>
*
* @param userAttributes
* An array of name-value pairs representing user attributes.</p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
*/
public void setUserAttributes(java.util.Collection<AttributeType> userAttributes) {
if (userAttributes == null) {
this.userAttributes = null;
return;
}
this.userAttributes = new java.util.ArrayList<AttributeType>(userAttributes);
}
/**
* <p>
* An array of name-value pairs representing user attributes.
* </p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setUserAttributes(java.util.Collection)} or {@link #withUserAttributes(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param userAttributes
* An array of name-value pairs representing user attributes.</p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withUserAttributes(AttributeType... userAttributes) {
if (this.userAttributes == null) {
setUserAttributes(new java.util.ArrayList<AttributeType>(userAttributes.length));
}
for (AttributeType ele : userAttributes) {
this.userAttributes.add(ele);
}
return this;
}
/**
* <p>
* An array of name-value pairs representing user attributes.
* </p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* </p>
*
* @param userAttributes
* An array of name-value pairs representing user attributes.</p>
* <p>
* For custom attributes, you must prepend the <code>custom:</code> prefix to the attribute name.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withUserAttributes(java.util.Collection<AttributeType> userAttributes) {
setUserAttributes(userAttributes);
return this;
}
/**
* <p>
* The validation data in the request to register a user.
* </p>
*
* @return The validation data in the request to register a user.
*/
public java.util.List<AttributeType> getValidationData() {
return validationData;
}
/**
* <p>
* The validation data in the request to register a user.
* </p>
*
* @param validationData
* The validation data in the request to register a user.
*/
public void setValidationData(java.util.Collection<AttributeType> validationData) {
if (validationData == null) {
this.validationData = null;
return;
}
this.validationData = new java.util.ArrayList<AttributeType>(validationData);
}
/**
* <p>
* The validation data in the request to register a user.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setValidationData(java.util.Collection)} or {@link #withValidationData(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param validationData
* The validation data in the request to register a user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withValidationData(AttributeType... validationData) {
if (this.validationData == null) {
setValidationData(new java.util.ArrayList<AttributeType>(validationData.length));
}
for (AttributeType ele : validationData) {
this.validationData.add(ele);
}
return this;
}
/**
* <p>
* The validation data in the request to register a user.
* </p>
*
* @param validationData
* The validation data in the request to register a user.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withValidationData(java.util.Collection<AttributeType> validationData) {
setValidationData(validationData);
return this;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
* </p>
*
* @param analyticsMetadata
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
*/
public void setAnalyticsMetadata(AnalyticsMetadataType analyticsMetadata) {
this.analyticsMetadata = analyticsMetadata;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
* </p>
*
* @return The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
*/
public AnalyticsMetadataType getAnalyticsMetadata() {
return this.analyticsMetadata;
}
/**
* <p>
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
* </p>
*
* @param analyticsMetadata
* The Amazon Pinpoint analytics metadata for collecting metrics for <code>SignUp</code> calls.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withAnalyticsMetadata(AnalyticsMetadataType analyticsMetadata) {
setAnalyticsMetadata(analyticsMetadata);
return this;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @param userContextData
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
*/
public void setUserContextData(UserContextDataType userContextData) {
this.userContextData = userContextData;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @return Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
*/
public UserContextDataType getUserContextData() {
return this.userContextData;
}
/**
* <p>
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the risk of an
* unexpected event by Amazon Cognito advanced security.
* </p>
*
* @param userContextData
* Contextual data such as the user's device fingerprint, IP address, or location used for evaluating the
* risk of an unexpected event by Amazon Cognito advanced security.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public SignUpRequest withUserContextData(UserContextDataType userContextData) {
setUserContextData(userContextData);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getClientId() != null)
sb.append("ClientId: ").append("***Sensitive Data Redacted***").append(",");
if (getSecretHash() != null)
sb.append("SecretHash: ").append("***Sensitive Data Redacted***").append(",");
if (getUsername() != null)
sb.append("Username: ").append("***Sensitive Data Redacted***").append(",");
if (getPassword() != null)
sb.append("Password: ").append("***Sensitive Data Redacted***").append(",");
if (getUserAttributes() != null)
sb.append("UserAttributes: ").append(getUserAttributes()).append(",");
if (getValidationData() != null)
sb.append("ValidationData: ").append(getValidationData()).append(",");
if (getAnalyticsMetadata() != null)
sb.append("AnalyticsMetadata: ").append(getAnalyticsMetadata()).append(",");
if (getUserContextData() != null)
sb.append("UserContextData: ").append(getUserContextData());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof SignUpRequest == false)
return false;
SignUpRequest other = (SignUpRequest) obj;
if (other.getClientId() == null ^ this.getClientId() == null)
return false;
if (other.getClientId() != null && other.getClientId().equals(this.getClientId()) == false)
return false;
if (other.getSecretHash() == null ^ this.getSecretHash() == null)
return false;
if (other.getSecretHash() != null && other.getSecretHash().equals(this.getSecretHash()) == false)
return false;
if (other.getUsername() == null ^ this.getUsername() == null)
return false;
if (other.getUsername() != null && other.getUsername().equals(this.getUsername()) == false)
return false;
if (other.getPassword() == null ^ this.getPassword() == null)
return false;
if (other.getPassword() != null && other.getPassword().equals(this.getPassword()) == false)
return false;
if (other.getUserAttributes() == null ^ this.getUserAttributes() == null)
return false;
if (other.getUserAttributes() != null && other.getUserAttributes().equals(this.getUserAttributes()) == false)
return false;
if (other.getValidationData() == null ^ this.getValidationData() == null)
return false;
if (other.getValidationData() != null && other.getValidationData().equals(this.getValidationData()) == false)
return false;
if (other.getAnalyticsMetadata() == null ^ this.getAnalyticsMetadata() == null)
return false;
if (other.getAnalyticsMetadata() != null && other.getAnalyticsMetadata().equals(this.getAnalyticsMetadata()) == false)
return false;
if (other.getUserContextData() == null ^ this.getUserContextData() == null)
return false;
if (other.getUserContextData() != null && other.getUserContextData().equals(this.getUserContextData()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getClientId() == null) ? 0 : getClientId().hashCode());
hashCode = prime * hashCode + ((getSecretHash() == null) ? 0 : getSecretHash().hashCode());
hashCode = prime * hashCode + ((getUsername() == null) ? 0 : getUsername().hashCode());
hashCode = prime * hashCode + ((getPassword() == null) ? 0 : getPassword().hashCode());
hashCode = prime * hashCode + ((getUserAttributes() == null) ? 0 : getUserAttributes().hashCode());
hashCode = prime * hashCode + ((getValidationData() == null) ? 0 : getValidationData().hashCode());
hashCode = prime * hashCode + ((getAnalyticsMetadata() == null) ? 0 : getAnalyticsMetadata().hashCode());
hashCode = prime * hashCode + ((getUserContextData() == null) ? 0 : getUserContextData().hashCode());
return hashCode;
}
@Override
public SignUpRequest clone() {
return (SignUpRequest) super.clone();
}
}
| |
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License
* 2.0 and the Server Side Public License, v 1; you may not use this file except
* in compliance with, at your election, the Elastic License 2.0 or the Server
* Side Public License, v 1.
*/
package org.elasticsearch.benchmark.search.aggregations;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.MatchAllDocsQuery;
import org.apache.lucene.search.Query;
import org.elasticsearch.common.breaker.CircuitBreaker;
import org.elasticsearch.common.breaker.PreallocatedCircuitBreakerService;
import org.elasticsearch.common.lease.Releasable;
import org.elasticsearch.common.lease.Releasables;
import org.elasticsearch.common.settings.ClusterSettings;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.BigArrays;
import org.elasticsearch.common.util.PageCacheRecycler;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.NamedAnalyzer;
import org.elasticsearch.index.cache.bitset.BitsetFilterCache;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexFieldDataCache;
import org.elasticsearch.index.mapper.MappedFieldType;
import org.elasticsearch.index.mapper.NumberFieldMapper;
import org.elasticsearch.index.mapper.NumberFieldMapper.NumberType;
import org.elasticsearch.index.mapper.ObjectMapper;
import org.elasticsearch.index.query.QueryBuilder;
import org.elasticsearch.index.query.support.NestedScope;
import org.elasticsearch.indices.breaker.CircuitBreakerService;
import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
import org.elasticsearch.indices.breaker.NoneCircuitBreakerService;
import org.elasticsearch.indices.fielddata.cache.IndicesFieldDataCache;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.search.SearchModule;
import org.elasticsearch.search.aggregations.Aggregator;
import org.elasticsearch.search.aggregations.AggregatorFactories;
import org.elasticsearch.search.aggregations.MultiBucketConsumerService.MultiBucketConsumer;
import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder;
import org.elasticsearch.search.aggregations.metrics.SumAggregationBuilder;
import org.elasticsearch.search.aggregations.support.AggregationContext;
import org.elasticsearch.search.aggregations.support.ValuesSourceRegistry;
import org.elasticsearch.search.internal.SubSearchContext;
import org.elasticsearch.search.lookup.SearchLookup;
import org.elasticsearch.search.sort.BucketedSort;
import org.elasticsearch.search.sort.BucketedSort.ExtraData;
import org.elasticsearch.search.sort.SortAndFormats;
import org.elasticsearch.search.sort.SortBuilder;
import org.openjdk.jmh.annotations.Benchmark;
import org.openjdk.jmh.annotations.BenchmarkMode;
import org.openjdk.jmh.annotations.Fork;
import org.openjdk.jmh.annotations.Measurement;
import org.openjdk.jmh.annotations.Mode;
import org.openjdk.jmh.annotations.OutputTimeUnit;
import org.openjdk.jmh.annotations.Param;
import org.openjdk.jmh.annotations.Scope;
import org.openjdk.jmh.annotations.Setup;
import org.openjdk.jmh.annotations.State;
import org.openjdk.jmh.annotations.Threads;
import org.openjdk.jmh.annotations.Warmup;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.function.Function;
/**
* Benchmarks the overhead of constructing {@link Aggregator}s in many
* parallel threads. Machines with different numbers of cores will see
* wildly different results running this from running this with more
* cores seeing more benefits from preallocation.
*/
@Fork(2)
@Warmup(iterations = 10)
@Measurement(iterations = 5)
@BenchmarkMode(Mode.AverageTime)
@OutputTimeUnit(TimeUnit.MICROSECONDS)
@State(Scope.Benchmark)
@Threads(Threads.MAX)
public class AggConstructionContentionBenchmark {
private final SearchModule searchModule = new SearchModule(Settings.EMPTY, List.of());
private final ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
private final PageCacheRecycler recycler = new PageCacheRecycler(Settings.EMPTY);
private final Index index = new Index("test", "uuid");
private final IndicesFieldDataCache indicesFieldDataCache = new IndicesFieldDataCache(
Settings.EMPTY,
new IndexFieldDataCache.Listener() {
}
);
private CircuitBreakerService breakerService;
private BigArrays bigArrays;
private boolean preallocateBreaker;
@Param({ "noop", "real", "preallocate" })
private String breaker;
@Setup
public void setup() {
switch (breaker) {
case "real":
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
break;
case "preallocate":
preallocateBreaker = true;
breakerService = new HierarchyCircuitBreakerService(Settings.EMPTY, List.of(), clusterSettings);
break;
case "noop":
breakerService = new NoneCircuitBreakerService();
break;
default:
throw new UnsupportedOperationException();
}
bigArrays = new BigArrays(recycler, breakerService, "request");
}
@Benchmark
public void sum() throws IOException {
buildFactories(new AggregatorFactories.Builder().addAggregator(new SumAggregationBuilder("s").field("int_1")));
}
@Benchmark
public void termsSum() throws IOException {
buildFactories(
new AggregatorFactories.Builder().addAggregator(
new TermsAggregationBuilder("t").field("int_1").subAggregation(new SumAggregationBuilder("s").field("int_2"))
)
);
}
@Benchmark
public void termsSixtySums() throws IOException {
TermsAggregationBuilder b = new TermsAggregationBuilder("t").field("int_1");
for (int i = 0; i < 60; i++) {
b.subAggregation(new SumAggregationBuilder("s" + i).field("int_" + i));
}
buildFactories(new AggregatorFactories.Builder().addAggregator(b));
}
private void buildFactories(AggregatorFactories.Builder factories) throws IOException {
try (DummyAggregationContext context = new DummyAggregationContext(factories.bytesToPreallocate())) {
factories.build(context, null).createTopLevelAggregators();
}
}
private class DummyAggregationContext extends AggregationContext {
private final Query query = new MatchAllDocsQuery();
private final List<Releasable> releaseMe = new ArrayList<>();
private final CircuitBreaker breaker;
private final PreallocatedCircuitBreakerService preallocated;
private final MultiBucketConsumer multiBucketConsumer;
DummyAggregationContext(long bytesToPreallocate) {
CircuitBreakerService breakerService;
if (preallocateBreaker) {
breakerService = preallocated = new PreallocatedCircuitBreakerService(
AggConstructionContentionBenchmark.this.breakerService,
CircuitBreaker.REQUEST,
bytesToPreallocate,
"aggregations"
);
} else {
breakerService = AggConstructionContentionBenchmark.this.breakerService;
preallocated = null;
}
breaker = breakerService.getBreaker(CircuitBreaker.REQUEST);
multiBucketConsumer = new MultiBucketConsumer(Integer.MAX_VALUE, breaker);
}
@Override
public Query query() {
return query;
}
@Override
public Aggregator profileIfEnabled(Aggregator agg) throws IOException {
return agg;
}
@Override
public boolean profiling() {
return false;
}
@Override
public long nowInMillis() {
return 0;
}
@Override
protected IndexFieldData<?> buildFieldData(MappedFieldType ft) {
IndexFieldDataCache indexFieldDataCache = indicesFieldDataCache.buildIndexFieldDataCache(new IndexFieldDataCache.Listener() {
}, index, ft.name());
return ft.fielddataBuilder("test", this::lookup).build(indexFieldDataCache, breakerService);
}
@Override
public MappedFieldType getFieldType(String path) {
if (path.startsWith("int")) {
return new NumberFieldMapper.NumberFieldType(path, NumberType.INTEGER);
}
throw new UnsupportedOperationException();
}
@Override
public Collection<MappedFieldType> getMatchingFieldTypes(String pattern) {
throw new UnsupportedOperationException();
}
@Override
public boolean isFieldMapped(String field) {
return field.startsWith("int");
}
@Override
public <FactoryType> FactoryType compile(Script script, ScriptContext<FactoryType> context) {
throw new UnsupportedOperationException();
}
@Override
public SearchLookup lookup() {
throw new UnsupportedOperationException();
}
@Override
public ValuesSourceRegistry getValuesSourceRegistry() {
return searchModule.getValuesSourceRegistry();
}
@Override
public BigArrays bigArrays() {
return bigArrays;
}
@Override
public IndexSearcher searcher() {
return null;
}
@Override
public Query buildQuery(QueryBuilder builder) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public Query filterQuery(Query query) {
throw new UnsupportedOperationException();
}
@Override
public IndexSettings getIndexSettings() {
throw new UnsupportedOperationException();
}
@Override
public Optional<SortAndFormats> buildSort(List<SortBuilder<?>> sortBuilders) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public ObjectMapper getObjectMapper(String path) {
throw new UnsupportedOperationException();
}
@Override
public NestedScope nestedScope() {
throw new UnsupportedOperationException();
}
@Override
public SubSearchContext subSearchContext() {
throw new UnsupportedOperationException();
}
@Override
public void addReleasable(Aggregator aggregator) {
releaseMe.add(aggregator);
}
@Override
public MultiBucketConsumer multiBucketConsumer() {
return multiBucketConsumer;
}
@Override
public BitsetFilterCache bitsetFilterCache() {
throw new UnsupportedOperationException();
}
@Override
public BucketedSort buildBucketedSort(SortBuilder<?> sort, int size, ExtraData values) throws IOException {
throw new UnsupportedOperationException();
}
@Override
public int shardRandomSeed() {
return 0;
}
@Override
public long getRelativeTimeInMillis() {
return 0;
}
@Override
public boolean isCancelled() {
return false;
}
@Override
public CircuitBreaker breaker() {
return breaker;
}
@Override
public Analyzer getIndexAnalyzer(Function<String, NamedAnalyzer> unindexedFieldAnalyzer) {
throw new UnsupportedOperationException();
}
@Override
public boolean isCacheable() {
throw new UnsupportedOperationException();
}
@Override
public void close() {
List<Releasable> releaseMe = new ArrayList<>(this.releaseMe);
releaseMe.add(preallocated);
Releasables.close(releaseMe);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package opennlp.tools.tokenize;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.charset.StandardCharsets;
import java.util.regex.Pattern;
import org.junit.Assert;
import org.junit.Test;
import opennlp.tools.dictionary.Dictionary;
import opennlp.tools.formats.ResourceAsStreamFactory;
import opennlp.tools.tokenize.DummyTokenizerFactory.DummyContextGenerator;
import opennlp.tools.tokenize.DummyTokenizerFactory.DummyDictionary;
import opennlp.tools.tokenize.lang.Factory;
import opennlp.tools.util.InputStreamFactory;
import opennlp.tools.util.ObjectStream;
import opennlp.tools.util.PlainTextByLineStream;
import opennlp.tools.util.TrainingParameters;
/**
* Tests for the {@link TokenizerFactory} class.
*/
public class TokenizerFactoryTest {
private static ObjectStream<TokenSample> createSampleStream()
throws IOException {
InputStreamFactory in = new ResourceAsStreamFactory(
TokenizerFactoryTest.class, "/opennlp/tools/tokenize/token.train");
return new TokenSampleStream(new PlainTextByLineStream(in, StandardCharsets.UTF_8));
}
private static TokenizerModel train(TokenizerFactory factory)
throws IOException {
return TokenizerME.train(createSampleStream(), factory, TrainingParameters.defaultParams());
}
private static Dictionary loadAbbDictionary() throws IOException {
InputStream in = TokenizerFactoryTest.class.getClassLoader()
.getResourceAsStream("opennlp/tools/sentdetect/abb.xml");
return new Dictionary(in);
}
@Test
public void testDefault() throws IOException {
Dictionary dic = loadAbbDictionary();
final String lang = "spa";
TokenizerModel model = train(new TokenizerFactory(lang, dic, false, null));
TokenizerFactory factory = model.getFactory();
Assert.assertTrue(factory.getAbbreviationDictionary() != null);
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(Factory.DEFAULT_ALPHANUMERIC, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertFalse(factory.isUseAlphaNumericOptmization());
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TokenizerModel fromSerialized = new TokenizerModel(in);
factory = fromSerialized.getFactory();
Assert.assertTrue(factory.getAbbreviationDictionary() != null);
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(Factory.DEFAULT_ALPHANUMERIC, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertFalse(factory.isUseAlphaNumericOptmization());
}
@Test
public void testNullDict() throws IOException {
Dictionary dic = null;
final String lang = "spa";
TokenizerModel model = train(new TokenizerFactory(lang, dic, false, null));
TokenizerFactory factory = model.getFactory();
Assert.assertNull(factory.getAbbreviationDictionary());
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(Factory.DEFAULT_ALPHANUMERIC, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertFalse(factory.isUseAlphaNumericOptmization());
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TokenizerModel fromSerialized = new TokenizerModel(in);
factory = fromSerialized.getFactory();
Assert.assertNull(factory.getAbbreviationDictionary());
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(Factory.DEFAULT_ALPHANUMERIC, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertFalse(factory.isUseAlphaNumericOptmization());
}
@Test
public void testCustomPatternAndAlphaOpt() throws IOException {
Dictionary dic = null;
final String lang = "spa";
String pattern = "^[0-9A-Za-z]+$";
TokenizerModel model = train(new TokenizerFactory(lang, dic, true,
Pattern.compile(pattern)));
TokenizerFactory factory = model.getFactory();
Assert.assertNull(factory.getAbbreviationDictionary());
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(pattern, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertTrue(factory.isUseAlphaNumericOptmization());
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TokenizerModel fromSerialized = new TokenizerModel(in);
factory = fromSerialized.getFactory();
Assert.assertNull(factory.getAbbreviationDictionary());
Assert.assertTrue(factory.getContextGenerator() instanceof DefaultTokenContextGenerator);
Assert.assertEquals(pattern, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertTrue(factory.isUseAlphaNumericOptmization());
}
@Test
public void testDummyFactory() throws IOException {
Dictionary dic = loadAbbDictionary();
final String lang = "spa";
String pattern = "^[0-9A-Za-z]+$";
TokenizerModel model = train(new DummyTokenizerFactory(lang, dic, true,
Pattern.compile(pattern)));
TokenizerFactory factory = model.getFactory();
Assert.assertTrue(factory.getAbbreviationDictionary() instanceof DummyDictionary);
Assert.assertTrue(factory.getContextGenerator() instanceof DummyContextGenerator);
Assert.assertEquals(pattern, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertTrue(factory.isUseAlphaNumericOptmization());
ByteArrayOutputStream out = new ByteArrayOutputStream();
model.serialize(out);
ByteArrayInputStream in = new ByteArrayInputStream(out.toByteArray());
TokenizerModel fromSerialized = new TokenizerModel(in);
factory = fromSerialized.getFactory();
Assert.assertTrue(factory.getAbbreviationDictionary() instanceof DummyDictionary);
Assert.assertTrue(factory.getContextGenerator() instanceof DummyContextGenerator);
Assert.assertEquals(pattern, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertEquals(lang, model.getLanguage());
Assert.assertTrue(factory.isUseAlphaNumericOptmization());
}
@Test
public void testCreateDummyFactory() throws IOException {
Dictionary dic = loadAbbDictionary();
final String lang = "spa";
String pattern = "^[0-9A-Za-z]+$";
TokenizerFactory factory = TokenizerFactory.create(
DummyTokenizerFactory.class.getCanonicalName(), lang, dic, true,
Pattern.compile(pattern));
Assert.assertTrue(factory.getAbbreviationDictionary() instanceof DummyDictionary);
Assert.assertTrue(factory.getContextGenerator() instanceof DummyContextGenerator);
Assert.assertEquals(pattern, factory.getAlphaNumericPattern().pattern());
Assert.assertEquals(lang, factory.getLanguageCode());
Assert.assertTrue(factory.isUseAlphaNumericOptmization());
}
}
| |
package com.mantralabsglobal.cashin.ui.fragment.tabs;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Bundle;
import android.support.annotation.Nullable;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ImageView;
import com.mantralabsglobal.cashin.R;
import com.mantralabsglobal.cashin.service.BankSnapService;
import com.mantralabsglobal.cashin.ui.Application;
import com.mantralabsglobal.cashin.ui.activity.app.BaseActivity;
import com.mantralabsglobal.cashin.ui.activity.camera.CwacCameraActivity;
import com.soundcloud.android.crop.Crop;
import com.squareup.picasso.Picasso;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import butterknife.InjectView;
import butterknife.OnClick;
import retrofit.Callback;
import retrofit.client.Response;
import retrofit.mime.TypedFile;
public class SnapBankStatementFragment extends BaseBindableFragment<BankSnapService.BankSnapImage>
{
private static final String TAG = "SnapBankStatementFragment";
@InjectView(R.id.imageView1)
ImageView imageView1;
@InjectView(R.id.imageView2)
ImageView imageView2;
@InjectView(R.id.imageView3)
ImageView imageView3;
@InjectView(R.id.bankSnapView)
ViewGroup bankSnapView;
BankSnapService bankSnapService;
List<ImageView> imageViewList;
BankSnapService.BankSnapImage dirtyImage;
ImageView imageViewClicked;
int imageIndex;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_take_snap, container, false);
return view;
}
@Override
protected View getFormView() {
return bankSnapView;
}
@Override
public void onViewCreated(View view, @Nullable Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
imageViewList = new ArrayList<ImageView>();
imageViewList.add(imageView1);
imageViewList.add(imageView2);
imageViewList.add(imageView3);
bankSnapService = ((Application) getActivity().getApplication() ).getRestClient().getBankSnapService();
reset(false);
}
@OnClick({R.id.imageView1, R.id.imageView2, R.id.imageView3})
public void onClick(final View v) {
imageIndex = imageViewList.indexOf(v);
imageViewClicked = imageViewList.get(imageIndex);
CharSequence uploadOptions[] = new CharSequence[] {"Choose from gallery", "Take a Snap"};
AlertDialog.Builder builder = new AlertDialog.Builder(getActivity());
builder.setTitle("Add bank statement");
builder.setItems(uploadOptions, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
if ( which == 0 )
pickImageFromGallery();
else if( which == 1 )
launchCamera();
}
});
builder.show();
}
@Override
protected void onUpdate(BankSnapService.BankSnapImage updatedData, Callback<BankSnapService.BankSnapImage> saveCallback) {
if (updatedData != null && updatedData.getFilePath() != null && updatedData.getFilePath().length() > 0) {
TypedFile typedFile = new TypedFile("multipart/form-data", new File(updatedData.getFilePath()));
bankSnapService.uploadBankSnapImage(typedFile, saveCallback);
}
}
@Override
protected void onCreate(BankSnapService.BankSnapImage updatedData, Callback<BankSnapService.BankSnapImage> saveCallback) {
onUpdate(updatedData, saveCallback);
}
@Override
protected void loadDataFromServer(Callback<BankSnapService.BankSnapImage> dataCallback) {
BankSnapService.BankSnapUtil.getBankStatamentSnapImage(dataCallback, bankSnapService, getActivity());
}
@Override
protected void handleDataNotPresentOnServer() {
setVisibleChildView(bankSnapView);
}
public void pickImageFromGallery()
{
Intent photoPickerIntent = new Intent(Intent.ACTION_PICK);
photoPickerIntent.setType("image/*");
getActivity().startActivityForResult(photoPickerIntent , BaseActivity.SELECT_PHOTO_FROM_GALLERY);
}
public void launchCamera()
{
Intent intent = new Intent(getActivity(), CwacCameraActivity.class);
intent.putExtra(CwacCameraActivity.SHOW_CAMERA_SWITCH, true);
intent.putExtra(CwacCameraActivity.DEFAULT_CAMERA, CwacCameraActivity.STANDARD);
intent.putExtra("FLASH", false);
intent.putExtra("FLIP_CAMERA", false);
getActivity().startActivityForResult(intent, BaseActivity.SELFIE_CAPTURE);
}
@Override
public void onActivityResult(int requestCode, int resultCode, Intent imageReturnedIntent) {
super.onActivityResult(requestCode, resultCode, imageReturnedIntent);
switch(requestCode) {
case BaseActivity.SELECT_PHOTO_FROM_GALLERY:
if(resultCode == Activity.RESULT_OK){
Log.d("Data ", imageReturnedIntent.getData() + "");
final Uri imageUri = imageReturnedIntent.getData();
beginCrop(imageUri, "selfie-cropped"+(imageIndex+1)+".jpg");
//bindDataToForm(imageUri);
}
break;
case BaseActivity.SELFIE_CAPTURE:
if(resultCode == Activity.RESULT_OK){
String path = imageReturnedIntent.getStringExtra("file_path");
File file = new File(path);
Uri imageUri = Uri.fromFile(file);
beginCrop(imageUri, "selfie-cropped"+(imageIndex+1)+".jpg");
//bindDataToForm(imageUri);
}
break;
case BaseActivity.CROP_SELFIE:
if (resultCode == Activity.RESULT_OK) {
BankSnapService.BankSnapImage bankSnapImg = new BankSnapService.BankSnapImage();
bankSnapImg.setFilePath(Crop.getOutput(imageReturnedIntent).getPath());
bindDataToForm(bankSnapImg);
dirtyImage = bankSnapImg;
save();
} else if (resultCode == Crop.RESULT_ERROR) {
showToastOnUIThread(Crop.getError(imageReturnedIntent).getMessage());
reset(false);
}
break;
default:
Log.d("Result code", requestCode + "");
}
}
private void beginCrop(Uri source, String fileName) {
Log.d("File Name", fileName);
Uri destination = Uri.fromFile(new File(getActivity().getExternalFilesDir(null), fileName));
Crop.of(source, destination).asSquare().withAspect(4,5).withMaxSize(80, 100).start(getActivity(), BaseActivity.CROP_SELFIE);
}
@Override
protected boolean beforeBindDataToForm(BankSnapService.BankSnapImage value, Response response) {
if (value.getBankImage() != null && value.getBankImage().length() > 0) {
//Log.i(TAG, "Delete picasso cache" + value.getBankImage());
Picasso.with(getActivity()).invalidate(value.getBankImage());
dirtyImage = null;
}
return false;
}
@Override
public void bindDataToForm(BankSnapService.BankSnapImage value) {
//setVisibleChildView(imageViewClicked);
if(imageIndex < (imageViewList.size()-1))
imageViewList.get(imageIndex+1).setVisibility(View.VISIBLE);
if(value != null && value.getFilePath() != null && value.getFilePath().length()>0) {
try{
final InputStream imageStream = getActivity().getContentResolver().openInputStream(value.getImageUri());
final Bitmap selectedImage = BitmapFactory.decodeStream(imageStream);
imageViewClicked.setImageBitmap(selectedImage);
} catch (FileNotFoundException e) {
// Log.e(TAG, e.getMessage());
}
}
else if(value != null && value.getBankImage() != null && value.getBankImage().length()>0)
{
showProgressDialog("");
Picasso.with(getActivity())
.load(value.getBankImage())
.fit()
.into(imageViewClicked, new com.squareup.picasso.Callback() {
@Override
public void onSuccess() {
hideProgressDialog();
}
@Override
public void onError() {
showErrorOnUIWithoutAction(R.string.failed_to_load_image);
}
});
}
}
@Override
public BankSnapService.BankSnapImage getDataFromForm(BankSnapService.BankSnapImage base) {
if(dirtyImage != null)
return dirtyImage;
return base;
}
public boolean isFormValid()
{
return dirtyImage != null;
}
}
| |
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
/*
* NumberPanel.java
*
* Created on 14.06.2010, 16:42:25
*/
package com.jme3.gde.materials.multiview.widgets;
import com.jme3.math.FastMath;
import java.awt.Color;
import javax.swing.JFrame;
/**
*
* @author normenhansen
*/
public class ColorPanel extends MaterialPropertyWidget {
/** Creates new form NumberPanel */
public ColorPanel() {
initComponents();
}
/** This method is called from within the constructor to
* initialize the form.
* WARNING: Do NOT modify this code. The content of this method is
* always regenerated by the Form Editor.
*/
@SuppressWarnings("unchecked")
// <editor-fold defaultstate="collapsed" desc="Generated Code">//GEN-BEGIN:initComponents
private void initComponents() {
jLabel1 = new javax.swing.JLabel();
jPanel2 = new javax.swing.JPanel();
rLabel = new javax.swing.JTextField();
gLabel = new javax.swing.JTextField();
bLabel = new javax.swing.JTextField();
jLabel2 = new javax.swing.JLabel();
jLabel3 = new javax.swing.JLabel();
jLabel4 = new javax.swing.JLabel();
aLabel = new javax.swing.JTextField();
jLabel5 = new javax.swing.JLabel();
colorPreview = new javax.swing.JLabel();
jButton1 = new javax.swing.JButton();
jSeparator1 = new javax.swing.JSeparator();
setMaximumSize(new java.awt.Dimension(32767, 35));
setPreferredSize(new java.awt.Dimension(430, 35));
jLabel1.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jLabel1.text")); // NOI18N
jLabel1.setPreferredSize(new java.awt.Dimension(100, 16));
rLabel.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.rLabel.text")); // NOI18N
rLabel.setMaximumSize(new java.awt.Dimension(110, 2147483647));
rLabel.setNextFocusableComponent(gLabel);
rLabel.setPreferredSize(new java.awt.Dimension(110, 28));
rLabel.setVerifyInputWhenFocusTarget(false);
rLabel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
textChanged(evt);
}
});
rLabel.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusGained(java.awt.event.FocusEvent evt) {
rLabelFocusGained(evt);
}
public void focusLost(java.awt.event.FocusEvent evt) {
rLabelFocusLost(evt);
}
});
gLabel.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.gLabel.text")); // NOI18N
gLabel.setMaximumSize(new java.awt.Dimension(110, 2147483647));
gLabel.setNextFocusableComponent(bLabel);
gLabel.setPreferredSize(new java.awt.Dimension(110, 28));
gLabel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
gLabeltextChanged(evt);
}
});
gLabel.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusGained(java.awt.event.FocusEvent evt) {
gLabelFocusGained(evt);
}
public void focusLost(java.awt.event.FocusEvent evt) {
gLabelFocusLost(evt);
}
});
bLabel.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.bLabel.text")); // NOI18N
bLabel.setMaximumSize(new java.awt.Dimension(110, 2147483647));
bLabel.setNextFocusableComponent(aLabel);
bLabel.setPreferredSize(new java.awt.Dimension(110, 28));
bLabel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
bLabeltextChanged(evt);
}
});
bLabel.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusGained(java.awt.event.FocusEvent evt) {
bLabelFocusGained(evt);
}
public void focusLost(java.awt.event.FocusEvent evt) {
bLabelFocusLost(evt);
}
});
jLabel2.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jLabel2.text")); // NOI18N
jLabel3.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jLabel3.text")); // NOI18N
jLabel4.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jLabel4.text")); // NOI18N
aLabel.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.aLabel.text")); // NOI18N
aLabel.setCursor(new java.awt.Cursor(java.awt.Cursor.TEXT_CURSOR));
aLabel.setMaximumSize(new java.awt.Dimension(110, 2147483647));
aLabel.setNextFocusableComponent(jButton1);
aLabel.setPreferredSize(new java.awt.Dimension(110, 28));
aLabel.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
aLabeltextChanged(evt);
}
});
aLabel.addFocusListener(new java.awt.event.FocusAdapter() {
public void focusGained(java.awt.event.FocusEvent evt) {
aLabelFocusGained(evt);
}
public void focusLost(java.awt.event.FocusEvent evt) {
aLabelFocusLost(evt);
}
});
jLabel5.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jLabel5.text")); // NOI18N
colorPreview.setBackground(new java.awt.Color(204, 204, 204));
colorPreview.setHorizontalAlignment(javax.swing.SwingConstants.TRAILING);
colorPreview.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.colorPreview.text")); // NOI18N
colorPreview.setBorder(javax.swing.BorderFactory.createLineBorder(new java.awt.Color(102, 102, 102)));
colorPreview.setOpaque(true);
javax.swing.GroupLayout jPanel2Layout = new javax.swing.GroupLayout(jPanel2);
jPanel2.setLayout(jPanel2Layout);
jPanel2Layout.setHorizontalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, jPanel2Layout.createSequentialGroup()
.addContainerGap()
.addComponent(colorPreview, javax.swing.GroupLayout.DEFAULT_SIZE, 18, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel2)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(rLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel3)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(gLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 45, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel4)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(bLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jLabel5)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(aLabel, javax.swing.GroupLayout.PREFERRED_SIZE, 46, javax.swing.GroupLayout.PREFERRED_SIZE))
);
jPanel2Layout.setVerticalGroup(
jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addComponent(gLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 20, Short.MAX_VALUE)
.addComponent(bLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 20, Short.MAX_VALUE)
.addGroup(jPanel2Layout.createParallelGroup(javax.swing.GroupLayout.Alignment.BASELINE)
.addComponent(jLabel3)
.addComponent(jLabel4)
.addComponent(jLabel5)
.addComponent(jLabel2)
.addComponent(rLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 20, Short.MAX_VALUE))
.addComponent(colorPreview, javax.swing.GroupLayout.DEFAULT_SIZE, 20, Short.MAX_VALUE)
.addComponent(aLabel, javax.swing.GroupLayout.DEFAULT_SIZE, 20, Short.MAX_VALUE)
);
jButton1.setIcon(new javax.swing.ImageIcon(getClass().getResource("/com/jme3/gde/materials/multiview/widgets/icons/color_wheel.png"))); // NOI18N
jButton1.setText(org.openide.util.NbBundle.getMessage(ColorPanel.class, "ColorPanel.jButton1.text")); // NOI18N
jButton1.setHorizontalTextPosition(javax.swing.SwingConstants.CENTER);
jButton1.setMaximumSize(new java.awt.Dimension(43, 15));
jButton1.setMinimumSize(new java.awt.Dimension(43, 15));
jButton1.setPreferredSize(new java.awt.Dimension(43, 15));
jButton1.setVerticalTextPosition(javax.swing.SwingConstants.BOTTOM);
jButton1.addActionListener(new java.awt.event.ActionListener() {
public void actionPerformed(java.awt.event.ActionEvent evt) {
jButton1ActionPerformed(evt);
}
});
javax.swing.GroupLayout layout = new javax.swing.GroupLayout(this);
this.setLayout(layout);
layout.setHorizontalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(javax.swing.GroupLayout.Alignment.TRAILING, layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.TRAILING)
.addComponent(jSeparator1, javax.swing.GroupLayout.DEFAULT_SIZE, 420, Short.MAX_VALUE)
.addGroup(layout.createSequentialGroup()
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, 70, Short.MAX_VALUE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jPanel2, javax.swing.GroupLayout.PREFERRED_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.PREFERRED_SIZE)
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.UNRELATED)
.addComponent(jButton1, javax.swing.GroupLayout.PREFERRED_SIZE, 29, javax.swing.GroupLayout.PREFERRED_SIZE)))
.addContainerGap())
);
layout.setVerticalGroup(
layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING)
.addGroup(layout.createSequentialGroup()
.addGroup(layout.createParallelGroup(javax.swing.GroupLayout.Alignment.LEADING, false)
.addComponent(jButton1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jLabel1, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE)
.addComponent(jPanel2, javax.swing.GroupLayout.DEFAULT_SIZE, javax.swing.GroupLayout.DEFAULT_SIZE, Short.MAX_VALUE))
.addPreferredGap(javax.swing.LayoutStyle.ComponentPlacement.RELATED)
.addComponent(jSeparator1, javax.swing.GroupLayout.DEFAULT_SIZE, 9, Short.MAX_VALUE))
);
}// </editor-fold>//GEN-END:initComponents
private void jButton1ActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_jButton1ActionPerformed
// TODO add your handling code here:
ColorRGBADialog dialog = new ColorRGBADialog(new JFrame(), true);
dialog.setLocationRelativeTo(null);
dialog.setColor(getColor(rLabel.getText(), gLabel.getText(), bLabel.getText(), aLabel.getText()));
dialog.setVisible(true);
if (dialog.getColorAsText() != null) {
colorPreview.setBackground(dialog.getColor());
float[] f = new float[4];
dialog.getColor().getRGBComponents(f);
dialog.getColor().getAlpha();
rLabel.setText(f[0] + "");
gLabel.setText(f[1] + "");
bLabel.setText(f[2] + "");
aLabel.setText(f[3] + "");
property.setValue(rLabel.getText() + " " + gLabel.getText() + " " + bLabel.getText() + " " + aLabel.getText());
fireChanged();
}
}//GEN-LAST:event_jButton1ActionPerformed
private void textChanged(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_textChanged
updateValue();
}//GEN-LAST:event_textChanged
private void gLabeltextChanged(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_gLabeltextChanged
updateValue();
}//GEN-LAST:event_gLabeltextChanged
private void bLabeltextChanged(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_bLabeltextChanged
updateValue();
}//GEN-LAST:event_bLabeltextChanged
private void aLabeltextChanged(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_aLabeltextChanged
updateValue();
}//GEN-LAST:event_aLabeltextChanged
private void rLabelFocusGained(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_rLabelFocusGained
rLabel.setSelectionStart(0);
rLabel.setSelectionEnd(rLabel.getText().length());
}//GEN-LAST:event_rLabelFocusGained
private void gLabelFocusGained(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_gLabelFocusGained
gLabel.setSelectionStart(0);
gLabel.setSelectionEnd(gLabel.getText().length());
}//GEN-LAST:event_gLabelFocusGained
private void bLabelFocusGained(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_bLabelFocusGained
bLabel.setSelectionStart(0);
bLabel.setSelectionEnd(bLabel.getText().length());
}//GEN-LAST:event_bLabelFocusGained
private void aLabelFocusGained(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_aLabelFocusGained
aLabel.setSelectionStart(0);
aLabel.setSelectionEnd(aLabel.getText().length());
}//GEN-LAST:event_aLabelFocusGained
private void rLabelFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_rLabelFocusLost
updateValue();
}//GEN-LAST:event_rLabelFocusLost
private void gLabelFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_gLabelFocusLost
updateValue();
}//GEN-LAST:event_gLabelFocusLost
private void bLabelFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_bLabelFocusLost
updateValue();
}//GEN-LAST:event_bLabelFocusLost
private void aLabelFocusLost(java.awt.event.FocusEvent evt) {//GEN-FIRST:event_aLabelFocusLost
updateValue();
}//GEN-LAST:event_aLabelFocusLost
private void updateValue() {
Color c = getColor(rLabel.getText(), gLabel.getText(), bLabel.getText(), aLabel.getText());
if (c != null) {
colorPreview.setBackground(c);
property.setValue(rLabel.getText() + " " + gLabel.getText() + " " + bLabel.getText() + " " + aLabel.getText());
fireChanged();
} else {
colorPreview.setBackground(new Color(204, 204, 204));
}
}
@Override
protected void readProperty() {
java.awt.EventQueue.invokeLater(new Runnable() {
public void run() {
jLabel1.setText(property.getName());
jLabel1.setToolTipText(property.getName());
String[] s = property.getValue().split(" ");
if (s.length == 4) {
try {
colorPreview.setBackground(getColor(s[0], s[1], s[2], s[3]));
rLabel.setText(s[0]);
gLabel.setText(s[1]);
bLabel.setText(s[2]);
aLabel.setText(s[3]);
} catch (NumberFormatException e) {
clear();
}
} else {
clear();
}
}
});
}
private void clear() {
colorPreview.setBackground(new Color(204, 204, 204));
rLabel.setText("");
gLabel.setText("");
bLabel.setText("");
aLabel.setText("");
}
private Color getColor(String r, String g, String b, String a) {
try {
float fr = FastMath.clamp(Float.parseFloat(r), 0, 1);
float fg = FastMath.clamp(Float.parseFloat(g), 0, 1);
float fb = FastMath.clamp(Float.parseFloat(b), 0, 1);
float fa = FastMath.clamp(Float.parseFloat(a), 0, 1);
return new Color(fr, fg, fb, fa);
} catch (NumberFormatException e) {
return null;
}
}
// Variables declaration - do not modify//GEN-BEGIN:variables
private javax.swing.JTextField aLabel;
private javax.swing.JTextField bLabel;
private javax.swing.JLabel colorPreview;
private javax.swing.JTextField gLabel;
private javax.swing.JButton jButton1;
private javax.swing.JLabel jLabel1;
private javax.swing.JLabel jLabel2;
private javax.swing.JLabel jLabel3;
private javax.swing.JLabel jLabel4;
private javax.swing.JLabel jLabel5;
private javax.swing.JPanel jPanel2;
private javax.swing.JSeparator jSeparator1;
private javax.swing.JTextField rLabel;
// End of variables declaration//GEN-END:variables
}
| |
/* __ __ __ __ __ ___
* \ \ / / \ \ / / __/
* \ \/ / /\ \ \/ / /
* \____/__/ \__\____/__/
*
* Copyright 2014-2017 Vavr, http://vavr.io
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.vavr.control;
import io.vavr.*;
import io.vavr.collection.Iterator;
import io.vavr.collection.List;
import io.vavr.collection.Seq;
import java.io.Serializable;
import java.util.NoSuchElementException;
import java.util.Objects;
import java.util.function.Consumer;
import java.util.function.Function;
import java.util.function.Predicate;
import java.util.function.Supplier;
/**
* An implementation similar to scalaz's <a href="http://eed3si9n.com/learning-scalaz/Validation.html">Validation</a> control.
*
* <p>
* The Validation type is different from a Monad type, it is an applicative
* functor. Whereas a Monad will short circuit after the first errors, the
* applicative functor will continue on, accumulating ALL errors. This is
* especially helpful in cases such as validation, where you want to know
* all the validation errors that have occurred, not just the first one.
* </p>
*
* <pre>
* <code>
* <b>Validation construction:</b>
*
* <i>Valid:</i>
* Validation<String,Integer> valid = Validation.valid(5);
*
* <i>Invalid:</i>
* Validation<List<String>,Integer> invalid = Validation.invalid(List.of("error1","error2"));
*
* <b>Validation combination:</b>
*
* Validation<String,String> valid1 = Validation.valid("John");
* Validation<String,Integer> valid2 = Validation.valid(5);
* Validation<String,Option<String>> valid3 = Validation.valid(Option.of("123 Fake St."));
* Function3<String,Integer,Option<String>,Person> f = ...;
*
* Validation<List<String>,String> result = valid1.combine(valid2).ap((name,age) -> "Name: "+name+" Age: "+age);
* Validation<List<String>,Person> result2 = valid1.combine(valid2).combine(valid3).ap(f);
*
* <b>Another form of combining validations:</b>
*
* Validation<List<String>,Person> result3 = Validation.combine(valid1, valid2, valid3).ap(f);
* </code>
* </pre>
*
* @param <E> value type in the case of invalid
* @param <T> value type in the case of valid
* @author Eric Nelson
* @see <a href="https://github.com/scalaz/scalaz/blob/series/7.3.x/core/src/main/scala/scalaz/Validation.scala">Validation</a>
*/
public interface Validation<E, T> extends Value<T>, Serializable {
long serialVersionUID = 1L;
/**
* Creates a {@link Valid} that contains the given {@code value}.
*
* @param <E> type of the error
* @param <T> type of the given {@code value}
* @param value A value
* @return {@code Valid(value)}
*/
static <E, T> Validation<E, T> valid(T value) {
return new Valid<>(value);
}
/**
* Creates an {@link Invalid} that contains the given {@code error}.
*
* @param <E> type of the given {@code error}
* @param <T> type of the value
* @param error An error
* @return {@code Invalid(error)}
* @throws NullPointerException if error is null
*/
static <E, T> Validation<E, T> invalid(E error) {
Objects.requireNonNull(error, "error is null");
return new Invalid<>(error);
}
static <E, T> Validation<E, T> invalid(Seq<E> errors) {
Objects.requireNonNull(errors, "errors is null");
if (errors.size() == 0) {
throw new IllegalArgumentException("Errors are empty");
}
return new Invalid<>(errors);
}
/**
* Creates a {@code Validation} of an {@code Either}.
*
* @param either An {@code Either}
* @param <E> error type
* @param <T> value type
* @return A {@code Valid(either.get())} if either is a Right, otherwise {@code Invalid(either.getLeft())}.
* @throws NullPointerException if either is null
*/
static <E, T> Validation<E, T> fromEither(Either<E, T> either) {
Objects.requireNonNull(either, "either is null");
return either.isRight() ? valid(either.get()) : invalid(either.getLeft());
}
/**
* Creates a {@code Validation} of an {@code Either}.
*
* @param t A {@code Try}
* @param <T> type of the valid value
* @return A {@code Valid(t.get())} if t is a Success, otherwise {@code Invalid(t.getCause())}.
* @throws NullPointerException if {@code t} is null
*/
static <T> Validation<Throwable, T> fromTry(Try<? extends T> t) {
Objects.requireNonNull(t, "t is null");
return t.isSuccess() ? valid(t.get()) : invalid(t.getCause());
}
/**
* Reduces many {@code Validation} instances into a single {@code Validation} by transforming an
* {@code Iterable<Validation<? extends T>>} into a {@code Validation<Seq<T>>}.
*
* @param <E> value type in the case of invalid
* @param <T> value type in the case of valid
* @param values An iterable of Validation instances.
* @return A valid Validation of a sequence of values if all Validation instances are valid
* or an invalid Validation containing an accumulated List of errors.
* @throws NullPointerException if values is null
*/
static <E, T> Validation<E, Seq<T>> sequence(Iterable<? extends Validation<E, T>> values) {
Objects.requireNonNull(values, "values is null");
List<E> errors = List.empty();
List<T> list = List.empty();
for (Validation<E, T> value : values) {
if (value.isInvalid()) {
errors = errors.prependAll(value.getErrors().reverse());
} else if (errors.isEmpty()) {
list = list.prepend(value.get());
}
}
return errors.isEmpty() ? valid(list.reverse()) : invalid(errors.reverse());
}
/**
* Narrows a widened {@code Validation<? extends E, ? extends T>} to {@code Validation<E, T>}
* by performing a type-safe cast. This is eligible because immutable/read-only
* collections are covariant.
*
* @param validation A {@code Validation}.
* @param <E> type of error
* @param <T> type of valid value
* @return the given {@code validation} instance as narrowed type {@code Validation<E, T>}.
*/
@SuppressWarnings("unchecked")
static <E, T> Validation<E, T> narrow(Validation<? extends E, ? extends T> validation) {
return (Validation<E, T>) validation;
}
/**
* Combines two {@code Validation}s into a {@link Builder}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param validation1 first validation
* @param validation2 second validation
* @return an instance of Builder<E,T1,T2>
* @throws NullPointerException if validation1 or validation2 is null
*/
static <E, T1, T2> Builder<E, T1, T2> combine(Validation<E, T1> validation1, Validation<E, T2> validation2) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
return new Builder<>(validation1, validation2);
}
/**
* Combines three {@code Validation}s into a {@link Builder3}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @return an instance of Builder3<E,T1,T2,T3>
* @throws NullPointerException if validation1, validation2 or validation3 is null
*/
static <E, T1, T2, T3> Builder3<E, T1, T2, T3> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
return new Builder3<>(validation1, validation2, validation3);
}
/**
* Combines four {@code Validation}s into a {@link Builder4}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param <T4> type of fourth valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @param validation4 fourth validation
* @return an instance of Builder3<E,T1,T2,T3,T4>
* @throws NullPointerException if validation1, validation2, validation3 or validation4 is null
*/
static <E, T1, T2, T3, T4> Builder4<E, T1, T2, T3, T4> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
Objects.requireNonNull(validation4, "validation4 is null");
return new Builder4<>(validation1, validation2, validation3, validation4);
}
/**
* Combines five {@code Validation}s into a {@link Builder5}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param <T4> type of fourth valid value
* @param <T5> type of fifth valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @param validation4 fourth validation
* @param validation5 fifth validation
* @return an instance of Builder3<E,T1,T2,T3,T4,T5>
* @throws NullPointerException if validation1, validation2, validation3, validation4 or validation5 is null
*/
static <E, T1, T2, T3, T4, T5> Builder5<E, T1, T2, T3, T4, T5> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
Objects.requireNonNull(validation4, "validation4 is null");
Objects.requireNonNull(validation5, "validation5 is null");
return new Builder5<>(validation1, validation2, validation3, validation4, validation5);
}
/**
* Combines six {@code Validation}s into a {@link Builder6}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param <T4> type of fourth valid value
* @param <T5> type of fifth valid value
* @param <T6> type of sixth valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @param validation4 fourth validation
* @param validation5 fifth validation
* @param validation6 sixth validation
* @return an instance of Builder3<E,T1,T2,T3,T4,T5,T6>
* @throws NullPointerException if validation1, validation2, validation3, validation4, validation5 or validation6 is null
*/
static <E, T1, T2, T3, T4, T5, T6> Builder6<E, T1, T2, T3, T4, T5, T6> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
Objects.requireNonNull(validation4, "validation4 is null");
Objects.requireNonNull(validation5, "validation5 is null");
Objects.requireNonNull(validation6, "validation6 is null");
return new Builder6<>(validation1, validation2, validation3, validation4, validation5, validation6);
}
/**
* Combines seven {@code Validation}s into a {@link Builder7}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param <T4> type of fourth valid value
* @param <T5> type of fifth valid value
* @param <T6> type of sixth valid value
* @param <T7> type of seventh valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @param validation4 fourth validation
* @param validation5 fifth validation
* @param validation6 sixth validation
* @param validation7 seventh validation
* @return an instance of Builder3<E,T1,T2,T3,T4,T5,T6,T7>
* @throws NullPointerException if validation1, validation2, validation3, validation4, validation5, validation6 or validation7 is null
*/
static <E, T1, T2, T3, T4, T5, T6, T7> Builder7<E, T1, T2, T3, T4, T5, T6, T7> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6, Validation<E, T7> validation7) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
Objects.requireNonNull(validation4, "validation4 is null");
Objects.requireNonNull(validation5, "validation5 is null");
Objects.requireNonNull(validation6, "validation6 is null");
Objects.requireNonNull(validation7, "validation7 is null");
return new Builder7<>(validation1, validation2, validation3, validation4, validation5, validation6, validation7);
}
/**
* Combines eight {@code Validation}s into a {@link Builder8}.
*
* @param <E> type of error
* @param <T1> type of first valid value
* @param <T2> type of second valid value
* @param <T3> type of third valid value
* @param <T4> type of fourth valid value
* @param <T5> type of fifth valid value
* @param <T6> type of sixth valid value
* @param <T7> type of seventh valid value
* @param <T8> type of eighth valid value
* @param validation1 first validation
* @param validation2 second validation
* @param validation3 third validation
* @param validation4 fourth validation
* @param validation5 fifth validation
* @param validation6 sixth validation
* @param validation7 seventh validation
* @param validation8 eighth validation
* @return an instance of Builder3<E,T1,T2,T3,T4,T5,T6,T7,T8>
* @throws NullPointerException if validation1, validation2, validation3, validation4, validation5, validation6, validation7 or validation8 is null
*/
static <E, T1, T2, T3, T4, T5, T6, T7, T8> Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> combine(Validation<E, T1> validation1, Validation<E, T2> validation2, Validation<E, T3> validation3, Validation<E, T4> validation4, Validation<E, T5> validation5, Validation<E, T6> validation6, Validation<E, T7> validation7, Validation<E, T8> validation8) {
Objects.requireNonNull(validation1, "validation1 is null");
Objects.requireNonNull(validation2, "validation2 is null");
Objects.requireNonNull(validation3, "validation3 is null");
Objects.requireNonNull(validation4, "validation4 is null");
Objects.requireNonNull(validation5, "validation5 is null");
Objects.requireNonNull(validation6, "validation6 is null");
Objects.requireNonNull(validation7, "validation7 is null");
Objects.requireNonNull(validation8, "validation8 is null");
return new Builder8<>(validation1, validation2, validation3, validation4, validation5, validation6, validation7, validation8);
}
/**
* Check whether this is of type {@code Valid}
*
* @return true if is a Valid, false if is an Invalid
*/
boolean isValid();
/**
* Check whether this is of type {@code Invalid}
*
* @return true if is an Invalid, false if is a Valid
*/
boolean isInvalid();
/**
* Returns this {@code Validation} if it is valid, otherwise return the alternative.
*
* @param other An alternative {@code Validation}
* @return this {@code Validation} if it is valid, otherwise return the alternative.
*/
@SuppressWarnings("unchecked")
default Validation<E, T> orElse(Validation<? extends E, ? extends T> other) {
Objects.requireNonNull(other, "other is null");
return isValid() ? this : (Validation<E, T>) other;
}
/**
* Returns this {@code Validation} if it is valid, otherwise return the result of evaluating supplier.
*
* @param supplier An alternative {@code Validation} supplier
* @return this {@code Validation} if it is valid, otherwise return the result of evaluating supplier.
*/
@SuppressWarnings("unchecked")
default Validation<E, T> orElse(Supplier<Validation<? extends E, ? extends T>> supplier) {
Objects.requireNonNull(supplier, "supplier is null");
return isValid() ? this : (Validation<E, T>) supplier.get();
}
@Override
default boolean isEmpty() {
return isInvalid();
}
/**
* Gets the value of this {@code Validation} if is a {@code Valid} or throws if this is an {@code Invalid}.
*
* @return The value of this {@code Validation}
* @throws NoSuchElementException if this is an {@code Invalid}
*/
@Override
T get();
/**
* Gets the value if it is a Valid or an value calculated from the errors
*
* @param other a function which converts an error to an alternative value
* @return the value, if the underlying Validation is a Valid, or else the alternative value
* provided by {@code other} by applying the errors.
*/
default T getOrElseGet(Function<? super Seq<? super E>, ? extends T> other) {
Objects.requireNonNull(other, "other is null");
if (isValid()) {
return get();
} else {
return other.apply(getErrors());
}
}
/**
* Gets the errors of this Validation if is an Invalid or throws if this is a Valid
*
* @return The errors of this Invalid
* @throws RuntimeException if this is a Valid
*/
Seq<E> getErrors();
/**
* Returns this as {@code Either}.
*
* @return {@code Either.right(get())} if this is valid, otherwise {@code Either.left(getErrors())}.
*/
default Either<Seq<E>, T> toEither() {
return isValid() ? Either.right(get()) : Either.left(getErrors());
}
@Override
boolean equals(Object o);
@Override
int hashCode();
@Override
String toString();
/**
* Performs the given action for the value contained in {@code Valid}, or do nothing
* if this is an Invalid.
*
* @param action the action to be performed on the contained value
* @throws NullPointerException if action is null
*/
@Override
default void forEach(Consumer<? super T> action) {
Objects.requireNonNull(action, "action is null");
if (isValid()) {
action.accept(get());
}
}
/**
* Performs the action in {@code fInvalid} on {@code errors} if this is an {@code Invalid},
* or {@code fValid} on {@code value} if this is a {@code Valid}.
* Returns an object of type U.
*
* <p>
* <code>
* For example:<br>
* Validation<List<String>,String> valid = ...;<br>
* Integer i = valid.fold(List::length, String::length);
* </code>
* </p>
*
* @param <U> the fold result type
* @param fInvalid the invalid fold operation
* @param fValid the valid fold operation
* @return an instance of type U
* @throws NullPointerException if fInvalid or fValid is null
*/
default <U> U fold(Function<Seq<? extends E>, ? extends U> fInvalid, Function<? super T, ? extends U> fValid) {
Objects.requireNonNull(fInvalid, "fInvalid is null");
Objects.requireNonNull(fValid, "fValid is null");
if (isInvalid()) {
return fInvalid.apply(this.getErrors());
} else {
final T value = this.get();
return fValid.apply(value);
}
}
/**
* Flip the valid/invalid values for this Validation. If this is a Valid<E,T>, returns Invalid<T,E>.
* Or if this is an Invalid<E,T>, return a Valid<T,E>.
*
* @return a flipped instance of Validation
*/
default Validation<T, Seq<E>> swap() {
if (isInvalid()) {
return Validation.valid(this.getErrors());
} else {
final T value = this.get();
return Validation.invalid(value);
}
}
@Override
default <U> Validation<E, U> map(Function<? super T, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
if (isInvalid()) {
return Validation.invalid(this.getErrors());
} else {
final T value = this.get();
return Validation.valid(f.apply(value));
}
}
/**
* Whereas map only performs a mapping on a valid Validation, and mapError performs a mapping on an invalid
* Validation, bimap allows you to provide mapping actions for both, and will give you the result based
* on what type of Validation this is. Without this, you would have to do something like:
*
* validation.map(...).mapError(...);
*
* @param <E2> type of the mapping result if this is an invalid
* @param <T2> type of the mapping result if this is a valid
* @param errorMapper the invalid mapping operation
* @param valueMapper the valid mapping operation
* @return an instance of Validation<U,R>
* @throws NullPointerException if invalidMapper or validMapper is null
*/
default <E2, T2> Validation<E2, T2> bimap(
Function<? super Seq<? super E>, ? extends E2> errorMapper,
Function<? super T, ? extends T2> valueMapper) {
Objects.requireNonNull(errorMapper, "errorMapper is null");
Objects.requireNonNull(valueMapper, "valueMapper is null");
if (isInvalid()) {
return Validation.invalid(errorMapper.apply(this.getErrors()));
} else {
final T value = this.get();
return Validation.valid(valueMapper.apply(value));
}
}
/**
* Applies a function f to the errors of this Validation if this is an Invalid. Otherwise does nothing
* if this is a Valid.
*
* @param <U> type of the errors resulting from the mapping
* @param f a function that maps the errors in this Invalid
* @return an instance of Validation<U,T>
* @throws NullPointerException if mapping operation f is null
*/
default <U> Validation<U, T> mapError(Function<Seq<? extends E>, ? extends U> f) {
Objects.requireNonNull(f, "f is null");
if (isInvalid()) {
return Validation.invalid(f.apply(this.getErrors()));
} else {
return Validation.valid(this.get());
}
}
default <U> Validation<E, U> ap(Validation<E, ? extends Function<? super T, ? extends U>> validation) {
Objects.requireNonNull(validation, "validation is null");
if (isValid()) {
if (validation.isValid()) {
final Function<? super T, ? extends U> f = validation.get();
final U u = f.apply(this.get());
return valid(u);
} else {
final Seq<E> errors = validation.getErrors();
return invalid(errors);
}
} else {
if (validation.isValid()) {
final Seq<E> errors = this.getErrors();
return invalid(errors);
} else {
Seq<E> specificErrors = List.ofAll(validation.getErrors());
Seq<E> finalErrors = specificErrors.appendAll(this.getErrors());
return invalid(finalErrors);
}
}
}
/**
* Combines two {@code Validation}s to form a {@link Builder}, which can then be used to perform further
* combines, or apply a function to it in order to transform the {@link Builder} into a {@code Validation}.
*
* @param <U> type of the value contained in validation
* @param validation the validation object to combine this with
* @return an instance of Builder
*/
default <U> Builder<E, T, U> combine(Validation<E, U> validation) {
return new Builder<>(this, validation);
}
// -- Implementation of Value
default Option<Validation<E, T>> filter(Predicate<? super T> predicate) {
Objects.requireNonNull(predicate, "predicate is null");
return isInvalid() || predicate.test(get()) ? Option.some(this) : Option.none();
}
@SuppressWarnings("unchecked")
default <U> Validation<E, U> flatMap(Function<? super T, ? extends Validation<E, ? extends U>> mapper) {
Objects.requireNonNull(mapper, "mapper is null");
return isInvalid() ? (Validation<E, U>) this : (Validation<E, U>) mapper.apply(get());
}
@Override
default Validation<E, T> peek(Consumer<? super T> action) {
if (isValid()) {
action.accept(get());
}
return this;
}
/**
* A {@code Validation}'s value is computed synchronously.
*
* @return false
*/
@Override
default boolean isAsync() {
return false;
}
/**
* A {@code Validation}'s value is computed eagerly.
*
* @return false
*/
@Override
default boolean isLazy() {
return false;
}
@Override
default boolean isSingleValued() {
return true;
}
@Override
default Iterator<T> iterator() {
return isValid() ? Iterator.of(get()) : Iterator.empty();
}
/**
* A valid Validation
*
* @param <E> type of the errors of this Validation
* @param <T> type of the value of this Validation
*/
final class Valid<E, T> implements Validation<E, T>, Serializable {
private static final long serialVersionUID = 1L;
private final T value;
/**
* Construct a {@code Valid}
*
* @param value The value of this success
*/
private Valid(T value) {
this.value = value;
}
@Override
public boolean isValid() {
return true;
}
@Override
public boolean isInvalid() {
return false;
}
@Override
public T get() {
return value;
}
@Override
public Seq<E> getErrors() throws RuntimeException {
throw new NoSuchElementException("errors of 'valid' Validation");
}
@Override
public boolean equals(Object obj) {
return (obj == this) || (obj instanceof Valid && Objects.equals(value, ((Valid<?, ?>) obj).value));
}
@Override
public int hashCode() {
return HashCodes.hash(value);
}
@Override
public String stringPrefix() {
return "Valid";
}
@Override
public String toString() {
return stringPrefix() + "(" + value + ")";
}
}
/**
* An invalid Validation
*
* @param <E> type of the errors of this Validation
* @param <T> type of the value of this Validation
*/
final class Invalid<E, T> implements Validation<E, T>, Serializable {
private static final long serialVersionUID = 1L;
private final Seq<E> errors;
/**
* Construct an {@code Invalid}
*
* @param error The value of this errors
*/
private Invalid(E error) {
this.errors = List.of(error);
}
private Invalid(Seq<E> errors) {
this.errors = errors;
}
@Override
public boolean isValid() {
return false;
}
@Override
public boolean isInvalid() {
return true;
}
@Override
public T get() throws RuntimeException {
throw new NoSuchElementException("get of 'invalid' Validation");
}
@Override
public Seq<E> getErrors() {
return errors;
}
@Override
public boolean equals(Object obj) {
return (obj == this) || (obj instanceof Invalid && Objects.equals(errors, ((Invalid<?, ?>) obj).errors));
}
@Override
public int hashCode() {
return HashCodes.hash(errors);
}
@Override
public String stringPrefix() {
return "Invalid";
}
@Override
public String toString() {
return stringPrefix() + "(" + errors + ")";
}
}
final class Builder<E, T1, T2> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Builder(Validation<E, T1> v1, Validation<E, T2> v2) {
this.v1 = v1;
this.v2 = v2;
}
public <R> Validation<E, R> ap(Function2<T1, T2, R> f) {
return v2.ap(v1.ap(Validation.valid(f.curried())));
}
public <T3> Builder3<E, T1, T2, T3> combine(Validation<E, T3> v3) {
return new Builder3<>(v1, v2, v3);
}
}
final class Builder3<E, T1, T2, T3> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Builder3(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
}
public <R> Validation<E, R> ap(Function3<T1, T2, T3, R> f) {
return v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))));
}
public <T4> Builder4<E, T1, T2, T3, T4> combine(Validation<E, T4> v4) {
return new Builder4<>(v1, v2, v3, v4);
}
}
final class Builder4<E, T1, T2, T3, T4> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Validation<E, T4> v4;
private Builder4(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.v4 = v4;
}
public <R> Validation<E, R> ap(Function4<T1, T2, T3, T4, R> f) {
return v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))));
}
public <T5> Builder5<E, T1, T2, T3, T4, T5> combine(Validation<E, T5> v5) {
return new Builder5<>(v1, v2, v3, v4, v5);
}
}
final class Builder5<E, T1, T2, T3, T4, T5> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Validation<E, T4> v4;
private Validation<E, T5> v5;
private Builder5(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.v4 = v4;
this.v5 = v5;
}
public <R> Validation<E, R> ap(Function5<T1, T2, T3, T4, T5, R> f) {
return v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))))));
}
public <T6> Builder6<E, T1, T2, T3, T4, T5, T6> combine(Validation<E, T6> v6) {
return new Builder6<>(v1, v2, v3, v4, v5, v6);
}
}
final class Builder6<E, T1, T2, T3, T4, T5, T6> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Validation<E, T4> v4;
private Validation<E, T5> v5;
private Validation<E, T6> v6;
private Builder6(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.v4 = v4;
this.v5 = v5;
this.v6 = v6;
}
public <R> Validation<E, R> ap(Function6<T1, T2, T3, T4, T5, T6, R> f) {
return v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))))));
}
public <T7> Builder7<E, T1, T2, T3, T4, T5, T6, T7> combine(Validation<E, T7> v7) {
return new Builder7<>(v1, v2, v3, v4, v5, v6, v7);
}
}
final class Builder7<E, T1, T2, T3, T4, T5, T6, T7> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Validation<E, T4> v4;
private Validation<E, T5> v5;
private Validation<E, T6> v6;
private Validation<E, T7> v7;
private Builder7(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6, Validation<E, T7> v7) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.v4 = v4;
this.v5 = v5;
this.v6 = v6;
this.v7 = v7;
}
public <R> Validation<E, R> ap(Function7<T1, T2, T3, T4, T5, T6, T7, R> f) {
return v7.ap(v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried()))))))));
}
public <T8> Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> combine(Validation<E, T8> v8) {
return new Builder8<>(v1, v2, v3, v4, v5, v6, v7, v8);
}
}
final class Builder8<E, T1, T2, T3, T4, T5, T6, T7, T8> {
private Validation<E, T1> v1;
private Validation<E, T2> v2;
private Validation<E, T3> v3;
private Validation<E, T4> v4;
private Validation<E, T5> v5;
private Validation<E, T6> v6;
private Validation<E, T7> v7;
private Validation<E, T8> v8;
private Builder8(Validation<E, T1> v1, Validation<E, T2> v2, Validation<E, T3> v3, Validation<E, T4> v4, Validation<E, T5> v5, Validation<E, T6> v6, Validation<E, T7> v7, Validation<E, T8> v8) {
this.v1 = v1;
this.v2 = v2;
this.v3 = v3;
this.v4 = v4;
this.v5 = v5;
this.v6 = v6;
this.v7 = v7;
this.v8 = v8;
}
public <R> Validation<E, R> ap(Function8<T1, T2, T3, T4, T5, T6, T7, T8, R> f) {
return v8.ap(v7.ap(v6.ap(v5.ap(v4.ap(v3.ap(v2.ap(v1.ap(Validation.valid(f.curried())))))))));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.pulsar.client.impl;
import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
import static org.testng.Assert.fail;
import java.lang.reflect.Field;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import org.apache.pulsar.broker.service.BrokerTestBase;
import org.apache.pulsar.client.admin.PulsarAdminException;
import org.apache.pulsar.client.api.Consumer;
import org.apache.pulsar.client.api.Message;
import org.apache.pulsar.client.api.MessageBuilder;
import org.apache.pulsar.client.api.MessageId;
import org.apache.pulsar.client.api.MessageRoutingMode;
import org.apache.pulsar.client.api.Producer;
import org.apache.pulsar.client.api.PulsarClientException;
import org.apache.pulsar.client.impl.ProducerImpl.OpSendMsg;
import org.apache.pulsar.client.impl.conf.ClientConfigurationData;
import org.apache.pulsar.common.api.ByteBufPair;
import org.apache.pulsar.common.api.Commands;
import org.apache.pulsar.common.api.Commands.ChecksumType;
import org.apache.pulsar.common.api.proto.PulsarApi.MessageMetadata;
import org.apache.pulsar.common.api.proto.PulsarApi.MessageMetadata.Builder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testng.Assert;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import io.netty.buffer.ByteBuf;
public class MessageIdTest extends BrokerTestBase {
private static final Logger log = LoggerFactory.getLogger(MessageIdTest.class);
@BeforeMethod
@Override
public void setup() throws Exception {
baseSetup();
}
@AfterMethod
@Override
protected void cleanup() throws Exception {
internalCleanup();
}
@Test(timeOut = 10000)
public void producerSendAsync() throws PulsarClientException {
// 1. Basic Config
String key = "producerSendAsync";
final String topicName = "persistent://prop/cluster/namespace/topic-" + key;
final String subscriptionName = "my-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int numberOfMessages = 30;
// 2. Create Producer
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
// 3. Create Consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscribe();
// 4. Publish message and get message id
Set<MessageId> messageIds = new HashSet<>();
List<Future<MessageId>> futures = new ArrayList<>();
for (int i = 0; i < numberOfMessages; i++) {
String message = messagePredicate + i;
futures.add(producer.sendAsync(message.getBytes()));
}
MessageIdImpl previousMessageId = null;
for (Future<MessageId> f : futures) {
try {
MessageIdImpl currentMessageId = (MessageIdImpl) f.get();
if (previousMessageId != null) {
Assert.assertTrue(currentMessageId.compareTo(previousMessageId) > 0,
"Message Ids should be in ascending order");
}
messageIds.add(currentMessageId);
previousMessageId = currentMessageId;
} catch (Exception e) {
Assert.fail("Failed to publish message, Exception: " + e.getMessage());
}
}
// 4. Check if message Ids are correct
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), numberOfMessages, "Not all messages published successfully");
for (int i = 0; i < numberOfMessages; i++) {
Message<byte[]> message = consumer.receive();
Assert.assertEquals(new String(message.getData()), messagePredicate + i);
MessageId messageId = message.getMessageId();
Assert.assertTrue(messageIds.remove(messageId), "Failed to receive message");
}
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), 0, "Not all messages received successfully");
consumer.unsubscribe();
}
@Test(timeOut = 10000)
public void producerSend() throws PulsarClientException {
// 1. Basic Config
String key = "producerSend";
final String topicName = "persistent://prop/cluster/namespace/topic-" + key;
final String subscriptionName = "my-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int numberOfMessages = 30;
// 2. Create Producer
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName).create();
// 3. Create Consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscribe();
// 4. Publish message and get message id
Set<MessageId> messageIds = new HashSet<>();
for (int i = 0; i < numberOfMessages; i++) {
String message = messagePredicate + i;
messageIds.add(producer.send(message.getBytes()));
}
// 4. Check if message Ids are correct
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), numberOfMessages, "Not all messages published successfully");
for (int i = 0; i < numberOfMessages; i++) {
Assert.assertTrue(messageIds.remove(consumer.receive().getMessageId()), "Failed to receive Message");
}
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), 0, "Not all messages received successfully");
consumer.unsubscribe();
;
}
@Test(timeOut = 10000)
public void partitionedProducerSendAsync() throws PulsarClientException, PulsarAdminException {
// 1. Basic Config
String key = "partitionedProducerSendAsync";
final String topicName = "persistent://prop/cluster/namespace/topic-" + key;
final String subscriptionName = "my-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int numberOfMessages = 30;
int numberOfPartitions = 3;
admin.persistentTopics().createPartitionedTopic(topicName, numberOfPartitions);
// 2. Create Producer
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
// 3. Create Consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscribe();
// 4. Publish message and get message id
Set<MessageId> messageIds = new HashSet<>();
Set<Future<MessageId>> futures = new HashSet<>();
for (int i = 0; i < numberOfMessages; i++) {
String message = messagePredicate + i;
futures.add(producer.sendAsync(message.getBytes()));
}
futures.forEach(f -> {
try {
messageIds.add(f.get());
} catch (Exception e) {
Assert.fail("Failed to publish message, Exception: " + e.getMessage());
}
});
// 4. Check if message Ids are correct
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), numberOfMessages, "Not all messages published successfully");
for (int i = 0; i < numberOfMessages; i++) {
MessageId topicMessageId = consumer.receive().getMessageId();
MessageId messageId = ((TopicMessageIdImpl)topicMessageId).getInnerMessageId();
log.info("Message ID Received = " + messageId);
Assert.assertTrue(messageIds.remove(messageId), "Failed to receive Message");
}
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), 0, "Not all messages received successfully");
consumer.unsubscribe();
}
@Test(timeOut = 10000)
public void partitionedProducerSend() throws PulsarClientException, PulsarAdminException {
// 1. Basic Config
String key = "partitionedProducerSend";
final String topicName = "persistent://prop/cluster/namespace/topic-" + key;
final String subscriptionName = "my-subscription-" + key;
final String messagePredicate = "my-message-" + key + "-";
final int numberOfMessages = 30;
int numberOfPartitions = 7;
admin.persistentTopics().createPartitionedTopic(topicName, numberOfPartitions);
// 2. Create Producer
Producer<byte[]> producer = pulsarClient.newProducer().topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
// 3. Create Consumer
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName(subscriptionName)
.subscribe();
// 4. Publish message and get message id
Set<MessageId> messageIds = new HashSet<>();
for (int i = 0; i < numberOfMessages; i++) {
String message = messagePredicate + i;
messageIds.add(producer.send(message.getBytes()));
}
// 4. Check if message Ids are correct
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), numberOfMessages, "Not all messages published successfully");
for (int i = 0; i < numberOfMessages; i++) {
MessageId topicMessageId = consumer.receive().getMessageId();
MessageId messageId = ((TopicMessageIdImpl)topicMessageId).getInnerMessageId();
Assert.assertTrue(messageIds.remove(messageId), "Failed to receive Message");
}
log.info("Message IDs = " + messageIds);
Assert.assertEquals(messageIds.size(), 0, "Not all messages received successfully");
// TODO - this statement causes the broker to hang - need to look into
// it
// consumer.unsubscribe();;
}
/**
* Verifies: different versions of broker-deployment (one broker understands Checksum and other doesn't in that case
* remove checksum before sending to broker-2)
*
* client first produce message with checksum and then retries to send message due to connection unavailable. But
* this time, if broker doesn't understand checksum: then client should remove checksum from the message before
* sending to broker.
*
* 1. stop broker 2. client compute checksum and add into message 3. produce 2 messages and corrupt 1 message 4.
* start broker with lower version (which doesn't support checksum) 5. client reconnects to broker and due to
* incompatibility of version: removes checksum from message 6. broker doesn't do checksum validation and persist
* message 7. client receives ack
*
* @throws Exception
*/
@Test
public void testChecksumVersionComptability() throws Exception {
final String topicName = "persistent://prop/use/ns-abc/topic1";
// 1. producer connect
ProducerImpl<byte[]> prod = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
ProducerImpl<byte[]> producer = spy(prod);
// return higher version compare to broker : so, it forces client-producer to remove checksum from payload
doReturn(producer.brokerChecksumSupportedVersion() + 1).when(producer).brokerChecksumSupportedVersion();
doAnswer(invocationOnMock -> prod.getState()).when(producer).getState();
doAnswer(invocationOnMock -> prod.getClientCnx()).when(producer).getClientCnx();
doAnswer(invocationOnMock -> prod.cnx()).when(producer).cnx();
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe();
// Stop the broker, and publishes messages. Messages are accumulated in the producer queue and they're checksums
// would have already been computed. If we change the message content at that point, it should result in a
// checksum validation error
stopBroker();
// stop timer to auto-reconnect as let spy-Producer connect to broker manually so, spy-producer object can get
// mock-value from brokerChecksumSupportedVersion
((PulsarClientImpl) pulsarClient).timer().stop();
ClientCnx mockClientCnx = spy(
new ClientCnx(new ClientConfigurationData(), ((PulsarClientImpl) pulsarClient).eventLoopGroup()));
doReturn(producer.brokerChecksumSupportedVersion() - 1).when(mockClientCnx).getRemoteEndpointProtocolVersion();
prod.setClientCnx(mockClientCnx);
Message<byte[]> msg1 = MessageBuilder.create().setContent("message-1".getBytes()).build();
CompletableFuture<MessageId> future1 = producer.sendAsync(msg1);
Message<byte[]> msg2 = MessageBuilder.create().setContent("message-2".getBytes()).build();
CompletableFuture<MessageId> future2 = producer.sendAsync(msg2);
// corrupt the message
msg2.getData()[msg2.getData().length - 1] = '3'; // new content would be 'message-3'
prod.setClientCnx(null);
// Restart the broker to have the messages published
startBroker();
// grab broker connection with mocked producer which has higher version compare to broker
prod.grabCnx();
try {
// it should not fail: as due to unsupported version of broker: client removes checksum and broker should
// ignore the checksum validation
future1.get();
future2.get();
} catch (Exception e) {
e.printStackTrace();
fail("Broker shouldn't verify checksum for corrupted message and it shouldn't fail");
}
((ConsumerImpl<byte[]>) consumer).grabCnx();
// We should only receive msg1
Message<byte[]> msg = consumer.receive(1, TimeUnit.SECONDS);
assertEquals(new String(msg.getData()), "message-1");
msg = consumer.receive(1, TimeUnit.SECONDS);
assertEquals(new String(msg.getData()), "message-3");
}
@Test
public void testChecksumReconnection() throws Exception {
final String topicName = "persistent://prop/use/ns-abc/topic1";
// 1. producer connect
ProducerImpl<byte[]> prod = (ProducerImpl<byte[]>) pulsarClient.newProducer().topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.create();
ProducerImpl<byte[]> producer = spy(prod);
// mock: broker-doesn't support checksum (remote_version < brokerChecksumSupportedVersion) so, it forces
// client-producer to perform checksum-strip from msg at reconnection
doReturn(producer.brokerChecksumSupportedVersion() + 1).when(producer).brokerChecksumSupportedVersion();
doAnswer(invocationOnMock -> prod.getState()).when(producer).getState();
doAnswer(invocationOnMock -> prod.getClientCnx()).when(producer).getClientCnx();
doAnswer(invocationOnMock -> prod.cnx()).when(producer).cnx();
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe();
stopBroker();
// stop timer to auto-reconnect as let spy-Producer connect to broker
// manually so, spy-producer object can get
// mock-value from brokerChecksumSupportedVersion
((PulsarClientImpl) pulsarClient).timer().stop();
// set clientCnx mock to get non-checksum supported version
ClientCnx mockClientCnx = spy(
new ClientCnx(new ClientConfigurationData(), ((PulsarClientImpl) pulsarClient).eventLoopGroup()));
doReturn(producer.brokerChecksumSupportedVersion() - 1).when(mockClientCnx).getRemoteEndpointProtocolVersion();
prod.setClientCnx(mockClientCnx);
Message<byte[]> msg1 = MessageBuilder.create().setContent("message-1".getBytes()).build();
CompletableFuture<MessageId> future1 = producer.sendAsync(msg1);
Message<byte[]> msg2 = MessageBuilder.create().setContent("message-2".getBytes()).build();
CompletableFuture<MessageId> future2 = producer.sendAsync(msg2);
// corrupt the message
msg2.getData()[msg2.getData().length - 1] = '3'; // new content would be
// 'message-3'
// unset mock
prod.setClientCnx(null);
// Restart the broker to have the messages published
startBroker();
// grab broker connection with mocked producer which has higher version
// compare to broker
prod.grabCnx();
try {
// it should not fail: as due to unsupported version of broker:
// client removes checksum and broker should
// ignore the checksum validation
future1.get(10, TimeUnit.SECONDS);
future2.get(10, TimeUnit.SECONDS);
} catch (Exception e) {
e.printStackTrace();
fail("Broker shouldn't verify checksum for corrupted message and it shouldn't fail");
}
((ConsumerImpl<byte[]>) consumer).grabCnx();
// We should only receive msg1
Message<byte[]> msg = consumer.receive(1, TimeUnit.SECONDS);
assertEquals(new String(msg.getData()), "message-1");
msg = consumer.receive(1, TimeUnit.SECONDS);
assertEquals(new String(msg.getData()), "message-3");
}
/**
* Verifies: if message is corrupted before sending to broker and if broker gives checksum error: then 1.
* Client-Producer recomputes checksum with modified data 2. Retry message-send again 3. Broker verifies checksum 4.
* client receives send-ack success
*
* @throws Exception
*/
@Test
public void testCorruptMessageRemove() throws Exception {
final String topicName = "persistent://prop/use/ns-abc/retry-topic";
// 1. producer connect
ProducerImpl<byte[]> prod = (ProducerImpl<byte[]>) pulsarClient.newProducer()
.topic(topicName)
.enableBatching(false)
.messageRoutingMode(MessageRoutingMode.SinglePartition)
.sendTimeout(10, TimeUnit.MINUTES)
.create();
ProducerImpl<byte[]> producer = spy(prod);
Field producerIdField = ProducerImpl.class.getDeclaredField("producerId");
producerIdField.setAccessible(true);
long producerId = (long) producerIdField.get(producer);
producer.cnx().registerProducer(producerId, producer); // registered spy ProducerImpl
Consumer<byte[]> consumer = pulsarClient.newConsumer().topic(topicName).subscriptionName("my-sub").subscribe();
// 2. Stop the broker, and publishes messages. Messages are accumulated in the producer queue and they're
// checksums
// would have already been computed. If we change the message content at that point, it should result in a
// checksum validation error
// enable checksum at producer
stopBroker();
Message<byte[]> msg = MessageBuilder.create().setContent("message-1".getBytes()).build();
CompletableFuture<MessageId> future = producer.sendAsync(msg);
// 3. corrupt the message
msg.getData()[msg.getData().length - 1] = '2'; // new content would be 'message-3'
// 4. Restart the broker to have the messages published
startBroker();
try {
future.get();
fail("send message should have failed with checksum excetion");
} catch (Exception e) {
if (e.getCause() instanceof PulsarClientException.ChecksumException) {
// ok (callback should get checksum exception as message was modified and corrupt)
} else {
fail("Callback should have only failed with ChecksumException", e);
}
}
// 5. Verify
/**
* verify: ProducerImpl.verifyLocalBufferIsNotCorrupted() => validates if message is corrupt
*/
MessageImpl<byte[]> msg2 = (MessageImpl<byte[]>) MessageBuilder.create().setContent("message-1".getBytes())
.build();
ByteBuf payload = msg2.getDataBuffer();
Builder metadataBuilder = ((MessageImpl<byte[]>) msg).getMessageBuilder();
MessageMetadata msgMetadata = metadataBuilder.setProducerName("test").setSequenceId(1).setPublishTime(10L)
.build();
ByteBufPair cmd = Commands.newSend(producerId, 1, 1, ChecksumType.Crc32c, msgMetadata, payload);
// (a) create OpSendMsg with message-data : "message-1"
OpSendMsg op = OpSendMsg.create(((MessageImpl<byte[]>) msg), cmd, 1, null);
// a.verify: as message is not corrupt: no need to update checksum
assertTrue(producer.verifyLocalBufferIsNotCorrupted(op));
// (b) corrupt message
msg2.getData()[msg2.getData().length - 1] = '2'; // new content would be 'message-2'
// b. verify: as message is corrupt: update checksum
assertFalse(producer.verifyLocalBufferIsNotCorrupted(op));
assertEquals(producer.getPendingQueueSize(), 0);
// [2] test-recoverChecksumError functionality
stopBroker();
MessageImpl<byte[]> msg1 = (MessageImpl<byte[]>) MessageBuilder.create().setContent("message-1".getBytes())
.build();
future = producer.sendAsync(msg1);
ClientCnx cnx = spy(
new ClientCnx(new ClientConfigurationData(), ((PulsarClientImpl) pulsarClient).eventLoopGroup()));
String exc = "broker is already stopped";
// when client-try to recover checksum by resending to broker: throw exception as broker is stopped
doThrow(new IllegalStateException(exc)).when(cnx).ctx();
try {
producer.recoverChecksumError(cnx, 1);
fail("it should call : resendMessages() => which should throw above mocked exception");
} catch (IllegalStateException e) {
assertEquals(exc, e.getMessage());
}
producer.close();
consumer.close();
producer = null; // clean reference of mocked producer
}
}
| |
package org.drools.integrationtests;
import org.drools.Cheese;
import org.drools.CommonTestMethodBase;
import org.junit.Test;
import org.kie.KnowledgeBase;
import org.kie.builder.KnowledgeBuilder;
import org.kie.builder.KnowledgeBuilderFactory;
import org.kie.io.ResourceFactory;
import org.kie.io.ResourceType;
import org.kie.runtime.StatefulKnowledgeSession;
import java.util.ArrayList;
import java.util.List;
public class NamedConsequencesTest extends CommonTestMethodBase {
@Test
public void testNamedConsequences() {
List<String> results = executeTestWithCondition("do[t1]");
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
private List<String> executeTestWithCondition(String conditionElement) {
String drl = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" " + conditionElement + "\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
return executeTestWithDRL(drl);
}
private List<String> executeTestWithDRL(String drl) {
KnowledgeBase kbase = loadKnowledgeBaseFromString(drl);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results", results );
Cheese stilton = new Cheese( "stilton", 5 );
Cheese cheddar = new Cheese( "cheddar", 7 );
Cheese brie = new Cheese( "brie", 5 );
ksession.insert( stilton );
ksession.insert( cheddar );
ksession.insert( brie );
ksession.fireAllRules();
return results;
}
@Test
public void testNonCompilingBreakingConsequences() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testNonCompilingDuplicatedNamedConsequence() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testOutOfScopeNamedConsequences() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $b.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testAllowedIfDo() {
List<String> results = executeTestWithCondition("if ( price < 10 ) do[t1]");
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNotAllowedIfDo() {
List<String> results = executeTestWithCondition("if ( price > 10 ) do[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testAllowedIfBreak() {
List<String> results = executeTestWithCondition("if ( price < 10 ) break[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNotAllowedIfBreak() {
List<String> results = executeTestWithCondition("if ( price > 10 ) break[t1]");
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testNamedConsequencesOutsideOR() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $a: Cheese ( type == \"gorgonzola\" ) )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNamedConsequencesInsideOR1() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" ) do[t1]\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testNamedConsequencesInsideOR2() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $b.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "cheddar" ) );
}
@Test
public void testOutOfScopeNamedConsequencesWithOr1() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testOutOfScopeNamedConsequencesWithOr2() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $b: Cheese ( type == \"gorgonzola\" ) do[t1] )\n" +
" $c: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $c.getType() );\n" +
"then[t1]\n" +
" results.add( $c.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testNonCompilingIFAfterOR() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" ( $a: Cheese ( type == \"stilton\" )\n" +
" or\n" +
" $a: Cheese ( type == \"gorgonzola\" ) )\n" +
" if ( price > 10 ) do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBuilder kbuilder = KnowledgeBuilderFactory.newKnowledgeBuilder();
kbuilder.add( ResourceFactory.newByteArrayResource(str.getBytes()), ResourceType.DRL );
assertTrue(kbuilder.hasErrors());
}
@Test
public void testIfElse1() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfElse2() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 2, results.size() );
assertTrue( results.contains( "cheddar" ) );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfElseBreak() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testNestedIfElseBreak() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( type.startsWith(\"a\") ) do[t0] else if ( price > 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t0]\n" +
" results.add( \"WRONG!\" );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test
public void testIfWithModify() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test(expected=RuntimeException.class)
public void testEndlessIfWithModify() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
" results.add( \"modify\" );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
}
@Test
public void testIfWithModify2() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $a.getType() );\n" +
"then[t1]\n" +
" modify( $a ) { setPrice(15) };\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testIfWithModify3() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\", price < 10 )\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"end\n" +
"rule R2 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price > 10 ) break[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "stilton" ) );
}
@Test
public void testIfElseWithModify() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else break[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
assertEquals( 1, results.size() );
assertTrue( results.contains( "STILTON" ) );
}
@Test(expected=RuntimeException.class)
public void testEndlessIfElseWithModify() {
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" if ( price < 10 ) do[t1] else do[t2]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" modify( $a ) { setPrice(15) };\n" +
" results.add( \"modify\" );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"then[t2]\n" +
" results.add( $a.getType().toUpperCase() );\n" +
" if (results.size() > 10) throw new RuntimeException();\n" +
"end\n";
List<String> results = executeTestWithDRL(str);
}
@Test
public void testNamedConsequenceAfterNotPattern() {
// DROOLS-5
String str = "import org.drools.Cheese;\n " +
"global java.util.List results;\n" +
"\n" +
"rule R1 when\n" +
" $a: Cheese ( type == \"stilton\" )\n" +
" not Cheese ( type == \"brie\" )\n" +
" do[t1]\n" +
" $b: Cheese ( type == \"cheddar\" )\n" +
"then\n" +
" results.add( $b.getType() );\n" +
"then[t1]\n" +
" results.add( $a.getType() );\n" +
"end\n";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List<String> results = new ArrayList<String>();
ksession.setGlobal( "results", results );
ksession.insert( new Cheese( "stilton", 5 ) );
ksession.insert( new Cheese("cheddar", 7 ) );
ksession.fireAllRules();
assertTrue(results.contains("stilton"));
assertTrue(results.contains("cheddar"));
}
@Test
public void testMultipleIfElseInARow() {
// DROOLS-26
String str =
"global java.util.List results;" +
"declare UnBlocker end \n" +
"\n" +
"declare Car\n" +
" colour\t: String \n" +
" price \t: int\n" +
" horsepower \t: int\n" +
" abs \t\t: boolean\n" +
"end\n" +
"\n" +
"rule \"Init\" \n" +
"when \n" +
"then \n" +
" insert( \n" +
"\tnew Car( \"red\", 1200, 170, true ) \n" +
" ); \n" +
"end\n" +
"\n" +
"rule \"Car\" \n" +
"when \n" +
" $car: Car( abs == true ) \n" +
" if ( colour == \"red\" ) do[red] " +
" else if ( colour != \"red\" ) do[notRed]\n" +
" if ( price < 1000 ) do[cheap] " +
" else do[notCheap]\n" +
" UnBlocker() \n" +
"then\n" +
" results.add( \"Found a Car\" ); \n" +
"then[red]\n" +
" results.add( \"Car is red\" ); " +
" insert( new UnBlocker() ); \n" +
"then[notRed]\n" +
" results.add( \"Car is NOT red\" ); \n" +
"then[cheap]\n" +
" results.add( \"Car is cheap\" ); \n" +
"then[notCheap]\n" +
" results.add( \"Car is NOT cheap\" ); \n" +
"end";
KnowledgeBase kbase = loadKnowledgeBaseFromString(str);
StatefulKnowledgeSession ksession = kbase.newStatefulKnowledgeSession();
List<String> results = new ArrayList<String>();
ksession.setGlobal("results", results);
ksession.fireAllRules();
assertEquals(3, results.size());
assertTrue(results.contains("Found a Car"));
assertTrue(results.contains("Car is red"));
assertTrue(results.contains("Car is NOT cheap"));
}
}
| |
/*
* Copyright 2014-2016 Samsung Research America, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* -*- Mode: java; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
/**
* This file is derived from a file in Mozilla's Rhino project,
* org.mozilla.javascript.IRFactory. This version is heavily modified,
* keeping intact only the visitor pattern; all other functionality
* has been stripped out to build a generic visitor class.
*/
package com.samsung.sjs;
import org.mozilla.javascript.*;
import org.mozilla.javascript.ast.*;
import java.util.List;
import java.util.ArrayList;
/*
* This class is a heavily modified version of org.mozilla.javascript.IRFactory.
* The main changes are to make this more general visitor class, hijacking
* IRFactory's code layout as a starting point; see the visit(AstNode) method.
*/
public class ExternalRhinoVisitor
{
public ExternalRhinoVisitor() {
}
public void visit(AstNode node) {
switch (node.getType()) {
case Token.ARRAYCOMP:
visitArrayComp((ArrayComprehension)node);
break;
case Token.ARRAYLIT:
visitArrayLiteral((ArrayLiteral)node);
break;
case Token.BLOCK:
visitBlock(node);
break;
case Token.BREAK:
visitBreak((BreakStatement)node);
break;
case Token.CALL:
visitFunctionCall((FunctionCall)node);
break;
case Token.CONTINUE:
visitContinue((ContinueStatement)node);
break;
case Token.DO:
visitDoLoop((DoLoop)node);
break;
case Token.EMPTY:
visitEmpty(node);
break;
case Token.FOR:
if (node instanceof ForInLoop) {
visitForInLoop((ForInLoop)node);
} else {
visitForLoop((ForLoop)node);
}
break;
case Token.FUNCTION:
visitFunction((FunctionNode)node);
break;
case Token.GENEXPR:
visitGenExpr((GeneratorExpression)node);
break;
case Token.GETELEM:
visitElementGet((ElementGet)node);
break;
case Token.GETPROP:
visitPropertyGet((PropertyGet)node);
break;
case Token.HOOK:
visitCondExpr((ConditionalExpression)node);
break;
case Token.IF:
visitIf((IfStatement)node);
break;
case Token.TRUE:
case Token.FALSE:
case Token.THIS:
case Token.NULL:
case Token.DEBUGGER:
visitLiteral(node);
break;
case Token.NAME:
visitName((Name)node);
break;
case Token.NUMBER:
visitNumber((NumberLiteral)node);
break;
case Token.NEW:
visitNewExpr((NewExpression)node);
break;
case Token.OBJECTLIT:
visitObjectLiteral((ObjectLiteral)node);
break;
case Token.REGEXP:
visitRegExp((RegExpLiteral)node);
break;
case Token.RETURN:
visitReturn((ReturnStatement)node);
break;
case Token.SCRIPT:
visitScript((ScriptNode)node);
break;
case Token.STRING:
visitString((StringLiteral)node);
break;
case Token.SWITCH:
visitSwitch((SwitchStatement)node);
break;
case Token.THROW:
visitThrow((ThrowStatement)node);
break;
case Token.TRY:
visitTry((TryStatement)node);
break;
case Token.WHILE:
visitWhileLoop((WhileLoop)node);
break;
case Token.WITH:
visitWith((WithStatement)node);
break;
case Token.YIELD:
visitYield((Yield)node);
break;
default:
if (node instanceof ExpressionStatement) {
visitExprStmt((ExpressionStatement)node);
break;
}
if (node instanceof Assignment) {
visitAssignment((Assignment)node);
break;
}
if (node instanceof UnaryExpression) {
visitUnary((UnaryExpression)node);
break;
}
if (node instanceof XmlMemberGet) {
visitXmlMemberGet((XmlMemberGet)node);
break;
}
if (node instanceof InfixExpression) {
visitInfix((InfixExpression)node);
break;
}
if (node instanceof VariableDeclaration) {
visitVariables((VariableDeclaration)node);
break;
}
if (node instanceof ParenthesizedExpression) {
visitParenExpr((ParenthesizedExpression)node);
break;
}
if (node instanceof LabeledStatement) {
visitLabeledStatement((LabeledStatement)node);
break;
}
if (node instanceof LetNode) {
visitLetNode((LetNode)node);
break;
}
if (node instanceof XmlRef) {
visitXmlRef((XmlRef)node);
break;
}
if (node instanceof XmlLiteral) {
visitXmlLiteral((XmlLiteral)node);
break;
}
throw new IllegalArgumentException("Can't visit: " + node);
}
}
protected void visitArrayComp(ArrayComprehension node) {
throw new UnsupportedOperationException();
}
protected void visitArrayLiteral(ArrayLiteral node) {
List<AstNode> elems = node.getElements();
for (int i = 0; i < elems.size(); ++i) {
AstNode elem = elems.get(i);
if (elem.getType() != Token.EMPTY) {
visit(elem);
}
}
}
protected void visitAssignment(Assignment node) {
visit(node.getLeft());
visit(node.getRight());
}
protected void visitBlock(AstNode node) {
for (Node kid : node) {
visit((AstNode)kid);
}
}
protected void visitBreak(BreakStatement node) {
}
protected void visitCondExpr(ConditionalExpression node) {
visit(node.getTestExpression());
visit(node.getTrueExpression());
visit(node.getFalseExpression());
}
protected void visitContinue(ContinueStatement node) {
}
protected void visitDoLoop(DoLoop loop) {
visit(loop.getBody());
visit(loop.getCondition());
}
protected void visitElementGet(ElementGet node) {
visit(node.getTarget());
visit(node.getElement());
}
protected void visitExprStmt(ExpressionStatement node) {
visit(node.getExpression());
}
protected void visitEmpty(Node node) {
}
protected void visitForInLoop(ForInLoop loop) {
visit(loop.getIterator());
visit(loop.getIteratedObject());
visit(loop.getBody());
}
protected void visitForLoop(ForLoop loop) {
visit(loop.getInitializer());
visit(loop.getCondition());
visit(loop.getIncrement());
visit(loop.getBody());
}
protected void visitFunction(FunctionNode fn) {
visit(fn.getBody());
}
protected void visitFunctionCall(FunctionCall node) {
visit(node.getTarget());
List<AstNode> args = node.getArguments();
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
visit(arg);
}
}
protected void visitGenExpr(GeneratorExpression node) {
visit(node.getResult());
List<GeneratorExpressionLoop> loops = node.getLoops();
int numLoops = loops.size();
for (int i = 0; i < numLoops; i++) {
GeneratorExpressionLoop acl = loops.get(i);
visit(acl.getIteratedObject());
}
if (node.getFilter() != null) {
visit(node.getFilter());
}
}
protected void visitIf(IfStatement n) {
visit(n.getCondition());
visit(n.getThenPart());
if (n.getElsePart() != null) {
visit(n.getElsePart());
}
}
protected void visitInfix(InfixExpression node) {
visit(node.getLeft());
visit(node.getRight());
}
protected void visitLabeledStatement(LabeledStatement ls) {
visit(ls.getStatement());
}
protected void visitLetNode(LetNode node) {
visitVariableInitializers(node.getVariables());
visit(node.getBody());
}
protected void visitLiteral(AstNode node) {
}
protected void visitName(Name node) {
}
protected void visitNewExpr(NewExpression node) {
// The initializer is an experimental Rhino extension for allocating via
// new C(...) { m: ..., f: ... }
// where the object literal is used to initialize additional fields.
// It's not standard JavaScript.
//visit(node.getInitializer());
List<AstNode> args = node.getArguments();
for (int i = 0; i < args.size(); i++) {
AstNode arg = args.get(i);
visit(arg);
}
}
protected void visitNumber(NumberLiteral node) {
}
protected void visitObjectLiteral(ObjectLiteral node) {
List<ObjectProperty> elems = node.getElements();
for (ObjectProperty prop : elems) {
visit(prop.getRight());
}
}
protected void visitParenExpr(ParenthesizedExpression node) {
AstNode expr = node.getExpression();
while (expr instanceof ParenthesizedExpression) {
expr = ((ParenthesizedExpression)expr).getExpression();
}
visit(expr);
}
protected void visitPropertyGet(PropertyGet node) {
visit(node.getTarget());
}
protected void visitRegExp(RegExpLiteral node) {
}
protected void visitReturn(ReturnStatement node) {
AstNode rv = node.getReturnValue();
if (rv != null)
visit(rv);
}
protected void visitScript(ScriptNode node) {
for (Node kid : node) {
visit((AstNode)kid);
}
}
protected void visitString(StringLiteral node) {
}
protected void visitSwitch(SwitchStatement node) {
for (SwitchCase sc : node.getCases()) {
AstNode expr = sc.getExpression();
if (expr != null) {
visit(expr);
}
List<AstNode> stmts = sc.getStatements();
if (stmts != null) {
for (AstNode kid : stmts) {
visit(kid);
}
}
}
}
protected void visitThrow(ThrowStatement node) {
visit(node.getExpression());
}
protected void visitTry(TryStatement node) {
visit(node.getTryBlock());
for (CatchClause cc : node.getCatchClauses()) {
AstNode ccc = cc.getCatchCondition();
if (ccc != null) {
visit(ccc);
}
visit(cc.getBody());
}
if (node.getFinallyBlock() != null) {
visit(node.getFinallyBlock());
}
}
protected void visitUnary(UnaryExpression node) {
int type = node.getType();
if (type == Token.DEFAULTNAMESPACE) {
visitDefaultXmlNamepace(node);
return;
}
visit(node.getOperand());
}
protected void visitVariables(VariableDeclaration node) {
visitVariableInitializers(node);
}
protected void visitVariableInitializers(VariableDeclaration node) {
List<VariableInitializer> vars = node.getVariables();
int size = vars.size(), i = 0;
for (VariableInitializer var : vars) {
AstNode target = var.getTarget();
AstNode init = var.getInitializer();
visit(target);
if (init != null) visit(init);
}
}
protected void visitWhileLoop(WhileLoop loop) {
visit(loop.getCondition());
visit(loop.getBody());
}
protected final void visitWith(WithStatement node) {
throw new UnsupportedOperationException("SJS does not permit with statements");
}
protected void visitYield(Yield node) {
if (node.getValue() != null)
visit(node.getValue());
}
protected void visitXmlLiteral(XmlLiteral node) {
throw new UnsupportedOperationException("SJS does not permit XML literals");
}
protected void visitXmlMemberGet(XmlMemberGet node) {
throw new UnsupportedOperationException("SJS does not permit XML features");
}
// We get here if we weren't a child of a . or .. infix node
protected void visitXmlRef(XmlRef node) {
throw new UnsupportedOperationException("SJS does not permit XML features");
}
protected void visitDefaultXmlNamepace(UnaryExpression node) {
visit(node.getOperand());
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.http;
import java.net.URI;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.camel.Consumer;
import org.apache.camel.PollingConsumer;
import org.apache.camel.Processor;
import org.apache.camel.Producer;
import org.apache.camel.http.common.HttpCommonEndpoint;
import org.apache.camel.spi.UriEndpoint;
import org.apache.camel.spi.UriParam;
import org.apache.camel.util.ObjectHelper;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.HttpConnectionManager;
import org.apache.commons.httpclient.auth.AuthPolicy;
import org.apache.commons.httpclient.params.HttpClientParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* For calling out to external HTTP servers using Apache HTTP Client 3.x.
*/
@UriEndpoint(firstVersion = "1.0.0", scheme = "http,https", title = "HTTP,HTTPS", syntax = "http:httpUri", producerOnly = true, label = "http", lenientProperties = true)
public class HttpEndpoint extends HttpCommonEndpoint {
// Note: all options must be documented with description in annotations so extended components can access the documentation
private static final Logger LOG = LoggerFactory.getLogger(HttpEndpoint.class);
private HttpClientParams clientParams;
@UriParam(label = "advanced")
private HttpClientConfigurer httpClientConfigurer;
@UriParam(label = "advanced", prefix = "httpClient.", multiValue = true)
private Map<String, Object> httpClientOptions;
@UriParam(label = "advanced")
private HttpConnectionManager httpConnectionManager;
@UriParam(label = "advanced", prefix = "httpConnectionManager.", multiValue = true)
private Map<String, Object> httpConnectionManagerOptions;
public HttpEndpoint() {
}
public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI) throws URISyntaxException {
this(endPointURI, component, httpURI, null);
}
public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI, HttpConnectionManager httpConnectionManager) throws URISyntaxException {
this(endPointURI, component, httpURI, new HttpClientParams(), httpConnectionManager, null);
}
public HttpEndpoint(String endPointURI, HttpComponent component, HttpClientParams clientParams,
HttpConnectionManager httpConnectionManager, HttpClientConfigurer clientConfigurer) throws URISyntaxException {
this(endPointURI, component, null, clientParams, httpConnectionManager, clientConfigurer);
}
public HttpEndpoint(String endPointURI, HttpComponent component, URI httpURI, HttpClientParams clientParams,
HttpConnectionManager httpConnectionManager, HttpClientConfigurer clientConfigurer) throws URISyntaxException {
super(endPointURI, component, httpURI);
this.clientParams = clientParams;
this.httpClientConfigurer = clientConfigurer;
this.httpConnectionManager = httpConnectionManager;
}
public Producer createProducer() throws Exception {
return new HttpProducer(this);
}
@Override
public Consumer createConsumer(Processor processor) throws Exception {
throw new UnsupportedOperationException("Cannot consume from http endpoint");
}
public PollingConsumer createPollingConsumer() throws Exception {
HttpPollingConsumer answer = new HttpPollingConsumer(this);
configurePollingConsumer(answer);
return answer;
}
/**
* Factory method used by producers and consumers to create a new {@link HttpClient} instance
*/
public HttpClient createHttpClient() {
ObjectHelper.notNull(clientParams, "clientParams");
ObjectHelper.notNull(httpConnectionManager, "httpConnectionManager");
HttpClient answer = new HttpClient(getClientParams());
// configure http proxy from camelContext
if (ObjectHelper.isNotEmpty(getCamelContext().getProperty("http.proxyHost")) && ObjectHelper.isNotEmpty(getCamelContext().getProperty("http.proxyPort"))) {
String host = getCamelContext().getProperty("http.proxyHost");
int port = Integer.parseInt(getCamelContext().getProperty("http.proxyPort"));
LOG.debug("CamelContext properties http.proxyHost and http.proxyPort detected. Using http proxy host: {} port: {}", host, port);
answer.getHostConfiguration().setProxy(host, port);
}
if (getProxyHost() != null) {
LOG.debug("Using proxy: {}:{}", getProxyHost(), getProxyPort());
answer.getHostConfiguration().setProxy(getProxyHost(), getProxyPort());
}
if (getAuthMethodPriority() != null) {
List<String> authPrefs = new ArrayList<>();
Iterator<?> it = getCamelContext().getTypeConverter().convertTo(Iterator.class, getAuthMethodPriority());
int i = 1;
while (it.hasNext()) {
Object value = it.next();
AuthMethod auth = getCamelContext().getTypeConverter().convertTo(AuthMethod.class, value);
if (auth == null) {
throw new IllegalArgumentException("Unknown authMethod: " + value + " in authMethodPriority: " + getAuthMethodPriority());
}
LOG.debug("Using authSchemePriority #{}: {}", i, auth);
authPrefs.add(auth.name());
i++;
}
if (!authPrefs.isEmpty()) {
answer.getParams().setParameter(AuthPolicy.AUTH_SCHEME_PRIORITY, authPrefs);
}
}
answer.setHttpConnectionManager(httpConnectionManager);
HttpClientConfigurer configurer = getHttpClientConfigurer();
if (configurer != null) {
configurer.configureHttpClient(answer);
}
return answer;
}
// Properties
//-------------------------------------------------------------------------
/**
* Provide access to the client parameters used on new {@link HttpClient} instances
* used by producers or consumers of this endpoint.
*/
public HttpClientParams getClientParams() {
return clientParams;
}
/**
* Provide access to the client parameters used on new {@link HttpClient} instances
* used by producers or consumers of this endpoint.
*/
public void setClientParams(HttpClientParams clientParams) {
this.clientParams = clientParams;
}
public HttpClientConfigurer getHttpClientConfigurer() {
return httpClientConfigurer;
}
/**
* Register a custom configuration strategy for new {@link HttpClient} instances
* created by producers or consumers such as to configure authentication mechanisms etc
*
* @param httpClientConfigurer the strategy for configuring new {@link HttpClient} instances
*/
public void setHttpClientConfigurer(HttpClientConfigurer httpClientConfigurer) {
this.httpClientConfigurer = httpClientConfigurer;
}
public HttpConnectionManager getHttpConnectionManager() {
return httpConnectionManager;
}
/**
* To use a custom HttpConnectionManager to manage connections
*/
public void setHttpConnectionManager(HttpConnectionManager httpConnectionManager) {
this.httpConnectionManager = httpConnectionManager;
}
public Map<String, Object> getHttpClientOptions() {
return httpClientOptions;
}
/**
* To configure the HttpClient using the key/values from the Map.
*/
public void setHttpClientOptions(Map<String, Object> httpClientOptions) {
this.httpClientOptions = httpClientOptions;
}
public Map<String, Object> getHttpConnectionManagerOptions() {
return httpConnectionManagerOptions;
}
/**
* To configure the HttpConnectionManager using the key/values from the Map.
*/
public void setHttpConnectionManagerOptions(Map<String, Object> httpConnectionManagerOptions) {
this.httpConnectionManagerOptions = httpConnectionManagerOptions;
}
}
| |
/*
* This file is part of ClassGraph.
*
* Author: Luke Hutchison
*
* Hosted at: https://github.com/classgraph/classgraph
*
* --
*
* The MIT License (MIT)
*
* Copyright (c) 2019 Luke Hutchison
*
* Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
* documentation files (the "Software"), to deal in the Software without restriction, including without
* limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
* the Software, and to permit persons to whom the Software is furnished to do so, subject to the following
* conditions:
*
* The above copyright notice and this permission notice shall be included in all copies or substantial
* portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT
* LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO
* EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
* AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
* OR OTHER DEALINGS IN THE SOFTWARE.
*/
package io.github.classgraph;
import java.lang.annotation.Annotation;
import java.lang.annotation.Repeatable;
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.List;
import java.util.Map;
import java.util.Set;
import io.github.classgraph.ClassInfo.RelType;
import io.github.classgraph.Classfile.TypeAnnotationDecorator;
import nonapi.io.github.classgraph.types.ParseException;
import nonapi.io.github.classgraph.types.TypeUtils;
import nonapi.io.github.classgraph.types.TypeUtils.ModifierType;
import nonapi.io.github.classgraph.utils.Assert;
import nonapi.io.github.classgraph.utils.LogNode;
/**
* Holds metadata about fields of a class encountered during a scan. All values are taken directly out of the
* classfile for the class.
*/
public class FieldInfo extends ScanResultObject implements Comparable<FieldInfo>, HasName {
/** The declaring class name. */
private String declaringClassName;
/** The name of the field. */
private String name;
/** The modifiers. */
private int modifiers;
/** The type signature string. */
private String typeSignatureStr;
/** The type descriptor string. */
private String typeDescriptorStr;
/** The parsed type signature. */
private transient TypeSignature typeSignature;
/** The parsed type descriptor. */
private transient TypeSignature typeDescriptor;
/** The constant initializer value for the field, if any. */
// This is transient because the constant initializer value is final, so the value doesn't need to be serialized
private ObjectTypedValueWrapper constantInitializerValue;
/** The annotation on the field, if any. */
AnnotationInfoList annotationInfo;
/** The type annotation decorators for the {@link TypeSignature} instance of this field. */
private List<TypeAnnotationDecorator> typeAnnotationDecorators;
// -------------------------------------------------------------------------------------------------------------
/** Default constructor for deserialization. */
FieldInfo() {
super();
}
/**
* Constructor.
*
* @param definingClassName
* The class the field is defined within.
* @param fieldName
* The name of the field.
* @param modifiers
* The field modifiers.
* @param typeDescriptorStr
* The field type descriptor.
* @param typeSignatureStr
* The field type signature.
* @param constantInitializerValue
* The static constant value the field is initialized to, if any.
* @param annotationInfo
* {@link AnnotationInfo} for any annotations on the field.
*/
FieldInfo(final String definingClassName, final String fieldName, final int modifiers,
final String typeDescriptorStr, final String typeSignatureStr, final Object constantInitializerValue,
final AnnotationInfoList annotationInfo, final List<TypeAnnotationDecorator> typeAnnotationDecorators) {
super();
if (fieldName == null) {
throw new IllegalArgumentException();
}
this.declaringClassName = definingClassName;
this.name = fieldName;
this.modifiers = modifiers;
this.typeDescriptorStr = typeDescriptorStr;
this.typeSignatureStr = typeSignatureStr;
this.constantInitializerValue = constantInitializerValue == null ? null
: new ObjectTypedValueWrapper(constantInitializerValue);
this.annotationInfo = annotationInfo == null || annotationInfo.isEmpty() ? null : annotationInfo;
this.typeAnnotationDecorators = typeAnnotationDecorators;
}
// -------------------------------------------------------------------------------------------------------------
/**
* Get the name of the field.
*
* @return The name of the field.
*/
@Override
public String getName() {
return name;
}
/**
* Get the {@link ClassInfo} object for the class that declares this field.
*
* @return The {@link ClassInfo} object for the declaring class.
*
* @see #getClassName()
*/
@Override
public ClassInfo getClassInfo() {
return super.getClassInfo();
}
// -------------------------------------------------------------------------------------------------------------
/**
* Deprecated -- use {@link #getModifiersStr()} instead.
*
* @deprecated Use {@link #getModifiersStr()} instead.
* @return The field modifiers, as a string.
*/
@Deprecated
public String getModifierStr() {
return getModifiersStr();
}
/**
* Get the field modifiers as a string, e.g. "public static final". For the modifier bits, call getModifiers().
*
* @return The field modifiers, as a string.
*/
public String getModifiersStr() {
final StringBuilder buf = new StringBuilder();
TypeUtils.modifiersToString(modifiers, ModifierType.FIELD, /* ignored */ false, buf);
return buf.toString();
}
/**
* Returns true if this field is public.
*
* @return True if the field is public.
*/
public boolean isPublic() {
return Modifier.isPublic(modifiers);
}
/**
* Returns true if this field is private.
*
* @return True if the field is private.
*/
public boolean isPrivate() {
return Modifier.isPrivate(modifiers);
}
/**
* Returns true if this field is protected.
*
* @return True if the field is protected.
*/
public boolean isProtected() {
return Modifier.isProtected(modifiers);
}
/**
* Returns true if this field is static.
*
* @return True if the field is static.
*/
public boolean isStatic() {
return Modifier.isStatic(modifiers);
}
/**
* Returns true if this field is final.
*
* @return True if the field is final.
*/
public boolean isFinal() {
return Modifier.isFinal(modifiers);
}
/**
* Returns true if this field is a transient field.
*
* @return True if the field is transient.
*/
public boolean isTransient() {
return Modifier.isTransient(modifiers);
}
/**
* Returns true if this field is synthetic.
*
* @return True if the field is synthetic.
*/
public boolean isSynthetic() {
return (modifiers & 0x1000) != 0;
}
/**
* Returns true if this field is an enum constant.
*
* @return True if the field is an enum constant.
*/
public boolean isEnum() {
return (modifiers & 0x4000) != 0;
}
/**
* Returns the modifier bits for the field.
*
* @return The modifier bits.
*/
public int getModifiers() {
return modifiers;
}
/**
* Returns the parsed type descriptor for the field, which will not include type parameters. If you need generic
* type parameters, call {@link #getTypeSignature()} instead.
*
* @return The parsed type descriptor string for the field.
*/
public TypeSignature getTypeDescriptor() {
if (typeDescriptorStr == null) {
return null;
}
if (typeDescriptor == null) {
try {
typeDescriptor = TypeSignature.parse(typeDescriptorStr, declaringClassName);
typeDescriptor.setScanResult(scanResult);
if (typeAnnotationDecorators != null) {
for (final TypeAnnotationDecorator decorator : typeAnnotationDecorators) {
decorator.decorate(typeDescriptor);
}
}
} catch (final ParseException e) {
throw new IllegalArgumentException(e);
}
}
return typeDescriptor;
}
/**
* Returns the type descriptor string for the field, which will not include type parameters. If you need generic
* type parameters, call {@link #getTypeSignatureStr()} instead.
*
* @return The type descriptor string for the field.
*/
public String getTypeDescriptorStr() {
return typeDescriptorStr;
}
/**
* Returns the parsed type signature for the field, possibly including type parameters. If this returns null,
* indicating that no type signature information is available for this field, call {@link #getTypeDescriptor()}
* instead.
*
* @return The parsed type signature for the field, or null if not available.
* @throws IllegalArgumentException
* if the field type signature cannot be parsed (this should only be thrown in the case of classfile
* corruption, or a compiler bug that causes an invalid type signature to be written to the
* classfile).
*/
public TypeSignature getTypeSignature() {
if (typeSignatureStr == null) {
return null;
}
if (typeSignature == null) {
try {
typeSignature = TypeSignature.parse(typeSignatureStr, declaringClassName);
typeSignature.setScanResult(scanResult);
if (typeAnnotationDecorators != null) {
for (final TypeAnnotationDecorator decorator : typeAnnotationDecorators) {
decorator.decorate(typeSignature);
}
}
} catch (final ParseException e) {
throw new IllegalArgumentException(
"Invalid type signature for field " + getClassName() + "." + getName()
+ (getClassInfo() != null
? " in classpath element " + getClassInfo().getClasspathElementURI()
: "")
+ " : " + typeSignatureStr,
e);
}
}
return typeSignature;
}
/**
* Returns the type signature string for the field, possibly including type parameters. If this returns null,
* indicating that no type signature information is available for this field, call
* {@link #getTypeDescriptorStr()} instead.
*
* @return The type signature string for the field, or null if not available.
*/
public String getTypeSignatureStr() {
return typeSignatureStr;
}
/**
* Returns the type signature for the field, possibly including type parameters. If the type signature is null,
* indicating that no type signature information is available for this field, returns the type descriptor
* instead.
*
* @return The parsed type signature for the field, or if not available, the parsed type descriptor for the
* field.
*/
public TypeSignature getTypeSignatureOrTypeDescriptor() {
TypeSignature typeSig = null;
try {
typeSig = getTypeSignature();
if (typeSig != null) {
return typeSig;
}
} catch (final Exception e) {
// Ignore
}
return getTypeDescriptor();
}
/**
* Returns the type signature string for the field, possibly including type parameters. If the type signature
* string is null, indicating that no type signature information is available for this field, returns the type
* descriptor string instead.
*
* @return The type signature string for the field, or if not available, the type descriptor string for the
* method.
*/
public String getTypeSignatureOrTypeDescriptorStr() {
if (typeSignatureStr != null) {
return typeSignatureStr;
} else {
return typeDescriptorStr;
}
}
/**
* Returns the constant initializer value of a field. Requires
* {@link ClassGraph#enableStaticFinalFieldConstantInitializerValues()} to have been called. Will only return
* non-null for fields that have constant initializers, which is usually only fields of primitive type, or
* String constants. Also note that it is up to the compiler as to whether or not a constant-valued field is
* assigned as a constant in the field definition itself, or whether it is assigned manually in static or
* non-static class initializer blocks or the constructor -- so your mileage may vary in being able to extract
* constant initializer values.
*
* @return The initializer value, if this field has a constant initializer value, or null if none.
*/
public Object getConstantInitializerValue() {
if (!scanResult.scanSpec.enableStaticFinalFieldConstantInitializerValues) {
throw new IllegalArgumentException(
"Please call ClassGraph#enableStaticFinalFieldConstantInitializerValues() " + "before #scan()");
}
return constantInitializerValue == null ? null : constantInitializerValue.get();
}
/**
* Get a list of annotations on this field, along with any annotation parameter values, wrapped in
* {@link AnnotationInfo} objects.
*
* @return A list of annotations on this field, along with any annotation parameter values, wrapped in
* {@link AnnotationInfo} objects, or the empty list if none.
*/
public AnnotationInfoList getAnnotationInfo() {
if (!scanResult.scanSpec.enableAnnotationInfo) {
throw new IllegalArgumentException("Please call ClassGraph#enableAnnotationInfo() before #scan()");
}
return annotationInfo == null ? AnnotationInfoList.EMPTY_LIST
: AnnotationInfoList.getIndirectAnnotations(annotationInfo, /* annotatedClass = */ null);
}
/**
* Get a the non-{@link Repeatable} annotation on this field, or null if the field does not have the annotation.
* (Use {@link #getAnnotationInfoRepeatable(Class)} for {@link Repeatable} annotations.)
*
* @param annotation
* The annotation.
* @return An {@link AnnotationInfo} object representing the annotation on this field, or null if the field does
* not have the annotation.
*/
public AnnotationInfo getAnnotationInfo(final Class<? extends Annotation> annotation) {
Assert.isAnnotation(annotation);
return getAnnotationInfo(annotation.getName());
}
/**
* Get a the named non-{@link Repeatable} annotation on this field, or null if the field does not have the named
* annotation. (Use {@link #getAnnotationInfoRepeatable(String)} for {@link Repeatable} annotations.)
*
* @param annotationName
* The annotation name.
* @return An {@link AnnotationInfo} object representing the named annotation on this field, or null if the
* field does not have the named annotation.
*/
public AnnotationInfo getAnnotationInfo(final String annotationName) {
return getAnnotationInfo().get(annotationName);
}
/**
* Get a the {@link Repeatable} annotation on this field, or the empty list if the field does not have the
* annotation.
*
* @param annotation
* The annotation.
* @return An {@link AnnotationInfoList} of all instances of the annotation on this field, or the empty list if
* the field does not have the annotation.
*/
public AnnotationInfoList getAnnotationInfoRepeatable(final Class<? extends Annotation> annotation) {
Assert.isAnnotation(annotation);
return getAnnotationInfoRepeatable(annotation.getName());
}
/**
* Get a the named {@link Repeatable} annotation on this field, or the empty list if the field does not have the
* named annotation.
*
* @param annotationName
* The annotation name.
* @return An {@link AnnotationInfoList} of all instances of the named annotation on this field, or the empty
* list if the field does not have the named annotation.
*/
public AnnotationInfoList getAnnotationInfoRepeatable(final String annotationName) {
return getAnnotationInfo().getRepeatable(annotationName);
}
/**
* Check if the field has a given annotation.
*
* @param annotation
* The annotation.
* @return true if this field has the annotation.
*/
public boolean hasAnnotation(final Class<? extends Annotation> annotation) {
Assert.isAnnotation(annotation);
return hasAnnotation(annotation.getName());
}
/**
* Check if the field has a given named annotation.
*
* @param annotationName
* The name of an annotation.
* @return true if this field has the named annotation.
*/
public boolean hasAnnotation(final String annotationName) {
return getAnnotationInfo().containsName(annotationName);
}
// -------------------------------------------------------------------------------------------------------------
/**
* Load the class this field is associated with, and get the {@link Field} reference for this field.
*
* @return The {@link Field} reference for this field.
* @throws IllegalArgumentException
* if the field does not exist.
*/
public Field loadClassAndGetField() throws IllegalArgumentException {
try {
return loadClass().getField(getName());
} catch (final NoSuchFieldException e1) {
try {
return loadClass().getDeclaredField(getName());
} catch (final NoSuchFieldException e2) {
throw new IllegalArgumentException("No such field: " + getClassName() + "." + getName());
}
}
}
// -------------------------------------------------------------------------------------------------------------
/**
* Handle {@link Repeatable} annotations.
*
* @param allRepeatableAnnotationNames
* the names of all repeatable annotations
*/
void handleRepeatableAnnotations(final Set<String> allRepeatableAnnotationNames) {
if (annotationInfo != null) {
annotationInfo.handleRepeatableAnnotations(allRepeatableAnnotationNames, getClassInfo(),
RelType.FIELD_ANNOTATIONS, RelType.CLASSES_WITH_FIELD_ANNOTATION,
RelType.CLASSES_WITH_NONPRIVATE_FIELD_ANNOTATION);
}
}
// -------------------------------------------------------------------------------------------------------------
/**
* Get the name of the class that declares this field.
*
* @return The name of the declaring class.
*
* @see #getClassInfo()
*/
@Override
public String getClassName() {
return declaringClassName;
}
/* (non-Javadoc)
* @see io.github.classgraph.ScanResultObject#setScanResult(io.github.classgraph.ScanResult)
*/
@Override
void setScanResult(final ScanResult scanResult) {
super.setScanResult(scanResult);
if (this.typeSignature != null) {
this.typeSignature.setScanResult(scanResult);
}
if (this.typeDescriptor != null) {
this.typeDescriptor.setScanResult(scanResult);
}
if (this.annotationInfo != null) {
for (final AnnotationInfo ai : this.annotationInfo) {
ai.setScanResult(scanResult);
}
}
}
/**
* Get {@link ClassInfo} objects for any classes referenced in the type descriptor or type signature.
*
* @param classNameToClassInfo
* the map from class name to {@link ClassInfo}.
* @param refdClassInfo
* the referenced class info
*/
@Override
protected void findReferencedClassInfo(final Map<String, ClassInfo> classNameToClassInfo,
final Set<ClassInfo> refdClassInfo, final LogNode log) {
try {
final TypeSignature fieldSig = getTypeSignature();
if (fieldSig != null) {
fieldSig.findReferencedClassInfo(classNameToClassInfo, refdClassInfo, log);
}
} catch (final IllegalArgumentException e) {
if (log != null) {
log.log("Illegal type signature for field " + getClassName() + "." + getName() + ": "
+ getTypeSignatureStr());
}
}
try {
final TypeSignature fieldDesc = getTypeDescriptor();
if (fieldDesc != null) {
fieldDesc.findReferencedClassInfo(classNameToClassInfo, refdClassInfo, log);
}
} catch (final IllegalArgumentException e) {
if (log != null) {
log.log("Illegal type descriptor for field " + getClassName() + "." + getName() + ": "
+ getTypeDescriptorStr());
}
}
if (annotationInfo != null) {
for (final AnnotationInfo ai : annotationInfo) {
ai.findReferencedClassInfo(classNameToClassInfo, refdClassInfo, log);
}
}
}
// -------------------------------------------------------------------------------------------------------------
/**
* Use class name and field name for equals().
*
* @param obj
* the object to compare to
* @return true if equal
*/
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
} else if (!(obj instanceof FieldInfo)) {
return false;
}
final FieldInfo other = (FieldInfo) obj;
return declaringClassName.equals(other.declaringClassName) && name.equals(other.name);
}
/**
* Use hash code of class name and field name.
*
* @return the hashcode
*/
@Override
public int hashCode() {
return name.hashCode() + declaringClassName.hashCode() * 11;
}
/**
* Sort in order of class name then field name.
*
* @param other
* the other FieldInfo object to compare to.
* @return the result of comparison.
*/
@Override
public int compareTo(final FieldInfo other) {
final int diff = declaringClassName.compareTo(other.declaringClassName);
if (diff != 0) {
return diff;
}
return name.compareTo(other.name);
}
// -------------------------------------------------------------------------------------------------------------
@Override
protected void toString(final boolean useSimpleNames, final StringBuilder buf) {
if (annotationInfo != null) {
for (final AnnotationInfo annotation : annotationInfo) {
if (buf.length() > 0) {
buf.append(' ');
}
annotation.toString(useSimpleNames, buf);
}
}
if (modifiers != 0) {
if (buf.length() > 0) {
buf.append(' ');
}
TypeUtils.modifiersToString(modifiers, ModifierType.FIELD, /* ignored */ false, buf);
}
if (buf.length() > 0) {
buf.append(' ');
}
final TypeSignature typeSig = getTypeSignatureOrTypeDescriptor();
typeSig.toStringInternal(useSimpleNames, /* annotationsToExclude = */ annotationInfo, buf);
buf.append(' ');
buf.append(name);
if (constantInitializerValue != null) {
final Object val = constantInitializerValue.get();
buf.append(" = ");
if (val instanceof String) {
buf.append('"').append(((String) val).replace("\\", "\\\\").replace("\"", "\\\"")).append('"');
} else if (val instanceof Character) {
buf.append('\'').append(((Character) val).toString().replace("\\", "\\\\").replaceAll("'", "\\'"))
.append('\'');
} else {
buf.append(val == null ? "null" : val.toString());
}
}
}
}
| |
package org.apache.tomcat.maven.runner;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.catalina.Context;
import org.apache.catalina.Host;
import org.apache.catalina.Manager;
import org.apache.catalina.connector.Connector;
import org.apache.catalina.core.StandardContext;
import org.apache.catalina.startup.Catalina;
import org.apache.catalina.startup.ContextConfig;
import org.apache.catalina.startup.Tomcat;
import org.apache.catalina.valves.AccessLogValve;
import org.apache.catalina.valves.RemoteIpValve;
import org.apache.juli.ClassLoaderLogManager;
import org.apache.tomcat.util.ExceptionUtils;
import org.apache.tomcat.util.http.fileupload.FileUtils;
import java.io.BufferedOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.HashMap;
import java.util.Map;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.logging.LogManager;
/**
* FIXME add junit for that but when https://issues.apache.org/bugzilla/show_bug.cgi?id=52028 fixed
* Main class used to run the standalone wars in a Apache Tomcat instance.
*
* @author Olivier Lamy
* @since 2.0
*/
public class Tomcat7Runner
{
// true/false to use the server.xml located in the jar /conf/server.xml
public static final String USE_SERVER_XML_KEY = "useServerXml";
// contains war name wars=foo.war,bar.war
public static final String WARS_KEY = "wars";
public static final String ARCHIVE_GENERATION_TIMESTAMP_KEY = "generationTimestamp";
public static final String ENABLE_NAMING_KEY = "enableNaming";
public static final String ENABLE_REMOTE_IP_VALVE = "enableRemoteIpValve";
public static final String ACCESS_LOG_VALVE_FORMAT_KEY = "accessLogValveFormat";
public static final String CODE_SOURCE_CONTEXT_PATH = "codeSourceContextPath";
/**
* key of the property which contains http protocol : HTTP/1.1 or org.apache.coyote.http11.Http11NioProtocol
*/
public static final String HTTP_PROTOCOL_KEY = "connectorhttpProtocol";
/**
* key for default http port defined in the plugin
*/
public static final String HTTP_PORT_KEY = "httpPort";
public int httpPort;
public int httpsPort;
public int maxPostSize = 2097152;
public int ajpPort;
public String serverXmlPath;
public Properties runtimeProperties;
public boolean resetExtract;
public boolean debug = false;
public String clientAuth = "false";
public String keyAlias = null;
public String httpProtocol;
public String extractDirectory = ".extract";
public File extractDirectoryFile;
public String sessionManagerFactoryClassName = null;
public String codeSourceContextPath = null;
public File codeSourceWar = null;
public String loggerName;
Catalina container;
Tomcat tomcat;
String uriEncoding = "ISO-8859-1";
/**
* key = context of the webapp, value = war path on file system
*/
Map<String, String> webappWarPerContext = new HashMap<String, String>();
public Tomcat7Runner()
{
// no op
}
public void run()
throws Exception
{
PasswordUtil.deobfuscateSystemProps();
if ( loggerName != null && loggerName.length() > 0 )
{
installLogger( loggerName );
}
this.extractDirectoryFile = new File( this.extractDirectory );
debugMessage( "use extractDirectory:" + extractDirectoryFile.getPath() );
boolean archiveTimestampChanged = false;
// compare timestamp stored during previous run if exists
File timestampFile = new File( extractDirectoryFile, ".tomcat_executable_archive.timestamp" );
Properties timestampProps = loadProperties( timestampFile );
if ( timestampFile.exists() )
{
String timestampValue = timestampProps.getProperty( Tomcat7Runner.ARCHIVE_GENERATION_TIMESTAMP_KEY );
if ( timestampValue != null )
{
long timestamp = Long.parseLong( timestampValue );
archiveTimestampChanged =
Long.parseLong( runtimeProperties.getProperty( Tomcat7Runner.ARCHIVE_GENERATION_TIMESTAMP_KEY ) )
> timestamp;
debugMessage( "read timestamp from file " + timestampValue + ", archiveTimestampChanged: "
+ archiveTimestampChanged );
}
}
codeSourceContextPath = runtimeProperties.getProperty( CODE_SOURCE_CONTEXT_PATH );
if ( codeSourceContextPath != null && !codeSourceContextPath.isEmpty() )
{
codeSourceWar = AccessController.doPrivileged( new PrivilegedAction<File>()
{
public File run()
{
try
{
File src =
new File( Tomcat7Runner.class.getProtectionDomain().getCodeSource().getLocation().toURI() );
if ( src.getName().endsWith( ".war" ) )
{
return src;
}
else
{
debugMessage( "ERROR: Code source is not a war file, ignoring." );
}
}
catch ( URISyntaxException e )
{
debugMessage( "ERROR: Could not find code source. " + e.getMessage() );
}
return null;
}
} );
}
// do we have to extract content
{
if ( !extractDirectoryFile.exists() || resetExtract || archiveTimestampChanged )
{
extract();
//if archiveTimestampChanged or timestamp file not exists store the last timestamp from the archive
if ( archiveTimestampChanged || !timestampFile.exists() )
{
timestampProps.put( Tomcat7Runner.ARCHIVE_GENERATION_TIMESTAMP_KEY, runtimeProperties.getProperty(
Tomcat7Runner.ARCHIVE_GENERATION_TIMESTAMP_KEY ) );
saveProperties( timestampProps, timestampFile );
}
}
else
{
String wars = runtimeProperties.getProperty( WARS_KEY );
populateWebAppWarPerContext( wars );
}
}
// create tomcat various paths
new File( extractDirectory, "conf" ).mkdirs();
new File( extractDirectory, "logs" ).mkdirs();
new File( extractDirectory, "webapps" ).mkdirs();
new File( extractDirectory, "work" ).mkdirs();
File tmpDir = new File( extractDirectory, "temp" );
tmpDir.mkdirs();
System.setProperty( "java.io.tmpdir", tmpDir.getAbsolutePath() );
System.setProperty( "catalina.base", extractDirectoryFile.getAbsolutePath() );
System.setProperty( "catalina.home", extractDirectoryFile.getAbsolutePath() );
// start with a server.xml
if ( serverXmlPath != null || useServerXml() )
{
container = new Catalina();
container.setUseNaming( this.enableNaming() );
if ( serverXmlPath != null && new File( serverXmlPath ).exists() )
{
container.setConfig( serverXmlPath );
}
else
{
container.setConfig( new File( extractDirectory, "conf/server.xml" ).getAbsolutePath() );
}
container.start();
}
else
{
tomcat = new Tomcat()
{
public Context addWebapp( Host host, String url, String name, String path )
{
Context ctx = new StandardContext();
ctx.setName( name );
ctx.setPath( url );
ctx.setDocBase( path );
ContextConfig ctxCfg = new ContextConfig();
ctx.addLifecycleListener( ctxCfg );
ctxCfg.setDefaultWebXml( new File( extractDirectory, "conf/web.xml" ).getAbsolutePath() );
if ( host == null )
{
getHost().addChild( ctx );
}
else
{
host.addChild( ctx );
}
if (sessionManagerFactoryClassName != null) {
boolean cookies = true;
constructSessionManager(ctx, sessionManagerFactoryClassName, cookies);
}
return ctx;
}
};
if ( this.enableNaming() )
{
System.setProperty( "catalina.useNaming", "true" );
tomcat.enableNaming();
}
tomcat.getHost().setAppBase( new File( extractDirectory, "webapps" ).getAbsolutePath() );
String connectorHttpProtocol = runtimeProperties.getProperty( HTTP_PROTOCOL_KEY );
if ( httpProtocol != null && httpProtocol.trim().length() > 0 )
{
connectorHttpProtocol = httpProtocol;
}
debugMessage( "use connectorHttpProtocol:" + connectorHttpProtocol );
if ( httpPort > 0 )
{
Connector connector = new Connector( connectorHttpProtocol );
connector.setPort( httpPort );
connector.setMaxPostSize( maxPostSize );
if ( httpsPort > 0 )
{
connector.setRedirectPort( httpsPort );
}
connector.setURIEncoding( uriEncoding );
tomcat.getService().addConnector( connector );
tomcat.setConnector( connector );
}
boolean enableRemoteIpValve =
Boolean.parseBoolean(runtimeProperties.getProperty( Tomcat7Runner.ENABLE_REMOTE_IP_VALVE, Boolean.TRUE.toString()));
if (enableRemoteIpValve) {
debugMessage("Adding RemoteIpValve");
RemoteIpValve riv = new RemoteIpValve();
tomcat.getHost().getPipeline().addValve(riv);
}
// add a default access log valve
AccessLogValve alv = new AccessLogValve();
alv.setDirectory( new File( extractDirectory, "logs" ).getAbsolutePath() );
alv.setPattern( runtimeProperties.getProperty( Tomcat7Runner.ACCESS_LOG_VALVE_FORMAT_KEY ) );
tomcat.getHost().getPipeline().addValve( alv );
// create https connector
if ( httpsPort > 0 )
{
Connector httpsConnector = new Connector( connectorHttpProtocol );
httpsConnector.setPort( httpsPort );
httpsConnector.setMaxPostSize( maxPostSize );
httpsConnector.setSecure( true );
httpsConnector.setProperty( "SSLEnabled", "true" );
httpsConnector.setProperty( "sslProtocol", "TLS" );
httpsConnector.setURIEncoding( uriEncoding );
String keystoreFile = System.getProperty( "javax.net.ssl.keyStore" );
String keystorePass = System.getProperty( "javax.net.ssl.keyStorePassword" );
String keystoreType = System.getProperty( "javax.net.ssl.keyStoreType", "jks" );
if ( keystoreFile != null )
{
httpsConnector.setAttribute( "keystoreFile", keystoreFile );
}
if ( keystorePass != null )
{
httpsConnector.setAttribute( "keystorePass", keystorePass );
}
httpsConnector.setAttribute( "keystoreType", keystoreType );
String truststoreFile = System.getProperty( "javax.net.ssl.trustStore" );
String truststorePass = System.getProperty( "javax.net.ssl.trustStorePassword" );
String truststoreType = System.getProperty( "javax.net.ssl.trustStoreType", "jks" );
if ( truststoreFile != null )
{
httpsConnector.setAttribute( "truststoreFile", truststoreFile );
}
if ( truststorePass != null )
{
httpsConnector.setAttribute( "truststorePass", truststorePass );
}
httpsConnector.setAttribute( "truststoreType", truststoreType );
httpsConnector.setAttribute( "clientAuth", clientAuth );
httpsConnector.setAttribute( "keyAlias", keyAlias );
tomcat.getService().addConnector( httpsConnector );
if ( httpPort <= 0 )
{
tomcat.setConnector( httpsConnector );
}
}
// create ajp connector
if ( ajpPort > 0 )
{
Connector ajpConnector = new Connector( "org.apache.coyote.ajp.AjpProtocol" );
ajpConnector.setPort( ajpPort );
ajpConnector.setURIEncoding( uriEncoding );
tomcat.getService().addConnector( ajpConnector );
}
// add webapps
for ( Map.Entry<String, String> entry : this.webappWarPerContext.entrySet() )
{
String baseDir = null;
Context context = null;
if ( entry.getKey().equals( "/" ) )
{
baseDir = new File( extractDirectory, "webapps/ROOT.war" ).getAbsolutePath();
context = tomcat.addWebapp( "", baseDir );
}
else
{
baseDir = new File( extractDirectory, "webapps/" + entry.getValue() ).getAbsolutePath();
context = tomcat.addWebapp( entry.getKey(), baseDir );
}
URL contextFileUrl = getContextXml( baseDir );
if ( contextFileUrl != null )
{
context.setConfigFile( contextFileUrl );
}
}
if ( codeSourceWar != null )
{
String baseDir = new File( extractDirectory, "webapps/" + codeSourceWar.getName() ).getAbsolutePath();
Context context = tomcat.addWebapp( codeSourceContextPath, baseDir );
URL contextFileUrl = getContextXml( baseDir );
if ( contextFileUrl != null )
{
context.setConfigFile( contextFileUrl );
}
}
tomcat.start();
Runtime.getRuntime().addShutdownHook( new TomcatShutdownHook() );
}
waitIndefinitely();
}
protected class TomcatShutdownHook
extends Thread
{
protected TomcatShutdownHook()
{
// no op
}
@Override
public void run()
{
try
{
Tomcat7Runner.this.stop();
}
catch ( Throwable ex )
{
ExceptionUtils.handleThrowable( ex );
System.out.println( "fail to properly shutdown Tomcat:" + ex.getMessage() );
}
finally
{
// If JULI is used, shut JULI down *after* the server shuts down
// so log messages aren't lost
LogManager logManager = LogManager.getLogManager();
if ( logManager instanceof ClassLoaderLogManager )
{
( (ClassLoaderLogManager) logManager ).shutdown();
}
}
}
}
private void constructSessionManager(Context ctx, String sessionManagerFactoryClassName, boolean cookies) {
try {
debugMessage("Constructing session manager with factory " + sessionManagerFactoryClassName);
Class sessionManagerClass = Class.forName(sessionManagerFactoryClassName);
Object managerFactory = (Object) sessionManagerClass.newInstance();
Method method = managerFactory.getClass().getMethod("createSessionManager");
if (method != null) {
Manager manager = (Manager) method.invoke(managerFactory, null);
ctx.setManager(manager);
ctx.setCookies(cookies);
} else {
System.out.print(sessionManagerFactoryClassName + " does not have a method createSessionManager()");
}
} catch (Exception e) {
System.err.println("Unable to construct specified session manager '" +
sessionManagerFactoryClassName + "': " + e.getLocalizedMessage());
e.printStackTrace();
}
}
private URL getContextXml( String warPath )
throws IOException
{
InputStream inputStream = null;
try
{
String urlStr = "jar:file:" + warPath + "!/META-INF/context.xml";
debugMessage( "search context.xml in url:'" + urlStr + "'" );
URL url = new URL( urlStr );
inputStream = url.openConnection().getInputStream();
if ( inputStream != null )
{
return url;
}
}
catch ( FileNotFoundException e )
{
return null;
}
finally
{
closeQuietly( inputStream );
}
return null;
}
private static void closeQuietly( InputStream inputStream )
{
if ( inputStream == null )
{
return;
}
try
{
inputStream.close();
}
catch ( IOException e )
{
// ignore exception here
}
}
private void waitIndefinitely()
{
Object lock = new Object();
synchronized ( lock )
{
try
{
lock.wait();
}
catch ( InterruptedException exception )
{
throw new Error( "InterruptedException on wait Indefinitely lock:" + exception.getMessage(),
exception );
}
}
}
public void stop()
throws Exception
{
if ( container != null )
{
container.stop();
}
if ( tomcat != null )
{
tomcat.stop();
}
}
protected void extract()
throws Exception
{
if ( extractDirectoryFile.exists() )
{
debugMessage( "delete extractDirectory:" + extractDirectoryFile.getAbsolutePath() );
FileUtils.deleteDirectory( extractDirectoryFile );
}
if ( !this.extractDirectoryFile.exists() )
{
boolean created = this.extractDirectoryFile.mkdirs();
if ( !created )
{
throw new Exception( "FATAL: impossible to create directory:" + this.extractDirectoryFile.getPath() );
}
}
// ensure webapp dir is here
boolean created = new File( extractDirectory, "webapps" ).mkdirs();
if ( !created )
{
throw new Exception(
"FATAL: impossible to create directory:" + this.extractDirectoryFile.getPath() + "/webapps" );
}
String wars = runtimeProperties.getProperty( WARS_KEY );
populateWebAppWarPerContext( wars );
for ( Map.Entry<String, String> entry : webappWarPerContext.entrySet() )
{
debugMessage( "webappWarPerContext entry key/value: " + entry.getKey() + "/" + entry.getValue() );
InputStream inputStream = null;
try
{
File expandFile = null;
inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream( entry.getValue() );
if ( !useServerXml() )
{
if ( entry.getKey().equals( "/" ) )
{
expandFile = new File( extractDirectory, "webapps/ROOT.war" );
}
else
{
expandFile = new File( extractDirectory, "webapps/" + entry.getValue() );
}
}
else
{
expandFile = new File( extractDirectory, "webapps/" + entry.getValue() );
}
debugMessage( "expand to file:" + expandFile.getPath() );
// MTOMCAT-211 ensure parent directories created
File parentFile = expandFile.getParentFile();
if ( !parentFile.mkdirs() && !parentFile.isDirectory() )
{
throw new Exception( "FATAL: impossible to create directories:" + parentFile );
}
expand( inputStream, expandFile );
}
finally
{
if ( inputStream != null )
{
inputStream.close();
}
}
}
//Copy code source to webapps folder
if ( codeSourceWar != null )
{
FileInputStream inputStream = null;
try
{
File expandFile = new File( extractDirectory, "webapps/" + codeSourceContextPath + ".war" );
inputStream = new FileInputStream( codeSourceWar );
debugMessage( "move code source to file:" + expandFile.getPath() );
expand( inputStream, expandFile );
}
finally
{
if ( inputStream != null )
{
inputStream.close();
}
}
}
// expand tomcat configuration files if there
expandConfigurationFile( "catalina.properties", extractDirectoryFile );
expandConfigurationFile( "logging.properties", extractDirectoryFile );
expandConfigurationFile( "tomcat-users.xml", extractDirectoryFile );
expandConfigurationFile( "catalina.policy", extractDirectoryFile );
expandConfigurationFile( "context.xml", extractDirectoryFile );
expandConfigurationFile( "server.xml", extractDirectoryFile );
expandConfigurationFile( "web.xml", extractDirectoryFile );
}
private static void expandConfigurationFile( String fileName, File extractDirectory )
throws Exception
{
InputStream inputStream = null;
try
{
inputStream = Thread.currentThread().getContextClassLoader().getResourceAsStream( "conf/" + fileName );
if ( inputStream != null )
{
File confDirectory = new File( extractDirectory, "conf" );
if ( !confDirectory.exists() )
{
confDirectory.mkdirs();
}
expand( inputStream, new File( confDirectory, fileName ) );
}
}
finally
{
if ( inputStream != null )
{
inputStream.close();
}
}
}
/**
* @param warsValue we can value in format: wars=foo.war|contextpath;bar.war ( |contextpath is optionnal if empty use the war name)
* so here we return war file name and populate webappWarPerContext
*/
private void populateWebAppWarPerContext( String warsValue )
{
if ( warsValue == null )
{
return;
}
StringTokenizer st = new StringTokenizer( warsValue, ";" );
while ( st.hasMoreTokens() )
{
String warValue = st.nextToken();
debugMessage( "populateWebAppWarPerContext warValue:" + warValue );
String warFileName = "";
String contextValue = "";
int separatorIndex = warValue.indexOf( "|" );
if ( separatorIndex >= 0 )
{
warFileName = warValue.substring( 0, separatorIndex );
contextValue = warValue.substring( separatorIndex + 1, warValue.length() );
}
else
{
warFileName = contextValue;
}
debugMessage( "populateWebAppWarPerContext contextValue/warFileName:" + contextValue + "/" + warFileName );
this.webappWarPerContext.put( contextValue, warFileName );
}
}
/**
* Expand the specified input stream into the specified file.
*
* @param input InputStream to be copied
* @param file The file to be created
* @throws java.io.IOException if an input/output error occurs
*/
private static void expand( InputStream input, File file )
throws IOException
{
BufferedOutputStream output = null;
try
{
output = new BufferedOutputStream( new FileOutputStream( file ) );
byte buffer[] = new byte[2048];
while ( true )
{
int n = input.read( buffer );
if ( n <= 0 )
{
break;
}
output.write( buffer, 0, n );
}
}
finally
{
if ( output != null )
{
try
{
output.close();
}
catch ( IOException e )
{
// Ignore
}
}
}
}
public boolean useServerXml()
{
return Boolean.parseBoolean( runtimeProperties.getProperty( USE_SERVER_XML_KEY, Boolean.FALSE.toString() ) );
}
public void debugMessage( String message )
{
if ( debug )
{
System.out.println( message );
}
}
public boolean enableNaming()
{
return Boolean.parseBoolean( runtimeProperties.getProperty( ENABLE_NAMING_KEY, Boolean.FALSE.toString() ) );
}
private void installLogger( String loggerName )
throws SecurityException, NoSuchMethodException, IllegalArgumentException, IllegalAccessException,
InvocationTargetException
{
if ( "slf4j".equals( loggerName ) )
{
try
{
// Check class is available
//final Class<?> clazz = Class.forName( "org.slf4j.bridge.SLF4JBridgeHandler" );
final Class<?> clazz =
Thread.currentThread().getContextClassLoader().loadClass( "org.slf4j.bridge.SLF4JBridgeHandler" );
// Remove all JUL handlers
java.util.logging.LogManager.getLogManager().reset();
// Install slf4j bridge handler
final Method method = clazz.getMethod( "install", null );
method.invoke( null );
}
catch ( ClassNotFoundException e )
{
System.out.println( "WARNING: issue configuring slf4j jul bridge, skip it" );
}
}
else
{
System.out.println( "WARNING: loggerName " + loggerName + " not supported, skip it" );
}
}
private Properties loadProperties( File file )
throws FileNotFoundException, IOException
{
Properties properties = new Properties();
if ( file.exists() )
{
FileInputStream fileInputStream = new FileInputStream( file );
try
{
properties.load( fileInputStream );
}
finally
{
fileInputStream.close();
}
}
return properties;
}
private void saveProperties( Properties properties, File file )
throws FileNotFoundException, IOException
{
FileOutputStream fileOutputStream = new FileOutputStream( file );
try
{
properties.store( fileOutputStream, "Timestamp file for executable war/jar" );
}
finally
{
fileOutputStream.close();
}
}
}
| |
/*
* Copyright 2000-2015 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.refactoring.replaceConstructorWithBuilder;
import com.intellij.ide.highlighter.JavaFileType;
import com.intellij.ide.util.PackageUtil;
import com.intellij.openapi.module.Module;
import com.intellij.openapi.module.ModuleUtil;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.Ref;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.*;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.JavaCodeStyleManager;
import com.intellij.psi.codeStyle.VariableKind;
import com.intellij.psi.search.GlobalSearchScope;
import com.intellij.psi.search.searches.ReferencesSearch;
import com.intellij.psi.util.PropertyUtil;
import com.intellij.psi.util.PsiTreeUtil;
import com.intellij.psi.util.PsiUtil;
import com.intellij.psi.util.TypeConversionUtil;
import com.intellij.refactoring.MoveDestination;
import com.intellij.refactoring.replaceConstructorWithBuilder.usageInfo.ReplaceConstructorWithSettersChainInfo;
import com.intellij.refactoring.util.FixableUsageInfo;
import com.intellij.refactoring.util.FixableUsagesRefactoringProcessor;
import com.intellij.refactoring.util.RefactoringUtil;
import com.intellij.usageView.UsageInfo;
import com.intellij.usageView.UsageViewDescriptor;
import com.intellij.util.IncorrectOperationException;
import com.intellij.util.VisibilityUtil;
import com.intellij.util.containers.MultiMap;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/**
* @author anna
* @since 04-Sep-2008
*/
public class ReplaceConstructorWithBuilderProcessor extends FixableUsagesRefactoringProcessor {
public static final String REFACTORING_NAME = "Replace Constructor with Builder";
private final PsiMethod[] myConstructors;
private final Map<String, ParameterData> myParametersMap;
private final String myClassName;
private final String myPackageName;
private final boolean myCreateNewBuilderClass;
private final PsiElementFactory myElementFactory;
private final MoveDestination myMoveDestination;
public ReplaceConstructorWithBuilderProcessor(Project project,
PsiMethod[] constructors,
Map<String, ParameterData> parametersMap,
String className,
String packageName,
MoveDestination moveDestination, boolean createNewBuilderClass) {
super(project);
myMoveDestination = moveDestination;
myElementFactory = JavaPsiFacade.getInstance(myProject).getElementFactory();
myConstructors = constructors;
myParametersMap = parametersMap;
myClassName = className;
myPackageName = packageName;
myCreateNewBuilderClass = createNewBuilderClass;
}
@NotNull
protected UsageViewDescriptor createUsageViewDescriptor(@NotNull final UsageInfo[] usages) {
return new ReplaceConstructorWithBuilderViewDescriptor();
}
protected void findUsages(@NotNull final List<FixableUsageInfo> usages) {
final String builderQualifiedName = StringUtil.getQualifiedName(myPackageName, myClassName);
final PsiClass builderClass =
JavaPsiFacade.getInstance(myProject).findClass(builderQualifiedName, GlobalSearchScope.projectScope(myProject));
for (PsiMethod constructor : myConstructors) {
for (PsiReference reference : ReferencesSearch.search(constructor)) {
final PsiElement element = reference.getElement();
final PsiNewExpression newExpression = PsiTreeUtil.getParentOfType(element, PsiNewExpression.class);
if (newExpression != null && !PsiTreeUtil.isAncestor(builderClass, element, false)) {
usages.add(new ReplaceConstructorWithSettersChainInfo(newExpression, StringUtil.getQualifiedName(myPackageName, myClassName), myParametersMap));
}
}
}
}
@Nullable
private PsiClass createBuilderClass() {
final PsiClass psiClass = myConstructors[0].getContainingClass();
assert psiClass != null;
final PsiTypeParameterList typeParameterList = psiClass.getTypeParameterList();
final String text = "public class " + myClassName + (typeParameterList != null ? typeParameterList.getText() : "") + "{}";
final PsiFileFactory factory = PsiFileFactory.getInstance(myProject);
final PsiJavaFile newFile = (PsiJavaFile)factory.createFileFromText(myClassName + ".java", JavaFileType.INSTANCE, text);
final PsiFile containingFile = myConstructors[0].getContainingFile();
final PsiDirectory containingDirectory = containingFile.getContainingDirectory();
final PsiDirectory directory;
if (myMoveDestination != null) {
directory = myMoveDestination.getTargetDirectory(containingDirectory);
} else {
final Module module = ModuleUtil.findModuleForPsiElement(containingFile);
assert module != null;
directory = PackageUtil.findOrCreateDirectoryForPackage(module, myPackageName, containingDirectory, true, true);
}
if (directory != null) {
final CodeStyleManager codeStyleManager = CodeStyleManager.getInstance(PsiManager.getInstance(myProject).getProject());
final PsiJavaFile reformattedFile = (PsiJavaFile)codeStyleManager.reformat(JavaCodeStyleManager.getInstance(newFile.getProject()).shortenClassReferences(newFile));
if (directory.findFile(reformattedFile.getName()) != null) return reformattedFile.getClasses()[0];
return ((PsiJavaFile)directory.add(reformattedFile)).getClasses()[0];
}
return null;
}
@Override
protected void performRefactoring(@NotNull UsageInfo[] usageInfos) {
final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(myProject);
final PsiClass builderClass = myCreateNewBuilderClass
? createBuilderClass()
: psiFacade.findClass(StringUtil.getQualifiedName(myPackageName, myClassName),
GlobalSearchScope.projectScope(myProject));
if (builderClass == null) return;
for (String propertyName : myParametersMap.keySet()) {
final ParameterData parameterData = myParametersMap.get(propertyName);
final PsiField field = createField(builderClass, parameterData);
createSetter(builderClass, parameterData, field);
}
super.performRefactoring(usageInfos);
final PsiMethod method = createMethodSignature(createMethodName());
if (builderClass.findMethodBySignature(method, false) == null) {
builderClass.add(method);
}
//fix visibilities
final PsiMethod constructor = getWorkingConstructor();
VisibilityUtil.escalateVisibility(constructor, builderClass);
PsiClass containingClass = constructor.getContainingClass();
while (containingClass != null) {
VisibilityUtil.escalateVisibility(containingClass, builderClass);
containingClass = containingClass.getContainingClass();
}
}
private void createSetter(PsiClass builderClass, ParameterData parameterData, PsiField field) {
PsiMethod setter = null;
for (PsiMethod method : builderClass.getMethods()) {
if (Comparing.strEqual(method.getName(), parameterData.getSetterName()) && method.getParameterList().getParametersCount() == 1
&& TypeConversionUtil.isAssignable(method.getParameterList().getParameters()[0].getType(), parameterData.getType())) {
setter = method;
fixSetterReturnType(builderClass, field, setter);
break;
}
}
if (setter == null) {
setter = PropertyUtil.generateSetterPrototype(field, builderClass, true);
final PsiIdentifier nameIdentifier = setter.getNameIdentifier();
assert nameIdentifier != null;
nameIdentifier.replace(myElementFactory.createIdentifier(parameterData.getSetterName()));
setter.getParameterList().getParameters()[0].getTypeElement().replace(myElementFactory.createTypeElement(parameterData.getType())); //setter varargs
builderClass.add(setter);
}
}
private PsiField createField(PsiClass builderClass, ParameterData parameterData) {
PsiField field = builderClass.findFieldByName(parameterData.getFieldName(), false);
if (field == null) {
PsiType type = parameterData.getType();
if (type instanceof PsiEllipsisType) {
type = ((PsiEllipsisType)type).toArrayType();
}
field = myElementFactory.createField(parameterData.getFieldName(), type);
field = (PsiField)builderClass.add(field);
}
final String defaultValue = parameterData.getDefaultValue();
if (defaultValue != null) {
final PsiExpression initializer = field.getInitializer();
if (initializer == null) {
try {
field.setInitializer(myElementFactory.createExpressionFromText(defaultValue, field));
}
catch (IncorrectOperationException e) {
//skip invalid default value
}
}
}
return field;
}
private void fixSetterReturnType(PsiClass builderClass, PsiField field, PsiMethod method) {
if (PsiUtil.resolveClassInType(method.getReturnType()) != builderClass) {
final PsiCodeBlock body = method.getBody();
final PsiCodeBlock generatedBody = PropertyUtil.generateSetterPrototype(field, builderClass, true).getBody();
assert body != null;
assert generatedBody != null;
body.replace(generatedBody);
final PsiTypeElement typeElement = method.getReturnTypeElement();
assert typeElement != null;
typeElement.replace(myElementFactory.createTypeElement(myElementFactory.createType(builderClass)));
}
}
private PsiMethod createMethodSignature(String createMethodName) {
JavaCodeStyleManager styleManager = JavaCodeStyleManager.getInstance(myProject);
final StringBuffer buf = new StringBuffer();
final PsiMethod constructor = getWorkingConstructor();
for (PsiParameter parameter : constructor.getParameterList().getParameters()) {
final String pureParamName = styleManager.variableNameToPropertyName(parameter.getName(), VariableKind.PARAMETER);
if (buf.length() > 0) buf.append(", ");
buf.append(myParametersMap.get(pureParamName).getFieldName());
}
return myElementFactory.createMethodFromText("public " +
constructor.getName() +
" " +
createMethodName +
"(){\n return new " +
constructor.getName() +
"(" +
buf.toString() +
")" +
";\n}", constructor);
}
private PsiMethod getWorkingConstructor() {
PsiMethod constructor = getMostCommonConstructor();
if (constructor == null){
constructor = myConstructors[0];
if (constructor.getParameterList().getParametersCount() == 0) {
constructor = myConstructors[1];
}
}
return constructor;
}
@Nullable
private PsiMethod getMostCommonConstructor() {
if (myConstructors.length == 1) return myConstructors[0];
PsiMethod commonConstructor = null;
for (PsiMethod constructor : myConstructors) {
final PsiMethod chainedConstructor = RefactoringUtil.getChainedConstructor(constructor);
if (chainedConstructor == null) {
if (commonConstructor != null) {
if (!isChained(commonConstructor, constructor)) {
return null;
}
}
commonConstructor = constructor;
} else {
if (commonConstructor == null) {
commonConstructor = chainedConstructor;
} else {
if (!isChained(commonConstructor, chainedConstructor)) {
return null;
}
}
}
}
return commonConstructor;
}
private static boolean isChained(PsiMethod first, PsiMethod last) {
if (first == null) return false;
if (first == last) return true;
return isChained(RefactoringUtil.getChainedConstructor(first), last);
}
private String createMethodName() {
return "create" + StringUtil.capitalize(myConstructors[0].getName());
}
@Override
protected boolean preprocessUsages(@NotNull Ref<UsageInfo[]> refUsages) {
final MultiMap<PsiElement, String> conflicts = new MultiMap<>();
final JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(myProject);
final PsiClass builderClass =
psiFacade.findClass(StringUtil.getQualifiedName(myPackageName, myClassName), GlobalSearchScope.projectScope(myProject));
if (builderClass == null) {
if (!myCreateNewBuilderClass) {
conflicts.putValue(null, "Selected class was not found.");
}
} else if (myCreateNewBuilderClass){
conflicts.putValue(builderClass, "Class with chosen name already exist.");
}
if (myMoveDestination != null && myCreateNewBuilderClass) {
myMoveDestination.analyzeModuleConflicts(Collections.emptyList(), conflicts, refUsages.get());
}
final PsiMethod commonConstructor = getMostCommonConstructor();
if (commonConstructor == null) {
conflicts.putValue(null, "Found constructors are not reducible to simple chain");
}
return showConflicts(conflicts, refUsages.get());
}
protected String getCommandName() {
return REFACTORING_NAME;
}
}
| |
/*
* Copyright 2009 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import com.google.common.collect.Lists;
import com.google.javascript.jscomp.NodeTraversal.AbstractPostOrderCallback;
import com.google.javascript.rhino.IR;
import com.google.javascript.rhino.Node;
<<<<<<< HEAD
=======
>>>>>>> 5c522db6e745151faa1d8dc310d145e94f78ac77
import java.util.ArrayDeque;
import java.util.Deque;
import java.util.List;
import javax.annotation.Nullable;
/**
* Creates synthetic blocks to optimizations from moving code
* past markers in the source.
*
*/
class CreateSyntheticBlocks implements CompilerPass {
static final DiagnosticType UNMATCHED_START_MARKER = DiagnosticType.warning(
"JSC_UNMATCHED_START_MARKER", "Unmatched {0}");
static final DiagnosticType UNMATCHED_END_MARKER = DiagnosticType.warning(
"JSC_UNMATCHED_END_MARKER", "Unmatched {1} - {0} not in the same block");
static final DiagnosticType INVALID_MARKER_USAGE = DiagnosticType.warning(
"JSC_INVALID_MARKER_USAGE", "Marker {0} can only be used in a simple "
+ "call expression");
private final AbstractCompiler compiler;
/** Name of the start marker. */
private final String startMarkerName;
/** Name of the end marker. */
private final String endMarkerName;
/**
* Markers can be nested.
*/
private final Deque<Node> markerStack = new ArrayDeque<Node>();
private final List<Marker> validMarkers = Lists.newArrayList();
private class Marker {
final Node startMarker;
final Node endMarker;
public Marker(Node startMarker, Node endMarker) {
this.startMarker = startMarker;
this.endMarker = endMarker;
}
}
public CreateSyntheticBlocks(AbstractCompiler compiler,
String startMarkerName, String endMarkerName) {
this.compiler = compiler;
this.startMarkerName = startMarkerName;
this.endMarkerName = endMarkerName;
}
@Override
public void process(Node externs, Node root) {
// Find and validate the markers.
NodeTraversal.traverse(compiler, root, new Callback());
// Complain about any unmatched markers.
for (Node node : markerStack) {
compiler.report(
JSError.make(NodeUtil.getSourceName(node),
node,
UNMATCHED_START_MARKER, startMarkerName));
}
// Add the block for the valid marker sets.
for (Marker marker : validMarkers) {
addBlocks(marker);
}
}
/**
* @param marker The marker to add synthetic blocks for.
*/
private void addBlocks(Marker marker) {
// Add block around the template section so that it looks like this:
// BLOCK (synthetic)
// START
// BLOCK (synthetic)
// BODY
// END
// This prevents the start or end markers from mingling with the code
// in the block body.
Node originalParent = marker.endMarker.getParent();
Node outerBlock = IR.block();
outerBlock.setIsSyntheticBlock(true);
originalParent.addChildBefore(outerBlock, marker.startMarker);
Node innerBlock = IR.block();
innerBlock.setIsSyntheticBlock(true);
// Move everything after the start Node up to the end Node into the inner
// block.
moveSiblingExclusive(originalParent, innerBlock,
marker.startMarker,
marker.endMarker);
// Add the start node.
outerBlock.addChildToBack(originalParent.removeChildAfter(outerBlock));
// Add the inner block
outerBlock.addChildToBack(innerBlock);
// and finally the end node.
outerBlock.addChildToBack(originalParent.removeChildAfter(outerBlock));
compiler.reportCodeChange();
}
/**
* Move the Nodes between start and end from the source block to the
* destination block. If start is null, move the first child of the block.
* If end is null, move the last child of the block.
*/
private void moveSiblingExclusive(
Node src, Node dest, @Nullable Node start, @Nullable Node end) {
while (childAfter(src, start) != end) {
Node child = removeChildAfter(src, start);
dest.addChildToBack(child);
}
}
/**
* Like Node.getNext, that null is used to signal the child before the
* block.
*/
private Node childAfter(Node parent, @Nullable Node siblingBefore) {
if (siblingBefore == null) {
return parent.getFirstChild();
} else {
return siblingBefore.getNext();
}
}
/**
* Like removeChildAfter, the firstChild is removed
*/
private Node removeChildAfter(Node parent, @Nullable Node siblingBefore) {
if (siblingBefore == null) {
return parent.removeFirstChild();
} else {
return parent.removeChildAfter(siblingBefore);
}
}
private class Callback extends AbstractPostOrderCallback {
@Override
public void visit(NodeTraversal t, Node n, Node parent) {
if (!n.isCall() || !n.getFirstChild().isName()) {
return;
}
Node callTarget = n.getFirstChild();
String callName = callTarget.getString();
if (startMarkerName.equals(callName)) {
if (!parent.isExprResult()) {
compiler.report(
t.makeError(n, INVALID_MARKER_USAGE, startMarkerName));
return;
}
markerStack.push(parent);
return;
}
if (!endMarkerName.equals(callName)) {
return;
}
Node endMarkerNode = parent;
if (!endMarkerNode.isExprResult()) {
compiler.report(
t.makeError(n, INVALID_MARKER_USAGE, endMarkerName));
return;
}
if (markerStack.isEmpty()) {
compiler.report(t.makeError(n, UNMATCHED_END_MARKER,
startMarkerName, endMarkerName));
return;
}
Node startMarkerNode = markerStack.pop();
if (endMarkerNode.getParent() != startMarkerNode.getParent()) {
// The end marker isn't in the same block as the start marker.
compiler.report(t.makeError(n, UNMATCHED_END_MARKER,
startMarkerName, endMarkerName));
return;
}
// This is a valid marker set add it to the list of markers to process.
validMarkers.add(new Marker(startMarkerNode, endMarkerNode));
}
}
}
| |
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.builder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.camel.CamelContext;
import org.apache.camel.Endpoint;
import org.apache.camel.Expression;
import org.apache.camel.LoggingLevel;
import org.apache.camel.NoSuchEndpointException;
import org.apache.camel.builder.xml.XPathBuilder;
import org.apache.camel.model.ModelCamelContext;
import org.apache.camel.model.language.HeaderExpression;
import org.apache.camel.model.language.MethodCallExpression;
import org.apache.camel.model.language.PropertyExpression;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Base class for implementation inheritance for different clauses in the <a
* href="http://camel.apache.org/dsl.html">Java DSL</a>
*
* @version
*/
public abstract class BuilderSupport {
private ModelCamelContext context;
private ErrorHandlerBuilder errorHandlerBuilder;
protected BuilderSupport() {
}
protected BuilderSupport(CamelContext context) {
this.context = (ModelCamelContext) context;
}
// Builder methods
// -------------------------------------------------------------------------
/**
* Returns a value builder for the given header
*/
public ValueBuilder header(String name) {
HeaderExpression expression = new HeaderExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a value builder for the given property
*/
public ValueBuilder property(String name) {
PropertyExpression expression = new PropertyExpression(name);
return new ValueBuilder(expression);
}
/**
* Returns a predicate and value builder for the inbound body on an exchange
*/
public ValueBuilder body() {
return Builder.body();
}
/**
* Returns a predicate and value builder for the inbound message body as a
* specific type
*/
public <T> ValueBuilder body(Class<T> type) {
return Builder.bodyAs(type);
}
/**
* Returns a predicate and value builder for the outbound body on an
* exchange
*/
public ValueBuilder outBody() {
return Builder.outBody();
}
/**
* Returns a predicate and value builder for the outbound message body as a
* specific type
*/
public <T> ValueBuilder outBody(Class<T> type) {
return Builder.outBodyAs(type);
}
/**
* Returns a predicate and value builder for the fault body on an exchange
*/
public ValueBuilder faultBody() {
return Builder.faultBody();
}
/**
* Returns a predicate and value builder for the fault message body as a
* specific type
*/
public <T> ValueBuilder faultBodyAs(Class<T> type) {
return Builder.faultBodyAs(type);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name) {
return Builder.systemProperty(name);
}
/**
* Returns a value builder for the given system property
*/
public ValueBuilder systemProperty(String name, String defaultValue) {
return Builder.systemProperty(name, defaultValue);
}
/**
* Returns a constant expression value builder
*/
public ValueBuilder constant(Object value) {
return Builder.constant(value);
}
/**
* Returns a language expression value builder
*/
public ValueBuilder language(String language, String expression) {
return Builder.language(language, expression);
}
/**
* Returns a simple expression value builder
*/
public SimpleBuilder simple(String value) {
return SimpleBuilder.simple(value);
}
/**
* Returns a simple expression value builder
*/
public SimpleBuilder simple(String value, Class<?> resultType) {
return SimpleBuilder.simple(value, resultType);
}
/**
* Returns a xpath expression value builder
*
* @param value
* The XPath expression
* @return A new XPathBuilder object
*/
public XPathBuilder xpath(String value) {
return XPathBuilder.xpath(value);
}
/**
* Returns a xpath expression value builder
*
* @param value
* The XPath expression
* @param resultType
* The result type that the XPath expression will return.
* @return A new XPathBuilder object
*/
public static XPathBuilder xpath(String value, Class<?> resultType) {
return XPathBuilder.xpath(value, resultType);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a
* reference to a bean (String).
*
* @param beanOrBeanRef
* either an instanceof a bean or a reference to bean to lookup
* in the Registry
* @return the builder
* @deprecated use {@link #method(Object)} instead
*/
@Deprecated
public ValueBuilder bean(Object beanOrBeanRef) {
return bean(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a
* reference to a bean (String).
*
* @param beanOrBeanRef
* either an instanceof a bean or a reference to bean to lookup
* in the Registry
* @param method
* name of method to invoke
* @return the builder
* @deprecated use {@link #method(Object, String)} instead
*/
@Deprecated
public ValueBuilder bean(Object beanOrBeanRef, String method) {
MethodCallExpression expression;
if (beanOrBeanRef instanceof String) {
expression = new MethodCallExpression((String) beanOrBeanRef, method);
} else {
expression = new MethodCallExpression(beanOrBeanRef, method);
}
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
*
* @param beanType
* the Class of the bean which we want to invoke
* @return the builder
* @deprecated use {@link #method(Class)} instead
*/
@Deprecated
public ValueBuilder bean(Class<?> beanType) {
MethodCallExpression expression = new MethodCallExpression(beanType);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
*
* @param beanType
* the Class of the bean which we want to invoke
* @param method
* name of method to invoke
* @return the builder
* @deprecated use {@link #method(Class, String)} instead
*/
@Deprecated
public ValueBuilder bean(Class<?> beanType, String method) {
MethodCallExpression expression = new MethodCallExpression(beanType, method);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a
* reference to a bean (String).
*
* @param beanOrBeanRef
* either an instanceof a bean or a reference to bean to lookup
* in the Registry
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef) {
return method(beanOrBeanRef, null);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
* <p/>
* This method accepts dual parameters. Either an bean instance or a
* reference to a bean (String).
*
* @param beanOrBeanRef
* either an instanceof a bean or a reference to bean to lookup
* in the Registry
* @param method
* name of method to invoke
* @return the builder
*/
public ValueBuilder method(Object beanOrBeanRef, String method) {
MethodCallExpression expression;
if (beanOrBeanRef instanceof String) {
expression = new MethodCallExpression((String) beanOrBeanRef, method);
} else {
expression = new MethodCallExpression(beanOrBeanRef, method);
}
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
*
* @param beanType
* the Class of the bean which we want to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType) {
MethodCallExpression expression = new MethodCallExpression(beanType);
return new ValueBuilder(expression);
}
/**
* Returns a <a href="http://camel.apache.org/bean-language.html">method
* call expression</a> value builder
*
* @param beanType
* the Class of the bean which we want to invoke
* @param method
* name of method to invoke
* @return the builder
*/
public ValueBuilder method(Class<?> beanType, String method) {
MethodCallExpression expression = new MethodCallExpression(beanType, method);
return new ValueBuilder(expression);
}
/**
* Returns an expression processing the exchange to the given endpoint uri
*
* @param uri
* endpoint uri to send the exchange to
* @return the builder
*/
public ValueBuilder sendTo(String uri) {
return Builder.sendTo(uri);
}
/**
* Returns an expression value builder that replaces all occurrences of the
* regular expression with the given replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, String replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns an expression value builder that replaces all occurrences of the
* regular expression with the given replacement
*/
public ValueBuilder regexReplaceAll(Expression content, String regex, Expression replacement) {
return Builder.regexReplaceAll(content, regex, replacement);
}
/**
* Returns a exception expression value builder
*/
public ValueBuilder exceptionMessage() {
return Builder.exceptionMessage();
}
/**
* Resolves the given URI to an endpoint
*
* @param uri
* the uri to resolve
* @throws NoSuchEndpointException
* if the endpoint URI could not be resolved
* @return the endpoint
*/
public Endpoint endpoint(String uri) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
Endpoint endpoint = getContext().getEndpoint(uri);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Resolves the given URI to an endpoint of the specified type
*
* @param uri
* the uri to resolve
* @param type
* the excepted type of the endpoint
* @throws NoSuchEndpointException
* if the endpoint URI could not be resolved
* @return the endpoint
*/
public <T extends Endpoint> T endpoint(String uri, Class<T> type) throws NoSuchEndpointException {
ObjectHelper.notNull(uri, "uri");
T endpoint = getContext().getEndpoint(uri, type);
if (endpoint == null) {
throw new NoSuchEndpointException(uri);
}
return endpoint;
}
/**
* Resolves the list of URIs into a list of {@link Endpoint} instances
*
* @param uris
* list of endpoints to resolve
* @throws NoSuchEndpointException
* if an endpoint URI could not be resolved
* @return list of endpoints
*/
public List<Endpoint> endpoints(String... uris) throws NoSuchEndpointException {
List<Endpoint> endpoints = new ArrayList<Endpoint>();
for (String uri : uris) {
endpoints.add(endpoint(uri));
}
return endpoints;
}
/**
* Helper method to create a list of {@link Endpoint} instances
*
* @param endpoints
* endpoints
* @return list of the given endpoints
*/
public List<Endpoint> endpoints(Endpoint... endpoints) {
List<Endpoint> answer = new ArrayList<Endpoint>();
answer.addAll(Arrays.asList(endpoints));
return answer;
}
/**
* Creates a default <a
* href="http://camel.apache.org/error-handler.html">error handler</a>.
*
* @return the builder
*/
public DefaultErrorHandlerBuilder defaultErrorHandler() {
return new DefaultErrorHandlerBuilder();
}
/**
* Creates a disabled <a
* href="http://camel.apache.org/error-handler.html">error handler</a> for
* removing the default error handler
*
* @return the builder
*/
public NoErrorHandlerBuilder noErrorHandler() {
return new NoErrorHandlerBuilder();
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error
* handler</a> which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler() {
return new LoggingErrorHandlerBuilder();
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error
* handler</a> which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(String log) {
return loggingErrorHandler(LoggerFactory.getLogger(log));
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error
* handler</a> which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(Logger log) {
return new LoggingErrorHandlerBuilder(log);
}
/**
* Creates an <a href="http://camel.apache.org/error-handler.html">error
* handler</a> which just logs errors
*
* @return the builder
*/
public LoggingErrorHandlerBuilder loggingErrorHandler(Logger log, LoggingLevel level) {
return new LoggingErrorHandlerBuilder(log, level);
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter
* Channel EIP:</a> is a error handler for handling messages that could not
* be delivered to it's intended destination.
*
* @param deadLetterUri
* uri to the dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(String deadLetterUri) {
return deadLetterChannel(endpoint(deadLetterUri));
}
/**
* <a href="http://camel.apache.org/dead-letter-channel.html">Dead Letter
* Channel EIP:</a> is a error handler for handling messages that could not
* be delivered to it's intended destination.
*
* @param deadLetterEndpoint
* dead letter endpoint storing dead messages
* @return the builder
*/
public DeadLetterChannelBuilder deadLetterChannel(Endpoint deadLetterEndpoint) {
return new DeadLetterChannelBuilder(deadLetterEndpoint);
}
// Properties
// -------------------------------------------------------------------------
public ModelCamelContext getContext() {
return context;
}
@Deprecated
public void setContext(CamelContext context) {
this.context = (ModelCamelContext) context;
}
public void setContext(ModelCamelContext context) {
this.context = context;
}
public ErrorHandlerBuilder getErrorHandlerBuilder() {
if (errorHandlerBuilder == null) {
errorHandlerBuilder = createErrorHandlerBuilder();
}
return errorHandlerBuilder;
}
protected ErrorHandlerBuilder createErrorHandlerBuilder() {
return new DefaultErrorHandlerBuilder();
}
/**
* Sets the error handler to use with processors created by this builder
*/
public void setErrorHandlerBuilder(ErrorHandlerBuilder errorHandlerBuilder) {
this.errorHandlerBuilder = errorHandlerBuilder;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.cloud.controller.iaases.kubernetes;
import com.google.common.collect.Lists;
import io.fabric8.kubernetes.api.model.*;
import org.apache.commons.codec.digest.DigestUtils;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cloud.controller.context.CloudControllerContext;
import org.apache.stratos.cloud.controller.domain.*;
import org.apache.stratos.cloud.controller.domain.kubernetes.KubernetesCluster;
import org.apache.stratos.cloud.controller.domain.kubernetes.KubernetesClusterContext;
import org.apache.stratos.cloud.controller.domain.kubernetes.KubernetesHost;
import org.apache.stratos.cloud.controller.domain.kubernetes.PortRange;
import org.apache.stratos.cloud.controller.exception.*;
import org.apache.stratos.cloud.controller.iaases.Iaas;
import org.apache.stratos.cloud.controller.iaases.PartitionValidator;
import org.apache.stratos.cloud.controller.util.CloudControllerConstants;
import org.apache.stratos.cloud.controller.util.CloudControllerUtil;
import org.apache.stratos.common.Property;
import org.apache.stratos.common.constants.StratosConstants;
import org.apache.stratos.common.domain.NameValuePair;
import org.apache.stratos.kubernetes.client.KubernetesApiClient;
import org.apache.stratos.kubernetes.client.KubernetesConstants;
import org.apache.stratos.kubernetes.client.exceptions.KubernetesClientException;
import org.apache.stratos.messaging.domain.topology.KubernetesService;
import org.wso2.carbon.registry.core.exceptions.RegistryException;
import java.util.*;
import java.util.concurrent.locks.Lock;
/**
* Kubernetes IaaS implementation.
*/
public class KubernetesIaas extends Iaas {
private static final Log log = LogFactory.getLog(KubernetesIaas.class);
private static final long DEFAULT_POD_ACTIVATION_TIMEOUT = 60000; // 1 min
private static final String PAYLOAD_PARAMETER_SEPARATOR = ",";
private static final String PAYLOAD_PARAMETER_NAME_VALUE_SEPARATOR = "=";
private static final String PAYLOAD_PARAMETER_PREFIX = "payload_parameter.";
private static final String PORT_MAPPINGS = "PORT_MAPPINGS";
private static final String KUBERNETES_CONTAINER_CPU = "KUBERNETES_CONTAINER_CPU";
private static final String KUBERNETES_CONTAINER_MEMORY = "KUBERNETES_CONTAINER_MEMORY";
private static final String KUBERNETES_SERVICE_SESSION_AFFINITY = "KUBERNETES_SERVICE_SESSION_AFFINITY";
private static final String KUBERNETES_CONTAINER_CPU_DEFAULT = "kubernetes.container.cpu.default";
private static final String KUBERNETES_CONTAINER_MEMORY_DEFAULT = "kubernetes.container.memory.default";
public static final String POD_ID_PREFIX = "pod";
public static final String SERVICE_NAME_PREFIX = "service";
private PartitionValidator partitionValidator;
private List<NameValuePair> payload;
private Long podActivationTimeout;
public KubernetesIaas(IaasProvider iaasProvider) {
super(iaasProvider);
partitionValidator = new KubernetesPartitionValidator();
payload = new ArrayList<NameValuePair>();
podActivationTimeout = Long.getLong("stratos.pod.activation.timeout");
if (podActivationTimeout == null) {
podActivationTimeout = DEFAULT_POD_ACTIVATION_TIMEOUT;
if (log.isInfoEnabled()) {
log.info("Pod activation timeout was set: " + podActivationTimeout);
}
}
}
@Override
public void initialize() {
}
/**
* Set dynamic payload which needs to be passed to the containers as environment variables.
*
* @param payloadByteArray
*/
@Override
public void setDynamicPayload(byte[] payloadByteArray) {
// Clear existing payload parameters
payload.clear();
if (payloadByteArray != null) {
String payloadString = new String(payloadByteArray);
String[] parameterArray = payloadString.split(PAYLOAD_PARAMETER_SEPARATOR);
if (parameterArray != null) {
for (String parameter : parameterArray) {
if (parameter != null) {
String[] nameValueArray = parameter.split(PAYLOAD_PARAMETER_NAME_VALUE_SEPARATOR, 2);
if ((nameValueArray != null) && (nameValueArray.length == 2)) {
NameValuePair nameValuePair = new NameValuePair(nameValueArray[0], nameValueArray[1]);
payload.add(nameValuePair);
}
}
}
if (log.isDebugEnabled()) {
log.debug("Dynamic payload is set: " + payload.toString());
}
}
}
}
@Override
public MemberContext startInstance(MemberContext memberContext, byte[] payload) throws CartridgeNotFoundException {
setDynamicPayload(payload);
return startContainer(memberContext);
}
@Override
public PartitionValidator getPartitionValidator() {
return partitionValidator;
}
@Override
public void terminateInstance(MemberContext memberContext)
throws InvalidCartridgeTypeException, InvalidMemberException, MemberTerminationFailedException {
try {
terminateContainer(memberContext);
} catch (RegistryException e) {
log.error(String.format("Could not persist data while terminating container for member [member-id] %s",
memberContext.getMemberId()), e);
}
}
/**
* Starts a container via kubernetes for the given member context.
*
* @param memberContext
* @return
* @throws CartridgeNotFoundException
*/
public MemberContext startContainer(MemberContext memberContext) throws CartridgeNotFoundException {
Lock lock = null;
try {
lock = CloudControllerContext.getInstance().acquireMemberContextWriteLock();
handleNullObject(memberContext, "member context is null");
log.info(String.format("Starting container: [application] %s [cartridge] %s [member] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId()));
// Validate cluster id
String clusterId = memberContext.getClusterId();
String memberId = memberContext.getMemberId();
handleNullObject(clusterId, "cluster id is null in member context");
// Validate cluster context
ClusterContext clusterContext = CloudControllerContext.getInstance().getClusterContext(clusterId);
handleNullObject(clusterContext,
String.format("Cluster context not found: [application] %s [cartridge] %s " + "[cluster] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), clusterId));
// Validate partition
Partition partition = memberContext.getPartition();
handleNullObject(partition, String.format(
"partition not found in member context: [application] %s " + "[cartridge] %s [member] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId()));
// Validate cartridge
String cartridgeType = clusterContext.getCartridgeType();
Cartridge cartridge = CloudControllerContext.getInstance().getCartridge(cartridgeType);
if (cartridge == null) {
String msg = String.format("Cartridge not found: [application] %s [cartridge] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType());
log.error(msg);
throw new CartridgeNotFoundException(msg);
}
String kubernetesClusterId = partition.getKubernetesClusterId();
KubernetesCluster kubernetesCluster = CloudControllerContext.getInstance().
getKubernetesCluster(kubernetesClusterId);
handleNullObject(kubernetesCluster, "kubernetes cluster not found: " +
"[kubernetes-cluster] " + kubernetesClusterId + " [cluster] " + clusterId +
" [member] " + memberId);
// Prepare kubernetes context
String kubernetesMasterIp = kubernetesCluster.getKubernetesMaster().getPrivateIPAddress();
PortRange kubernetesPortRange = kubernetesCluster.getPortRange();
String kubernetesMasterPort = CloudControllerUtil
.getProperty(kubernetesCluster.getKubernetesMaster().getProperties(),
StratosConstants.KUBERNETES_MASTER_PORT, StratosConstants.KUBERNETES_MASTER_DEFAULT_PORT);
// Add kubernetes cluster payload parameters to payload
if ((kubernetesCluster.getProperties() != null) && (kubernetesCluster.getProperties().getProperties()
!= null)) {
for (Property property : kubernetesCluster.getProperties().getProperties()) {
if (property != null) {
if (property.getName().startsWith(PAYLOAD_PARAMETER_PREFIX)) {
String name = property.getName().replace(PAYLOAD_PARAMETER_PREFIX, "");
payload.add(new NameValuePair(name, property.getValue()));
}
}
}
}
KubernetesClusterContext kubernetesClusterContext = getKubernetesClusterContext(kubernetesClusterId,
kubernetesMasterIp, kubernetesMasterPort, kubernetesPortRange.getUpper(),
kubernetesPortRange.getLower());
// Generate kubernetes service ports and update port mappings in cartridge
generateKubernetesServicePorts(clusterContext.getApplicationId(), clusterContext.getClusterId(),
kubernetesClusterContext, cartridge);
// Create kubernetes services for port mappings
KubernetesApiClient kubernetesApi = kubernetesClusterContext.getKubApi();
createKubernetesServices(kubernetesApi, clusterContext, kubernetesCluster, kubernetesClusterContext,
memberContext);
// Create pod
createPod(clusterContext, memberContext, kubernetesApi, kubernetesClusterContext);
// Wait for pod status to be changed to running
Pod pod = waitForPodToBeActivated(memberContext, kubernetesApi);
// Update member context
updateMemberContext(memberContext, pod, kubernetesCluster);
log.info(String.format("Container started successfully: [application] %s [cartridge] %s [member] %s "
+ "[pod] %s [cpu] %s [memory] %s", memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(), memberContext.getKubernetesPodId(),
memberContext.getInstanceMetadata().getCpu(), memberContext.getInstanceMetadata().getRam()));
return memberContext;
} catch (Exception e) {
String msg = String.format("Could not start container: [application] %s [cartridge] %s [member] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId());
log.error(msg, e);
throw new RuntimeException(msg, e);
} finally {
if (lock != null) {
CloudControllerContext.getInstance().releaseWriteLock(lock);
}
}
}
private void updateMemberContext(MemberContext memberContext, Pod pod, KubernetesCluster kubernetesCluster) {
String memberPrivateIPAddress = pod.getStatus().getPodIP();
String podHostIPAddress = pod.getStatus().getHostIP();
String memberPublicIPAddress = podHostIPAddress;
String kubernetesHostPublicIP = findKubernetesHostPublicIPAddress(kubernetesCluster, podHostIPAddress);
if (StringUtils.isNotBlank(kubernetesHostPublicIP)) {
memberPublicIPAddress = kubernetesHostPublicIP;
if (log.isInfoEnabled()) {
log.info(String.format("Member public IP address set to kubernetes host public IP address:"
+ "[pod-host-ip] %s [kubernetes-host-public-ip] %s", podHostIPAddress, kubernetesHostPublicIP));
}
}
memberContext.setInstanceId(pod.getMetadata().getName());
memberContext.setDefaultPrivateIP(memberPrivateIPAddress);
memberContext.setPrivateIPs(new String[] { memberPrivateIPAddress });
memberContext.setDefaultPublicIP(memberPublicIPAddress);
memberContext.setPublicIPs(new String[] { memberPublicIPAddress });
memberContext.setInitTime(memberContext.getInitTime());
memberContext.setProperties(memberContext.getProperties());
}
private String findKubernetesHostPublicIPAddress(KubernetesCluster kubernetesCluster, String podHostIP) {
if ((kubernetesCluster != null) && (StringUtils.isNotBlank(podHostIP))) {
for (KubernetesHost kubernetesHost : kubernetesCluster.getKubernetesHosts()) {
if (kubernetesHost != null) {
if (podHostIP.equals(kubernetesHost.getPrivateIPAddress())) {
return kubernetesHost.getPublicIPAddress();
}
}
}
}
return null;
}
private Pod waitForPodToBeActivated(MemberContext memberContext, KubernetesApiClient kubernetesApi)
throws KubernetesClientException, InterruptedException {
Pod pod;
boolean podCreated = false;
boolean podRunning = false;
long startTime = System.currentTimeMillis();
while (!podRunning) {
pod = kubernetesApi.getPod(memberContext.getKubernetesPodId());
if (pod != null) {
podCreated = true;
if (pod.getStatus().getPhase().equals(KubernetesConstants.POD_STATUS_RUNNING)) {
log.info(String.format(
"Pod status changed to running: [application] %s [cartridge] %s [member] %s " + "[pod] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(),
memberContext.getMemberId(), pod.getMetadata().getName()));
return pod;
} else {
log.info(String.format("Waiting pod status to be changed to running: [application] %s "
+ "[cartridge] %s [member] %s [pod] %s", memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(),
pod.getMetadata().getName()));
}
} else {
log.info(String.format(
"Waiting for pod to be created: [application] %s " + "[cartridge] %s [member] %s [pod] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId(),
memberContext.getKubernetesPodId()));
}
if ((System.currentTimeMillis() - startTime) > podActivationTimeout) {
break;
}
Thread.sleep(5000);
}
String message;
if (podCreated) {
// Pod created but status did not change to running
message = String.format("Pod status did not change to running within %d sec: "
+ "[application] %s [cartridge] %s [member] %s [pod] %s",
(podActivationTimeout.intValue() / 1000), memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(), memberContext.getKubernetesPodId());
log.error(message);
} else {
// Pod did not create
message = String.format("Pod did not create within %d sec: "
+ "[application] %s [cartridge] %s [member] %s [pod] %s",
(podActivationTimeout.intValue() / 1000), memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(), memberContext.getKubernetesPodId());
log.error(message);
}
throw new RuntimeException(message);
}
/**
* Create new pod and pass environment variables.
*
* @param memberContext
* @param kubernetesApi
* @param kubernetesClusterContext
* @throws KubernetesClientException
*/
private void createPod(ClusterContext clusterContext, MemberContext memberContext,
KubernetesApiClient kubernetesApi, KubernetesClusterContext kubernetesClusterContext)
throws KubernetesClientException, RegistryException {
String applicationId = memberContext.getApplicationId();
String cartridgeType = memberContext.getCartridgeType();
String clusterId = memberContext.getClusterId();
String memberId = memberContext.getMemberId();
if (log.isInfoEnabled()) {
log.info(
String.format("Creating kubernetes pod: [application] %s [cartridge] %s [member] %s", applicationId,
cartridgeType, memberId));
}
Partition partition = memberContext.getPartition();
if (partition == null) {
String message = String
.format("Partition not found in member context: [application] %s [cartridge] %s " + "[member] %s ",
applicationId, cartridgeType, memberId);
log.error(message);
throw new RuntimeException(message);
}
Cartridge cartridge = CloudControllerContext.getInstance().getCartridge(cartridgeType);
if (cartridge == null) {
String message = "Could not find cartridge: [cartridge] " + cartridgeType;
log.error(message);
throw new RuntimeException(message);
}
// Set default values to zero to avoid cpu and memory restrictions
String cpu = System.getProperty(KUBERNETES_CONTAINER_CPU_DEFAULT, "0");
String memory = System.getProperty(KUBERNETES_CONTAINER_MEMORY_DEFAULT, "0");
Property cpuProperty = cartridge.getProperties().getProperty(KUBERNETES_CONTAINER_CPU);
if (cpuProperty != null) {
cpu = cpuProperty.getValue();
}
Property memoryProperty = cartridge.getProperties().getProperty(KUBERNETES_CONTAINER_MEMORY);
if (memoryProperty != null) {
memory = memoryProperty.getValue();
}
IaasProvider iaasProvider = CloudControllerContext.getInstance()
.getIaasProviderOfPartition(cartridge.getType(), partition.getId());
if (iaasProvider == null) {
String message = "Could not find iaas provider: [partition] " + partition.getId();
log.error(message);
throw new RuntimeException(message);
}
// Add dynamic payload to the member context
memberContext.setDynamicPayload(payload.toArray(new NameValuePair[payload.size()]));
// Find next available sequence number
long podSeqNo = kubernetesClusterContext.getNextPodSeqNo();
String podId = preparePodId(podSeqNo);
while (kubernetesApi.getPod(podId) != null) {
podSeqNo = kubernetesClusterContext.getNextPodSeqNo();
podId = preparePodId(podSeqNo);
}
// Create pod
String podName = DigestUtils.md5Hex(clusterId);
String dockerImage = iaasProvider.getImage();
List<EnvVar> environmentVariables = KubernetesIaasUtil
.prepareEnvironmentVariables(clusterContext, memberContext);
List<ContainerPort> ports = KubernetesIaasUtil.convertPortMappings(Arrays.asList(cartridge.getPortMappings()));
log.info(String.format("Starting pod: [application] %s [cartridge] %s [member] %s " + "[cpu] %s [memory] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId(), cpu,
memory));
Map<String, String> podLabels = new HashMap<>();
podLabels.put(KubernetesConstants.SERVICE_SELECTOR_LABEL, podName);
podLabels.put(CloudControllerConstants.APPLICATION_ID_LABEL,
trimLabel(CloudControllerConstants.APPLICATION_ID_LABEL, memberContext.getApplicationId()));
podLabels.put(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL,
trimLabel(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL, memberContext.getClusterInstanceId()));
podLabels.put(CloudControllerConstants.MEMBER_ID_LABEL,
trimLabel(CloudControllerConstants.MEMBER_ID_LABEL, memberContext.getMemberId()));
Map<String, String> podAnnotations = new HashMap<>();
podAnnotations.put(CloudControllerConstants.APPLICATION_ID_LABEL, memberContext.getApplicationId());
podAnnotations.put(CloudControllerConstants.CARTRIDGE_TYPE_LABEL, memberContext.getCartridgeType());
podAnnotations.put(CloudControllerConstants.CLUSTER_ID_LABEL, memberContext.getClusterId());
podAnnotations.put(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL, memberContext.getClusterInstanceId());
podAnnotations.put(CloudControllerConstants.MEMBER_ID_LABEL, memberContext.getMemberId());
kubernetesApi.createPod(podId, podName, podLabels, podAnnotations, dockerImage, cpu, memory, ports,
environmentVariables);
log.info(String.format("Pod started successfully: [application] %s [cartridge] %s [member] %s "
+ "[pod] %s [pod-label] %s [cpu] %s [memory] %s", memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(), podId, podName, cpu, memory));
// Add pod id to member context
memberContext.setKubernetesPodId(podId);
memberContext.setKubernetesPodName(podName);
// Create instance metadata
InstanceMetadata instanceMetadata = new InstanceMetadata();
instanceMetadata.setImageId(dockerImage);
instanceMetadata.setCpu(cpu);
instanceMetadata.setRam(memory);
memberContext.setInstanceMetadata(instanceMetadata);
// Persist cloud controller context
CloudControllerContext.getInstance().persist();
}
private String preparePodId(long podSeqNo) {
return POD_ID_PREFIX + "-" + podSeqNo;
}
/**
* Creates and returns proxy services for the cluster.
*
* @param kubernetesApi
* @param clusterContext
* @param kubernetesCluster
* @param kubernetesClusterContext
* @throws KubernetesClientException
*/
private void createKubernetesServices(KubernetesApiClient kubernetesApi, ClusterContext clusterContext,
KubernetesCluster kubernetesCluster, KubernetesClusterContext kubernetesClusterContext,
MemberContext memberContext) throws KubernetesClientException, RegistryException {
String clusterId = clusterContext.getClusterId();
String cartridgeType = clusterContext.getCartridgeType();
Cartridge cartridge = CloudControllerContext.getInstance().getCartridge(cartridgeType);
if (cartridge == null) {
String message = "Could not create kubernetes services, cartridge not found: [cartridge] " + cartridgeType;
log.error(message);
throw new RuntimeException(message);
}
String sessionAffinity = null;
Property sessionAffinityProperty = cartridge.getProperties().getProperty(KUBERNETES_SERVICE_SESSION_AFFINITY);
if (sessionAffinityProperty != null) {
sessionAffinity = sessionAffinityProperty.getValue();
}
// Prepare minion public IP addresses
List<String> minionPublicIPList = prepareMinionIPAddresses(kubernetesCluster);
if (log.isDebugEnabled()) {
log.debug(String.format("Minion public IPs: %s", minionPublicIPList));
}
Collection<ClusterPortMapping> clusterPortMappings = CloudControllerContext.getInstance()
.getClusterPortMappings(clusterContext.getApplicationId(), clusterId);
if (clusterPortMappings == null) {
log.info("No cluster port mappings found. Stratos will not attempt to create Kubernetes services");
return;
}
String serviceName = DigestUtils.md5Hex(clusterId);
Collection<KubernetesService> kubernetesServices = clusterContext
.getKubernetesServices(memberContext.getClusterInstanceId());
for (ClusterPortMapping clusterPortMapping : clusterPortMappings) {
// Skip if already created
int containerPort = clusterPortMapping.getPort();
KubernetesService existingService = findKubernetesService(kubernetesServices, containerPort);
if ((existingService != null) && serviceExistsInCluster(existingService.getId(), kubernetesClusterContext,
memberContext, clusterPortMapping.getName())) {
log.info(String.format("Kubernetes service already exists: [kubernetes-cluster] %s "
+ "[cluster] %s [service-name] %s [container-port] %d ",
kubernetesCluster.getClusterId(), clusterId, serviceName, containerPort));
continue;
}
// Find next available service sequence number
long serviceSeqNo = kubernetesClusterContext.getNextServiceSeqNo();
String serviceId = KubernetesIaasUtil.fixSpecialCharacters(prepareServiceName(serviceSeqNo));
while (kubernetesApi.getService(serviceId) != null) {
serviceSeqNo = kubernetesClusterContext.getNextServiceSeqNo();
serviceId = KubernetesIaasUtil.fixSpecialCharacters(prepareServiceName(serviceSeqNo));
}
if (log.isInfoEnabled()) {
log.info(String.format("Creating kubernetes service: [cluster] %s [service-id] %s [service-name] " +
"%s " + "[protocol] %s [service-port] %d [container-port] %s", clusterId, serviceId,
serviceName, clusterPortMapping.getProtocol(), clusterPortMapping.getKubernetesServicePort(),
containerPort));
}
// Create kubernetes service for port mapping
int servicePort = clusterPortMapping.getKubernetesServicePort();
String serviceType = clusterPortMapping.getKubernetesPortType();
String containerPortName = KubernetesIaasUtil.preparePortNameFromPortMapping(clusterPortMapping);
Map<String, String> serviceLabels = new HashMap<>();
serviceLabels.put(CloudControllerConstants.APPLICATION_ID_LABEL,
trimLabel(CloudControllerConstants.APPLICATION_ID_LABEL, clusterContext.getApplicationId()));
serviceLabels.put(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL,
trimLabel(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL,
memberContext.getClusterInstanceId()));
serviceLabels.put(CloudControllerConstants.PORT_NAME_LABEL,
trimLabel(CloudControllerConstants.PORT_NAME_LABEL, clusterPortMapping.getName()));
Map<String, String> serviceAnnotations = new HashMap<>();
serviceAnnotations.put(CloudControllerConstants.APPLICATION_ID_LABEL, clusterContext.getApplicationId());
serviceAnnotations.put(CloudControllerConstants.CLUSTER_ID_LABEL, clusterContext.getClusterId());
serviceAnnotations
.put(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL, memberContext.getClusterInstanceId());
serviceAnnotations.put(CloudControllerConstants.PORT_NAME_LABEL, clusterPortMapping.getName());
serviceAnnotations.put(CloudControllerConstants.PROTOCOL_LABEL, clusterPortMapping.getProtocol());
serviceAnnotations
.put(CloudControllerConstants.PORT_TYPE_LABEL, clusterPortMapping.getKubernetesPortType());
serviceAnnotations.put(CloudControllerConstants.SERVICE_PORT_LABEL,
String.valueOf(clusterPortMapping.getKubernetesServicePort()));
serviceAnnotations.put(CloudControllerConstants.PORT_LABEL, String.valueOf(clusterPortMapping.getPort()));
serviceAnnotations
.put(CloudControllerConstants.PROXY_PORT_LABEL, String.valueOf(clusterPortMapping.getProxyPort()));
kubernetesApi
.createService(serviceId, serviceName, serviceLabels, serviceAnnotations, servicePort, serviceType,
containerPortName, containerPort, sessionAffinity);
try {
Thread.sleep(1000);
} catch (InterruptedException ignore) {
}
Service service = kubernetesApi.getService(serviceId);
if (service == null) {
throw new KubernetesClientException("Kubernetes service was not created: [service] " + serviceId);
}
KubernetesService kubernetesService = new KubernetesService();
kubernetesService.setId(service.getMetadata().getName());
kubernetesService.setPortalIP(service.getSpec().getClusterIP());
// Expose minions public IP addresses as they need to be accessed by external networks
String[] minionPublicIPArray = minionPublicIPList.toArray(new String[minionPublicIPList.size()]);
kubernetesService.setPublicIPs(minionPublicIPArray);
kubernetesService.setProtocol(clusterPortMapping.getProtocol());
kubernetesService.setPortName(clusterPortMapping.getName());
String kubernetesPortType = service.getSpec().getType();
kubernetesService.setServiceType(kubernetesPortType);
kubernetesService.setKubernetesClusterId(memberContext.getPartition().getKubernetesClusterId());
if (kubernetesPortType.equals(KubernetesConstants.NODE_PORT)) {
kubernetesService.setPort(service.getSpec().getPorts().get(0).getNodePort());
} else {
kubernetesService.setPort(service.getSpec().getPorts().get(0).getPort());
}
kubernetesService.setContainerPort(containerPort);
clusterContext.addKubernetesService(memberContext.getClusterInstanceId(), kubernetesService);
CloudControllerContext.getInstance().persist();
if (log.isInfoEnabled()) {
log.info(String.format(
"Kubernetes service successfully created: [cluster] %s [service-id] %s [protocol] %s "
+ "[node-port] %d [container-port] %s", clusterId, serviceId,
clusterPortMapping.getProtocol(), servicePort, containerPort));
}
}
}
/**
* Check a given kubernetes service exists in kubernetes cluster
*
* @param serviceId
* @param kubernetesClusterContext
* @param memberContext
* @param portName
* @return
* @throws KubernetesClientException
*/
private boolean serviceExistsInCluster(String serviceId, KubernetesClusterContext kubernetesClusterContext,
MemberContext memberContext, String portName) throws KubernetesClientException {
KubernetesApiClient kubernetesApi = kubernetesClusterContext.getKubApi();
Service service = kubernetesApi.getService(serviceId);
if (service != null) {
Map<String, String> annotations = service.getMetadata().getAnnotations();
String applicationIdLabel = annotations.get(CloudControllerConstants.APPLICATION_ID_LABEL);
String clusterInstanceIdLabel = annotations.get(CloudControllerConstants.CLUSTER_INSTANCE_ID_LABEL);
String portNameLabel = annotations.get(CloudControllerConstants.PORT_NAME_LABEL);
return (StringUtils.isNotEmpty(applicationIdLabel) &&
StringUtils.isNotEmpty(clusterInstanceIdLabel) &&
StringUtils.isNotEmpty(portNameLabel) &&
applicationIdLabel.equals(memberContext.getApplicationId()) &&
clusterInstanceIdLabel.equals(memberContext.getClusterInstanceId()) &&
portNameLabel.equals(portName));
}
return false;
}
private String trimLabel(String key, String value) {
if (StringUtils.isNotEmpty(value) && (value.length() > KubernetesConstants.MAX_LABEL_LENGTH)) {
String trimmed = value.substring(0, KubernetesConstants.MAX_LABEL_LENGTH - 2).concat("X");
log.warn(String.format("Kubernetes label trimmed: [key] %s [original] %s [trimmed] %s", key, value,
trimmed));
return trimmed;
}
return value;
}
private String prepareServiceName(long serviceSeqNo) {
return SERVICE_NAME_PREFIX + "-" + (serviceSeqNo);
}
private List<String> prepareMinionIPAddresses(KubernetesCluster kubernetesCluster) {
List<String> minionPublicIPList = new ArrayList<String>();
KubernetesHost[] kubernetesHosts = kubernetesCluster.getKubernetesHosts();
if ((kubernetesHosts == null) || (kubernetesHosts.length == 0) || (kubernetesHosts[0] == null)) {
throw new RuntimeException(
"Hosts not found in kubernetes cluster: [cluster] " + kubernetesCluster.getClusterId());
}
for (KubernetesHost host : kubernetesHosts) {
if (host != null) {
minionPublicIPList.add(host.getPublicIPAddress());
}
}
return minionPublicIPList;
}
/**
* Find a kubernetes service by container port
*
* @param kubernetesServices
* @param containerPort
* @return
*/
private KubernetesService findKubernetesService(Collection<KubernetesService> kubernetesServices,
int containerPort) {
if (kubernetesServices != null) {
for (KubernetesService kubernetesService : kubernetesServices) {
if (kubernetesService.getContainerPort() == containerPort) {
return kubernetesService;
}
}
}
return null;
}
/**
* Generate kubernetes service ports for cluster.
*
* @param kubernetesClusterContext
* @param clusterId
* @param cartridge
*/
private void generateKubernetesServicePorts(String applicationId, String clusterId,
KubernetesClusterContext kubernetesClusterContext, Cartridge cartridge)
throws KubernetesClientException, RegistryException {
synchronized (KubernetesIaas.class) {
if (cartridge != null) {
StringBuilder portMappingStrBuilder = new StringBuilder();
for (PortMapping portMapping : Arrays.asList(cartridge.getPortMappings())) {
Collection<ClusterPortMapping> clusterPortMappings = CloudControllerContext.getInstance()
.getClusterPortMappings(applicationId, clusterId);
if (clusterPortMappings == null) {
throw new CloudControllerException(String.format(
"Cluster port mappings not found: " + "[application-id] %s [cluster-id] %s",
applicationId, clusterId));
}
ClusterPortMapping clusterPortMapping = findClusterPortMapping(clusterPortMappings, portMapping);
if (clusterPortMapping == null) {
throw new CloudControllerException(String.format("Cluster port mapping not found: "
+ "[application-id] %s [cluster-id] %s [transport] %s", applicationId,
clusterId, portMapping.getName()));
}
if (clusterPortMapping.getKubernetesPortType() == null) {
throw new CloudControllerException(String.format("Kubernetes service type not "
+ "found [application-id] %s [cluster-id] %s [cartridge] %s", applicationId,
clusterId, cartridge));
}
String serviceType = portMapping.getKubernetesPortType();
clusterPortMapping.setKubernetesPortType(serviceType);
// If kubernetes service port is already set, skip setting a new one
if (clusterPortMapping.getKubernetesServicePort() == 0) {
if (serviceType.equals(KubernetesConstants.NODE_PORT)) {
int nextServicePort = kubernetesClusterContext.getNextServicePort();
if (nextServicePort == -1) {
throw new RuntimeException(
String.format("Could not generate service port: [cluster-id] %s " + "[port] %d",
clusterId, portMapping.getPort()));
}
// Find next available service port
KubernetesApiClient kubernetesApi = kubernetesClusterContext.getKubApi();
List<Service> services = kubernetesApi.getServices();
while (!nodePortAvailable(services, nextServicePort)) {
nextServicePort = kubernetesClusterContext.getNextServicePort();
}
clusterPortMapping.setKubernetesServicePort(nextServicePort);
} else {
clusterPortMapping.setKubernetesServicePort(portMapping.getPort());
}
} else {
if (log.isDebugEnabled()) {
log.debug(String.format("Kubernetes service port is already set: [application-id] %s "
+ "[cluster-id] %s [port] %d [service-port] %d", applicationId, clusterId,
clusterPortMapping.getPort(), clusterPortMapping.getKubernetesServicePort()));
}
}
// Add port mappings to payload
if (portMappingStrBuilder.toString().length() > 0) {
portMappingStrBuilder.append(";");
}
portMappingStrBuilder.append(String
.format("NAME:%s|PROTOCOL:%s|PORT:%d|PROXY_PORT:%d|TYPE:%s", clusterPortMapping.getName(),
clusterPortMapping.getProtocol(), clusterPortMapping.getKubernetesServicePort(),
clusterPortMapping.getProxyPort(), clusterPortMapping.getKubernetesPortType()));
if (log.isInfoEnabled()) {
log.info(String.format("Kubernetes service port generated: [application-id] %s "
+ "[cluster-id] %s [port] %d [service-port] %d", applicationId, clusterId,
clusterPortMapping.getPort(), clusterPortMapping.getKubernetesServicePort()));
}
}
NameValuePair nameValuePair = new NameValuePair(PORT_MAPPINGS, portMappingStrBuilder.toString());
payload.add(nameValuePair);
// Persist service ports added to cluster port mappings
CloudControllerContext.getInstance().persist();
}
}
}
private boolean nodePortAvailable(List<Service> services, int nodePort) throws KubernetesClientException {
for (Service service : services) {
for (ServicePort servicePort : service.getSpec().getPorts()) {
// Need to check node port is null here to avoid unboxing errors
if ((servicePort.getNodePort() != null) && (servicePort.getNodePort() == nodePort)) {
return false;
}
}
}
return true;
}
/**
* Find cluster port mapping that corresponds to cartridge port mapping.
*
* @param clusterPortMappings
* @param portMapping
* @return
*/
private ClusterPortMapping findClusterPortMapping(Collection<ClusterPortMapping> clusterPortMappings,
PortMapping portMapping) {
for (ClusterPortMapping clusterPortMapping : clusterPortMappings) {
if (clusterPortMapping.getName().equals(portMapping.getName())) {
return clusterPortMapping;
}
}
return null;
}
/**
* Terminate a container by member id
*
* @param memberContext
* @return
* @throws MemberTerminationFailedException
*/
public MemberContext terminateContainer(MemberContext memberContext)
throws MemberTerminationFailedException, RegistryException {
Lock lock = null;
try {
lock = CloudControllerContext.getInstance().acquireMemberContextWriteLock();
handleNullObject(memberContext, "Could not terminate container, member context not found");
Partition partition = memberContext.getPartition();
if (partition == null) {
String message = String
.format("Partition not found in member context: [member] %s ", memberContext.getMemberId());
log.error(message);
throw new RuntimeException(message);
}
String kubernetesClusterId = memberContext.getPartition().getKubernetesClusterId();
handleNullObject(kubernetesClusterId, String.format("Could not terminate container, kubernetes cluster "
+ "context id is null: [partition-id] %s [member-id] %s", partition.getId(),
memberContext.getMemberId()));
KubernetesClusterContext kubernetesClusterContext = CloudControllerContext.getInstance()
.getKubernetesClusterContext(kubernetesClusterId);
handleNullObject(kubernetesClusterContext, String.format(
"Could not terminate container, kubernetes cluster "
+ "context not found: [partition-id] %s [member-id] %s", partition.getId(),
memberContext.getMemberId()));
KubernetesApiClient kubApi = kubernetesClusterContext.getKubApi();
try {
log.info(String.format("Removing kubernetes pod: [application] %s [cartridge] %s [member] %s [pod] %s",
memberContext.getApplicationId(), memberContext.getCartridgeType(), memberContext.getMemberId(),
memberContext.getKubernetesPodId()));
// Remove pod
kubApi.deletePod(memberContext.getKubernetesPodId());
// Persist changes
CloudControllerContext.getInstance().persist();
log.info(String.format("Kubernetes pod removed successfully: [application] %s [cartridge] %s "
+ "[member] %s [pod] %s", memberContext.getApplicationId(),
memberContext.getCartridgeType(), memberContext.getMemberId(),
memberContext.getKubernetesPodId()));
} catch (KubernetesClientException ignore) {
// we can't do nothing here
log.warn(String.format("Could not delete pod: [pod-id] %s", memberContext.getKubernetesPodId()));
}
return memberContext;
} finally {
if (lock != null) {
CloudControllerContext.getInstance().releaseWriteLock(lock);
}
}
}
/**
* Get kubernetes cluster context
*
* @param kubernetesClusterId
* @param kubernetesMasterIp
* @param kubernetesMasterPort
* @param upperPort
* @param lowerPort
* @return
*/
private KubernetesClusterContext getKubernetesClusterContext(String kubernetesClusterId, String kubernetesMasterIp,
String kubernetesMasterPort, int upperPort, int lowerPort) {
KubernetesClusterContext kubernetesClusterContext = CloudControllerContext.getInstance().
getKubernetesClusterContext(kubernetesClusterId);
if (kubernetesClusterContext != null) {
return kubernetesClusterContext;
}
kubernetesClusterContext = new KubernetesClusterContext(kubernetesClusterId, kubernetesMasterIp,
kubernetesMasterPort, lowerPort, upperPort);
CloudControllerContext.getInstance().addKubernetesClusterContext(kubernetesClusterContext);
return kubernetesClusterContext;
}
private String readProperty(String property, org.apache.stratos.common.Properties properties, String object) {
String propVal = CloudControllerUtil.getProperty(properties, property);
handleNullObject(propVal,
"Property validation failed. Could not find property: '" + property + " in " + object);
return propVal;
}
private void handleNullObject(Object obj, String errorMsg) {
if (obj == null) {
log.error(errorMsg);
throw new IllegalArgumentException(errorMsg);
}
}
@Override
public void releaseAddress(String ip) {
}
@Override
public boolean isValidRegion(String region) throws InvalidRegionException {
// No regions in kubernetes cluster
return true;
}
@Override
public boolean isValidZone(String region, String zone) throws InvalidZoneException, InvalidRegionException {
// No zones in kubernetes cluster
return true;
}
@Override
public boolean isValidHost(String zone, String host) throws InvalidHostException {
// No zones in kubernetes cluster
return true;
}
@Override
public String createVolume(int sizeGB, String snapshotId) {
throw new NotImplementedException();
}
@Override
public String attachVolume(String instanceId, String volumeId, String deviceName) {
throw new NotImplementedException();
}
@Override
public void detachVolume(String instanceId, String volumeId) {
throw new NotImplementedException();
}
@Override
public void deleteVolume(String volumeId) {
throw new NotImplementedException();
}
@Override
public String getIaasDevice(String device) {
throw new NotImplementedException();
}
@Override
public void allocateIpAddresses(String clusterId, MemberContext memberContext, Partition partition) {
}
/**
* Remove kubernetes services if available for application cluster.
*
* @param clusterContext
* @param clusterInstanceId
*/
public static void removeKubernetesServices(ClusterContext clusterContext, String clusterInstanceId) {
if (clusterContext != null) {
ArrayList<KubernetesService> kubernetesServices = Lists
.newArrayList(clusterContext.getKubernetesServices(clusterInstanceId));
for (KubernetesService kubernetesService : kubernetesServices) {
KubernetesClusterContext kubernetesClusterContext = CloudControllerContext.getInstance()
.getKubernetesClusterContext(kubernetesService.getKubernetesClusterId());
KubernetesApiClient kubernetesApiClient = kubernetesClusterContext.getKubApi();
String serviceId = kubernetesService.getId();
log.info(String.format("Deleting kubernetes service: [application-id] %s " + "[service-id] %s",
clusterContext.getApplicationId(), serviceId));
try {
kubernetesApiClient.deleteService(serviceId);
kubernetesClusterContext.deallocatePort(kubernetesService.getPort());
clusterContext.removeKubernetesService(clusterInstanceId, serviceId);
} catch (KubernetesClientException e) {
log.error(String.format(
"Could not delete kubernetes service: [application-id] %s " + "[service-id] %s",
clusterContext.getApplicationId(), serviceId), e);
}
}
}
}
}
| |
/*
* ******************************************************************************
* Copyright 2016-2018 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ******************************************************************************
*/
package com.spectralogic.dsbrowser.gui.services.ds3Panel;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.spectralogic.ds3client.commands.GetBucketRequest;
import com.spectralogic.ds3client.commands.GetBucketResponse;
import com.spectralogic.ds3client.commands.spectrads3.GetBucketsSpectraS3Request;
import com.spectralogic.ds3client.commands.spectrads3.GetBucketsSpectraS3Response;
import com.spectralogic.ds3client.models.Bucket;
import com.spectralogic.ds3client.models.ListBucketResult;
import com.spectralogic.ds3client.models.PhysicalPlacement;
import com.spectralogic.ds3client.utils.Guard;
import com.spectralogic.dsbrowser.api.services.logging.LogType;
import com.spectralogic.dsbrowser.api.services.logging.LoggingService;
import com.spectralogic.dsbrowser.gui.components.ds3panel.Ds3Common;
import com.spectralogic.dsbrowser.gui.components.ds3panel.Ds3PanelPresenter;
import com.spectralogic.dsbrowser.gui.components.ds3panel.ds3treetable.Ds3TreeTableItem;
import com.spectralogic.dsbrowser.gui.components.ds3panel.ds3treetable.Ds3TreeTableValue;
import com.spectralogic.dsbrowser.gui.components.metadata.Ds3Metadata;
import com.spectralogic.dsbrowser.gui.components.metadata.MetadataView;
import com.spectralogic.dsbrowser.gui.components.physicalplacement.PhysicalPlacementPopup;
import com.spectralogic.dsbrowser.gui.services.Workers;
import com.spectralogic.dsbrowser.gui.services.sessionStore.Session;
import com.spectralogic.dsbrowser.gui.services.tasks.MetadataTask;
import com.spectralogic.dsbrowser.gui.services.tasks.PhysicalPlacementTask;
import com.spectralogic.dsbrowser.gui.services.tasks.SearchJobTask;
import com.spectralogic.dsbrowser.gui.util.*;
import com.spectralogic.dsbrowser.gui.util.treeItem.SafeHandler;
import com.spectralogic.dsbrowser.util.GuavaCollectors;
import javafx.application.Platform;
import javafx.collections.FXCollections;
import javafx.collections.ObservableList;
import javafx.scene.control.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import java.time.Instant;
import java.util.Comparator;
import java.util.Optional;
import java.util.ResourceBundle;
import java.util.stream.Collectors;
public final class Ds3PanelService {
private static final Logger LOG = LoggerFactory.getLogger(Ds3PanelService.class);
private static Instant lastRefresh = Instant.now();
/**
* check if bucket contains or folders
*
* @param bucketName bucketName
* @return true if bucket is empty else return false
*/
public static boolean checkIfBucketEmpty(final String bucketName, final Session session) {
try {
final GetBucketRequest request = new GetBucketRequest(bucketName).withMaxKeys(1);
final GetBucketResponse bucketResponse = session.getClient().getBucket(request);
final ListBucketResult listBucketResult = bucketResponse.getListBucketResult();
return Guard.isNullOrEmpty(listBucketResult.getObjects()) && Guard.isNullOrEmpty(listBucketResult.getCommonPrefixes());
} catch (final Exception e) {
LOG.error("could not get bucket response", e);
return false;
}
}
public static Optional<ImmutableList<Bucket>> setSearchableBucket(final ObservableList<TreeItem<Ds3TreeTableValue>> selectedItem,
final Session session,
final TreeTableView<Ds3TreeTableValue> treeTableView) {
try {
if (null != treeTableView) {
ObservableList<TreeItem<Ds3TreeTableValue>> selectedItemTemp = selectedItem;
if (null == selectedItemTemp) {
selectedItemTemp = FXCollections.observableArrayList();
if (null != treeTableView.getRoot() && null != treeTableView.getRoot().getValue()) {
selectedItemTemp.add(treeTableView.getRoot());
}
}
final GetBucketsSpectraS3Request getBucketsSpectraS3Request = new GetBucketsSpectraS3Request();
final GetBucketsSpectraS3Response response = session.getClient().getBucketsSpectraS3(getBucketsSpectraS3Request);
final ImmutableList<Bucket> buckets = response.getBucketListResult().getBuckets().stream().collect(GuavaCollectors.immutableList());
if (!Guard.isNullOrEmpty(selectedItemTemp)) {
final ImmutableSet<String> bucketNameSet = selectedItemTemp.stream().map(item -> item.getValue()
.getBucketName()).collect(GuavaCollectors.immutableSet());
return Optional.ofNullable(buckets.stream().filter(bucket -> bucketNameSet.contains(bucket.getName())).collect
(GuavaCollectors.immutableList()));
} else {
return Optional.ofNullable(buckets);
}
} else {
throw new NullPointerException("TreeTableView can't be null");
}
} catch (final Exception e) {
LOG.error("Something went wrong!", e);
return Optional.empty();
}
}
public static void refresh(final TreeItem<Ds3TreeTableValue> modifiedTreeItem) {
LOG.info("Running refresh of row");
if (modifiedTreeItem instanceof Ds3TreeTableItem) {
final Ds3TreeTableItem item;
if (modifiedTreeItem.getValue().getType().equals(Ds3TreeTableValue.Type.File)) {
item = (Ds3TreeTableItem) modifiedTreeItem.getParent();
} else {
item = (Ds3TreeTableItem) modifiedTreeItem;
}
if (item != null) {
if (item.isExpanded()) {
item.refresh();
} else if (item.isAccessedChildren()) {
item.setExpanded(true);
item.refresh();
} else {
item.setExpanded(true);
}
}
}
}
public static void throttledRefresh(final TreeItem<Ds3TreeTableValue> modifiedTreeItem) {
if (modifiedTreeItem != null && lastRefresh.plus(Duration.ofSeconds(5)).isBefore(Instant.now())) {
lastRefresh = Instant.now();
refresh(modifiedTreeItem);
}
}
public static void showPhysicalPlacement(final Ds3Common ds3Common, final Workers workers, final ResourceBundle resourceBundle) {
ImmutableList<TreeItem<Ds3TreeTableValue>> tempValues = ds3Common.getDs3TreeTableView().getSelectionModel().getSelectedItems()
.stream().collect(GuavaCollectors.immutableList());
final TreeItem<Ds3TreeTableValue> root = ds3Common.getDs3TreeTableView().getRoot();
if (tempValues.isEmpty() && (root == null || root.getValue() != null)) {
LOG.info(resourceBundle.getString("nothingSelected"));
new LazyAlert(resourceBundle).info(resourceBundle.getString("nothingSelected"));
return;
} else if (tempValues.isEmpty()) {
final ImmutableList.Builder<TreeItem<Ds3TreeTableValue>> builder = ImmutableList.builder();
tempValues = builder.add(root).build().asList();
}
final ImmutableList<TreeItem<Ds3TreeTableValue>> values = tempValues;
if (values.size() > 1) {
LOG.info(resourceBundle.getString("onlySingleObjectSelectForPhysicalPlacement"));
new LazyAlert(resourceBundle).info(resourceBundle.getString("onlySingleObjectSelectForPhysicalPlacement"));
return;
}
final PhysicalPlacementTask getPhysicalPlacement = new PhysicalPlacementTask(ds3Common, values, workers);
workers.execute(getPhysicalPlacement);
getPhysicalPlacement.setOnSucceeded(SafeHandler.logHandle(event -> Platform.runLater(() -> {
LOG.info("Launching PhysicalPlacement popup");
PhysicalPlacementPopup.show((PhysicalPlacement) getPhysicalPlacement.getValue(), resourceBundle);
})));
}
@SuppressWarnings("unchecked")
public static void showMetadata(final Ds3Common ds3Common, final Workers workers, final ResourceBundle resourceBundle) {
final TreeTableView ds3TreeTableView = ds3Common.getDs3TreeTableView();
final ImmutableList<TreeItem<Ds3TreeTableValue>> values = (ImmutableList<TreeItem<Ds3TreeTableValue>>) ds3TreeTableView.getSelectionModel().getSelectedItems().stream().collect(GuavaCollectors.immutableList());
if (values.isEmpty()) {
LOG.info(resourceBundle.getString("noFiles"));
new LazyAlert(resourceBundle).info(resourceBundle.getString("noFiles"));
return;
}
if (values.size() > 1) {
LOG.info(resourceBundle.getString("onlySingleObjectSelectForMetadata"));
new LazyAlert(resourceBundle).info(resourceBundle.getString("onlySingleObjectSelectForMetadata"));
return;
}
final MetadataTask getMetadata = new MetadataTask(ds3Common, values);
workers.execute(getMetadata);
getMetadata.setOnSucceeded(SafeHandler.logHandle(event -> Platform.runLater(() -> {
LOG.info("Launching metadata popup");
final MetadataView metadataView = new MetadataView((Ds3Metadata) getMetadata.getValue());
Popup.show(metadataView.getView(), resourceBundle.getString("metaDataContextMenu"));
})));
}
public static void filterChanged(final Ds3Common ds3Common, final Workers workers, final LoggingService loggingService, final ResourceBundle resourceBundle, final DateTimeUtils dateTimeUtils) {
final Ds3PanelPresenter ds3PanelPresenter = ds3Common.getDs3PanelPresenter();
final String newValue = ds3PanelPresenter.getSearchedText();
ds3PanelPresenter.getDs3PathIndicator().setText(resourceBundle.getString("searching"));
ds3PanelPresenter.getDs3PathIndicatorTooltip().setText(resourceBundle.getString("searching"));
final TreeTableView<Ds3TreeTableValue> ds3TreeTableView = ds3Common.getDs3TreeTableView();
final Session session = ds3Common.getCurrentSession();
if (Guard.isStringNullOrEmpty(newValue)) {
setVisibilityOfItemsInfo(true, ds3Common);
RefreshCompleteViewWorker.refreshCompleteTreeTableView(ds3Common, workers, dateTimeUtils, loggingService);
} else {
try {
ObservableList<TreeItem<Ds3TreeTableValue>> selectedItem = ds3TreeTableView.getSelectionModel().getSelectedItems();
final TreeItem<Ds3TreeTableValue> root = ds3TreeTableView.getRoot();
if (Guard.isNullOrEmpty(selectedItem) && (root != null && root.getValue() != null)) {
selectedItem = FXCollections.observableArrayList();
selectedItem.add(root);
}
final Optional<ImmutableList<Bucket>> searchableBuckets = Ds3PanelService.setSearchableBucket(selectedItem, session,
ds3TreeTableView);
final TreeItem<Ds3TreeTableValue> rootTreeItem = new TreeItem<>();
rootTreeItem.setExpanded(true);
ds3TreeTableView.setShowRoot(false);
setVisibilityOfItemsInfo(false, ds3Common);
final SearchJobTask searchJobTask = new SearchJobTask(searchableBuckets.get(), newValue, session, workers, ds3Common, dateTimeUtils, loggingService);
workers.execute(searchJobTask);
searchJobTask.setOnSucceeded(SafeHandler.logHandle(event -> {
LOG.info("Search completed!");
Platform.runLater(() -> {
try {
final ObservableList<Ds3TreeTableItem> treeTableItems = FXCollections.observableArrayList(searchJobTask.get().stream().collect(Collectors.toList()));
ds3PanelPresenter.getDs3PathIndicator().setText(StringBuilderUtil.nObjectsFoundMessage(treeTableItems.size()).toString());
ds3PanelPresenter.getDs3PathIndicatorTooltip().setText(StringBuilderUtil.nObjectsFoundMessage(treeTableItems.size()).toString());
loggingService.logMessage(
StringBuilderUtil.nObjectsFoundMessage(treeTableItems.size()).toString(), LogType.INFO);
treeTableItems.sort(Comparator.comparing(t -> t.getValue().getType().toString()));
treeTableItems.forEach(value -> rootTreeItem.getChildren().add(value));
if (rootTreeItem.getChildren().size() == 0) {
ds3TreeTableView.setPlaceholder(new Label(resourceBundle.getString("0_SearchResult")));
}
ds3TreeTableView.setRoot(rootTreeItem);
final TreeTableColumn<Ds3TreeTableValue, ?> ds3TreeTableValueTreeTableColumn = ds3TreeTableView
.getColumns().get(1);
if (null != ds3TreeTableValueTreeTableColumn) {
ds3TreeTableValueTreeTableColumn.setVisible(true);
}
} catch (final Exception e) {
LOG.error("Search failed", e);
loggingService.logMessage(StringBuilderUtil.searchFailedMessage().append(e).toString(), LogType.ERROR);
}
});
}));
searchJobTask.setOnCancelled(SafeHandler.logHandle(event -> LOG.info("Search cancelled")));
} catch (final Exception e) {
LOG.error("Could not complete search: ", e);
}
}
}
private static void setVisibilityOfItemsInfo(final boolean visibility, final Ds3Common ds3Common) {
ds3Common.getDs3PanelPresenter().getInfoLabel().setVisible(visibility);
ds3Common.getDs3PanelPresenter().getCapacityLabel().setVisible(visibility);
}
}
| |
package com.mindfire.review.services;
import java.util.ArrayList;
import java.util.List;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import com.mindfire.review.exceptions.AlreadyReviewedException;
import com.mindfire.review.exceptions.AuthorExistenceException;
import com.mindfire.review.exceptions.ReviewDoesnotExistException;
import com.mindfire.review.util.Utility;
import com.mindfire.review.web.dto.AuthorDto;
import com.mindfire.review.web.models.Author;
import com.mindfire.review.web.models.AuthorLike;
import com.mindfire.review.web.models.Book;
import com.mindfire.review.web.models.BookAuthor;
import com.mindfire.review.web.models.ReviewAuthor;
import com.mindfire.review.web.models.ReviewBook;
import com.mindfire.review.web.models.User;
import com.mindfire.review.web.repositories.AuthorLikeRepository;
import com.mindfire.review.web.repositories.AuthorRepository;
import com.mindfire.review.web.repositories.BookAuthorRepository;
import com.mindfire.review.web.repositories.ReviewAuthorRepository;
import com.mindfire.review.web.repositories.ReviewBookRepository;
/**
* @author pratyasa
*/
@Service
public class AuthorServiceImpl implements AuthorService {
@Autowired
private AuthorRepository authorRepository;
@Autowired
private ReviewAuthorRepository reviewAuthorRepository;
@Autowired
private BookAuthorRepository bookAuthorRepository;
@Autowired
private ReviewBookRepository reviewBookRepository;
@Autowired
private BookService bookService;
@Autowired
private UserService userService;
@Autowired
private AuthorLikeRepository authorLikeRepository;
/**
* get the list of all the authors
* @return List<Author>
*/
@Override
public Page<Author> getAllAuthor(int pageno, int size) {
Pageable page = Utility.buildPageRequest(size, pageno);
return authorRepository.findAll(page);
}
/**
*
*/
@Override
public List<Author> getAllAuthor() {
return authorRepository.findAll();
}
/**
* to get the author or authors with the same name
* @param name
* @return List<Author>
*/
@Override
public Page<Author> getAuthorByNameLike(String name,int page) {return authorRepository.findByAuthorNameContainsIgnoreCase(name, Utility.buildPageRequest(10, page));}
/**
* get the author by name
* @param name
* @return
*/
@Override
public Author getAuthorByName(String name) {
return authorRepository.findByAuthorNameIgnoreCase(name);
}
/**
* to get the list of authors by their rating
* @param rating
* @return
*/
@Override
public Page<Author> getAuthorByRating(float rating,int pageno, int size) {
Pageable page = Utility.buildPageRequest(size, pageno);
return authorRepository.findByAuthorRating(rating, page);
}
/**
*
* @return
*/
@Override
public List<Author> getAuthorByRating(float rating) {
return authorRepository.findByAuthorRating(rating);
}
/**
* to get the list of authors according to the genre
* @param genre
* @return
*/
@Override
public Page<Author> getAuthorByGenre(String genre, int pageno, int size) {
Pageable page = Utility.buildPageRequest(size, pageno);
return authorRepository.findByAuthorGenreContainsIgnoreCase(genre, page);}
/**
*
* @param genre
* @param page
* @return
*/
@Override
public List<Author> getAuthorByGenre(String genre) {
return authorRepository.findByAuthorGenreContainsIgnoreCase(genre);}
/**
* to get all the reviews on the author
* @param name
* @return
*/
@Override
public List<ReviewAuthor> getAuthorReviewByAuthorName(String name) {return reviewAuthorRepository.findByAuthor(getAuthorByName(name));}
/**
*
* @param name
* @return
*/
@Override
public int getTotalAuthorReviewByAuthorName(String name) {return reviewAuthorRepository.findByAuthor(getAuthorByName(name)).size();}
/**
*
* @param name
* @param pageno
* @param size
* @return
*/
@Override
public Page<ReviewAuthor> getAuthorReviewByAuthorName(String name, int pageno, int size) {
Pageable page = Utility.buildPageRequest(size, pageno);
List<ReviewAuthor> reviewAuthors = reviewAuthorRepository.findByAuthor(getAuthorByName(name));
return new PageImpl<>(reviewAuthors,page, size);
}
/**
* to get the author by the Id
* @param authorId
* @return
*/
@Override
public Author getAuthorById(Long authorId){ return authorRepository.findOne(authorId);}
/**
* get the book reviews on all the books authored by the author
* @param name
* @return
*/
@Override
public List<ReviewBook> getBookReviewByAuthorName(String name) {
List<Book> bookList = getBookByAuthor(name);
List<ReviewBook> reviewBookList = new ArrayList<>();
List<ReviewBook> reviewBookList1;
for (Book b : bookList) {
reviewBookList1 = reviewBookRepository.findByBook(b);
reviewBookList.addAll(reviewBookList1);
}
return reviewBookList;
}
@Override
public Page<ReviewBook> getBookReviewByAuthorName(String name, int pageno, int size) {
List<Book> bookList = getBookByAuthor(name);
List<ReviewBook> reviewBookList = new ArrayList<>();
List<ReviewBook> reviewBookList1;
for (Book b : bookList) {
reviewBookList1 = reviewBookRepository.findByBook(b);
reviewBookList.addAll(reviewBookList1);
}
Pageable page = Utility.buildPageRequest(size, pageno);
return new PageImpl<>(reviewBookList,page, size);
}
/**
* to get the list of books authored by the author
* @param name
* @return
*/
@Override
public List<Book> getBookByAuthor(String name) {
List<BookAuthor> list = bookAuthorRepository.findByAuthor(authorRepository.findByAuthorNameIgnoreCase(name));
List<Book> book = new ArrayList<>();
for(BookAuthor ba : list){
book.add(ba.getBook());
}
return book;
}
/**
*
*/
@Override
public Page<Book> getBookByAuthor(String name, int pageno, int size) {
List<BookAuthor> list = bookAuthorRepository.findByAuthor(authorRepository.findByAuthorNameIgnoreCase(name));
List<Book> book = new ArrayList<>();
for(BookAuthor ba : list){
book.add(ba.getBook());
}
Pageable page = Utility.buildPageRequest(size, pageno);
return new PageImpl<>(book,page,size);
}
/**
* to get all the users who have commented on the books by the author
* @param name
* @return
*/
@Override
public List<User> getUserByAuthor(String name) {
List<User> users = new ArrayList<>();
List<Book> books = getBookByAuthor(name);
for (Book b : books) {
users.addAll(bookService.getUserByBookReview(b.getBookName()));
}
return users;
}
/**
*
*/
@Override
public Page<User> getUserByAuthor(String name, int pageno, int size) {
List<User> users = new ArrayList<>();
List<Book> books = getBookByAuthor(name);
for (Book b : books) {
users.addAll(bookService.getUserByBookReview(b.getBookName()));
}
Pageable page = Utility.buildPageRequest(size, pageno);
return new PageImpl<>(users,page,size);
}
/**
* method to add authors
* @param authorDto
* @throws AuthorExistenceException
*/
@Override
public void addAuthor(AuthorDto authorDto) throws AuthorExistenceException{
if(authorDto == null){
throw new NullPointerException("Author Dto sjould not be null.");
}
if(authorRepository.findByAuthorNameIgnoreCase(authorDto.getAuthorName()) != null){
throw new AuthorExistenceException("Author already exists.");
}
Author author = new Author();
author.setAuthorDescription(authorDto.getAuthorDescription());
author.setAuthorGenre(authorDto.getAuthorGenre());
author.setAuthorName(authorDto.getAuthorName());
author.setAuthorRating(authorDto.getAuthorRating());
authorRepository.save(author);
}
/**
* method to update author
* @param authorDto
* @param authorId
* @throws AuthorExistenceException
*/
@Override
public void updateAuthor(AuthorDto authorDto, Long authorId) throws AuthorExistenceException{
Author author = authorRepository.findOne(authorId);
if(authorDto == null){
throw new NullPointerException("Sorry for the internal error caused.");
}
if (author == null){
throw new AuthorExistenceException("Author does not esist");
}
author.setAuthorRating(authorDto.getAuthorRating());
author.setAuthorName(authorDto.getAuthorName());
author.setAuthorGenre(authorDto.getAuthorGenre());
author.setAuthorDescription(authorDto.getAuthorDescription());
authorRepository.save(author);
}
/**
* method to remove authors
* @param authorId
* @throws AuthorExistenceException
*/
@Override
public void removeAuthor(Long authorId) throws AuthorExistenceException{
Author author = authorRepository.findOne(authorId);
if(author == null){
throw new AuthorExistenceException("Author does not exist");
}
authorRepository.delete(author);
}
/**
* method to add likes for author
* @param userName
* @param authorId
* @throws AlreadyReviewedException
*/
@Override
public void addAuthorLikeByUser(String userName, Long authorId) throws AlreadyReviewedException{
User user = userService.getUser(userName);
Author author = getAuthorById(authorId);
if(authorLikeRepository.findByAuthorAndUser(author, user) != null){
throw new AlreadyReviewedException("already liked the author");
}
AuthorLike authorLike = new AuthorLike();
authorLike.setAuthor(author);
authorLike.setUser(user);
authorLike = authorLikeRepository.save(authorLike);
author.setAuthorLikes(getNumberOfLikesByUser(authorId));
authorRepository.save(author);
if(authorLike == null){
throw new NullPointerException("Unable to persist into the database. "+author.getAuthorName());
}
}
/**
* method to dislike author
* @param authorLikeId
* @throws ReviewDoesnotExistException
*/
@Override
public void removeAuthorLikeByUser(String userName, Long authorId) throws ReviewDoesnotExistException{
User user = userService.getUser(userName);
Author author = getAuthorById(authorId);
AuthorLike authorLike = authorLikeRepository.findByAuthorAndUser(author, user);
if(authorLike == null){
throw new ReviewDoesnotExistException("the author like doesnot exist.");
}
authorLikeRepository.delete(authorLike);
}
/**
* returns the total likes
* @param authorId
* @return
*/
@Override
public int getNumberOfLikesByUser(Long authorId){
Author author = getAuthorById(authorId);
int likes = authorLikeRepository.findByAuthor(author).size();
author.setAuthorLikes(likes);
authorRepository.save(author);
return likes;
}
/**
* returns the top 10 authors
*/
@Override
public List<Author> getTop10Authors() {
return authorRepository.findTop10ByAuthorLikesGreaterThanOrderByAuthorLikesDesc(0);
}
}
| |
package com.tapad.tracking;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.preference.PreferenceManager;
import android.text.TextUtils;
import com.tapad.tracking.deviceidentification.*;
import com.tapad.util.Logging;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
/**
* Public entry-point to the tracking API.
*/
public class Tracking {
protected static final String PREF_TAPAD_DEVICE_ID = "_tapad_device_id";
protected static final String PREF_INSTALL_SENT = "_tapad_install_sent";
protected static final String PREF_FIRST_RUN_SENT = "_tapad_first_run_sent";
protected static final String EVENT_INSTALL = "install";
protected static final String EVENT_FIRST_RUN = "first-run";
public static final String OPTED_OUT_DEVICE_ID = "OptedOut";
private static TrackingService service = null;
private static String deviceId;
private static String typedDeviceIds;
private static IdentifierSource idCollector = new IdentifierSourceAggregator(defaultIdSources());
private static DeviceIdentifier deviceIdLocator = new DeviceIdentifier() {
@Override
public String get() {
return deviceId;
}
@Override
public String getTypedIds() {
return typedDeviceIds;
}
@Override
public boolean isOptedOut() {
return Tracking.isOptedOut();
}
};
/**
* Initializes the tracking API with application id as specified in AndroidManifest.xml:
* <p/>
* <application>
* <meta-data android:name="tapad.APP_ID" android:value="INSERT_APP_ID_HERE"/>
* ...
* </application>
* <p/>
* The default id sources are AndroidId, PhoneId, and WifiMac, but
* this can be configured to suit the developer's privacy policy through the AndroidManifest.xml:
* <p/>
* <application>
* <meta-data android:name="tapad.ID_SOURCES" android:value="AndroidId,PhoneId,WifiMac"/>
* ...
* </application>
*
* @param context a context reference
*/
public static void init(Context context) {
init(context, null, null);
}
/**
* Initializes the tracking API using the supplied application id. If the
* supplied value is null or consist only of white space, then the AndroidManifest.xml
* values are used (@see #init(android.content.Context)).
* <p/>
* If the idSources is null or empty, then the AndroidManifest.xml values are used (@see #init(android.content.Context)).
* <p/>
* One of the initialization functions must be called before TrackingService.get().
*
* @param context a context reference
* @param appId the application identifier
* @param idSources a list of identifier sources to use to collect ids
* @see #init(android.content.Context)
*/
public static void init(Context context, String appId, List<IdentifierSource> idSources) {
setupAPI(context, appId, idSources);
SharedPreferences prefs = PreferenceManager.getDefaultSharedPreferences(context);
// The install event may have been sent by the InstallReferrerReceiver,
// so first-run and install are not always sent at the same time.
// Since 3.x, the marketplace behavior has been to fire the INSTALL_REFERRER intent
// after first launch. So we are leaving FIRST_RUN here and letting the InstallReferrerReceiver
// fire the INSTALL event. Otherwise, we will either get two install events or one without the
// referrer value, which is useful for determining proper attribution.
if (!prefs.getBoolean(PREF_FIRST_RUN_SENT, false)) {
get().onEvent(EVENT_FIRST_RUN);
prefs.edit().putBoolean(PREF_FIRST_RUN_SENT, true).commit();
}
}
/**
* Configures the API.
*/
protected static void setupAPI(Context context, String appId, List<IdentifierSource> idSources) {
synchronized (Tracking.class) {
if (service == null) {
if (appId == null || appId.trim().length() == 0) {
try {
ApplicationInfo ai = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_META_DATA);
Object appIdMetaData = ai.metaData.get("tapad.APP_ID");
if (appIdMetaData == null)
throw new RuntimeException("tapad.APP_ID is not set in AndroidManifest.xml");
else
appId = appIdMetaData.toString();
} catch (Exception e) {
throw new RuntimeException("No app id specified and unable to read tapad.APP_ID from AndroidManifest.xml", e);
}
}
if (idSources == null || idSources.isEmpty()) {
try {
ApplicationInfo ai = context.getPackageManager().getApplicationInfo(context.getPackageName(), PackageManager.GET_META_DATA);
String[] idSourceClasses = ai.metaData.getString("tapad.ID_SOURCES").split(",");
idSources = new ArrayList<IdentifierSource>();
for (String className : idSourceClasses) {
try {
idSources.add((IdentifierSource) Class.forName("com.tapad.tracking.deviceidentification." + className.trim()).newInstance());
} catch (Exception e) {
Logging.warn("Tracking", "Unable to instantiate identifier source: " + className.trim());
}
}
if (idSources.isEmpty()) {
idSources = defaultIdSources();
}
} catch (Exception e) {
idSources = defaultIdSources();
}
}
idCollector = new IdentifierSourceAggregator(idSources);
collectIds(context);
service = new TrackingServiceImpl(
new EventDispatcher(new EventResource(appId, deviceIdLocator, DeviceInfo.getUserAgent(context)))
);
}
}
}
/**
* Creates the default identifier sources to use should none be specified.
* The default is all.
*
* @return the list of default id sources
*/
private static List<IdentifierSource> defaultIdSources() {
return Arrays.asList(new AndroidId(), new PhoneId(), new WifiMac());
}
/**
* Uses the idCollector to generate ids, if any. This is not done if the user is already opted out through
* preferences. If there were no ids generated, a random UUID is generated and persisted through
* preferences.
*
* @param context context object used to find/collect/persist ids
*/
private static void collectIds(Context context) {
deviceId = PreferenceManager.getDefaultSharedPreferences(context).getString(PREF_TAPAD_DEVICE_ID, null);
// do not attempt to collect any ids if the device is opted out
if (OPTED_OUT_DEVICE_ID.equals(deviceId)) {
typedDeviceIds = null;
} else {
// collect ids
List<TypedIdentifier> ids = idCollector.get(context);
// if no ids
if (ids.isEmpty()) {
// generate and store a new id if there is no saved id
if (deviceId == null) {
Logging.warn("Tracking", "Unable to retrieve any device identifiers, using a UUID instead.");
deviceId = UUID.randomUUID().toString();
PreferenceManager.getDefaultSharedPreferences(context).edit().putString(PREF_TAPAD_DEVICE_ID, deviceId).commit();
}
// ensure that typed id is set to null
typedDeviceIds = null;
} else {
// set the deviceId to the first typed id, but don't save it in prefs because that space is reserved for the generated UUID/Opt-out
deviceId = ids.get(0).getValue();
// set the typedDeviceIds to the full string representation
typedDeviceIds = TextUtils.join(",", ids);
}
}
}
/**
* Opts the device out of all tracking / personalization by setting the device id to the constant
* string OptedOut. This means that it is now impossible to distinguish this device from all
* other opted out device.
*
* @param context a context reference
*/
public static void optOut(Context context) {
deviceId = OPTED_OUT_DEVICE_ID;
typedDeviceIds = null;
PreferenceManager.getDefaultSharedPreferences(context)
.edit()
.putString(PREF_TAPAD_DEVICE_ID, deviceId)
.commit();
}
/**
* Opts the device back in after an opt out.
*
* @param context a context reference
*/
public static void optIn(Context context) {
// we clear the saved preferences and run through id collection logic once more
PreferenceManager.getDefaultSharedPreferences(context)
.edit()
.remove(PREF_TAPAD_DEVICE_ID)
.commit();
collectIds(context);
}
private static void assertInitialized() {
if (service == null)
throw new IllegalStateException("Please call Tracking.init(context) to initialize the API first!");
}
/**
* Gets device identifier locator used by the Tracking API.
*
* @return the identifier locator
*/
public static DeviceIdentifier getDeviceId() {
assertInitialized();
return deviceIdLocator;
}
/**
* Checks if the device is opted out of tracking. Note that the opt out is enforced by the API itself,
* so this check is just for UI purposes (e.g, determine if the opt out checkbox should be checked or not).
*
* @return true if the device is opted out
*/
public static boolean isOptedOut() {
assertInitialized();
return OPTED_OUT_DEVICE_ID.equals(deviceId);
}
public static TrackingService get() {
assertInitialized();
return service;
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.syntax;
import com.google.common.base.Joiner;
import com.google.common.base.Preconditions;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Iterables;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.skyframe.serialization.autocodec.AutoCodec;
import com.google.devtools.build.lib.syntax.Printer.BasePrinter;
import com.google.devtools.build.lib.util.LoggingUtil;
import com.google.devtools.build.lib.util.StringCanonicalizer;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.logging.Level;
import javax.annotation.Nullable;
/**
* <p>Root of Type symbol hierarchy for values in the build language.</p>
*
* <p>Type symbols are primarily used for their <code>convert</code> method,
* which is a kind of cast operator enabling conversion from untyped (Object)
* references to values in the build language, to typed references.</p>
*
* <p>For example, this code type-converts a value <code>x</code> returned by
* the evaluator, to a list of strings:</p>
*
* <pre>
* Object x = expr.eval(env);
* List<String> s = Type.STRING_LIST.convert(x);
* </pre>
*/
public abstract class Type<T> {
protected Type() {}
/**
* Converts untyped Object x resulting from the evaluation of an expression in the build language,
* into a typed object of type T.
*
* <p>x must be *directly* convertible to this type. This therefore disqualifies "selector
* expressions" of the form "{ config1: 'value1_of_orig_type', config2: 'value2_of_orig_type; }"
* (which support configurable attributes). To handle those expressions, see
* {@link com.google.devtools.build.lib.packages.BuildType#selectableConvert}.
*
* @param x the build-interpreter value to convert.
* @param what an object having a toString describing what x is for; should be included in
* any exception thrown. Grammatically, must produce a string describe a syntactic
* construct, e.g. "attribute 'srcs' of rule foo".
* @param context the label of the current BUILD rule; must be non-null if resolution of
* package-relative label strings is required
* @throws ConversionException if there was a problem performing the type conversion
*/
public abstract T convert(Object x, Object what, @Nullable Object context)
throws ConversionException;
// TODO(bazel-team): Check external calls (e.g. in PackageFactory), verify they always want
// this over selectableConvert.
/**
* Equivalent to {@link #convert(Object, Object, Object)} where the label is {@code null}.
* Useful for converting values to types that do not involve the type {@code LABEL}
* and hence do not require the label of the current package.
*/
public final T convert(Object x, Object what) throws ConversionException {
return convert(x, what, null);
}
/**
* Like {@link #convert(Object, Object, Object)}, but converts skylark {@code None}
* to given {@code defaultValue}.
*/
@Nullable public final T convertOptional(Object x,
String what, @Nullable Object context, T defaultValue)
throws ConversionException {
if (EvalUtils.isNullOrNone(x)) {
return defaultValue;
}
return convert(x, what, context);
}
/**
* Like {@link #convert(Object, Object, Object)}, but converts skylark {@code None}
* to java {@code null}.
*/
@Nullable public final T convertOptional(Object x, String what, @Nullable Object context)
throws ConversionException {
return convertOptional(x, what, context, null);
}
/**
* Like {@link #convert(Object, Object)}, but converts skylark {@code NONE} to java {@code null}.
*/
@Nullable public final T convertOptional(Object x, String what) throws ConversionException {
return convertOptional(x, what, null);
}
public abstract T cast(Object value);
@Override
public abstract String toString();
/**
* Returns the default value for this type; may return null iff no default is defined for this
* type.
*/
public abstract T getDefaultValue();
/**
* Function accepting a (potentially null) {@link Label} and an arbitrary context object. Used by
* {@link #visitLabels}.
*/
public interface LabelVisitor<C> {
void visit(@Nullable Label label, @Nullable C context) throws InterruptedException;
}
/**
* Invokes {@code visitor.visit(label, context)} for each {@link Label} {@code label} associated
* with {@code value}, which is assumed an instance of this {@link Type}.
*
* <p>This is used to support reliable label visitation in
* {@link com.google.devtools.build.lib.packages.AbstractAttributeMapper#visitLabels}. To preserve
* that reliability, every type should faithfully define its own instance of this method. In other
* words, be careful about defining default instances in base types that get auto-inherited by
* their children. Keep all definitions as explicit as possible.
*/
public abstract <C> void visitLabels(LabelVisitor<C> visitor, Object value, @Nullable C context)
throws InterruptedException;
/** Classifications of labels by their usage. */
public enum LabelClass {
/** Used for types which are not labels. */
NONE,
/** Used for types which use labels to declare a dependency. */
DEPENDENCY,
/**
* Used for types which use labels to reference another target but do not declare a dependency,
* in cases where doing so would cause a dependency cycle.
*/
NONDEP_REFERENCE,
/** Used for types which use labels to declare an output path. */
OUTPUT,
/**
* Used for types which contain Fileset entries, which contain labels but do not produce
* normal dependencies.
*/
FILESET_ENTRY
}
/** Returns the class of labels contained by this type, if any. */
public LabelClass getLabelClass() {
return LabelClass.NONE;
}
/**
* Implementation of concatenation for this type (e.g. "val1 + val2"). Returns null to
* indicate concatenation isn't supported.
*/
public T concat(@SuppressWarnings("unused") Iterable<T> elements) {
return null;
}
/**
* Converts an initialized Type object into a tag set representation.
* This operation is only valid for certain sub-Types which are guaranteed
* to be properly initialized.
*
* @param value the actual value
* @throws UnsupportedOperationException if the concrete type does not support
* tag conversion or if a convertible type has no initialized value.
*/
public Set<String> toTagSet(Object value, String name) {
String msg = "Attribute " + name + " does not support tag conversion.";
throw new UnsupportedOperationException(msg);
}
/** The type of an integer. */
@AutoCodec public static final Type<Integer> INTEGER = new IntegerType();
/** The type of a string. */
@AutoCodec public static final Type<String> STRING = new StringType();
/** The type of a boolean. */
@AutoCodec public static final Type<Boolean> BOOLEAN = new BooleanType();
/** The type of a list of not-yet-typed objects. */
@AutoCodec public static final ObjectListType OBJECT_LIST = new ObjectListType();
/** The type of a list of {@linkplain #STRING strings}. */
@AutoCodec public static final ListType<String> STRING_LIST = ListType.create(STRING);
/** The type of a list of {@linkplain #INTEGER strings}. */
@AutoCodec public static final ListType<Integer> INTEGER_LIST = ListType.create(INTEGER);
/** The type of a dictionary of {@linkplain #STRING strings}. */
@AutoCodec
public static final DictType<String, String> STRING_DICT = DictType.create(STRING, STRING);
/** The type of a dictionary of {@linkplain #STRING_LIST label lists}. */
@AutoCodec
public static final DictType<String, List<String>> STRING_LIST_DICT =
DictType.create(STRING, STRING_LIST);
/**
* For ListType objects, returns the type of the elements of the list; for
* all other types, returns null. (This non-obvious implementation strategy
* is necessitated by the wildcard capture rules of the Java type system,
* which disallow conversion from Type{List{ELEM}} to Type{List{?}}.)
*/
public Type<?> getListElementType() {
return null;
}
/**
* ConversionException is thrown when a type-conversion fails; it contains
* an explanatory error message.
*/
public static class ConversionException extends EvalException {
private static String message(Type<?> type, Object value, @Nullable Object what) {
BasePrinter printer = Printer.getPrinter();
printer.append("expected value of type '").append(type.toString()).append("'");
if (what != null) {
printer.append(" for ").append(what.toString());
}
printer.append(", but got ");
printer.repr(value);
printer.append(" (").append(EvalUtils.getDataTypeName(value)).append(")");
return printer.toString();
}
public ConversionException(Type<?> type, Object value, @Nullable Object what) {
super(null, message(type, value, what));
}
public ConversionException(String message) {
super(null, message);
}
}
/********************************************************************
* *
* Subclasses *
* *
********************************************************************/
private static class ObjectType extends Type<Object> {
@Override
public Object cast(Object value) {
return value;
}
@Override
public String getDefaultValue() {
throw new UnsupportedOperationException(
"ObjectType has no default value");
}
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) {
}
@Override
public String toString() {
return "object";
}
@Override
public Object convert(Object x, Object what, Object context) {
return x;
}
}
private static class IntegerType extends Type<Integer> {
@Override
public Integer cast(Object value) {
return (Integer) value;
}
@Override
public Integer getDefaultValue() {
return 0;
}
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) {
}
@Override
public String toString() {
return "int";
}
@Override
public Integer convert(Object x, Object what, Object context)
throws ConversionException {
if (!(x instanceof Integer)) {
throw new ConversionException(this, x, what);
}
return (Integer) x;
}
@Override
public Integer concat(Iterable<Integer> elements) {
int ans = 0;
for (Integer elem : elements) {
ans += elem;
}
return Integer.valueOf(ans);
}
}
private static class BooleanType extends Type<Boolean> {
@Override
public Boolean cast(Object value) {
return (Boolean) value;
}
@Override
public Boolean getDefaultValue() {
return false;
}
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) {
}
@Override
public String toString() {
return "boolean";
}
// Conversion to boolean must also tolerate integers of 0 and 1 only.
@Override
public Boolean convert(Object x, Object what, Object context)
throws ConversionException {
if (x instanceof Boolean) {
return (Boolean) x;
}
Integer xAsInteger = INTEGER.convert(x, what, context);
if (xAsInteger == 0) {
return false;
} else if (xAsInteger == 1) {
return true;
}
throw new ConversionException("boolean is not one of [0, 1]");
}
/**
* Booleans attributes are converted to tags based on their names.
*/
@Override
public Set<String> toTagSet(Object value, String name) {
if (value == null) {
String msg = "Illegal tag conversion from null on Attribute " + name + ".";
throw new IllegalStateException(msg);
}
String tag = (Boolean) value ? name : "no" + name;
return ImmutableSet.of(tag);
}
}
private static class StringType extends Type<String> {
@Override
public String cast(Object value) {
return (String) value;
}
@Override
public String getDefaultValue() {
return "";
}
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context) {
}
@Override
public String toString() {
return "string";
}
@Override
public String convert(Object x, Object what, Object context)
throws ConversionException {
if (!(x instanceof String)) {
throw new ConversionException(this, x, what);
}
return StringCanonicalizer.intern((String) x);
}
@Override
public String concat(Iterable<String> elements) {
return Joiner.on("").join(elements);
}
/**
* A String is representable as a set containing its value.
*/
@Override
public Set<String> toTagSet(Object value, String name) {
if (value == null) {
String msg = "Illegal tag conversion from null on Attribute " + name + ".";
throw new IllegalStateException(msg);
}
return ImmutableSet.of((String) value);
}
}
/**
* A type to support dictionary attributes.
*/
public static class DictType<KeyT, ValueT> extends Type<Map<KeyT, ValueT>> {
private final Type<KeyT> keyType;
private final Type<ValueT> valueType;
private final Map<KeyT, ValueT> empty = ImmutableMap.of();
private final LabelClass labelClass;
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context)
throws InterruptedException {
for (Map.Entry<KeyT, ValueT> entry : cast(value).entrySet()) {
keyType.visitLabels(visitor, entry.getKey(), context);
valueType.visitLabels(visitor, entry.getValue(), context);
}
}
public static <KEY, VALUE> DictType<KEY, VALUE> create(
Type<KEY> keyType, Type<VALUE> valueType) {
LabelClass keyLabelClass = keyType.getLabelClass();
LabelClass valueLabelClass = valueType.getLabelClass();
Preconditions.checkArgument(
keyLabelClass == LabelClass.NONE
|| valueLabelClass == LabelClass.NONE
|| keyLabelClass == valueLabelClass,
"A DictType's keys and values must be the same class of label if both contain labels, "
+ "but the key type " + keyType + " contains " + keyLabelClass + " labels, while "
+ "the value type " + valueType + " contains " + valueLabelClass + " labels.");
LabelClass labelClass = (keyLabelClass != LabelClass.NONE) ? keyLabelClass : valueLabelClass;
return new DictType<>(keyType, valueType, labelClass);
}
protected DictType(Type<KeyT> keyType, Type<ValueT> valueType, LabelClass labelClass) {
this.keyType = keyType;
this.valueType = valueType;
this.labelClass = labelClass;
}
public Type<KeyT> getKeyType() {
return keyType;
}
public Type<ValueT> getValueType() {
return valueType;
}
@Override
public LabelClass getLabelClass() {
return labelClass;
}
@SuppressWarnings("unchecked")
@Override
public Map<KeyT, ValueT> cast(Object value) {
return (Map<KeyT, ValueT>) value;
}
@Override
public String toString() {
return "dict(" + keyType + ", " + valueType + ")";
}
@Override
public Map<KeyT, ValueT> convert(Object x, Object what, Object context)
throws ConversionException {
if (!(x instanceof Map<?, ?>)) {
throw new ConversionException(this, x, what);
}
Map<?, ?> o = (Map<?, ?>) x;
// It's possible that #convert() calls transform non-equal keys into equal ones so we can't
// just use ImmutableMap.Builder() here (that throws on collisions).
LinkedHashMap<KeyT, ValueT> result = new LinkedHashMap<>();
for (Map.Entry<?, ?> elem : o.entrySet()) {
result.put(
keyType.convert(elem.getKey(), "dict key element", context),
valueType.convert(elem.getValue(), "dict value element", context));
}
return ImmutableMap.copyOf(result);
}
@Override
public Map<KeyT, ValueT> getDefaultValue() {
return empty;
}
}
/** A type for lists of a given element type */
public static class ListType<ElemT> extends Type<List<ElemT>> {
private final Type<ElemT> elemType;
private final List<ElemT> empty = ImmutableList.of();
public static <ELEM> ListType<ELEM> create(Type<ELEM> elemType) {
return new ListType<>(elemType);
}
private ListType(Type<ElemT> elemType) {
this.elemType = elemType;
}
@SuppressWarnings("unchecked")
@Override
public List<ElemT> cast(Object value) {
return (List<ElemT>) value;
}
@Override
public Type<ElemT> getListElementType() {
return elemType;
}
@Override
public LabelClass getLabelClass() {
return elemType.getLabelClass();
}
@Override
public List<ElemT> getDefaultValue() {
return empty;
}
@Override
public <T> void visitLabels(LabelVisitor<T> visitor, Object value, T context)
throws InterruptedException {
List<ElemT> elems = cast(value);
// Hot code path. Optimize for lists with O(1) access to avoid iterator garbage.
if (elems instanceof ImmutableList || elems instanceof ArrayList) {
for (int i = 0; i < elems.size(); i++) {
elemType.visitLabels(visitor, elems.get(i), context);
}
} else {
for (ElemT elem : elems) {
elemType.visitLabels(visitor, elem, context);
}
}
}
@Override
public String toString() {
return "list(" + elemType + ")";
}
@Override
public List<ElemT> convert(Object x, Object what, Object context)
throws ConversionException {
Iterable<?> iterable;
try {
iterable = EvalUtils.toIterableStrict(x, null, null);
} catch (EvalException ex) {
throw new ConversionException(this, x, what);
}
int index = 0;
List<ElemT> result = new ArrayList<>(Iterables.size(iterable));
ListConversionContext conversionContext = new ListConversionContext(what);
for (Object elem : iterable) {
conversionContext.update(index);
ElemT converted = elemType.convert(elem, conversionContext, context);
if (converted != null) {
result.add(converted);
} else {
// shouldn't happen but it does, rarely
String message = "Converting a list with a null element: "
+ "element " + index + " of " + what + " in " + context;
LoggingUtil.logToRemote(Level.WARNING, message,
new ConversionException(message));
}
++index;
}
return result;
}
@Override
public List<ElemT> concat(Iterable<List<ElemT>> elements) {
ImmutableList.Builder<ElemT> builder = ImmutableList.builder();
for (List<ElemT> list : elements) {
builder.addAll(list);
}
return builder.build();
}
/**
* A list is representable as a tag set as the contents of itself expressed
* as Strings. So a {@code List<String>} is effectively converted to a {@code Set<String>}.
*/
@Override
public Set<String> toTagSet(Object items, String name) {
if (items == null) {
String msg = "Illegal tag conversion from null on Attribute" + name + ".";
throw new IllegalStateException(msg);
}
Set<String> tags = new LinkedHashSet<>();
@SuppressWarnings("unchecked")
List<ElemT> itemsAsListofElem = (List<ElemT>) items;
for (ElemT element : itemsAsListofElem) {
tags.add(element.toString());
}
return tags;
}
/**
* Provides a {@link #toString()} description of the context of the value in a list being
* converted. This is preferred over a raw string to avoid uselessly constructing strings which
* are never used. This class is mutable (the index is updated).
*/
private static class ListConversionContext {
private final Object what;
private int index = 0;
ListConversionContext(Object what) {
this.what = what;
}
void update(int index) {
this.index = index;
}
@Override
public String toString() {
return "element " + index + " of " + what;
}
}
}
/** Type for lists of arbitrary objects */
public static class ObjectListType extends ListType<Object> {
private static final Type<Object> elemType = new ObjectType();
private ObjectListType() {
super(elemType);
}
@Override
@SuppressWarnings("unchecked")
public List<Object> convert(Object x, Object what, Object context)
throws ConversionException {
if (x instanceof SkylarkList) {
return ((SkylarkList) x).getImmutableList();
} else if (x instanceof List) {
return (List<Object>) x;
} else if (x instanceof Iterable) {
return ImmutableList.copyOf((Iterable<?>) x);
} else {
throw new ConversionException(this, x, what);
}
}
}
}
| |
package io.airlift.command.model;
import com.google.common.base.Function;
import com.google.common.base.Preconditions;
import com.google.common.collect.*;
import io.airlift.command.*;
import javax.annotation.Nullable;
import javax.inject.Inject;
import java.lang.reflect.Field;
import java.util.*;
import static com.google.common.base.Predicates.compose;
import static com.google.common.base.Predicates.equalTo;
import static com.google.common.collect.Iterables.find;
import static com.google.common.collect.Iterables.transform;
import static com.google.common.collect.Lists.newArrayList;
import static com.google.common.collect.Maps.newHashMap;
public class MetadataLoader
{
public static GlobalMetadata loadGlobal(String name,
String description,
CommandMetadata defaultCommand,
Iterable<CommandMetadata> defaultGroupCommands,
Iterable<CommandGroupMetadata> groups)
{
ImmutableList.Builder<OptionMetadata> globalOptionsBuilder = ImmutableList.builder();
if (defaultCommand != null) {
globalOptionsBuilder.addAll(defaultCommand.getGlobalOptions());
}
for (CommandMetadata command : defaultGroupCommands) {
globalOptionsBuilder.addAll(command.getGlobalOptions());
}
for (CommandGroupMetadata group : groups) {
for (CommandMetadata command : group.getCommands()) {
globalOptionsBuilder.addAll(command.getGlobalOptions());
}
}
List<OptionMetadata> globalOptions = mergeOptionSet(globalOptionsBuilder.build());
return new GlobalMetadata(name, description, globalOptions, defaultCommand, defaultGroupCommands, groups);
}
public static CommandGroupMetadata loadCommandGroup(String name, String description, CommandMetadata defaultCommand, Iterable<CommandMetadata> commands)
{
ImmutableList.Builder<OptionMetadata> groupOptionsBuilder = ImmutableList.builder();
if (defaultCommand != null) {
groupOptionsBuilder.addAll(defaultCommand.getGroupOptions());
}
for (CommandMetadata command : commands) {
groupOptionsBuilder.addAll(command.getGroupOptions());
}
List<OptionMetadata> groupOptions = mergeOptionSet(groupOptionsBuilder.build());
return new CommandGroupMetadata(name, description, groupOptions, defaultCommand, commands);
}
public static <T> ImmutableList<CommandMetadata> loadCommands(Iterable<Class<? extends T>> defaultCommands)
{
return ImmutableList.copyOf(Iterables.transform(defaultCommands, new Function<Class<?>, CommandMetadata>()
{
public CommandMetadata apply(Class<?> commandType)
{
return loadCommand(commandType);
}
}));
}
public static CommandMetadata loadCommand(Class<?> commandType)
{
Command command = null;
List<Group> groups = new ArrayList<Group>();
for (Class<?> cls = commandType; command == null && !Object.class.equals(cls); cls = cls.getSuperclass()) {
command = cls.getAnnotation(Command.class);
if(cls.isAnnotationPresent(Groups.class))
{
groups.addAll(Arrays.asList(cls.getAnnotation(Groups.class).value()));
}
if(cls.isAnnotationPresent(Group.class))
{
groups.add(cls.getAnnotation(Group.class));
}
}
Preconditions.checkArgument(command != null, "Command %s is not annotated with @Command", commandType.getName());
String name = command.name();
String description = command.description().isEmpty() ? null : command.description();
List<String> groupNames = Arrays.asList(command.groupNames());
boolean hidden = command.hidden();
InjectionMetadata injectionMetadata = loadInjectionMetadata(commandType);
CommandMetadata commandMetadata = new CommandMetadata(
name,
description,
hidden, injectionMetadata.globalOptions,
injectionMetadata.groupOptions,
injectionMetadata.commandOptions,
Iterables.getFirst(injectionMetadata.arguments, null),
injectionMetadata.metadataInjections,
commandType,
groupNames,
groups);
return commandMetadata;
}
public static SuggesterMetadata loadSuggester(Class<? extends Suggester> suggesterClass)
{
InjectionMetadata injectionMetadata = loadInjectionMetadata(suggesterClass);
return new SuggesterMetadata(suggesterClass, injectionMetadata.metadataInjections);
}
public static InjectionMetadata loadInjectionMetadata(Class<?> type)
{
InjectionMetadata injectionMetadata = new InjectionMetadata();
loadInjectionMetadata(type, injectionMetadata, ImmutableList.<Field>of());
injectionMetadata.compact();
return injectionMetadata;
}
public static void loadInjectionMetadata(Class<?> type, InjectionMetadata injectionMetadata, List<Field> fields)
{
if(type.isInterface())
{
return;
}
for (Class<?> cls = type; !Object.class.equals(cls); cls = cls.getSuperclass()) {
for (Field field : cls.getDeclaredFields()) {
field.setAccessible(true);
ImmutableList<Field> path = concat(fields, field);
Inject injectAnnotation = field.getAnnotation(Inject.class);
if (injectAnnotation != null) {
if (field.getType().equals(GlobalMetadata.class) ||
field.getType().equals(CommandGroupMetadata.class) ||
field.getType().equals(CommandMetadata.class)) {
injectionMetadata.metadataInjections.add(new Accessor(path));
} else {
loadInjectionMetadata(field.getType(), injectionMetadata, path);
}
}
Option optionAnnotation = field.getAnnotation(Option.class);
if (optionAnnotation != null) {
OptionType optionType = optionAnnotation.type();
String name;
if (!optionAnnotation.title().isEmpty()) {
name = optionAnnotation.title();
}
else {
name = field.getName();
}
List<String> options = ImmutableList.copyOf(optionAnnotation.name());
String description = optionAnnotation.description();
int arity = optionAnnotation.arity();
Preconditions.checkArgument(arity >= 0 || arity == Integer.MIN_VALUE, "Invalid arity for option %s", name);
if (optionAnnotation.arity() >= 0) {
arity = optionAnnotation.arity();
}
else {
Class<?> fieldType = field.getType();
if (Boolean.class.isAssignableFrom(fieldType) || boolean.class.isAssignableFrom(fieldType)) {
arity = 0;
}
else {
arity = 1;
}
}
boolean required = optionAnnotation.required();
boolean hidden = optionAnnotation.hidden();
List<String> allowedValues = ImmutableList.copyOf(optionAnnotation.allowedValues());
if (allowedValues.isEmpty()) {
allowedValues = null;
}
OptionMetadata optionMetadata = new OptionMetadata(optionType, options, name, description, arity, required, hidden, allowedValues, path);
switch (optionType) {
case GLOBAL:
injectionMetadata.globalOptions.add(optionMetadata);
break;
case GROUP:
injectionMetadata.groupOptions.add(optionMetadata);
break;
case COMMAND:
injectionMetadata.commandOptions.add(optionMetadata);
break;
}
}
Arguments argumentsAnnotation = field.getAnnotation(Arguments.class);
if (field.isAnnotationPresent(Arguments.class)) {
String title;
if (!argumentsAnnotation.title().isEmpty()) {
title = argumentsAnnotation.title();
}
else {
title = field.getName();
}
String description = argumentsAnnotation.description();
String usage = argumentsAnnotation.usage();
boolean required = argumentsAnnotation.required();
injectionMetadata.arguments.add(new ArgumentsMetadata(title, description, usage, required, path));
}
}
}
}
private static List<OptionMetadata> mergeOptionSet(List<OptionMetadata> options)
{
ListMultimap<OptionMetadata, OptionMetadata> metadataIndex = ArrayListMultimap.create();
for (OptionMetadata option : options) {
metadataIndex.put(option, option);
}
options = ImmutableList.copyOf(transform(metadataIndex.asMap().values(), new Function<Collection<OptionMetadata>, OptionMetadata>()
{
@Override
public OptionMetadata apply(@Nullable Collection<OptionMetadata> options)
{
return new OptionMetadata(options);
}
}));
Map<String, OptionMetadata> optionIndex = newHashMap();
for (OptionMetadata option : options) {
for (String optionName : option.getOptions()) {
if (optionIndex.containsKey(optionName)) {
throw new IllegalArgumentException(String.format("Fields %s and %s have conflicting definitions of option %s",
optionIndex.get(optionName).getAccessors().iterator().next(),
option.getAccessors().iterator().next(),
optionName));
}
optionIndex.put(optionName, option);
}
}
return options;
}
private static <T> ImmutableList<T> concat(Iterable<T> iterable, T item)
{
return ImmutableList.<T>builder().addAll(iterable).add(item).build();
}
public static void loadCommandsIntoGroupsByAnnotation(List<CommandMetadata> allCommands, List<CommandGroupMetadata> commandGroups, List<CommandMetadata> defaultCommandGroup)
{
List<CommandMetadata> newCommands = new ArrayList<CommandMetadata>();
// first, create any groups explicitly annotated
createGroupsFromAnnotations(allCommands,newCommands,commandGroups,defaultCommandGroup);
for (CommandMetadata command : allCommands) {
boolean added = false;
//now add the command to any groupNames specified in the Command annotation
for(String groupName : command.getGroupNames())
{
CommandGroupMetadata group = find(commandGroups, compose(equalTo(groupName), CommandGroupMetadata.nameGetter()), null);
if (group != null) {
group.addCommand(command);
added = true;
}
else
{
ImmutableList.Builder<OptionMetadata> groupOptionsBuilder = ImmutableList.builder();
groupOptionsBuilder.addAll(command.getGroupOptions());
CommandGroupMetadata newGroup = loadCommandGroup(groupName,"",null, Collections.singletonList(command));
commandGroups.add(newGroup);
added = true;
}
}
if(added && defaultCommandGroup.contains(command))
{
defaultCommandGroup.remove(command);
}
}
allCommands.addAll(newCommands);
}
private static void createGroupsFromAnnotations(List<CommandMetadata> allCommands, List<CommandMetadata> newCommands, List<CommandGroupMetadata> commandGroups, List<CommandMetadata> defaultCommandGroup)
{
for (CommandMetadata command : allCommands) {
boolean added = false;
// first, create any groups explicitly annotated
for(Group groupAnno : command.getGroups())
{
Class defaultCommandClass = null;
CommandMetadata defaultCommand = null;
//load default command if needed
if(!groupAnno.defaultCommand().equals(Group.DEFAULT.class))
{
defaultCommandClass = groupAnno.defaultCommand();
defaultCommand = find(allCommands, compose(equalTo(defaultCommandClass), CommandMetadata.typeGetter()), null);
if(null == defaultCommand)
{
defaultCommand = loadCommand(defaultCommandClass);
newCommands.add(defaultCommand);
}
}
//load other commands if needed
List<CommandMetadata> groupCommands = new ArrayList<CommandMetadata>(groupAnno.commands().length);
CommandMetadata groupCommand = null;
for(Class commandClass : groupAnno.commands())
{
groupCommand = find(allCommands, compose(equalTo(commandClass), CommandMetadata.typeGetter()), null);
if(null == groupCommand)
{
groupCommand = loadCommand(commandClass);
newCommands.add(groupCommand);
groupCommands.add(groupCommand);
}
}
CommandGroupMetadata groupMetadata = find(commandGroups, compose(equalTo(groupAnno.name()), CommandGroupMetadata.nameGetter()), null);
if(null == groupMetadata)
{
groupMetadata = loadCommandGroup(groupAnno.name(),groupAnno.description(),defaultCommand, groupCommands);
commandGroups.add(groupMetadata);
}
groupMetadata.addCommand(command);
added = true;
}
if(added && defaultCommandGroup.contains(command))
{
defaultCommandGroup.remove(command);
}
}
}
private static class InjectionMetadata
{
private List<OptionMetadata> globalOptions = newArrayList();
private List<OptionMetadata> groupOptions = newArrayList();
private List<OptionMetadata> commandOptions = newArrayList();
private List<ArgumentsMetadata> arguments = newArrayList();
private List<Accessor> metadataInjections = newArrayList();
private void compact()
{
globalOptions = mergeOptionSet(globalOptions);
groupOptions = mergeOptionSet(groupOptions);
commandOptions = mergeOptionSet(commandOptions);
if (arguments.size() > 1) {
arguments = ImmutableList.of(new ArgumentsMetadata(arguments));
}
}
}
}
| |
/*
* DBeaver - Universal Database Manager
* Copyright (C) 2010-2021 DBeaver Corp and others
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jkiss.dbeaver.ui.controls.resultset.handler;
import org.eclipse.core.commands.ExecutionEvent;
import org.eclipse.core.commands.ExecutionException;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.dialogs.IDialogConstants;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.*;
import org.eclipse.ui.commands.IElementUpdater;
import org.eclipse.ui.handlers.HandlerUtil;
import org.eclipse.ui.menus.UIElement;
import org.jkiss.dbeaver.model.data.DBDDisplayFormat;
import org.jkiss.dbeaver.ui.IActionConstants;
import org.jkiss.dbeaver.ui.UIUtils;
import org.jkiss.dbeaver.ui.controls.ValueFormatSelector;
import org.jkiss.dbeaver.ui.controls.resultset.IResultSetController;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetCopySettings;
import org.jkiss.dbeaver.ui.controls.resultset.ResultSetUtils;
import org.jkiss.dbeaver.ui.controls.resultset.internal.ResultSetMessages;
import org.jkiss.utils.CommonUtils;
import java.util.Map;
/**
* Copy special handler
*/
public class ResultSetHandlerCopySpecial extends ResultSetHandlerMain implements IElementUpdater {
public static final String CMD_COPY_SPECIAL = IActionConstants.CMD_COPY_SPECIAL;
@Override
public Object execute(ExecutionEvent event) throws ExecutionException
{
IResultSetController resultSet = getActiveResultSet(HandlerUtil.getActivePart(event));
if (resultSet == null) {
return null;
}
switch (event.getCommand().getId()) {
case CMD_COPY_SPECIAL:
showAdvancedCopyDialog(resultSet, HandlerUtil.getActiveShell(event));
break;
}
return null;
}
public static void showAdvancedCopyDialog(IResultSetController resultSet, Shell shell) {
AdvancedCopyConfigDialog configDialog = new AdvancedCopyConfigDialog(shell);
if (configDialog.open() == IDialogConstants.OK_ID) {
ResultSetUtils.copyToClipboard(
resultSet.getActivePresentation().copySelection(configDialog.copySettings));
}
}
@Override
public void updateElement(UIElement element, Map parameters)
{
element.setText(ResultSetMessages.actions_spreadsheet_copy_special);
}
public static class CopyConfigDialog extends Dialog {
static final String PARAM_COL_DELIMITER = "delimiter";
static final String PARAM_ROW_DELIMITER = "rowDelimiter";
static final String PARAM_QUOTE_STRING = "quoteString";
protected final IDialogSettings settings;
private Combo colDelimCombo;
private Combo rowDelimCombo;
private Combo quoteStringCombo;
protected ResultSetCopySettings copySettings;
protected CopyConfigDialog(Shell shell, String dialogId)
{
super(shell);
settings = UIUtils.getDialogSettings(dialogId);
copySettings = new ResultSetCopySettings();
copySettings.setColumnDelimiter("\t");
copySettings.setRowDelimiter("\n");
copySettings.setQuoteString("\"");
if (settings.get(PARAM_COL_DELIMITER) != null) {
copySettings.setColumnDelimiter(settings.get(PARAM_COL_DELIMITER));
}
if (settings.get(PARAM_ROW_DELIMITER) != null) {
copySettings.setRowDelimiter(settings.get(PARAM_ROW_DELIMITER));
}
if (settings.get(PARAM_QUOTE_STRING) != null) {
copySettings.setQuoteString(settings.get(PARAM_QUOTE_STRING));
}
}
@Override
protected void configureShell(Shell newShell) {
super.configureShell(newShell);
newShell.setText("Options");
}
@Override
protected Control createDialogArea(Composite parent)
{
Composite group = (Composite)super.createDialogArea(parent);
((GridLayout)group.getLayout()).numColumns = 2;
createControlsBefore(group);
colDelimCombo = UIUtils.createDelimiterCombo(group, "Column Delimiter", new String[] {"\t", ";", ","}, copySettings.getColumnDelimiter(), false);
rowDelimCombo = UIUtils.createDelimiterCombo(group, "Row Delimiter", new String[] {"\n", "|", "^"}, copySettings.getRowDelimiter(), false);
quoteStringCombo = UIUtils.createDelimiterCombo(group, "Quote Character", new String[] {"\"", "'"}, copySettings.getQuoteString(), false);
createControlsAfter(group);
return group;
}
protected void createControlsAfter(Composite group) {
}
protected void createControlsBefore(Composite group) {
}
@Override
protected void okPressed() {
copySettings.setColumnDelimiter(CommonUtils.unescapeDisplayString(colDelimCombo.getText()));
copySettings.setRowDelimiter(CommonUtils.unescapeDisplayString(rowDelimCombo.getText()));
copySettings.setQuoteString(CommonUtils.unescapeDisplayString(quoteStringCombo.getText()));
settings.put(PARAM_COL_DELIMITER, copySettings.getColumnDelimiter());
settings.put(PARAM_ROW_DELIMITER, copySettings.getRowDelimiter());
settings.put(PARAM_QUOTE_STRING, copySettings.getQuoteString());
super.okPressed();
}
}
private static class AdvancedCopyConfigDialog extends CopyConfigDialog {
static final String PARAM_COPY_HEADER = "copyHeader";
static final String PARAM_COPY_ROWS = "copyRows";
static final String PARAM_QUOTE_CELLS = "quoteCells";
static final String PARAM_FORCE_QUOTES = "forceQuotes";
static final String PARAM_FORMAT = "format";
static final String PARAM_COPY_HTML = "copyHTML";
private Button copyHeaderCheck;
private Button copyRowsCheck;
private Button quoteCellsCheck;
private Button forceQuoteCheck;
private Button copyHtmlCheck;
private ValueFormatSelector formatSelector;
protected AdvancedCopyConfigDialog(Shell shell)
{
super(shell, "AdvanceCopySettings");
copySettings.setQuoteCells(true);
copySettings.setCopyHeader(true);
copySettings.setCopyRowNumbers(false);
copySettings.setFormat(DBDDisplayFormat.UI);
if (settings.get(PARAM_COPY_HEADER) != null) {
copySettings.setCopyHeader(settings.getBoolean(PARAM_COPY_HEADER));
}
if (settings.get(PARAM_COPY_ROWS) != null) {
copySettings.setCopyRowNumbers(settings.getBoolean(PARAM_COPY_ROWS));
}
if (settings.get(PARAM_QUOTE_CELLS) != null) {
copySettings.setQuoteCells(settings.getBoolean(PARAM_QUOTE_CELLS));
}
if (settings.get(PARAM_FORCE_QUOTES) != null) {
copySettings.setForceQuotes(settings.getBoolean(PARAM_FORCE_QUOTES));
}
if (settings.get(PARAM_FORMAT) != null) {
copySettings.setFormat(DBDDisplayFormat.valueOf(settings.get(PARAM_FORMAT)));
}
if (settings.get(PARAM_COPY_HTML) != null) {
copySettings.setCopyHTML(settings.getBoolean(PARAM_COPY_HTML));
}
}
@Override
protected void createControlsBefore(Composite group) {
copyHeaderCheck = UIUtils.createCheckbox(group, "Copy header", null, copySettings.isCopyHeader(), 2);
copyRowsCheck = UIUtils.createCheckbox(group, "Copy row numbers", null, copySettings.isCopyRowNumbers(), 2);
quoteCellsCheck = UIUtils.createCheckbox(group, "Quote cell values", "Place cell value in quotes if it contains column or row delimiter", copySettings.isQuoteCells(), 2);
forceQuoteCheck = UIUtils.createCheckbox(group, "Always quote values", "Place all cell values in quotes", copySettings.isForceQuotes(), 2);
copyHtmlCheck = UIUtils.createCheckbox(group, "Copy as HTML", "Copy as HTML (in addition to plaintext format)", copySettings.isCopyHTML(), 2);
formatSelector = new ValueFormatSelector(group);
formatSelector.select(copySettings.getFormat());
}
@Override
protected void okPressed() {
copySettings.setCopyHeader(copyHeaderCheck.getSelection());
copySettings.setCopyRowNumbers(copyRowsCheck.getSelection());
copySettings.setQuoteCells(quoteCellsCheck.getSelection());
copySettings.setForceQuotes(forceQuoteCheck.getSelection());
copySettings.setCopyHTML(copyHtmlCheck.getSelection());
copySettings.setFormat(formatSelector.getSelection());
settings.put(PARAM_COPY_HEADER, copySettings.isCopyHeader());
settings.put(PARAM_COPY_ROWS, copySettings.isCopyRowNumbers());
settings.put(PARAM_QUOTE_CELLS, copySettings.isQuoteCells());
settings.put(PARAM_FORCE_QUOTES, copySettings.isForceQuotes());
settings.put(PARAM_COPY_HTML, copySettings.isCopyHTML());
settings.put(PARAM_FORMAT, copySettings.getFormat().name());
super.okPressed();
}
}
}
| |
/*******************************************************************************
* Copyright 2016 Jalian Systems Pvt. Ltd.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package net.sourceforge.marathon.javaagent.components;
import java.awt.Component;
import java.awt.Rectangle;
import java.awt.event.MouseEvent;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Enumeration;
import java.util.List;
import java.util.Properties;
import java.util.StringTokenizer;
import java.util.concurrent.Callable;
import java.util.logging.Logger;
import javax.swing.JComponent;
import javax.swing.JTable;
import javax.swing.table.TableModel;
import net.sourceforge.marathon.javaagent.AbstractJavaElement;
import net.sourceforge.marathon.javaagent.EventQueueWait;
import net.sourceforge.marathon.javaagent.IJavaAgent;
import net.sourceforge.marathon.javaagent.IJavaElement;
import net.sourceforge.marathon.javaagent.JavaTargetLocator.JWindow;
import net.sourceforge.marathon.json.JSONArray;
import net.sourceforge.marathon.json.JSONObject;
import net.sourceforge.marathon.javaagent.NoSuchElementException;
public class JTableJavaElement extends AbstractJavaElement {
public static final Logger LOGGER = Logger.getLogger(JTableJavaElement.class.getName());
private static final class PropertyPredicate implements Predicate {
private final Properties p;
private PropertyPredicate(Properties p) {
this.p = p;
}
@Override
public boolean isValid(JTableCellJavaElement e) {
Enumeration<Object> keys = p.keys();
while (keys.hasMoreElements()) {
String object = (String) keys.nextElement();
if (!p.getProperty(object).equals(e.getAttribute(object))) {
return false;
}
}
return true;
}
}
private static class RowColPropertyPredicate implements Predicate {
private String row;
private String column;
public RowColPropertyPredicate(String row, String column) {
this.row = row;
this.column = column;
}
@Override
public boolean isValid(JTableCellJavaElement e) {
String eRow = e.getAttribute("row");
String eColumn = e.getAttribute("column");
if (row.equals(eRow)) {
if (column.equals(eColumn))
return true;
if (column.length() == 1 && column.charAt(0) >= 'A' && column.charAt(0) <= 'Z') {
int colId = column.charAt(0) - 'A';
if ((e.getViewColumn() - 1) == colId)
return true;
}
}
return false;
}
}
private static interface Predicate {
public boolean isValid(JTableCellJavaElement e);
}
public JTableJavaElement(Component component, IJavaAgent driver, JWindow window) {
super(component, driver, window);
}
@Override
public List<IJavaElement> getByPseudoElement(String selector, Object[] params) {
if (selector.equals("header")) {
return Arrays.asList((IJavaElement) new JTableHeaderJavaElement(((JTable) getComponent()).getTableHeader(),
getDriver(), getWindow()));
} else if (selector.equals("mnth-cell")) {
return Arrays.asList((IJavaElement) new JTableCellJavaElement(this, ((Integer) params[0]).intValue() - 1,
((Integer) params[1]).intValue() - 1));
} else if (selector.equals("all-cells")) {
return collectCells(new ArrayList<IJavaElement>(), new Predicate() {
@Override
public boolean isValid(JTableCellJavaElement e) {
return true;
}
});
} else if (selector.equals("select-by-properties")) {
JSONObject o = new JSONObject((String) params[0]);
return selectByProperties(new ArrayList<IJavaElement>(), o);
}
return super.getByPseudoElement(selector, params);
}
public List<IJavaElement> collectCells(List<IJavaElement> r, Predicate p) {
try {
int rows = (Integer) EventQueueWait.call(getComponent(), "getRowCount");
int cols = (Integer) EventQueueWait.call(getComponent(), "getColumnCount");
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
JTableCellJavaElement e = new JTableCellJavaElement(this, i, j);
if (p.isValid(e)) {
r.add(e);
}
}
}
return r;
} catch (NoSuchMethodException e) {
return Collections.<IJavaElement>emptyList();
}
}
private List<IJavaElement> selectByProperties(List<IJavaElement> r, JSONObject o) {
final Properties p;
if (o.has("select")) {
p = PropertyHelper.fromString(o.getString("select"), new String[][] { { "row", "column" } });
return collectCells(r, new RowColPropertyPredicate(p.getProperty("row"), p.getProperty("column")));
} else {
p = PropertyHelper.asProperties(o);
return collectCells(r, new PropertyPredicate(p));
}
}
public Component getEditor(final int viewRow, final int viewCol) {
return EventQueueWait.exec(new Callable<Component>() {
@Override
public Component call() throws Exception {
validate(viewRow, viewCol);
JTable table = (JTable) getComponent();
Rectangle bounds = table.getCellRect(viewRow, viewCol, false);
MouseEvent me = new MouseEvent(table, MouseEvent.MOUSE_CLICKED, System.currentTimeMillis(), 0,
bounds.x + bounds.width / 2, bounds.y + bounds.height / 2, 2, false);
table.editCellAt(viewRow, viewCol, me);
Component c = table.getEditorComponent();
if (c instanceof JComponent) {
((JComponent) c).putClientProperty("marathon.celleditor", true);
((JComponent) c).putClientProperty("marathon.celleditor.parent", table);
}
return c;
}
});
}
@Override
public String _getText() {
JTable table = (JTable) getComponent();
int rows = table.getRowCount();
int cols = table.getColumnCount();
JSONArray r = new JSONArray();
for (int i = 0; i < rows; i++) {
JSONArray c = new JSONArray();
for (int j = 0; j < cols; j++) {
c.put(new JTableCellJavaElement(JTableJavaElement.this, i, j)._getText());
}
r.put(c);
}
return r.toString();
}
private void validate(int viewRow, int viewCol) {
JTable table = (JTable) getComponent();
try {
int row = table.convertRowIndexToModel(viewRow);
int col = table.convertColumnIndexToModel(viewCol);
TableModel model = table.getModel();
if (row >= 0 && row < model.getRowCount() && col >= 0 && col < model.getColumnCount()) {
if (table.isCellEditable(viewRow, viewCol)) {
return;
} else {
throw new NoSuchElementException(
"The cell is not editable on JTable: (" + viewRow + ", " + viewCol + ")", null);
}
}
} catch (IndexOutOfBoundsException e) {
}
throw new NoSuchElementException("Invalid row/col for JTable: (" + viewRow + ", " + viewCol + ")", null);
}
public String getContent() {
return new JSONArray(getContent((JTable) component)).toString();
}
public static String[][] getContent(JTable component) {
int rows = component.getRowCount();
int cols = component.getColumnCount();
String[][] content = new String[rows][cols];
for (int i = 0; i < rows; i++) {
for (int j = 0; j < cols; j++) {
Object valueAt = component.getValueAt(i, j);
if (valueAt == null) {
valueAt = "";
}
content[i][j] = valueAt.toString();
}
}
return content;
}
@Override
public boolean marathon_select(JSONArray jsonArray) {
List<IJavaElement> l = new ArrayList<IJavaElement>();
for (int i = 0; i < jsonArray.length(); i++) {
JSONObject o = jsonArray.getJSONObject(i);
selectByProperties(l, o);
}
int[] rows = new int[l.size()];
int[] cols = new int[l.size()];
int index = 0;
for (IJavaElement e : l) {
rows[index] = ((JTableCellJavaElement) e).getRow();
cols[index] = ((JTableCellJavaElement) e).getCol();
}
selectRowsColumns((JTable) component, rows, cols);
return true;
}
@Override
public boolean marathon_select(String text) {
JTable table = (JTable) component;
boolean cellEditing = table.isEditing();
if (cellEditing) {
return true;
}
if ("".equals(text)) {
table.clearSelection();
return true;
}
int[] rows;
int[] cols;
if ("all".equals(text)) {
int rowCount = table.getRowCount();
int columnCount = table.getColumnCount();
rows = new int[rowCount];
cols = new int[columnCount];
for (int i = 0; i < rowCount; i++) {
rows[i] = i;
}
for (int i = 0; i < columnCount; i++) {
cols[i] = i;
}
} else {
rows = parseRows(text);
String[] colNames = parseCols(text);
cols = new int[colNames.length];
for (int i = 0; i < colNames.length; i++) {
cols[i] = getColumnIndex(colNames[i]);
}
}
return selectRowsColumns(table, rows, cols);
}
private boolean selectRowsColumns(JTable table, int[] rows, int[] cols) {
int rowCount = table.getRowCount();
for (int r : rows) {
if (r >= rowCount) {
return false;
}
}
table.clearSelection();
for (int c : cols) {
table.addColumnSelectionInterval(c, c);
}
for (int r : rows) {
table.addRowSelectionInterval(r, r);
}
return true;
}
private int getColumnIndex(String columnName) {
JTable table = (JTable) component;
int ncolumns = table.getColumnCount();
for (int i = 0; i < ncolumns; i++) {
String column = getColumnName(i);
if (columnName.equals(escape(column))) {
return i;
}
}
if (columnName.length() == 1 && columnName.charAt(0) >= 'A' && columnName.charAt(0) <= 'Z')
return columnName.charAt(0) - 'A';
throw new RuntimeException("Could not find column " + columnName + " in table");
}
private String escape(String columnName) {
return columnName.replaceAll("#", "##").replaceAll(",", "#;");
}
private int[] parseRows(String s) {
String rowText = "";
int i = s.indexOf("rows:");
if (i != -1) {
int j = s.indexOf("columns:");
if (j == -1) {
rowText = s.substring(i + 5);
} else {
rowText = s.substring(i + 5, j);
}
int k = rowText.indexOf('[');
int l = rowText.indexOf(']');
rowText = rowText.substring(k + 1, l);
}
StringTokenizer tokenizer = new StringTokenizer(rowText, ", ");
List<String> rows = new ArrayList<String>();
while (tokenizer.hasMoreElements()) {
rows.add(tokenizer.nextToken());
}
int irows[] = new int[rows.size()];
for (int j = 0; j < irows.length; j++) {
try {
irows[j] = Integer.parseInt(rows.get(j));
} catch (Throwable t) {
return new int[0];
}
}
return irows;
}
private String[] parseCols(String s) {
String colText = "";
int i = s.indexOf("columns:");
if (i != -1) {
colText = s.substring(i + 8);
int k = colText.indexOf('[');
int l = colText.indexOf(']');
colText = colText.substring(k + 1, l);
}
List<String> cols = new ArrayList<String>();
StringTokenizer tokenizer = new StringTokenizer(colText, ",");
while (tokenizer.hasMoreElements()) {
cols.add(tokenizer.nextToken());
}
return cols.toArray(new String[cols.size()]);
}
}
| |
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.trino.operator;
import com.google.common.util.concurrent.ListenableFuture;
import io.trino.connector.CatalogName;
import io.trino.execution.buffer.PagesSerde;
import io.trino.execution.buffer.PagesSerdeFactory;
import io.trino.execution.buffer.SerializedPage;
import io.trino.metadata.Split;
import io.trino.spi.Page;
import io.trino.spi.connector.UpdatablePageSource;
import io.trino.split.RemoteSplit;
import io.trino.sql.planner.plan.PlanNodeId;
import java.net.URI;
import java.util.Optional;
import java.util.function.Supplier;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static java.util.Objects.requireNonNull;
public class ExchangeOperator
implements SourceOperator
{
public static final CatalogName REMOTE_CONNECTOR_ID = new CatalogName("$remote");
public static class ExchangeOperatorFactory
implements SourceOperatorFactory
{
private final int operatorId;
private final PlanNodeId sourceId;
private final ExchangeClientSupplier exchangeClientSupplier;
private final PagesSerdeFactory serdeFactory;
private final RetryPolicy retryPolicy;
private ExchangeClient exchangeClient;
private boolean closed;
public ExchangeOperatorFactory(
int operatorId,
PlanNodeId sourceId,
ExchangeClientSupplier exchangeClientSupplier,
PagesSerdeFactory serdeFactory,
RetryPolicy retryPolicy)
{
this.operatorId = operatorId;
this.sourceId = sourceId;
this.exchangeClientSupplier = exchangeClientSupplier;
this.serdeFactory = serdeFactory;
this.retryPolicy = requireNonNull(retryPolicy, "retryPolicy is null");
}
@Override
public PlanNodeId getSourceId()
{
return sourceId;
}
@Override
public SourceOperator createOperator(DriverContext driverContext)
{
checkState(!closed, "Factory is already closed");
TaskContext taskContext = driverContext.getPipelineContext().getTaskContext();
OperatorContext operatorContext = driverContext.addOperatorContext(operatorId, sourceId, ExchangeOperator.class.getSimpleName());
if (exchangeClient == null) {
exchangeClient = exchangeClientSupplier.get(driverContext.getPipelineContext().localSystemMemoryContext(), taskContext::sourceTaskFailed, retryPolicy);
}
return new ExchangeOperator(
operatorContext,
sourceId,
serdeFactory.createPagesSerde(),
exchangeClient);
}
@Override
public void noMoreOperators()
{
closed = true;
}
}
private final OperatorContext operatorContext;
private final PlanNodeId sourceId;
private final ExchangeClient exchangeClient;
private final PagesSerde serde;
private ListenableFuture<Void> isBlocked = NOT_BLOCKED;
public ExchangeOperator(
OperatorContext operatorContext,
PlanNodeId sourceId,
PagesSerde serde,
ExchangeClient exchangeClient)
{
this.operatorContext = requireNonNull(operatorContext, "operatorContext is null");
this.sourceId = requireNonNull(sourceId, "sourceId is null");
this.exchangeClient = requireNonNull(exchangeClient, "exchangeClient is null");
this.serde = requireNonNull(serde, "serde is null");
operatorContext.setInfoSupplier(exchangeClient::getStatus);
}
@Override
public PlanNodeId getSourceId()
{
return sourceId;
}
@Override
public Supplier<Optional<UpdatablePageSource>> addSplit(Split split)
{
requireNonNull(split, "split is null");
checkArgument(split.getCatalogName().equals(REMOTE_CONNECTOR_ID), "split is not a remote split");
RemoteSplit remoteSplit = (RemoteSplit) split.getConnectorSplit();
exchangeClient.addLocation(remoteSplit.getTaskId(), URI.create(remoteSplit.getLocation()));
return Optional::empty;
}
@Override
public void noMoreSplits()
{
exchangeClient.noMoreLocations();
}
@Override
public OperatorContext getOperatorContext()
{
return operatorContext;
}
@Override
public void finish()
{
close();
}
@Override
public boolean isFinished()
{
return exchangeClient.isFinished();
}
@Override
public ListenableFuture<Void> isBlocked()
{
// Avoid registering a new callback in the ExchangeClient when one is already pending
if (isBlocked.isDone()) {
isBlocked = exchangeClient.isBlocked();
if (isBlocked.isDone()) {
isBlocked = NOT_BLOCKED;
}
}
return isBlocked;
}
@Override
public boolean needsInput()
{
return false;
}
@Override
public void addInput(Page page)
{
throw new UnsupportedOperationException(getClass().getName() + " cannot take input");
}
@Override
public Page getOutput()
{
SerializedPage page = exchangeClient.pollPage();
if (page == null) {
return null;
}
operatorContext.recordNetworkInput(page.getSizeInBytes(), page.getPositionCount());
Page deserializedPage = serde.deserialize(page);
operatorContext.recordProcessedInput(deserializedPage.getSizeInBytes(), page.getPositionCount());
return deserializedPage;
}
@Override
public void close()
{
exchangeClient.close();
}
}
| |
/*
Copyright (C) 2016 Electronic Arts Inc. All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Electronic Arts, Inc. ("EA") nor the names of
its contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY ELECTRONIC ARTS AND ITS CONTRIBUTORS "AS IS" AND ANY
EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL ELECTRONIC ARTS OR ITS CONTRIBUTORS BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package cloud.orbit.actors.test;
import org.slf4j.Logger;
import org.slf4j.Marker;
public class LogInterceptor implements Logger
{
private volatile Logger delegate;
public LogInterceptor(final Logger delegate)
{
this.delegate = delegate;
}
protected Logger delegate()
{
return delegate;
}
@Override
public String getName()
{
return delegate.getName();
}
public boolean isTraceEnabled()
{
return delegate().isTraceEnabled();
}
public void trace(String msg)
{
delegate().trace(msg);
if (isTraceEnabled()) message("trace", msg);
}
public void trace(String format, Object arg)
{
delegate().trace(format, arg);
if (isTraceEnabled()) message("trace", format, arg);
}
public void trace(String format, Object arg1, Object arg2)
{
delegate().trace(format, arg1, arg2);
if (isTraceEnabled()) message("trace", format, arg1, arg2);
}
public void trace(String format, Object... arguments)
{
delegate().trace(format, arguments);
if (isTraceEnabled()) message("trace", format, arguments);
}
public void trace(String msg, Throwable t)
{
delegate().trace(msg, t);
if (isTraceEnabled()) message("trace", msg, t);
}
public boolean isTraceEnabled(Marker marker)
{
return delegate().isTraceEnabled(marker);
}
public void trace(Marker marker, String msg)
{
delegate().trace(marker, msg);
if (isTraceEnabled()) message("trace", marker, msg);
}
public void trace(Marker marker, String format, Object arg)
{
delegate().trace(marker, format, arg);
if (isTraceEnabled()) message("trace", marker, format, arg);
}
public void trace(Marker marker, String format, Object arg1, Object arg2)
{
delegate().trace(marker, format, arg1, arg2);
if (isTraceEnabled()) message("trace", marker, format, arg1, arg2);
}
public void trace(Marker marker, String format, Object... arguments)
{
delegate().trace(marker, format, arguments);
if (isTraceEnabled()) message("trace", marker, format, arguments);
}
public void trace(Marker marker, String msg, Throwable t)
{
delegate().trace(marker, msg, t);
if (isTraceEnabled()) message("trace", marker, msg, t);
}
public boolean isDebugEnabled()
{
return delegate().isDebugEnabled();
}
public void debug(String msg)
{
delegate().debug(msg);
if (isDebugEnabled()) message("debug", msg);
}
public void debug(String format, Object arg)
{
delegate().debug(format, arg);
if (isDebugEnabled()) message("debug", format, arg);
}
public void debug(String format, Object arg1, Object arg2)
{
delegate().debug(format, arg1, arg2);
if (isDebugEnabled()) message("debug", format, arg1, arg2);
}
public void debug(String format, Object... arguments)
{
delegate().debug(format, arguments);
if (isDebugEnabled()) message("debug", format, arguments);
}
public void debug(String msg, Throwable t)
{
delegate().debug(msg, t);
if (isDebugEnabled()) message("debug", msg, t);
}
public boolean isDebugEnabled(Marker marker)
{
return delegate().isDebugEnabled(marker);
}
public void debug(Marker marker, String msg)
{
delegate().debug(marker, msg);
if (isDebugEnabled()) message("debug", marker, msg);
}
public void debug(Marker marker, String format, Object arg)
{
delegate().debug(marker, format, arg);
if (isDebugEnabled()) message("debug", marker, format, arg);
}
public void debug(Marker marker, String format, Object arg1, Object arg2)
{
delegate().debug(marker, format, arg1, arg2);
if (isDebugEnabled()) message("debug", marker, format, arg1, arg2);
}
public void debug(Marker marker, String format, Object... arguments)
{
delegate().debug(marker, format, arguments);
if (isDebugEnabled()) message("debug", marker, format, arguments);
}
public void debug(Marker marker, String msg, Throwable t)
{
delegate().debug(marker, msg, t);
if (isDebugEnabled()) message("debug", marker, msg, t);
}
public boolean isInfoEnabled()
{
return delegate().isInfoEnabled();
}
public void info(String msg)
{
delegate().info(msg);
if (isInfoEnabled()) message("info", msg);
}
public void info(String format, Object arg)
{
delegate().info(format, arg);
if (isInfoEnabled()) message("info", format, arg);
}
public void info(String format, Object arg1, Object arg2)
{
delegate().info(format, arg1, arg2);
if (isInfoEnabled()) message("info", format, arg1, arg2);
}
public void info(String format, Object... arguments)
{
delegate().info(format, arguments);
if (isInfoEnabled()) message("info", format, arguments);
}
public void info(String msg, Throwable t)
{
delegate().info(msg, t);
if (isInfoEnabled()) message("info", msg, t);
}
public boolean isInfoEnabled(Marker marker)
{
return delegate().isInfoEnabled(marker);
}
public void info(Marker marker, String msg)
{
delegate().info(marker, msg);
if (isInfoEnabled()) message("info", marker, msg);
}
public void info(Marker marker, String format, Object arg)
{
delegate().info(marker, format, arg);
if (isInfoEnabled()) message("info", marker, format, arg);
}
public void info(Marker marker, String format, Object arg1, Object arg2)
{
delegate().info(marker, format, arg1, arg2);
if (isInfoEnabled()) message("info", marker, format, arg1, arg2);
}
public void info(Marker marker, String format, Object... arguments)
{
delegate().info(marker, format, arguments);
if (isInfoEnabled()) message("info", marker, format, arguments);
}
public void info(Marker marker, String msg, Throwable t)
{
delegate().info(marker, msg, t);
if (isInfoEnabled()) message("info", marker, msg, t);
}
public boolean isWarnEnabled()
{
return delegate().isWarnEnabled();
}
public void warn(String msg)
{
delegate().warn(msg);
if (isWarnEnabled()) message("warn", msg);
}
public void warn(String format, Object arg)
{
delegate().warn(format, arg);
if (isWarnEnabled()) message("warn", format, arg);
}
public void warn(String format, Object arg1, Object arg2)
{
delegate().warn(format, arg1, arg2);
if (isWarnEnabled()) message("warn", format, arg1, arg2);
}
public void warn(String format, Object... arguments)
{
delegate().warn(format, arguments);
if (isWarnEnabled()) message("warn", format, arguments);
}
public void warn(String msg, Throwable t)
{
delegate().warn(msg, t);
if (isWarnEnabled()) message("warn", msg, t);
}
public boolean isWarnEnabled(Marker marker)
{
return delegate().isWarnEnabled(marker);
}
public void warn(Marker marker, String msg)
{
delegate().warn(marker, msg);
if (isWarnEnabled()) message("warn", marker, msg);
}
public void warn(Marker marker, String format, Object arg)
{
delegate().warn(marker, format, arg);
if (isWarnEnabled()) message("warn", marker, format, arg);
}
public void warn(Marker marker, String format, Object arg1, Object arg2)
{
delegate().warn(marker, format, arg1, arg2);
if (isWarnEnabled()) message("warn", marker, format, arg1, arg2);
}
public void warn(Marker marker, String format, Object... arguments)
{
delegate().warn(marker, format, arguments);
if (isWarnEnabled()) message("warn", marker, format, arguments);
}
public void warn(Marker marker, String msg, Throwable t)
{
delegate().warn(marker, msg, t);
if (isWarnEnabled()) message("warn", marker, msg, t);
}
public boolean isErrorEnabled()
{
return delegate().isErrorEnabled();
}
public void error(String msg)
{
delegate().error(msg);
if (isErrorEnabled()) message("error", msg);
}
public void error(String format, Object arg)
{
delegate().error(format, arg);
if (isErrorEnabled()) message("error", format, arg);
}
public void error(String format, Object arg1, Object arg2)
{
delegate().error(format, arg1, arg2);
if (isErrorEnabled()) message("error", format, arg1, arg2);
}
public void error(String format, Object... arguments)
{
delegate().error(format, arguments);
if (isErrorEnabled()) message("error", format, arguments);
}
public void error(String msg, Throwable t)
{
delegate().error(msg, t);
if (isErrorEnabled()) message("error", msg, t);
}
public boolean isErrorEnabled(Marker marker)
{
return delegate().isErrorEnabled(marker);
}
public void error(Marker marker, String msg)
{
delegate().error(marker, msg);
if (isErrorEnabled()) message("error", marker, msg);
}
public void error(Marker marker, String format, Object arg)
{
delegate().error(marker, format, arg);
if (isErrorEnabled()) message("error", marker, format, arg);
}
public void error(Marker marker, String format, Object arg1, Object arg2)
{
delegate().error(marker, format, arg1, arg2);
if (isErrorEnabled()) message("error", marker, format, arg1, arg2);
}
public void error(Marker marker, String format, Object... arguments)
{
delegate().error(marker, format, arguments);
if (isErrorEnabled()) message("error", marker, format, arguments);
}
public void error(Marker marker, String msg, Throwable t)
{
delegate().error(marker, msg, t);
if (isErrorEnabled()) message("error", marker, msg, t);
}
protected void message(final String type, final Marker marker, final String format, final Object... arguments)
{
}
protected void message(final String type, final Marker marker, final String msg, final Throwable t)
{
}
protected void message(final String type, final String format, final Object... arguments)
{
message(type, null, format, arguments);
}
protected void message(final String type, final String msg, final Throwable t)
{
message(type, null, msg, t);
}
}
| |
/*
* Copyright 2016-present Open Networking Laboratory
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.onosproject.store.intent.impl;
import org.junit.Before;
import org.junit.Test;
import org.onlab.junit.NullScheduledExecutor;
import org.onlab.packet.IpAddress;
import org.onosproject.cluster.ClusterServiceAdapter;
import org.onosproject.cluster.ControllerNode;
import org.onosproject.cluster.DefaultControllerNode;
import org.onosproject.cluster.Leader;
import org.onosproject.cluster.Leadership;
import org.onosproject.cluster.LeadershipEvent;
import org.onosproject.cluster.LeadershipEventListener;
import org.onosproject.cluster.LeadershipService;
import org.onosproject.cluster.LeadershipServiceAdapter;
import org.onosproject.cluster.NodeId;
import org.onosproject.common.event.impl.TestEventDispatcher;
import org.onosproject.net.intent.Key;
import java.util.Arrays;
import java.util.HashSet;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import static junit.framework.TestCase.assertFalse;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
import static org.easymock.EasyMock.verify;
import static org.junit.Assert.assertTrue;
/**
* Unit tests for the IntentPartitionManager class.
*/
public class IntentPartitionManagerTest {
private final LeadershipEvent event
= new LeadershipEvent(LeadershipEvent.Type.CANDIDATES_CHANGED,
new Leadership(ELECTION_PREFIX + "0",
new Leader(MY_NODE_ID, 0, 0),
Arrays.asList(MY_NODE_ID, OTHER_NODE_ID)));
private static final NodeId MY_NODE_ID = new NodeId("local");
private static final NodeId OTHER_NODE_ID = new NodeId("other");
private static final NodeId INACTIVE_NODE_ID = new NodeId("inactive");
private static final String ELECTION_PREFIX = "intent-partition-";
private LeadershipService leadershipService;
private LeadershipEventListener leaderListener;
private IntentPartitionManager partitionManager;
@Before
public void setUp() {
leadershipService = createMock(LeadershipService.class);
leadershipService.addListener(anyObject(LeadershipEventListener.class));
expectLastCall().andDelegateTo(new TestLeadershipService());
for (int i = 0; i < IntentPartitionManager.NUM_PARTITIONS; i++) {
expect(leadershipService.runForLeadership(ELECTION_PREFIX + i))
.andReturn(null)
.times(1);
}
partitionManager = new IntentPartitionManager()
.withScheduledExecutor(new NullScheduledExecutor());
partitionManager.clusterService = new TestClusterService();
partitionManager.localNodeId = MY_NODE_ID;
partitionManager.leadershipService = leadershipService;
partitionManager.eventDispatcher = new TestEventDispatcher();
}
/**
* Configures a mock leadership service to have the specified number of
* partitions owned by the local node and all other partitions owned by a
* (fake) remote node.
*
* @param numMine number of partitions that should be owned by the local node
*/
private void setUpLeadershipService(int numMine) {
List<NodeId> allNodes = Arrays.asList(MY_NODE_ID, OTHER_NODE_ID);
for (int i = 0; i < numMine; i++) {
expect(leadershipService.getLeadership(ELECTION_PREFIX + i))
.andReturn(new Leadership(ELECTION_PREFIX + i,
new Leader(MY_NODE_ID, 1, 1000),
allNodes))
.anyTimes();
}
for (int i = numMine; i < IntentPartitionManager.NUM_PARTITIONS; i++) {
expect(leadershipService.getLeadership(ELECTION_PREFIX + i))
.andReturn(new Leadership(ELECTION_PREFIX + i,
new Leader(OTHER_NODE_ID, 1, 1000),
allNodes))
.anyTimes();
}
}
/**
* Tests that the PartitionManager's activate method correctly runs for
* all the leader elections that it should.
*/
@Test
public void testActivate() {
reset(leadershipService);
leadershipService.addListener(anyObject(LeadershipEventListener.class));
for (int i = 0; i < IntentPartitionManager.NUM_PARTITIONS; i++) {
expect(leadershipService.runForLeadership(ELECTION_PREFIX + i))
.andReturn(null)
.times(1);
}
replay(leadershipService);
partitionManager.activate();
verify(leadershipService);
}
/**
* Tests that the isMine method returns the correct result based on the
* underlying leadership service data.
*/
@Test
public void testIsMine() {
// We'll own only the first partition
setUpLeadershipService(1);
replay(leadershipService);
Key myKey = new ControllableHashKey(0);
Key notMyKey = new ControllableHashKey(1);
assertTrue(partitionManager.isMine(myKey));
assertFalse(partitionManager.isMine(notMyKey));
// Make us the owner of 4 partitions now
reset(leadershipService);
setUpLeadershipService(4);
replay(leadershipService);
assertTrue(partitionManager.isMine(myKey));
// notMyKey is now my key because because we're in control of that
// partition now
assertTrue(partitionManager.isMine(notMyKey));
assertFalse(partitionManager.isMine(new ControllableHashKey(4)));
}
/**
* Tests sending in LeadershipServiceEvents in the case when we have
* too many partitions. The event will trigger the partition manager to
* schedule a rebalancing activity.
*/
@Test
public void testRebalanceScheduling() {
// We have all the partitions so we'll need to relinquish some
setUpLeadershipService(IntentPartitionManager.NUM_PARTITIONS);
replay(leadershipService);
partitionManager.activate();
// Send in the event
leaderListener.event(event);
assertTrue(partitionManager.rebalanceScheduled.get());
verify(leadershipService);
}
/**
* Tests rebalance will trigger the right now of leadership withdraw calls.
*/
@Test
public void testRebalance() {
// We have all the partitions so we'll need to relinquish some
setUpLeadershipService(IntentPartitionManager.NUM_PARTITIONS);
leadershipService.withdraw(anyString());
expectLastCall().times(7);
replay(leadershipService);
partitionManager.activate();
// trigger rebalance
partitionManager.doRebalance();
verify(leadershipService);
}
/**
* Tests that attempts to rebalance when the paritions are already
* evenly distributed does not result in any relinquish attempts.
*/
@Test
public void testNoRebalance() {
// Partitions are already perfectly balanced among the two active instances
setUpLeadershipService(IntentPartitionManager.NUM_PARTITIONS / 2);
replay(leadershipService);
partitionManager.activate();
// trigger rebalance
partitionManager.doRebalance();
verify(leadershipService);
reset(leadershipService);
// We have a smaller share than we should
setUpLeadershipService(IntentPartitionManager.NUM_PARTITIONS / 2 - 1);
replay(leadershipService);
// trigger rebalance
partitionManager.doRebalance();
verify(leadershipService);
}
/**
* LeadershipService that allows us to grab a reference to
* PartitionManager's LeadershipEventListener.
*/
public class TestLeadershipService extends LeadershipServiceAdapter {
@Override
public void addListener(LeadershipEventListener listener) {
leaderListener = listener;
}
}
/**
* ClusterService set up with a very simple cluster - 3 nodes, one is the
* current node, one is a different active node, and one is an inactive node.
*/
private class TestClusterService extends ClusterServiceAdapter {
private final ControllerNode self =
new DefaultControllerNode(MY_NODE_ID, IpAddress.valueOf(1));
private final ControllerNode otherNode =
new DefaultControllerNode(OTHER_NODE_ID, IpAddress.valueOf(2));
private final ControllerNode inactiveNode =
new DefaultControllerNode(INACTIVE_NODE_ID, IpAddress.valueOf(3));
Set<ControllerNode> nodes;
public TestClusterService() {
nodes = new HashSet<>();
nodes.add(self);
nodes.add(otherNode);
nodes.add(inactiveNode);
}
@Override
public ControllerNode getLocalNode() {
return self;
}
@Override
public Set<ControllerNode> getNodes() {
return nodes;
}
@Override
public ControllerNode getNode(NodeId nodeId) {
return nodes.stream()
.filter(c -> c.id().equals(nodeId))
.findFirst()
.get();
}
@Override
public ControllerNode.State getState(NodeId nodeId) {
return nodeId.equals(INACTIVE_NODE_ID) ? ControllerNode.State.INACTIVE :
ControllerNode.State.ACTIVE;
}
}
/**
* A key that always hashes to a value provided to the constructor. This
* allows us to control the hash of the key for unit tests.
*/
private class ControllableHashKey extends Key {
protected ControllableHashKey(long hash) {
super(hash);
}
@Override
public int hashCode() {
return Objects.hash(hash());
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof ControllableHashKey)) {
return false;
}
ControllableHashKey that = (ControllableHashKey) obj;
return Objects.equals(this.hash(), that.hash());
}
@Override
public int compareTo(Key o) {
Long thisHash = hash();
return thisHash.compareTo(o.hash());
}
}
}
| |
package com.planet_ink.coffee_mud.Commands;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.CMClass.CMObjectType;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.io.*;
import java.util.*;
/*
Copyright 2004-2016 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings({"unchecked","rawtypes"})
public class Load extends StdCommand
{
public Load()
{
}
private final String[] access = I(new String[] { "LOAD" });
@Override
public String[] getAccessWords()
{
return access;
}
public final String[] combine(final String[] set1, final CMClass.CMObjectType[] set2)
{
final String[] fset=new String[set1.length+set2.length];
for(int x=0;x<set1.length;x++)
fset[x]=set1[x];
for(int x=0;x<set2.length;x++)
fset[set1.length+x]=set2[x].toString();
return fset;
}
public final String ARCHON_LIST[]=combine(new String[]{"RESOURCE","FACTION"},CMClass.CMObjectType.values());
public final Ammunition getNextAmmunition(String type, List<Ammunition> ammos)
{
for(final Ammunition ammo : ammos)
{
if((!ammo.amDestroyed())&&(ammo.ammunitionRemaining() > 0)&&(ammo.ammunitionType().equalsIgnoreCase(type)))
return ammo;
}
return null;
}
@Override
public boolean execute(MOB mob, List<String> commands, int metaFlags)
throws java.io.IOException
{
if(mob==null)
return true;
boolean tryArchon=CMSecurity.isAllowed(mob,mob.location(),CMSecurity.SecFlag.LOADUNLOAD);
if(commands.size()<3)
{
if(tryArchon)
mob.tell(L("LOAD what? Try @x1 [CLASSNAME]",CMParms.toListString(ARCHON_LIST)));
else
mob.tell(L("Load what where?"));
return false;
}
String what=commands.get(1);
String name=CMParms.combine(commands,2);
if(tryArchon)
{
Item I=mob.fetchWieldedItem();
if((I instanceof AmmunitionWeapon)&&((AmmunitionWeapon)I).requiresAmmunition())
tryArchon=false;
else
{
I=mob.location().findItem(null, name);
if((I instanceof AmmunitionWeapon)
&&(((AmmunitionWeapon)I).requiresAmmunition())
&&((AmmunitionWeapon)I).isFreeStanding())
tryArchon=false;
}
for(final String aList : ARCHON_LIST)
{
if(what.equalsIgnoreCase(aList))
tryArchon=true;
}
}
if(!tryArchon)
{
commands.remove(0);
final XVector ammoV=new XVector(what);
final List<Item> baseAmmoItems=CMLib.english().fetchItemList(mob,mob,null,ammoV,Wearable.FILTER_UNWORNONLY,false);
final List<Ammunition> ammos=new XVector<Ammunition>();
for (Item I : baseAmmoItems)
{
if(I instanceof Ammunition)
{
ammos.add((Ammunition)I);
}
}
if(baseAmmoItems.size()==0)
mob.tell(L("You don't seem to have any ammunition like that."));
else
if((ammos.size()==0)&&(!what.equalsIgnoreCase("all")))
mob.tell(L("You can't seem to use that as ammunition."));
else
{
commands.remove(0);
final List<Item> baseItems=CMLib.english().fetchItemList(mob,mob,null,commands,Wearable.FILTER_ANY,false);
baseItems.addAll(mob.location().findItems(null,CMParms.combine(commands,0)));
final List<AmmunitionWeapon> items=new XVector<AmmunitionWeapon>();
for (Item I : baseItems)
{
if((I instanceof AmmunitionWeapon)
&&((AmmunitionWeapon)I).requiresAmmunition())
{
if(mob.isMine(I))
items.add((AmmunitionWeapon)I);
else
if(((AmmunitionWeapon)I).isFreeStanding())
items.add((AmmunitionWeapon)I);
}
}
boolean doneOne=false;
if(baseItems.size()==0)
mob.tell(L("You don't seem to have that."));
else
if(items.size()==0)
mob.tell(L("You can't seem to load that."));
else
for(final AmmunitionWeapon W : items)
{
Ammunition ammunition = getNextAmmunition(W.ammunitionType(),ammos);
if(ammunition==null)
mob.tell(L("You are all out of @x1.",W.ammunitionType()));
else
{
while((ammunition != null)
&&((W.ammunitionRemaining() < W.ammunitionCapacity())||(!doneOne)))
{
final CMMsg newMsg=CMClass.getMsg(mob,W,ammunition,CMMsg.MSG_RELOAD,L("<S-NAME> reload(s) <T-NAME> with <O-NAME>."));
if(mob.location().okMessage(mob,newMsg))
{
doneOne=true;
mob.location().send(mob,newMsg);
ammunition = getNextAmmunition(W.ammunitionType(),ammos);
}
else
break;
}
}
}
}
}
else
{
if((what.equalsIgnoreCase("FACTION"))
&&(CMSecurity.isAllowed(mob,mob.location(),CMSecurity.SecFlag.CMDFACTIONS)))
{
final Faction F=CMLib.factions().getFaction(name);
if(F==null)
mob.tell(L("Faction file '@x1' was not found.",name));
else
mob.tell(L("Faction '@x1' from file '@x2' was loaded.",F.name(),name));
return false;
}
else
if(what.equalsIgnoreCase("RESOURCE"))
{
final CMFile F=new CMFile(name,mob,CMFile.FLAG_LOGERRORS);
if((!F.exists())||(!F.canRead()))
mob.tell(L("File '@x1' could not be accessed.",name));
else
{
final StringBuffer buf=Resources.getFileResource(name,true); // enforces its own security
if((buf==null)||(buf.length()==0))
mob.tell(L("Resource '@x1' was not found.",name));
else
mob.tell(L("Resource '@x1' was loaded.",name));
}
}
else
if(CMSecurity.isASysOp(mob))
{
try
{
if(name.toUpperCase().endsWith(".JAVA"))
{
while(name.startsWith("/"))
name=name.substring(1);
Class<?> C=null;
Object CO=null;
try
{
C=Class.forName("com.sun.tools.javac.Main", true, CMClass.instance());
if(C!=null)
CO=C.newInstance();
}catch(final Exception e)
{
Log.errOut("Load",e.getMessage());
}
final ByteArrayOutputStream bout=new ByteArrayOutputStream();
final PrintWriter pout=new PrintWriter(new OutputStreamWriter(bout));
if(CO==null)
{
mob.tell(L("Unable to instantiate compiler. You might try including your Java JDK's lib/tools.jar in your classpath next time you boot the mud."));
return false;
}
final String[] args=new String[]{name};
if(C!=null)
{
final java.lang.reflect.Method M=C.getMethod("compile",new Class[]{args.getClass(),PrintWriter.class});
final Object returnVal=M.invoke(CO,new Object[]{args,pout});
if((returnVal instanceof Integer)&&(((Integer)returnVal).intValue()!=0))
{
pout.flush();
mob.tell(L("Compile failed:"));
if(mob.session()!=null)
mob.session().rawOut(bout.toString());
return false;
}
}
name=name.substring(0,name.length()-5)+".class";
}
String unloadClassName=name;
if(unloadClassName.toUpperCase().endsWith(".CLASS"))
unloadClassName=unloadClassName.substring(0,unloadClassName.length()-6);
unloadClassName=unloadClassName.replace('\\','.');
unloadClassName=unloadClassName.replace('/','.');
if(what.equalsIgnoreCase("CLASS"))
{
final Object O=CMClass.getObjectOrPrototype(unloadClassName);
if(O!=null)
{
final CMClass.CMObjectType x=CMClass.getObjectType(O);
if(x!=null)
what=x.toString();
}
}
final CMObjectType whatType=CMClass.findObjectType(what);
if(whatType==null)
mob.tell(L("Don't know how to load a '@x1'. Try one of the following: @x2",what,CMParms.toListString(ARCHON_LIST)));
else
{
final Object O=CMClass.getObjectOrPrototype(unloadClassName);
if((O instanceof CMObject)
&&(name.toUpperCase().endsWith(".CLASS"))
&&(CMClass.delClass(whatType,(CMObject)O)))
mob.tell(L("@x1 was unloaded.",unloadClassName));
if(CMClass.loadClass(whatType,name,false))
{
mob.tell(L("@x1 @x2 was successfully loaded.",CMStrings.capitalizeAndLower(what),name));
return true;
}
}
}
catch(final java.lang.Error err)
{
mob.tell(err.getMessage());
}
catch(final Exception t)
{
Log.errOut("Load",t.getClass().getName()+": "+t.getMessage());
}
mob.tell(L("@x1 @x2 was not loaded.",CMStrings.capitalizeAndLower(what),name));
}
}
return false;
}
@Override
public boolean canBeOrdered()
{
return true;
}
@Override
public boolean securityCheck(MOB mob)
{
return super.securityCheck(mob);
}
@Override
public double combatActionsCost(final MOB mob, final List<String> cmds)
{
return CMProps.getCommandCombatActionCost(ID());
}
@Override
public double actionsCost(final MOB mob, final List<String> cmds)
{
return CMProps.getCommandActionCost(ID());
}
}
| |
package com.krishagni.catissueplus.core.biospecimen.events;
import java.math.BigDecimal;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import org.codehaus.jackson.annotate.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonFilter;
import com.krishagni.catissueplus.core.administrative.domain.PermissibleValue;
import com.krishagni.catissueplus.core.biospecimen.domain.AliquotSpecimensRequirement;
import com.krishagni.catissueplus.core.biospecimen.domain.DerivedSpecimenRequirement;
import com.krishagni.catissueplus.core.biospecimen.domain.SpecimenRequirement;
import com.krishagni.catissueplus.core.common.events.UserSummary;
import com.krishagni.catissueplus.core.common.util.NumUtil;
import com.krishagni.catissueplus.core.common.util.Utility;
@JsonFilter("withoutId")
public class SpecimenRequirementDetail implements Comparable<SpecimenRequirementDetail> {
private Long id;
private String name;
private String code;
private String lineage;
private String specimenClass;
private String type;
private String anatomicSite;
private String laterality;
private String pathology;
private String storageType;
private BigDecimal initialQty;
private BigDecimal concentration;
private UserSummary collector;
private String collectionProcedure;
private String collectionContainer;
private UserSummary receiver;
private String labelFmt;
private String labelAutoPrintMode;
private Integer labelPrintCopies;
private Integer sortOrder;
private Long eventId;
private List<SpecimenRequirementDetail> children;
private String cpShortTitle;
private String eventLabel;
private String parentSrCode;
private String activityStatus;
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getCode() {
return code;
}
public void setCode(String code) {
this.code = code;
}
public String getLineage() {
return lineage;
}
public void setLineage(String lineage) {
this.lineage = lineage;
}
public String getSpecimenClass() {
return specimenClass;
}
public void setSpecimenClass(String specimenClass) {
this.specimenClass = specimenClass;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public String getAnatomicSite() {
return anatomicSite;
}
public void setAnatomicSite(String anatomicSite) {
this.anatomicSite = anatomicSite;
}
public String getLaterality() {
return laterality;
}
public void setLaterality(String laterality) {
this.laterality = laterality;
}
public String getPathology() {
return pathology;
}
public void setPathology(String pathology) {
this.pathology = pathology;
}
public String getStorageType() {
return storageType;
}
public void setStorageType(String storageType) {
this.storageType = storageType;
}
public BigDecimal getInitialQty() {
return initialQty;
}
public void setInitialQty(BigDecimal initialQty) {
this.initialQty = initialQty;
}
public BigDecimal getConcentration() {
return concentration;
}
public void setConcentration(BigDecimal concentration) {
this.concentration = concentration;
}
public UserSummary getCollector() {
return collector;
}
public void setCollector(UserSummary collector) {
this.collector = collector;
}
public String getCollectionProcedure() {
return collectionProcedure;
}
public void setCollectionProcedure(String collectionProcedure) {
this.collectionProcedure = collectionProcedure;
}
public String getCollectionContainer() {
return collectionContainer;
}
public void setCollectionContainer(String collectionContainer) {
this.collectionContainer = collectionContainer;
}
public UserSummary getReceiver() {
return receiver;
}
public void setReceiver(UserSummary receiver) {
this.receiver = receiver;
}
public String getLabelFmt() {
return labelFmt;
}
public void setLabelFmt(String labelFmt) {
this.labelFmt = labelFmt;
}
public String getLabelAutoPrintMode() {
return labelAutoPrintMode;
}
public void setLabelAutoPrintMode(String labelAutoPrintMode) {
this.labelAutoPrintMode = labelAutoPrintMode;
}
public Integer getLabelPrintCopies() {
return labelPrintCopies;
}
public void setLabelPrintCopies(Integer labelPrintCopies) {
this.labelPrintCopies = labelPrintCopies;
}
public Integer getSortOrder() {
return sortOrder;
}
public void setSortOrder(Integer sortOrder) {
this.sortOrder = sortOrder;
}
public Long getEventId() {
return eventId;
}
public void setEventId(Long eventId) {
this.eventId = eventId;
}
public List<SpecimenRequirementDetail> getChildren() {
return children;
}
public void setChildren(List<SpecimenRequirementDetail> children) {
this.children = children;
}
@JsonIgnore
public String getCpShortTitle() {
return cpShortTitle;
}
public void setCpShortTitle(String cpShortTitle) {
this.cpShortTitle = cpShortTitle;
}
@JsonIgnore
public String getEventLabel() {
return eventLabel;
}
public void setEventLabel(String eventLabel) {
this.eventLabel = eventLabel;
}
@JsonIgnore
public String getParentSrCode() {
return parentSrCode;
}
public void setParentSrCode(String parentSrCode) {
this.parentSrCode = parentSrCode;
}
public String getActivityStatus() {
return activityStatus;
}
public void setActivityStatus(String activityStatus) {
this.activityStatus = activityStatus;
}
@Override
public int compareTo(SpecimenRequirementDetail other) {
int cmp = NumUtil.compareTo(sortOrder, other.sortOrder);
if (cmp != 0) {
return cmp;
}
return NumUtil.compareTo(id, other.id);
}
public AliquotSpecimensRequirement toAliquotRequirement(Long parentSrId, int noOfAliquots) {
AliquotSpecimensRequirement req = new AliquotSpecimensRequirement();
req.setNoOfAliquots(noOfAliquots);
req.setLabelFmt(getLabelFmt());
req.setLabelAutoPrintMode(getLabelAutoPrintMode());
req.setLabelPrintCopies(getLabelPrintCopies());
req.setParentSrId(parentSrId);
req.setQtyPerAliquot(getInitialQty());
req.setStorageType(getStorageType());
return req;
}
public DerivedSpecimenRequirement toDerivedRequirement(Long parentSrId) {
DerivedSpecimenRequirement req = new DerivedSpecimenRequirement();
req.setConcentration(getConcentration());
req.setLabelFmt(getLabelFmt());
req.setLabelAutoPrintMode(getLabelAutoPrintMode());
req.setLabelPrintCopies(getLabelPrintCopies());
req.setName(getName());
req.setParentSrId(parentSrId);
req.setQuantity(getInitialQty());
req.setSpecimenClass(getSpecimenClass());
req.setStorageType(getStorageType());
req.setType(getType());
req.setCode(getCode());
req.setPathology(getPathology());
return req;
}
public static SpecimenRequirementDetail from(SpecimenRequirement sr) {
return from(sr, true);
}
public static SpecimenRequirementDetail from(SpecimenRequirement sr, boolean incChildren) {
SpecimenRequirementDetail detail = new SpecimenRequirementDetail();
detail.setId(sr.getId());
detail.setName(sr.getName());
detail.setCode(sr.getCode());
detail.setLineage(sr.getLineage());
detail.setSpecimenClass(PermissibleValue.getValue(sr.getSpecimenClass()));
detail.setType(PermissibleValue.getValue(sr.getSpecimenType()));
detail.setAnatomicSite(PermissibleValue.getValue(sr.getAnatomicSite()));
detail.setLaterality(PermissibleValue.getValue(sr.getLaterality()));
detail.setPathology(PermissibleValue.getValue(sr.getPathologyStatus()));
detail.setStorageType(sr.getStorageType());
detail.setInitialQty(sr.getInitialQuantity());
detail.setConcentration(sr.getConcentration());
detail.setCollector(sr.getCollector() == null ? null : UserSummary.from(sr.getCollector()));
detail.setCollectionProcedure(PermissibleValue.getValue(sr.getCollectionProcedure()));
detail.setCollectionContainer(PermissibleValue.getValue(sr.getCollectionContainer()));
detail.setReceiver(sr.getReceiver() == null ? null : UserSummary.from(sr.getReceiver()));
detail.setLabelFmt(sr.getLabelFormat());
detail.setLabelAutoPrintMode(sr.getLabelAutoPrintMode() == null ? null : sr.getLabelAutoPrintMode().name());
detail.setLabelPrintCopies(sr.getLabelPrintCopies());
detail.setSortOrder(sr.getSortOrder());
detail.setEventId(sr.getCollectionProtocolEvent().getId());
detail.setActivityStatus(sr.getActivityStatus());
if (incChildren) {
detail.setChildren(from(sr.getChildSpecimenRequirements()));
}
return detail;
}
public static List<SpecimenRequirementDetail> from(Collection<SpecimenRequirement> srs) {
return from(srs, true);
}
public static List<SpecimenRequirementDetail> from(Collection<SpecimenRequirement> srs, boolean incChildren) {
return Utility.nullSafeStream(srs).map(sr -> from(sr, incChildren)).sorted().collect(Collectors.toList());
}
}
| |
// Copyright 2014 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.rules.java;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Multimap;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.LabelConverter;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsConverter;
import com.google.devtools.build.lib.analysis.config.BuildConfiguration.StrictDepsMode;
import com.google.devtools.build.lib.analysis.config.DefaultsPackage;
import com.google.devtools.build.lib.analysis.config.FragmentOptions;
import com.google.devtools.build.lib.cmdline.Label;
import com.google.devtools.build.lib.cmdline.LabelSyntaxException;
import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaClasspathMode;
import com.google.devtools.build.lib.rules.java.JavaConfiguration.JavaOptimizationMode;
import com.google.devtools.common.options.EnumConverter;
import com.google.devtools.common.options.Option;
import com.google.devtools.common.options.TriState;
import java.util.HashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
/**
* Command-line options for building Java targets
*/
public class JavaOptions extends FragmentOptions {
/** Converter for the --java_classpath option. */
public static class JavaClasspathModeConverter extends EnumConverter<JavaClasspathMode> {
public JavaClasspathModeConverter() {
super(JavaClasspathMode.class, "Java classpath reduction strategy");
}
}
/**
* Converter for the --java_optimization_mode option.
*/
public static class JavaOptimizationModeConverter extends EnumConverter<JavaOptimizationMode> {
public JavaOptimizationModeConverter() {
super(JavaOptimizationMode.class, "Java optimization strategy");
}
}
@Option(name = "javabase",
defaultValue = "@bazel_tools//tools/jdk:jdk",
category = "version",
help = "JAVABASE used for the JDK invoked by Blaze. This is the "
+ "JAVABASE which will be used to execute external Java "
+ "commands.")
public String javaBase;
@Option(name = "java_toolchain",
defaultValue = "@bazel_tools//tools/jdk:toolchain",
category = "version",
converter = LabelConverter.class,
help = "The name of the toolchain rule for Java.")
public Label javaToolchain;
@Option(
name = "host_java_toolchain",
defaultValue = "@bazel_tools//tools/jdk:toolchain",
category = "version",
converter = LabelConverter.class,
help = "The Java toolchain used to build tools that are executed during a build."
)
public Label hostJavaToolchain;
@Option(name = "host_javabase",
defaultValue = "@bazel_tools//tools/jdk:jdk",
category = "version",
help = "JAVABASE used for the host JDK. This is the JAVABASE which is used to execute "
+ " tools during a build.")
public String hostJavaBase;
@Option(name = "javacopt",
allowMultiple = true,
defaultValue = "",
category = "flags",
help = "Additional options to pass to javac.")
public List<String> javacOpts;
@Option(name = "jvmopt",
allowMultiple = true,
defaultValue = "",
category = "flags",
help = "Additional options to pass to the Java VM. These options will get added to the "
+ "VM startup options of each java_binary target.")
public List<String> jvmOpts;
@Option(name = "use_ijars",
defaultValue = "true",
category = "strategy",
help = "If enabled, this option causes Java compilation to use interface jars. "
+ "This will result in faster incremental compilation, "
+ "but error messages can be different.")
public boolean useIjars;
@Deprecated
@Option(name = "use_src_ijars",
defaultValue = "false",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility.")
public boolean useSourceIjars;
@Option(
name = "java_header_compilation",
defaultValue = "true",
category = "semantics",
help = "Compile ijars directly from source.",
oldName = "experimental_java_header_compilation"
)
public boolean headerCompilation;
// TODO(cushon): delete flag after removing from global .blazerc
@Deprecated
@Option(
name = "experimental_optimize_header_compilation_annotation_processing",
defaultValue = "false",
category = "undocumented",
help = "This flag is a noop and scheduled for removal."
)
public boolean optimizeHeaderCompilationAnnotationProcessing;
@Option(name = "java_deps",
defaultValue = "true",
category = "strategy",
help = "Generate dependency information (for now, compile-time classpath) per Java target.")
public boolean javaDeps;
@Option(
name = "java_classpath",
allowMultiple = false,
defaultValue = "javabuilder",
converter = JavaClasspathModeConverter.class,
category = "semantics",
help = "Enables reduced classpaths for Java compilations.",
oldName = "experimental_java_classpath"
)
public JavaClasspathMode javaClasspath;
@Option(name = "java_debug",
defaultValue = "null",
category = "testing",
expansion = {"--test_arg=--wrapper_script_flag=--debug", "--test_output=streamed",
"--test_strategy=exclusive", "--test_timeout=9999", "--nocache_test_results"},
help = "Causes the Java virtual machine of a java test to wait for a connection from a "
+ "JDWP-compliant debugger (such as jdb) before starting the test. Implies "
+ "-test_output=streamed."
)
public Void javaTestDebug;
@Option(
name = "strict_java_deps",
allowMultiple = false,
defaultValue = "default",
converter = StrictDepsConverter.class,
category = "semantics",
help =
"If true, checks that a Java target explicitly declares all directly used "
+ "targets as dependencies.",
oldName = "strict_android_deps"
)
public StrictDepsMode strictJavaDeps;
@Option(
name = "javabuilder_top",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String javaBuilderTop;
@Option(
name = "singlejar_top",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String singleJarTop;
@Option(
name = "genclass_top",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String genClassTop;
@Option(
name = "ijar_top",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String iJarTop;
@Option(
name = "java_langtools",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String javaLangtoolsJar;
@Option(
name = "javac_bootclasspath",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String javacBootclasspath;
@Option(
name = "javac_extdir",
defaultValue = "null",
category = "undocumented",
help = "No-op. Kept here for backwards compatibility."
)
public String javacExtdir;
@Option(
name = "host_java_launcher",
defaultValue = "null",
converter = LabelConverter.class,
category = "semantics",
help = "The Java launcher used by tools that are executed during a build."
)
public Label hostJavaLauncher;
@Option(
name = "java_launcher",
defaultValue = "null",
converter = LabelConverter.class,
category = "semantics",
help =
"The Java launcher to use when building Java binaries. "
+ "The \"launcher\" attribute overrides this flag. "
)
public Label javaLauncher;
@Option(name = "proguard_top",
defaultValue = "null",
category = "version",
converter = LabelConverter.class,
help = "Specifies which version of ProGuard to use for code removal when building a Java "
+ "binary.")
public Label proguard;
@Option(name = "extra_proguard_specs",
allowMultiple = true,
defaultValue = "", // Ignored
converter = LabelConverter.class,
category = "undocumented",
help = "Additional Proguard specs that will be used for all Proguard invocations. Note that "
+ "using this option only has an effect when Proguard is used anyway.")
public List<Label> extraProguardSpecs;
@Option(name = "translations",
defaultValue = "auto",
category = "semantics",
help = "Translate Java messages; bundle all translations into the jar "
+ "for each affected rule.")
public TriState bundleTranslations;
@Option(name = "message_translations",
defaultValue = "",
category = "semantics",
allowMultiple = true,
help = "The message translations used for translating messages in Java targets.")
public List<String> translationTargets;
@Option(name = "check_constraint",
allowMultiple = true,
defaultValue = "",
category = "checking",
help = "Check the listed constraint.")
public List<String> checkedConstraints;
@Option(name = "experimental_disable_jvm",
defaultValue = "false",
category = "undocumented",
help = "Disables the Jvm configuration entirely.")
public boolean disableJvm;
@Option(name = "java_optimization_mode",
defaultValue = "legacy",
converter = JavaOptimizationModeConverter.class,
category = "undocumented",
help = "Applies desired link-time optimizations to Java binaries and tests.")
public JavaOptimizationMode javaOptimizationMode;
@Option(name = "legacy_bazel_java_test",
defaultValue = "false",
category = "undocumented",
help = "Use the legacy mode of Bazel for java_test.")
public boolean legacyBazelJavaTest;
@Option(
name = "strict_deps_java_protos",
defaultValue = "false",
category = "undocumented",
help =
"When 'strict-deps' is on, .java files that depend on classes not declared in their rule's "
+ "'deps' fail to build. In other words, it's forbidden to depend on classes obtained "
+ "transitively. When true, Java protos are strict regardless of their 'strict_deps' "
+ "attribute."
)
public boolean strictDepsJavaProtos;
@Option(
name = "experimental_java_header_compilation_direct_classpath",
defaultValue = "false",
category = "undocumented",
help = "Experimental option to limit the header compilation classpath to direct deps."
)
public boolean headerCompilationDirectClasspath;
@Option(
name = "experimental_java_header_compilation_direct_classpath_fallback_error",
defaultValue = "false",
category = "undocumented",
help =
"If --experimental_java_header_compilation_direct_classpath is set, transitive classpath"
+ " fallback is an error"
)
public boolean headerCompilationDirectClasspathFallbackError;
@Option(
name = "experimental_one_version_enforcement",
defaultValue = "false",
category = "undocumented",
help =
"When enabled, enforce that a java_binary rule can't contain more than one version "
+ "of the same class file on the classpath"
)
public boolean enforceOneVersion;
@Override
public FragmentOptions getHost(boolean fallback) {
JavaOptions host = (JavaOptions) getDefault();
host.javaBase = hostJavaBase;
host.jvmOpts = ImmutableList.of("-XX:ErrorFile=/dev/stderr");
host.javacOpts = javacOpts;
host.javaToolchain = hostJavaToolchain;
host.javaLauncher = hostJavaLauncher;
// Java builds often contain complicated code generators for which
// incremental build performance is important.
host.useIjars = useIjars;
host.headerCompilation = headerCompilation;
host.headerCompilationDirectClasspath = headerCompilationDirectClasspath;
host.headerCompilationDirectClasspathFallbackError =
headerCompilationDirectClasspathFallbackError;
host.javaDeps = javaDeps;
host.javaClasspath = javaClasspath;
host.strictJavaDeps = strictJavaDeps;
host.enforceOneVersion = enforceOneVersion;
return host;
}
@Override
public void addAllLabels(Multimap<String, Label> labelMap) {
addOptionalLabel(labelMap, "jdk", javaBase);
addOptionalLabel(labelMap, "jdk", hostJavaBase);
if (javaLauncher != null) {
labelMap.put("java_launcher", javaLauncher);
}
labelMap.put("java_toolchain", javaToolchain);
labelMap.putAll("translation", getTranslationLabels());
}
@Override
public Map<String, Set<Label>> getDefaultsLabels(BuildConfiguration.Options commonOptions) {
Set<Label> jdkLabels = new LinkedHashSet<>();
DefaultsPackage.parseAndAdd(jdkLabels, javaBase);
DefaultsPackage.parseAndAdd(jdkLabels, hostJavaBase);
Map<String, Set<Label>> result = new HashMap<>();
result.put("JDK", jdkLabels);
result.put("JAVA_TOOLCHAIN", ImmutableSet.of(javaToolchain));
return result;
}
private Set<Label> getTranslationLabels() {
Set<Label> result = new LinkedHashSet<>();
for (String s : translationTargets) {
try {
Label label = Label.parseAbsolute(s);
result.add(label);
} catch (LabelSyntaxException e) {
// We ignore this exception here - it will cause an error message at a later time.
}
}
return result;
}
}
| |
/*
* SPDX-License-Identifier: Apache-2.0
*
* Copyright 2014-2021 The author and/or original authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.codehaus.griffon.runtime.datasource;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import com.zaxxer.hikari.util.DriverDataSource;
import griffon.annotations.core.Nonnull;
import griffon.core.Configuration;
import griffon.core.GriffonApplication;
import griffon.core.env.Environment;
import griffon.core.env.Metadata;
import griffon.exceptions.GriffonException;
import griffon.plugins.datasource.DataSourceFactory;
import griffon.plugins.datasource.events.DataSourceConnectEndEvent;
import griffon.plugins.datasource.events.DataSourceConnectStartEvent;
import griffon.plugins.datasource.events.DataSourceDisconnectEndEvent;
import griffon.plugins.datasource.events.DataSourceDisconnectStartEvent;
import griffon.plugins.monitor.MBeanManager;
import griffon.util.GriffonClassUtils;
import org.codehaus.griffon.runtime.core.storage.AbstractObjectFactory;
import org.codehaus.griffon.runtime.datasource.monitor.HikariPoolMonitor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import javax.inject.Named;
import javax.sql.DataSource;
import java.io.IOException;
import java.net.URL;
import java.sql.SQLException;
import java.sql.Statement;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Scanner;
import java.util.Set;
import static griffon.core.GriffonExceptionHandler.sanitize;
import static griffon.core.env.Environment.getEnvironmentShortName;
import static griffon.util.ConfigUtils.getConfigValue;
import static griffon.util.ConfigUtils.getConfigValueAsBoolean;
import static griffon.util.ConfigUtils.getConfigValueAsString;
import static griffon.util.GriffonNameUtils.requireNonBlank;
import static java.util.Arrays.asList;
import static java.util.Objects.requireNonNull;
/**
* @author Andres Almiray
*/
public class DefaultDataSourceFactory extends AbstractObjectFactory<DataSource> implements DataSourceFactory {
private static final Logger LOG = LoggerFactory.getLogger(DefaultDataSourceFactory.class);
private static final String ERROR_DATASOURCE_BLANK = "Argument 'dataSourceName' must not be blank";
private final Set<String> dataSourceNames = new LinkedHashSet<>();
@Inject
private MBeanManager mBeanManager;
@Inject
private Metadata metadata;
@Inject
private Environment environment;
@Inject
public DefaultDataSourceFactory(@Nonnull @Named("datasource") Configuration configuration, @Nonnull GriffonApplication application) {
super(configuration, application);
dataSourceNames.add(KEY_DEFAULT);
if (configuration.containsKey(getPluralKey())) {
Map<String, Object> datasources = (Map<String, Object>) configuration.get(getPluralKey());
dataSourceNames.addAll(datasources.keySet());
}
}
@Nonnull
@Override
public Set<String> getDataSourceNames() {
return dataSourceNames;
}
@Nonnull
@Override
public Map<String, Object> getConfigurationFor(@Nonnull String dataSourceName) {
requireNonBlank(dataSourceName, ERROR_DATASOURCE_BLANK);
return narrowConfig(dataSourceName);
}
@Nonnull
@Override
protected String getSingleKey() {
return "dataSource";
}
@Nonnull
@Override
protected String getPluralKey() {
return "dataSources";
}
@Nonnull
@Override
public DataSource create(@Nonnull String name) {
requireNonBlank(name, ERROR_DATASOURCE_BLANK);
Map<String, Object> config = narrowConfig(name);
event(DataSourceConnectStartEvent.of(name, config));
DataSource dataSource = createDataSource(config, name);
boolean skipSchema = getConfigValueAsBoolean(config, "schema", false);
if (!skipSchema) {
processSchema(config, name, dataSource);
}
if (getConfigValueAsBoolean(config, "jmx", true) && getConfigValueAsBoolean(config, "pooled", true)) {
dataSource = new JMXAwareDataSource(dataSource);
registerMBeans(name, (JMXAwareDataSource) dataSource);
}
event(DataSourceConnectEndEvent.of(name, config, dataSource));
return dataSource;
}
@Override
public void destroy(@Nonnull String name, @Nonnull DataSource instance) {
requireNonBlank(name, ERROR_DATASOURCE_BLANK);
requireNonNull(instance, "Argument 'instance' must not be null");
Map<String, Object> config = narrowConfig(name);
event(DataSourceDisconnectStartEvent.of(name, config, instance));
if (getConfigValueAsBoolean(config, "jmx", true) && getConfigValueAsBoolean(config, "pooled", true)) {
unregisterMBeans((JMXAwareDataSource) instance);
}
event(DataSourceDisconnectEndEvent.of(name, config));
}
private void registerMBeans(@Nonnull String name, @Nonnull JMXAwareDataSource dataSource) {
HikariPoolMonitor poolMonitor = new HikariPoolMonitor(metadata, ((HikariDataSource) dataSource.getDelegate()).getHikariPoolMXBean(), name);
dataSource.addObjectName(mBeanManager.registerMBean(poolMonitor, true).getCanonicalName());
}
private void unregisterMBeans(@Nonnull JMXAwareDataSource dataSource) {
for (String objectName : dataSource.getObjectNames()) {
mBeanManager.unregisterMBean(objectName);
}
dataSource.clearObjectNames();
}
@Nonnull
@SuppressWarnings("ConstantConditions")
private DataSource createDataSource(@Nonnull Map<String, Object> config, @Nonnull String name) {
String driverClassName = getConfigValueAsString(config, "driverClassName", "");
requireNonBlank(driverClassName, "Configuration for " + name + ".driverClassName must not be blank");
String url = getConfigValueAsString(config, "url", "");
requireNonBlank(url, "Configuration for " + name + ".url must not be blank");
try {
getApplication().getApplicationClassLoader().get().loadClass(driverClassName);
} catch (ClassNotFoundException e) {
throw new GriffonException(e);
}
String username = getConfigValueAsString(config, "username", "");
String password = getConfigValueAsString(config, "password", "");
boolean pooled = getConfigValueAsBoolean(config, "pooled", true);
if (pooled) {
HikariConfig hikariConfig = new HikariConfig();
hikariConfig.setJdbcUrl(url);
hikariConfig.setDriverClassName(driverClassName);
hikariConfig.setUsername(username);
hikariConfig.setPassword(password);
Map<String, Object> pool = getConfigValue(config, "pool", Collections.<String, Object>emptyMap());
GriffonClassUtils.setPropertiesNoException(hikariConfig, pool);
return new HikariDataSource(hikariConfig);
}
Properties props = new Properties();
props.putAll(config);
return new DriverDataSource(url, driverClassName, props, username, password);
}
private void processSchema(@Nonnull Map<String, Object> config, @Nonnull String name, @Nonnull DataSource dataSource) {
String dbCreate = getConfigValueAsString(config, "dbCreate", "skip");
if (!"create".equals(dbCreate)) {
return;
}
String env = getEnvironmentShortName(environment);
URL ddl = null;
for (String schemaName : asList(name + "-schema-" + env + ".ddl", name + "-schema.ddl", "schema-" + env + ".ddl", "schema.ddl")) {
ddl = getApplication().getResourceHandler().getResourceAsURL(schemaName);
if (ddl == null) {
LOG.warn("DataSource[{}].dbCreate was set to 'create' but {} was not found in classpath.", name, schemaName);
} else {
break;
}
}
if (ddl == null) {
LOG.error("DataSource[{}].dbCreate was set to 'create' but no suitable schema was found in classpath.", name);
return;
}
final URL url = ddl;
LOG.info("Initializing schema on '{}'", name);
DefaultDataSourceHandler.doWithConnection(name, dataSource, (dataSourceName, ds, connection) -> {
try (Scanner sc = new Scanner(url.openStream()); Statement statement = connection.createStatement()) {
sc.useDelimiter(";");
while (sc.hasNext()) {
String line = sc.next().trim();
statement.execute(line);
}
} catch (IOException | SQLException e) {
LOG.error("An error occurred when reading schema DDL from " + url, sanitize(e));
return null;
}
return null;
});
}
}
| |
package com.jcwhatever.nucleus.internal.managed.commands;
import static org.junit.Assert.assertEquals;
import com.jcwhatever.v1_8_R3.BukkitTester;
import com.jcwhatever.v1_8_R3.MockPlayer;
import com.jcwhatever.nucleus.NucleusTest;
import com.jcwhatever.nucleus.internal.managed.commands.CommandCollection.ICommandContainerFactory;
import com.jcwhatever.nucleus.internal.managed.commands.DummyRegisteredCommand.CommandInfoBuilder;
import com.jcwhatever.nucleus.managed.commands.CommandInfo;
import com.jcwhatever.nucleus.managed.commands.ICommand;
import com.jcwhatever.nucleus.managed.commands.arguments.ICommandArgument;
import com.jcwhatever.nucleus.managed.commands.exceptions.CommandException;
import com.jcwhatever.nucleus.managed.commands.exceptions.InvalidArgumentException;
import com.jcwhatever.nucleus.utils.ArrayUtils;
import com.jcwhatever.nucleus.utils.items.ItemStackBuilder;
import org.bukkit.Material;
import org.bukkit.inventory.ItemStack;
import org.bukkit.plugin.Plugin;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import java.util.Iterator;
public class CommandArgumentsTest {
private Arguments getArguments(CommandInfo info, String... args) throws CommandException {
CommandDispatcher dispatcher = new CommandDispatcher(
BukkitTester.mockPlugin("dummy"),
new ICommandContainerFactory() {
@Override
public RegisteredCommand create(Plugin plugin, ICommand command) {
return new DummyRegisteredCommand(plugin, command, this);
}
});
dispatcher.registerCommand(DummyCommand.class);
DummyRegisteredCommand command = (DummyRegisteredCommand) dispatcher.getCommand("dummy");
assert command != null;
command.setInfo(info);
return new Arguments(command, args);
}
// Get arguments for a command that expects 1 static parameter "param1"
private Arguments getParseArguments(String argument) throws CommandException {
return getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1").build(),
argument != null ? new String[] { argument } : new String[0]);
}
// Get arguments for a command that has 1 optional static parameter "param1"
private Arguments getOptionalArguments(String argument) throws CommandException {
return getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1=optional").build(),
argument != null ? new String[] { argument } : new String[0]);
}
// Get arguments for a command that has 1 required static parameter "param1",
// 1 optional static param "param2" and 1 flag "flag".
private Arguments getMixedOptionalArguments(String argument1, String argument2, boolean flag)
throws CommandException {
return getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1", "param2=optional")
.flags("flag").build(),
ArrayUtils.removeNull(new String[]{argument1, argument2, flag ? "--flag" : null}));
}
// Get arguments for a command that has 1 flag argument
private Arguments getFlagArguments(boolean hasFlag) throws CommandException {
return getArguments(new CommandInfoBuilder("dummy")
.flags("flag").build(),
hasFlag ? new String[]{"--flag"} : new String[0]);
}
public enum TestEnum {
CONSTANT
}
@BeforeClass
public static void testStartup() {
NucleusTest.init();
}
@Test
public void testGetRawArguments() throws Exception {
Arguments args = getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1", "param2").build(),
"arg1", "arg2");
Assert.assertArrayEquals(new String[]{"arg1", "arg2"}, args.getRawArguments());
}
@Test
public void testStaticSize() throws Exception {
Arguments args = getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1", "param2").build(),
"arg1", "arg2");
// returns the number of static parameters, not arguments
assertEquals(2, args.staticSize());
args = getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1", "param2=optional").build(),
"arg1");
// returns the number of static parameters, not arguments
assertEquals(2, args.staticSize());
}
@Test
public void testFloatingSize() throws Exception {
Arguments args = getArguments(new CommandInfoBuilder("dummy")
.floatingParams("param1", "param2").build(),
"-param1", "arg1", "-param2", "arg2");
// returns the number of floating parameters, not arguments
assertEquals(2, args.floatingSize());
args = getArguments(new CommandInfoBuilder("dummy")
.floatingParams("param1", "param2=optional").build(),
"-param1", "arg1");
// returns the number of floating parameters, not arguments
assertEquals(2, args.floatingSize());
}
@Test
public void testGet() throws Exception {
Arguments args = getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1")
.floatingParams("param2").build(),
"arg1", "-param2", "arg2");
assertEquals("arg1", args.get("param1").getValue());
assertEquals("arg2", args.get("param2").getValue());
}
@Test
public void testIterator() throws Exception {
Arguments args = getArguments(new CommandInfoBuilder("dummy")
.staticParams("param1")
.floatingParams("param2").build(),
"arg1", "-param2", "arg2");
Iterator<ICommandArgument> iterator = args.iterator();
assertEquals("arg1", iterator.next().getValue());
assertEquals("arg2", iterator.next().getValue());
assertEquals(false, iterator.hasNext());
}
@Test
public void testGetName() throws Exception {
Arguments args = getParseArguments("validName");
// should not throw any exceptions
assertEquals("validName", args.getName("param1"));
// ------------
args = getParseArguments("#invalidName");
// check for invalid argument, name has invalid character at beginning
try {
args.getName("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
args = getParseArguments("NameLongerThan16Characters");
// check for invalid argument, name is too long
try {
args.getName("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// check for invalid argument, name is too long
try {
args.getName("param1", 20); // name cannot be longer than 20 characters
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
args = getParseArguments("shortName");
// check for invalid argument, name is too long
try {
args.getName("param1", 8); // name cannot be longer than 8 characters
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetString() throws Exception {
Arguments args = getParseArguments("string");
assertEquals("string", args.getString("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getString("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testGetBoolean() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("true");
assertEquals(true, args.getBoolean("param1"));
args = getParseArguments("yes");
assertEquals(true, args.getBoolean("param1"));
args = getParseArguments("allow");
assertEquals(true, args.getBoolean("param1"));
args = getParseArguments("on");
assertEquals(true, args.getBoolean("param1"));
args = getParseArguments("1");
assertEquals(true, args.getBoolean("param1"));
args = getParseArguments("false");
assertEquals(false, args.getBoolean("param1"));
args = getParseArguments("no");
assertEquals(false, args.getBoolean("param1"));
args = getParseArguments("deny");
assertEquals(false, args.getBoolean("param1"));
args = getParseArguments("off");
assertEquals(false, args.getBoolean("param1"));
args = getParseArguments("0");
assertEquals(false, args.getBoolean("param1"));
// ------------
// check for runtime exception, invalid parameter name specified
try {
args.getBoolean("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("notABoolean");
try {
args.getBoolean("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// test flags
args = getFlagArguments(true);
assertEquals(true, args.getBoolean("flag"));
args = getFlagArguments(false);
assertEquals(false, args.getBoolean("flag"));
}
@Test
public void testGetChar() throws Exception {
Arguments args = getParseArguments("c");
// should not throw any exceptions
assertEquals('c', args.getChar("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getChar("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidChar");
try {
args.getChar("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetByte() throws Exception {
Arguments args = getParseArguments("10");
// should not throw any exceptions
assertEquals((byte) 10, args.getByte("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getByte("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidByte");
try {
args.getByte("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
// check for invalid argument, number out of range
args = getParseArguments(String.valueOf(Byte.MAX_VALUE + 1));
try {
args.getByte("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetShort() throws Exception {
Arguments args = getParseArguments("10");
// should not throw any exceptions
assertEquals(10, args.getShort("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getShort("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidShort");
try {
args.getShort("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
// check for invalid argument, number out of range
args = getParseArguments(String.valueOf(Short.MAX_VALUE + 1));
try {
args.getShort("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetInteger() throws Exception {
Arguments args = getParseArguments("10");
// should not throw any exceptions
assertEquals(10, args.getInteger("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getInteger("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidInteger");
try {
args.getInteger("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
// check for invalid argument, number out of range
args = getParseArguments(String.valueOf(Integer.MAX_VALUE + 1L));
try {
args.getInteger("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetLong() throws Exception {
Arguments args = getParseArguments("10");
// should not throw any exceptions
assertEquals(10, args.getLong("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getLong("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidLong");
try {
args.getLong("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
// ------------
// check for invalid argument, number out of range
args = getParseArguments("99999999999999999999999999999999999999999999999999999999999");
try {
args.getLong("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetFloat() throws Exception {
Arguments args = getParseArguments("10.0");
// should not throw any exceptions
assertEquals(10.0D, args.getFloat("param1"), 0.0D);
// check for runtime exception, invalid parameter name specified
try {
args.getFloat("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidFloat");
try {
args.getFloat("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetDouble() throws Exception {
Arguments args = getParseArguments("10.0");
// should not throw any exceptions
assertEquals(10.0D, args.getFloat("param1"), 0.0D);
// check for runtime exception, invalid parameter name specified
try {
args.getDouble("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidDouble");
try {
args.getDouble("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetPercent() throws Exception {
Arguments args = getParseArguments("10.0%");
// should not throw any exceptions
assertEquals(10.0D, args.getPercent("param1"), 0.0D);
// check for runtime exception, invalid parameter name specified
try {
args.getPercent("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidPercent");
try {
args.getPercent("param1");
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testGetParams() throws Exception {
Arguments args = getParseArguments("1 2 3");
// should not throw any exceptions
Assert.assertArrayEquals(new String[] { "1", "2", "3" }, args.getParams("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getParams("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testGetItemStack() throws Exception {
ItemStack wood = new ItemStackBuilder(Material.WOOD).build();
ItemStack woodAmount = new ItemStackBuilder(Material.WOOD).amount(5).build();
Arguments args;
args = getParseArguments("wood");
Assert.assertArrayEquals(new ItemStack[] { wood }, args.getItemStack(new MockPlayer("dummy"), "param1"));
args = getParseArguments("wood,wood");
Assert.assertArrayEquals(new ItemStack[] { wood, wood }, args.getItemStack(new MockPlayer("dummy"), "param1"));
args = getParseArguments("wood,wood;5");
Assert.assertArrayEquals(new ItemStack[] { wood, woodAmount }, args.getItemStack(new MockPlayer("dummy"), "param1"));
// check for runtime exception, invalid parameter name specified
try {
args.getParams("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testGetLocation() throws Exception {
// TODO: requires player to select a location
}
@Test
public void testGetEnum() throws Exception {
Arguments args = getParseArguments("constant");
// should not throw any exceptions
assertEquals(TestEnum.CONSTANT, args.getEnum("param1", TestEnum.class));
// check for runtime exception, invalid parameter name specified
try {
args.getEnum("param2", TestEnum.class);
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
// ------------
// check for invalid argument
args = getParseArguments("invalidConstant");
try {
args.getEnum("param1", TestEnum.class);
throw new AssertionError("InvalidArgumentException expected.");
}
catch (InvalidArgumentException ignore) {}
}
@Test
public void testIsDefaultValue() throws Exception {
Arguments args;
// should not throw any exceptions
args = getOptionalArguments("value");
assertEquals(false, args.isDefaultValue("param1"));
args = getOptionalArguments(null);
assertEquals(true, args.isDefaultValue("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.isDefaultValue("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasBoolean() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("true");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("allow");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("yes");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("on");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("false");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("deny");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("0");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("no");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("off");
assertEquals(true, args.hasBoolean("param1"));
args = getParseArguments("notABoolean");
assertEquals(false, args.hasBoolean("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasBoolean("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasChar() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("s");
assertEquals(true, args.hasChar("param1"));
args = getParseArguments("notAChar");
assertEquals(false, args.hasChar("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasChar("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasByte() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10");
assertEquals(true, args.hasByte("param1"));
args = getParseArguments("notAByte");
assertEquals(false, args.hasByte("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasByte("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasShort() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10");
assertEquals(true, args.hasShort("param1"));
args = getParseArguments("notAShort");
assertEquals(false, args.hasShort("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasShort("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasInt() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10");
assertEquals(true, args.hasInteger("param1"));
args = getParseArguments("notAnInt");
assertEquals(false, args.hasInteger("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasInteger("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasFloat() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10.0");
assertEquals(true, args.hasFloat("param1"));
args = getParseArguments("10");
assertEquals(true, args.hasFloat("param1"));
args = getParseArguments("notAFloat");
assertEquals(false, args.hasFloat("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasFloat("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasDouble() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10.0");
assertEquals(true, args.hasDouble("param1"));
args = getParseArguments("10");
assertEquals(true, args.hasDouble("param1"));
args = getParseArguments("notADouble");
assertEquals(false, args.hasDouble("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasDouble("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasItemStack() throws Exception {
Arguments args;
args = getParseArguments("wood");
assertEquals(true, args.hasItemStack("param1"));
args = getParseArguments("wood,wood");
assertEquals(true, args.hasItemStack("param1"));
args = getParseArguments("wood,wood;5");
assertEquals(true, args.hasItemStack("param1"));
args = getParseArguments("notAnItemStack");
assertEquals(false, args.hasItemStack("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasItemStack("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasPercent() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("10.0%");
assertEquals(true, args.hasPercent("param1"));
args = getParseArguments("10%");
assertEquals(true, args.hasPercent("param1"));
args = getParseArguments("10.0");
assertEquals(true, args.hasPercent("param1"));
args = getParseArguments("10");
assertEquals(true, args.hasPercent("param1"));
args = getParseArguments("notAPercent");
assertEquals(false, args.hasPercent("param1"));
// check for runtime exception, invalid parameter name specified
try {
args.hasPercent("param2");
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testHasEnum() throws Exception {
Arguments args;
// should not throw any exceptions
args = getParseArguments("constant");
assertEquals(true, args.hasEnum("param1", TestEnum.class));
args = getParseArguments("notAValidConstant");
assertEquals(false, args.hasEnum("param1", TestEnum.class));
// check for runtime exception, invalid parameter name specified
try {
args.hasEnum("param2", TestEnum.class);
throw new AssertionError("RuntimeException expected.");
}
catch (RuntimeException ignore) {}
}
@Test
public void testMixedArgTypes() throws Exception {
Arguments args;
args = getMixedOptionalArguments("arg1", "arg2", true);
assertEquals("arg1", args.getString("param1"));
assertEquals("arg2", args.getString("param2"));
assertEquals(true, args.getBoolean("flag"));
args = getMixedOptionalArguments("arg1", "arg2", false);
assertEquals("arg1", args.getString("param1"));
assertEquals("arg2", args.getString("param2"));
assertEquals(false, args.getBoolean("flag"));
args = getMixedOptionalArguments("arg1", null, false);
assertEquals("arg1", args.getString("param1"));
assertEquals("optional", args.getString("param2"));
assertEquals(false, args.getBoolean("flag"));
args = getMixedOptionalArguments("arg1", null, true);
assertEquals("arg1", args.getString("param1"));
assertEquals("optional", args.getString("param2"));
assertEquals(true, args.getBoolean("flag"));
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package net.firejack.platform.api.filestore;
import com.sun.jersey.core.util.Base64;
import net.firejack.platform.api.AbstractServiceProxy;
import net.firejack.platform.api.OPFEngine;
import net.firejack.platform.api.authority.domain.AuthenticationToken;
import net.firejack.platform.api.content.domain.FileInfo;
import net.firejack.platform.api.filestore.domain.FileStoreInfo;
import net.firejack.platform.core.response.ServiceResponse;
import net.firejack.platform.core.utils.InstallUtils;
import net.firejack.platform.core.utils.OpenFlame;
import net.firejack.platform.web.handler.Builder;
import net.firejack.platform.web.security.x509.KeyUtils;
import org.springframework.beans.BeansException;
import java.io.File;
import java.io.InputStream;
import java.net.InetAddress;
import java.security.KeyPair;
import java.security.cert.X509Certificate;
import java.util.Map;
public class FileStoreServiceProxy extends AbstractServiceProxy implements IFileStoreService {
private String sessionToken;
public FileStoreServiceProxy(Class[] classes) {
super(classes);
}
@Override
protected String getSessionToken() {
if (sessionToken == null) {
File keystore = InstallUtils.getKeyStore();
if (!keystore.exists()) return super.getSessionToken();
try {
String hostName = InetAddress.getLocalHost().getHostName();
// String hostName = InetAddress.getLocalHost().getHostName() + "_slave"; //TODO [CLUSTER] don't commit "_slave"
KeyPair keyPair = KeyUtils.load(keystore);
if (keyPair == null) {
throw new IllegalStateException("Key not found");
}
X509Certificate certificate = KeyUtils.generateCertificate("", 1, keyPair);
String cert = new String(Base64.encode(certificate.getEncoded()));
ServiceResponse<AuthenticationToken> response = OPFEngine.AuthorityService.processSTSCertSignIn(OpenFlame.PACKAGE, hostName, cert);
if (response.isSuccess()) {
AuthenticationToken authenticationToken = response.getItem();
sessionToken = authenticationToken.getToken();
}
} catch (Exception e) {
logger.error(e);
}
}
return sessionToken;
}
@Override
protected String getClientIp() {
return sessionToken == null ? super.getClientIp() : null;
}
protected void resetSessionToken() {
sessionToken = null;
}
@Override
public void valid() throws BeansException {
}
@Override
public String getServiceUrlSuffix() {
return "/filestore";
}
@Override
public ServiceResponse createDirectory(String lookup, String... path) {
return post("/directory/" + lookup, "path", path);
}
@Override
public ServiceResponse deleteDirectory(String lookup, String... path) {
return delete("/directory/" + lookup, "path", path);
}
@Override
public ServiceResponse renameDirectory(String lookup, String name, String... path) {
return put("/directory/" + lookup, "path", path, "name", name);
}
@Override
public ServiceResponse<FileInfo> search(String lookup, String term, String... path) {
return get("/directory/" + lookup, "path", path, "term", term);
}
@Override
public InputStream download(String lookup, String filename, String... path) {
return getStream("/file/" + lookup + "/" + filename, "path", path);
}
@Override
public ServiceResponse upload(String lookup, String filename, InputStream inputStream, String... path) {
return upload1("/file/" + lookup + "/" + filename, inputStream, null, "path", path);
}
@Override
public ServiceResponse deleteFile(String lookup, String filename, String... path) {
return delete("/file/" + lookup + "/" + filename, "path", path);
}
@Override
public ServiceResponse<FileStoreInfo> readFileStoreInfo() {
return get("/file");
}
@Override
public ServiceResponse<FileInfo> getInfo(String lookup, String... path) {
return get("/file/" + lookup, "path", path);
}
@Override
public ServiceResponse zip(String lookup, Map<String, InputStream> stream, String... path) {
return upload1("/zip/" + lookup, stream, "path", path);
}
@Override
public ServiceResponse updatezip(String lookup, Map<String, InputStream> stream, String... path) {
return upload2("/zip/" + lookup, stream, "path", path);
}
@Override
public ServiceResponse unzip(String lookup, InputStream stream, String... path) {
return upload1("/unzip/" + lookup, stream, null, "path", path);
}
@Override
public ServiceResponse<FileInfo> unzipTemp(String lookup, InputStream stream, String... path) {
return upload1("/unzip/temp/" + lookup, stream, FileInfo.class, "path", path);
}
protected <T> T doGet(Builder builder, final Class<T> clazz) {
return new FileStoreServiceProxyExecutor<T>(this){
@Override
public T doRequest(Builder builder) {
return builder.get(clazz);
}
}.request(builder);
}
protected <T> T doPost(Builder builder, final Class<T> clazz, final Object part) {
return new FileStoreServiceProxyExecutor<T>(this){
@Override
public T doRequest(Builder builder) {
return builder.post(clazz, part);
}
}.request(builder);
}
protected <T> T doPut(Builder builder, final Class<T> clazz, final Object part) {
return new FileStoreServiceProxyExecutor<T>(this){
@Override
public T doRequest(Builder builder) {
return builder.put(clazz, part);
}
}.request(builder);
}
protected <T> T doDelete(Builder builder, final Class<T> clazz, final Object part) {
return new FileStoreServiceProxyExecutor<T>(this){
@Override
public T doRequest(Builder builder) {
return builder.delete(clazz, part);
}
}.request(builder);
}
}
| |
package org.zaproxy.zap.extension.cmss;
import java.net.URL;
import java.util.ArrayList;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.io.IOUtils;
public class FastFingerprinter {
// based on ODZscanner
/**
* I think that should be modular too
* @param url
* @return
*/
public static ArrayList<String> JoomlaFastFingerprint(URL url){
// on se base sur ODZscanner
WebPage wp = null;
ArrayList<String> result = new ArrayList<String>();
org.jsoup.nodes.Document doc = null;
String dist = "";
String htacc = "";
String rdm ="";
String doc2 = "";
try {
wp = new WebPage(url);
doc = wp.getDocument();
}
catch(Exception e){
e.printStackTrace();
}
try{
doc2 = IOUtils.toString(CMSSUtils.getFileFromUrl(new URL(url.toString()+"/index.php?option=com_esi")),"UTF-8");
}catch(Exception e){
}
try{
rdm = IOUtils.toString(CMSSUtils.getFileFromUrl(new URL(url.toString()+"/README.txt")),"UTF-8");
}catch(Exception e){
}
try{
htacc = IOUtils.toString(CMSSUtils.getFileFromUrl(new URL(url.toString()+"/htaccess.txt")),"UTF-8");
}catch(Exception e){
}
try{
dist = IOUtils.toString(CMSSUtils.getFileFromUrl(new URL(url.toString()+"/configuration.php-dist")),"UTF-8");
}catch(Exception e){
}
Pattern p , p2;
p = Pattern.compile("<\\/html> <!-- \\d{1,30} -->");
p2 = Pattern.compile("The page you are trying to access does not exist");
try{
Matcher m = p.matcher(doc.toString()),
m2 = p2.matcher(doc2.toString());
if (m.find() || m2.find()
|| WebAppGuesser.checkIfExist(new URL(url.toString()+"/language/english.xml"))
|| WebAppGuesser.checkIfExist(new URL(url.toString()+"/administrator/templates/joomla_admin/images/security.png")))
result.add("1.0.x");
}catch(Exception e){
}
p = Pattern.compile(" Joomla! 1.5 - Open Source Content Management");
p2 = Pattern.compile("404- Component not found");
try{
Matcher m = p.matcher(doc.toString());
Matcher m2 = p2.matcher(doc2);
if (m.find() || m2.find()
|| WebAppGuesser.checkIfExist(new URL(url.toString()+"/administrator/templates/khepri/images/j_login_lock.jpg"))
|| WebAppGuesser.checkIfExist(new URL(url.toString()+"/administrator/templates/khepri/images/j_button1_next.png")))
result.add("1.5.x");
}catch(Exception e){
}
p = Pattern.compile("package to version 3.0.x");
try{
Matcher m = p.matcher(rdm.toString());
if (m.find() || WebAppGuesser.checkIfExist(new URL(url.toString()+
"/administrator/templates/isis/img/glyphicons-halflings.png")))
result.add("3.0.x");
}catch(Exception e){
}
try{
if(searchByRegex("47 2005-09-15 02:55:27Z rhuk", htacc.toString()))
result.add("[1.0.0 - 1.0.2]");
}
catch(Exception e){
}
try{
if(searchByRegex("423 2005-10-09 18:23:50Z stingrey", htacc.toString()))
result.add("1.0.3");
}catch(Exception e){
}
try{
if(searchByRegex("1005 2005-11-13 17:33:59Z stingrey", htacc.toString()))
result.add("[1.0.4 - 1.0.5]");
}catch(Exception e){
}
try{
if(searchByRegex("1570 2005-12-29 05:53:33Z eddieajau", htacc.toString()))
result.add("[1.0.6 - 1.0.7]");
}catch(Exception e){
}
try{
if(searchByRegex("2368 2006-02-14 17:40:02Z stingrey", htacc.toString()))
result.add("[1.0.8 - 1.0.9]");
}catch(Exception e){
}
try{
if(searchByRegex("44085 2006-06-21 16:03:54Z stingrey7 2005-09-15 02:55:27Z rhuk", htacc.toString()))
result.add("1.0.10");
}catch(Exception e){
}
try{
if(searchByRegex("4756 2006-08-25 16:07:11Z stingrey", htacc.toString()))
result.add("1.0.11");
}catch(Exception e){
}
try{
if(searchByRegex("5973 2006-12-11 01:26:33Z robs", htacc.toString()))
result.add("1.0.12");
}catch(Exception e){
}
try{
if(searchByRegex("5975 2006-12-11 01:26:33Z robs", htacc.toString()))
result.add("[1.0.13 - 1.0.15]");
}catch(Exception e){
}
try{
if(searchByRegex("47 2005-09-15 02:55:27Z rhuk", dist.toString()))
result.add("1.0.0");
}catch(Exception e){
}
try{
if(searchByRegex("217 2005-09-21 15:15:58Z stingrey", dist.toString()))
result.add("[1.0.1 - 1.0.2]");
}catch(Exception e){
}
try{
if(searchByRegex("506 2005-10-13 05:49:24Z stingrey", dist.toString()))
result.add("[1.0.3 - 1.0.7]");
}catch(Exception e){
}
try{
if(searchByRegex("2622 2006-02-26 04:16:09Z stingrey", dist.toString()))
result.add("1.0.8");
}catch(Exception e){
}
try{
if(searchByRegex("3754 2006-05-31 12:08:37Z stingrey", dist.toString()))
result.add("[1.0.9 - 1.0.10]");
}catch(Exception e){
e.printStackTrace();
}
return result;
}
public static boolean searchByRegex(String regex, String str){
Pattern p = Pattern.compile(regex);
Matcher m = p.matcher(str);
if (m.find()) return true;
return false;
}
// this method is presented here :
// http://www.antoine-cervoise.fr/2012/10/20/wordpress-version-checker-new-md5-list/?lang=fr
public static ArrayList<String> WordpressFastFingerprint(URL url){
boolean exist = false;
ArrayList<String> result = new ArrayList<String>();
URL indicFileUrl = null;
try{
exist = WebAppGuesser.checkIfExist(new URL(url.toString()+"/wp-includes/js/tinymce/tiny_mce.js"));
}catch(Exception e){
e.printStackTrace();
}
if(exist){
try{
indicFileUrl = new URL(url.toString()+"/wp-includes/js/tinymce/tiny_mce.js");
String myString = IOUtils.toString(CMSSUtils.getFileFromUrl(indicFileUrl), "UTF-8");
String chksum = CMSSUtils.checksum(myString.getBytes());
System.out.println(chksum);
if(chksum.compareTo("a306a72ce0f250e5f67132dc6bcb2ccb")==0)
for(String str:"2.0; 2.0.1; 2.0.4; 2.0.5; 2.0.6; 2.0.7; 2.0.8; 2.0.9; 2.0.10; 2.0.11".split(";")){
result.add(str);
}
if(chksum.compareTo("4f04728cb4631a553c4266c14b9846aa")==0)
for(String str:"2.1; 2.1.1; 2.1.2; 2.1.3".split(";")){
result.add(str);
}
if(chksum.compareTo("25e1e78d5b0c221e98e14c6e8c62084f")==0)
for(String str:"2.2; 2.2.1; 2.2.2; 2.2.3".split(";")){
result.add(str);
}
if(chksum.compareTo("83c83d0f0a71bd57c320d93e59991c53")==0)
for(String str:"2.3; 2.3.1; 2.3.2; 2.3.3".split(";")){
result.add(str);
}
if(chksum.compareTo("7293453cf0ff5a9a4cfe8cebd5b5a71a")==0)
result.add("2.5");
if(chksum.compareTo("a3d05665b236944c590493e20860bcdb")==0)
result.add("2.5.1");
if(chksum.compareTo("61740709537bd19fb6e03b7e11eb8812")==0)
for(String str:"2.6; 2.6.1; 2.6.2; 2.6.3; 2.6.5".split(";")){
result.add(str);
}
if(chksum.compareTo("e6bbc53a727f3af003af272fd229b0b2")==0)
for(String str:"2.7; 2.7.1".split(";")){
result.add(str);
}
if(chksum.compareTo("56c606da29ea9b8f8d823eeab8038ee8")==0)
for(String str:"2.8; 2.8.1; 2.8.2; 2.8.3; 2.8.4; 2.8.5; 2.8.6".split(";")){
result.add(str);
}
if(chksum.compareTo("128e75ed19d49a94a771586bf83265ec")==0)
for(String str:"2.9; 2.9.1; 2.9.2; 3.0; 3.0.1; 3.0.2; 3.0.3; 3.0.4; 3.0.5; 3.0.6".split(";")){
result.add(str);
}
if(chksum.compareTo("82ac611e3da57fa3e9973c37491486ee")==0)
result.add("3.1");
if(chksum.compareTo("e52dfe5056683d653536324fee39ca08")==0)
for(String str:"3.1.1; 3.1.2; 3.1.3; 3.1.4".split(";")){
result.add(str);
}
if(chksum.compareTo("a57c0d7464527bc07b34d675d4bf0159")==0)
for(String str:"3.2; 3.2.1".split(";")){
result.add(str);
}
if(chksum.compareTo("9754385dabfc67c8b6d49ad4acba25c3")==0)
for(String str:"3.3; 3.3.1; 3.3.2; 3.3.3".split(";")){
result.add(str);
}
if(chksum.compareTo("7424043e0838819af942d2fc530e8469")==0)
for(String str:"3.4; 3.4.1; 3.4.2".split(";")){
result.add(str);
}
else System.out.println("lolz");
}
catch(Exception e){
System.out.println("file not found");
}
}
return result;
}
/**
* This method take the result of : wapalyzer and guessWebApp.fastguess
* , combine between them
* @param targetUrl
* @param whatToFingerPrint
* @param POrAOption
* @return
* @throws Exception
*/
public static ArrayList<String> filterResults(URL targetUrl, ArrayList<String> whatToFingerPrint,int POrAOption) throws Exception{
ArrayList<String> result = new ArrayList<String>();
ArrayList<String> wapGessed = new ArrayList<String>();
ArrayList<String> blindGuessed = new ArrayList<String>();
if(POrAOption == 1 || POrAOption==3){
wapGessed = Wappalyzer.analyse(targetUrl,whatToFingerPrint);
for (String app : wapGessed){
result.add(app);
}
if(POrAOption==3){
blindGuessed = WebAppGuesser.guessApps(targetUrl);
for (String app : blindGuessed){
result.add(app);
}
}
}else{
blindGuessed = WebAppGuesser.guessApps(targetUrl);
for (String app : blindGuessed){
result.add(app);
}
}
System.out.println("fin");
return result;
}
}
| |
/*******************************************************************************
* Copyright 2015 Maximilian Stark | Dakror <mail@dakror.de>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package de.dakror.factory.game.entity.machine;
import java.awt.Color;
import java.awt.Graphics2D;
import java.awt.RenderingHints;
import java.awt.event.MouseEvent;
import java.awt.image.BufferedImage;
import java.util.ArrayList;
import de.dakror.factory.game.Game;
import de.dakror.factory.game.entity.Entity;
import de.dakror.factory.game.entity.item.Item;
import de.dakror.factory.game.entity.item.ItemType;
import de.dakror.factory.game.entity.item.Items;
import de.dakror.factory.game.entity.machine.tube.Tube;
import de.dakror.factory.game.world.Block;
import de.dakror.factory.game.world.World.Cause;
import de.dakror.factory.ui.ItemList;
import de.dakror.factory.util.Filter;
import de.dakror.factory.util.TubePoint;
import org.json.JSONArray;
import org.json.JSONObject;
import de.dakror.gamesetup.ui.ClickEvent;
import de.dakror.gamesetup.ui.Container.DefaultContainer;
/**
* @author Dakror
*/
public abstract class Machine extends Entity {
public static final int REQUEST_SPEED = 40;
protected String name;
protected ArrayList<TubePoint> points = new ArrayList<>();
protected ArrayList<Filter> inputFilters = new ArrayList<>();
protected ArrayList<Filter> outputFilters = new ArrayList<>();
protected Items items;
protected boolean running = true;
protected boolean drawFrame = true;
protected boolean working = false;
protected boolean outputSameMaterial = true;
public boolean forceGuiToStay = false;
protected int speed, tick, startTick;
public DefaultContainer container;
public Machine(float x, float y, int width, int height) {
super(x * Block.SIZE, y * Block.SIZE, width * Block.SIZE, height * Block.SIZE);
items = new Items();
container = new DefaultContainer();
addClickEvent(new ClickEvent() {
@Override
public void trigger() {
if (Game.currentGame.worldActiveMachine == null || !Game.currentGame.worldActiveMachine.forceGuiToStay) Game.currentGame.worldActiveMachine = Machine.this;
}
});
}
@Override
public void draw(Graphics2D g) {
Color c = g.getColor();
g.setColor(Color.black);
if (drawFrame) g.drawRect(x, y, width, height);
g.setColor(c);
if (this instanceof Tube) drawIcon(g);
}
public void drawAbove(Graphics2D g) {
if (!(this instanceof Tube)) {
Color c = g.getColor();
for (TubePoint p : points) {
g.setColor(Color.white);
g.fillRect(x + p.x * Block.SIZE + 4, y + p.y * Block.SIZE + 4, Block.SIZE - 8, Block.SIZE - 8);
g.setColor(p.in ? Color.blue : Color.red);
if (p.horizontal) g.fillRect(x + p.x * Block.SIZE + 8, y + p.y * Block.SIZE + (p.up ? 4 : Block.SIZE - 8), Block.SIZE - 16, 4);
else g.fillRect(x + p.x * Block.SIZE + (p.up ? 4 : Block.SIZE - 8), y + p.y * Block.SIZE + 8, 4, Block.SIZE - 16);
}
g.setColor(c);
drawIcon(g);
}
if (state != 0) {
Color c = g.getColor();
g.setColor(Color.darkGray);
g.drawRect(x, y, width - 1, height - 1);
g.setColor(c);
}
}
public void drawGUI(Graphics2D g) {
container.draw(g);
}
public BufferedImage getImage() {
BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_INT_ARGB);
Graphics2D g = (Graphics2D) bi.getGraphics();
g.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BILINEAR);
g.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON);
g.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY);
g.setRenderingHint(RenderingHints.KEY_TEXT_ANTIALIASING, RenderingHints.VALUE_TEXT_ANTIALIAS_ON);
Machine m = (Machine) clone();
m.x = 0;
m.y = 0;
drawIcon(g);
return bi;
}
protected abstract void drawIcon(Graphics2D g);
@Override
protected void tick(int tick) {
this.tick = tick;
if (inputFilters.size() > 0) {
if (!working) {
if (tick % REQUEST_SPEED == 0 && items.getLength(outputFilters) > 0 && Game.world.isTube(x + points.get(1).x * Block.SIZE, y + points.get(1).y * Block.SIZE + Block.SIZE)) {
ItemType it = items.getFilled().get(0);
Item item = new Item(x + points.get(1).x * Block.SIZE, y + points.get(1).y * Block.SIZE, it);
Game.world.addEntity(item);
items.add(it, -1);
if (items.getLength() == 0) Game.world.dispatchEntityUpdate(Cause.MACHINE_DONE, this);
}
if (items.getLength(inputFilters) == inputFilters.size()) {
working = true;
startTick = tick;
}
}
if (working && (tick - startTick) % speed == 0 && startTick != tick) {
for (ItemType t : items.getFilled(inputFilters)) {
if (t.hasMaterial() && outputSameMaterial) {
for (Filter f : outputFilters) {
if (f.c != null) {
items.add(ItemType.getItemsByCategories(t.getMaterial(), f.c)[0], 1);
}
}
}
items.set(t, 0);
}
for (Filter f : outputFilters)
if (f.c == null) items.add(f.t, 1);
working = false;
}
}
}
@Override
protected void onReachTarget() {}
@Override
public void mouseReleased(MouseEvent e) {
if (contains2(e.getPoint()) && e.getButton() == MouseEvent.BUTTON3 && (Game.currentGame.worldActiveMachine == null || !Game.currentGame.worldActiveMachine.forceGuiToStay)
&& Game.currentGame.activeMachine == null) {
for (Entity e1 : Game.world.getEntities())
if (e1 instanceof Item && getArea().intersects(e1.getArea())) return;
dead = true;
}
if (!dead) super.mouseReleased(e);
}
public ArrayList<TubePoint> getTubePoints() {
return points;
}
public void placeTubePoints() {
for (TubePoint tp : points)
Game.world.getEntities().add(new Tube(x / Block.SIZE + tp.x, y / Block.SIZE + tp.y));
running = true;
}
public String getName() {
return name;
}
public Items getItems() {
return items;
}
@Override
public void onRemoval() {
if (this instanceof Tube) return;
for (Entity e : Game.world.getEntities()) {
if (e instanceof Tube && getArea().contains(e.getArea())) e.setDead(true);
}
if (Game.currentGame.getActiveLayer() instanceof ItemList) Game.currentGame.removeLayer(Game.currentGame.getActiveLayer());
}
public boolean isRunning() {
return running;
}
public boolean isWorking() {
return working;
}
@Override
public void onReachPathNode() {}
public boolean matchesFilters(ItemType t) {
if (inputFilters.size() == 0) return true;
for (Filter f : inputFilters)
if (t.matchesFilter(f)) return true;
return false;
}
public boolean matchSameFilters(ItemType t, ItemType t2) {
if (inputFilters.size() == 0) return false;
for (Filter f : inputFilters)
if (t.matchesFilter(f) && t2.matchesFilter(f)) return true;
return false;
}
public boolean wantsItem(ItemType t) {
if (working || items.getLength(outputFilters) > 0) return false;
if (inputFilters.size() == 0) return true;
if (!matchesFilters(t)) return false;
int amount = 0;
Filter filter = null;
for (Filter f : inputFilters) {
if (t.matchesFilter(f) && (filter == null || (f.c == filter.c && f.t == filter.t))) {
filter = f;
amount++;
}
}
return items.get(t) + 1 <= amount;
}
@Override
public void onEntityUpdate(Cause cause, Object source) {}
@Override
public JSONObject getData() throws Exception {
JSONObject o = new JSONObject();
o.put("c", getClass().getName().replace("de.dakror.factory.game.entity.", ""));
o.put("x", x / Block.SIZE);
o.put("y", y / Block.SIZE);
o.put("i", items.getData());
o.put("w", working);
o.put("r", running);
JSONArray is = new JSONArray();
for (Filter f : inputFilters)
is.put(f.getData());
o.put("is", is);
JSONArray os = new JSONArray();
for (Filter f : outputFilters)
is.put(f.getData());
o.put("os", os);
o.put("sT", tick - startTick);
return o;
}
@Override
public void setData(JSONObject data) throws Exception {
items = new Items(data.getJSONObject("i"));
working = data.getBoolean("w");
running = data.getBoolean("r");
inputFilters.clear();
JSONArray is = data.getJSONArray("is");
for (int i = 0; i < is.length(); i++)
inputFilters.add(new Filter(is.getJSONArray(i)));
outputFilters.clear();
JSONArray os = data.getJSONArray("os");
for (int i = 0; i < os.length(); i++)
outputFilters.add(new Filter(os.getJSONArray(i)));
startTick = Game.currentGame.updater.tick - data.getInt("sT");
}
public boolean hasInputFilters() {
return inputFilters.size() > 0;
}
}
| |
/*
* This file is part of Cubic Chunks Mod, licensed under the MIT License (MIT).
*
* Copyright (c) 2015-2021 OpenCubicChunks
* Copyright (c) 2015-2021 contributors
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package io.github.opencubicchunks.cubicchunks.core.worldgen.generator.vanilla;
import io.github.opencubicchunks.cubicchunks.api.util.Box;
import io.github.opencubicchunks.cubicchunks.api.util.Coords;
import io.github.opencubicchunks.cubicchunks.api.world.ICube;
import io.github.opencubicchunks.cubicchunks.api.world.ICubicWorld;
import io.github.opencubicchunks.cubicchunks.api.worldgen.CubeGeneratorsRegistry;
import io.github.opencubicchunks.cubicchunks.api.worldgen.CubePrimer;
import io.github.opencubicchunks.cubicchunks.api.worldgen.ICubeGenerator;
import io.github.opencubicchunks.cubicchunks.core.CubicChunks;
import io.github.opencubicchunks.cubicchunks.core.CubicChunksConfig;
import io.github.opencubicchunks.cubicchunks.core.asm.mixin.ICubicWorldInternal;
import io.github.opencubicchunks.cubicchunks.core.asm.mixin.core.common.IGameRegistry;
import io.github.opencubicchunks.cubicchunks.core.util.CompatHandler;
import io.github.opencubicchunks.cubicchunks.core.world.IColumnInternal;
import io.github.opencubicchunks.cubicchunks.core.world.cube.Cube;
import io.github.opencubicchunks.cubicchunks.core.worldgen.WorldgenHangWatchdog;
import io.github.opencubicchunks.cubicchunks.core.worldgen.generator.WorldGenUtils;
import mcp.MethodsReturnNonnullByDefault;
import net.minecraft.block.state.IBlockState;
import net.minecraft.entity.EnumCreatureType;
import net.minecraft.init.Biomes;
import net.minecraft.init.Blocks;
import net.minecraft.util.math.BlockPos;
import net.minecraft.world.World;
import net.minecraft.world.WorldProvider;
import net.minecraft.world.biome.Biome;
import net.minecraft.world.biome.Biome.SpawnListEntry;
import net.minecraft.world.biome.BiomeProviderSingle;
import net.minecraft.world.chunk.Chunk;
import net.minecraft.world.chunk.ChunkPrimer;
import net.minecraft.world.chunk.IChunkProvider;
import net.minecraft.world.chunk.storage.ExtendedBlockStorage;
import net.minecraft.world.gen.ChunkGeneratorOverworld;
import net.minecraft.world.gen.ChunkGeneratorSettings;
import net.minecraft.world.gen.IChunkGenerator;
import net.minecraftforge.fml.common.IWorldGenerator;
import net.minecraftforge.fml.common.ObfuscationReflectionHelper;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Random;
import javax.annotation.Nonnull;
import javax.annotation.ParametersAreNonnullByDefault;
/**
* A cube generator that tries to mirror vanilla world generation. Cubes in the normal world range will be copied from a
* vanilla chunk generator, cubes above and below that will be filled with the most common block in the
* topmost/bottommost layers.
*/
@ParametersAreNonnullByDefault
@MethodsReturnNonnullByDefault
public class VanillaCompatibilityGenerator implements ICubeGenerator {
private boolean isInit = false;
private int worldHeightCubes;
@Nonnull private final IChunkGenerator vanilla;
@Nonnull private final World world;
/**
* Last chunk that was generated from the vanilla world gen
*/
private Chunk lastChunk;
/**
* We generate all the chunks in the vanilla range at once. This variable prevents infinite recursion
*/
private boolean optimizationHack;
private Biome[] biomes;
/**
* Detected block for filling cubes below the world
*/
@Nonnull private IBlockState extensionBlockBottom = Blocks.STONE.getDefaultState();
/**
* Detected block for filling cubes above the world
*/
@Nonnull private IBlockState extensionBlockTop = Blocks.AIR.getDefaultState();
private boolean hasTopBedrock = false, hasBottomBedrock = true;
/**
* Create a new VanillaCompatibilityGenerator
*
* @param vanilla The vanilla generator to mirror
* @param world The world in which cubes are being generated
*/
public VanillaCompatibilityGenerator(IChunkGenerator vanilla, World world) {
this.vanilla = vanilla;
this.world = world;
}
// lazy initialization to avoid circular dependencies
private void tryInit(IChunkGenerator vanilla, World world) {
if (isInit) {
return;
}
isInit = true;
// heuristics TODO: add a config that overrides this
lastChunk = vanilla.generateChunk(0, 0); // lets scan the chunk at 0, 0
int worldHeightBlocks = ((ICubicWorld) world).getMaxGenerationHeight();
worldHeightCubes = worldHeightBlocks / Cube.SIZE;
Map<IBlockState, Integer> blockHistogramBottom = new HashMap<>();
Map<IBlockState, Integer> blockHistogramTop = new HashMap<>();
ExtendedBlockStorage bottomEBS = lastChunk.getBlockStorageArray()[0];
for (int x = 0; x < Cube.SIZE; x++) {
for (int z = 0; z < Cube.SIZE; z++) {
// Scan three layers top / bottom each to guard against bedrock walls
for (int y = 0; y < 3; y++) {
IBlockState blockState = bottomEBS == null ?
Blocks.AIR.getDefaultState() : bottomEBS.get(x, y, z);
int count = blockHistogramBottom.getOrDefault(blockState, 0);
blockHistogramBottom.put(blockState, count + 1);
}
for (int y = worldHeightBlocks - 1; y > worldHeightBlocks - 4; y--) {
int localY = Coords.blockToLocal(y);
ExtendedBlockStorage ebs = lastChunk.getBlockStorageArray()[Coords.blockToCube(y)];
IBlockState blockState = ebs == null ? Blocks.AIR.getDefaultState() : ebs.get(x, localY, z);
int count = blockHistogramTop.getOrDefault(blockState, 0);
blockHistogramTop.put(blockState, count + 1);
}
}
}
CubicChunks.LOGGER.debug("Block histograms: \nTop: " + blockHistogramTop + "\nBottom: " + blockHistogramBottom);
int topcount = 0;
for (Map.Entry<IBlockState, Integer> entry : blockHistogramBottom.entrySet()) {
if (entry.getValue() > topcount && entry.getKey().getBlock() != Blocks.BEDROCK) {
extensionBlockBottom = entry.getKey();
topcount = entry.getValue();
}
}
hasBottomBedrock = blockHistogramBottom.getOrDefault(Blocks.BEDROCK.getDefaultState(), 0) > 0;
CubicChunks.LOGGER.info("Detected filler block " + extensionBlockBottom.getBlock().getTranslationKey() + " " +
"from layers [0, 2], bedrock=" + hasBottomBedrock);
topcount = 0;
for (Map.Entry<IBlockState, Integer> entry : blockHistogramTop.entrySet()) {
if (entry.getValue() > topcount && entry.getKey().getBlock() != Blocks.BEDROCK) {
extensionBlockTop = entry.getKey();
topcount = entry.getValue();
}
}
hasTopBedrock = blockHistogramTop.getOrDefault(Blocks.BEDROCK.getDefaultState(), 0) > 0;
CubicChunks.LOGGER.info("Detected filler block " + extensionBlockTop.getBlock().getTranslationKey() + " from" +
" layers [" + (worldHeightBlocks - 3) + ", " + (worldHeightBlocks - 1) + "], bedrock=" + hasTopBedrock);
}
@Override
public void generateColumn(Chunk column) {
this.biomes = this.world.getBiomeProvider()
.getBiomes(this.biomes,
Coords.cubeToMinBlock(column.x),
Coords.cubeToMinBlock(column.z),
Cube.SIZE, Cube.SIZE);
byte[] abyte = column.getBiomeArray();
for (int i = 0; i < abyte.length; ++i) {
abyte[i] = (byte) Biome.getIdForBiome(this.biomes[i]);
}
}
@Override
public void recreateStructures(Chunk column) {
vanilla.recreateStructures(column, column.x, column.z);
}
private Random getCubeSpecificRandom(int cubeX, int cubeY, int cubeZ) {
Random rand = new Random(world.getSeed());
rand.setSeed(rand.nextInt() ^ cubeX);
rand.setSeed(rand.nextInt() ^ cubeZ);
rand.setSeed(rand.nextInt() ^ cubeY);
return rand;
}
@Override
public CubePrimer generateCube(int cubeX, int cubeY, int cubeZ) {
try {
WorldgenHangWatchdog.startWorldGen();
tryInit(vanilla, world);
CubePrimer primer = new CubePrimer();
Random rand = new Random(world.getSeed());
rand.setSeed(rand.nextInt() ^ cubeX);
rand.setSeed(rand.nextInt() ^ cubeZ);
if (cubeY < 0 || cubeY >= worldHeightCubes) {
// Fill with bottom block
for (int y = 0; y < Cube.SIZE; y++) {
for (int z = 0; z < Cube.SIZE; z++) {
for (int x = 0; x < Cube.SIZE; x++) {
IBlockState state = cubeY < 0 ? extensionBlockBottom : extensionBlockTop;
int blockY = Coords.localToBlock(cubeY, y);
state = WorldGenUtils.getRandomBedrockReplacement(world, rand, state, blockY, 5,
hasTopBedrock, hasBottomBedrock);
primer.setBlockState(x, y, z, state);
}
}
}
} else {
// Make vanilla generate a chunk for us to copy
if (lastChunk.x != cubeX || lastChunk.z != cubeZ) {
if (CubicChunksConfig.optimizedCompatibilityGenerator) {
try (ICubicWorldInternal.CompatGenerationScope ignored =
((ICubicWorldInternal.Server) world).doCompatibilityGeneration()) {
lastChunk = vanilla.generateChunk(cubeX, cubeZ);
ChunkPrimer chunkPrimer = ((IColumnInternal) lastChunk).getCompatGenerationPrimer();
if (chunkPrimer == null) {
CubicChunks.LOGGER.error("Optimized compatibility generation failed, disabling...");
CubicChunksConfig.optimizedCompatibilityGenerator = false;
} else {
replaceBedrock(chunkPrimer, rand);
}
}
} else {
lastChunk = vanilla.generateChunk(cubeX, cubeZ);
}
}
if (!optimizationHack) {
optimizationHack = true;
// Recusrive generation
for (int y = worldHeightCubes - 1; y >= 0; y--) {
if (y == cubeY) {
continue;
}
((ICubicWorld) world).getCubeFromCubeCoords(cubeX, y, cubeZ);
}
optimizationHack = false;
}
// Copy from vanilla, replacing bedrock as appropriate
ChunkPrimer chunkPrimer = ((IColumnInternal) lastChunk).getCompatGenerationPrimer();
if (chunkPrimer != null) {
return new CubePrimerWrapper(chunkPrimer, cubeY);
}
ExtendedBlockStorage storage = lastChunk.getBlockStorageArray()[cubeY];
if (((ICubicWorld) world).getMaxHeight() == 16) {
if (cubeY != 0) {
storage = null;
} else {
storage = lastChunk.getBlockStorageArray()[4];
}
}
if (storage != null && !storage.isEmpty()) {
for (int y = 0; y < Cube.SIZE; y++) {
int blockY = Coords.localToBlock(cubeY, y);
for (int z = 0; z < Cube.SIZE; z++) {
for (int x = 0; x < Cube.SIZE; x++) {
IBlockState state = storage.get(x, y, z);
if (state == Blocks.BEDROCK.getDefaultState()) {
if (y < Cube.SIZE / 2) {
state = extensionBlockBottom;
} else {
state = extensionBlockTop;
}
state = WorldGenUtils.getRandomBedrockReplacement(world, rand, state, blockY, 5,
hasTopBedrock, hasBottomBedrock);
primer.setBlockState(x, y, z, state);
} else {
state = WorldGenUtils.getRandomBedrockReplacement(world, rand, state, blockY, 5,
hasTopBedrock, hasBottomBedrock);
primer.setBlockState(x, y, z, state);
}
}
}
}
}
}
return primer;
} finally {
WorldgenHangWatchdog.endWorldGen();
}
}
private void replaceBedrock(ChunkPrimer chunkPrimer, Random rand) {
for (int y = 0; y < 8; y++) {
replaceBedrockAtLayer(chunkPrimer, rand, y);
}
int startY = Coords.localToBlock(worldHeightCubes - 1, 8);
int endY = Coords.cubeToMinBlock(worldHeightCubes);
for (int y = startY; y < endY; y++) {
replaceBedrockAtLayer(chunkPrimer, rand, y);
}
}
private void replaceBedrockAtLayer(ChunkPrimer chunkPrimer, Random rand, int y) {
for (int z = 0; z < Cube.SIZE; z++) {
for (int x = 0; x < Cube.SIZE; x++) {
IBlockState state = chunkPrimer.getBlockState(x, y, z);
if (state == Blocks.BEDROCK.getDefaultState()) {
if (y < 64) {
chunkPrimer.setBlockState(x, y, z,
WorldGenUtils.getRandomBedrockReplacement(world, rand, extensionBlockBottom, y, 5, hasTopBedrock, hasBottomBedrock));
} else {
chunkPrimer.setBlockState(x, y, z,
WorldGenUtils.getRandomBedrockReplacement(world, rand, extensionBlockTop, y, 5, hasTopBedrock, hasBottomBedrock));
}
}
}
}
}
@Override
public void populate(ICube cube) {
try {
WorldgenHangWatchdog.startWorldGen();
tryInit(vanilla, world);
Random rand = getCubeSpecificRandom(cube.getX(), cube.getY(), cube.getZ());
CubeGeneratorsRegistry.populateVanillaCubic(world, rand, cube);
if (cube.getY() < 0 || cube.getY() >= worldHeightCubes) {
return;
}
// Cubes outside this range are only filled with their respective block
// No population takes place
if (cube.getY() >= 0 && cube.getY() < worldHeightCubes) {
for (int y = worldHeightCubes - 1; y >= 0; y--) {
// normal populators would not do this... but we are populating more than one cube!
((ICubicWorldInternal) world).getCubeFromCubeCoords(cube.getX(), y, cube.getZ()).setPopulated(true);
}
try {
CompatHandler.beforePopulate(world, vanilla);
vanilla.populate(cube.getX(), cube.getZ());
} catch (IllegalArgumentException ex) {
StackTraceElement[] stack = ex.getStackTrace();
if (stack == null || stack.length < 1 ||
!stack[0].getClassName().equals(Random.class.getName()) ||
!stack[0].getMethodName().equals("nextInt")) {
throw ex;
} else {
CubicChunks.LOGGER.error("Error while populating. Likely known mod issue, ignoring...", ex);
}
} finally {
CompatHandler.afterPopulate(world);
}
applyModGenerators(cube.getX(), cube.getZ(), world, vanilla, world.getChunkProvider());
}
} finally {
WorldgenHangWatchdog.endWorldGen();
}
}
private void applyModGenerators(int x, int z, World world, IChunkGenerator vanillaGen, IChunkProvider provider) {
List<IWorldGenerator> generators = IGameRegistry.getSortedGeneratorList();
if (generators == null) {
IGameRegistry.computeGenerators();
generators = IGameRegistry.getSortedGeneratorList();
assert generators != null;
}
long worldSeed = world.getSeed();
Random fmlRandom = new Random(worldSeed);
long xSeed = fmlRandom.nextLong() >> 2 + 1L;
long zSeed = fmlRandom.nextLong() >> 2 + 1L;
long chunkSeed = (xSeed * x + zSeed * z) ^ worldSeed;
for (IWorldGenerator generator : generators) {
fmlRandom.setSeed(chunkSeed);
try {
CompatHandler.beforeGenerate(world, generator);
generator.generate(fmlRandom, x, z, world, vanillaGen, provider);
} finally {
CompatHandler.afterGenerate(world);
}
}
}
@Override
public Box getFullPopulationRequirements(ICube cube) {
if (cube.getY() >= 0 && cube.getY() < worldHeightCubes) {
return new Box(
-1, -cube.getY(), -1,
0, worldHeightCubes - cube.getY() - 1, 0
);
}
return NO_REQUIREMENT;
}
@Override
public Box getPopulationPregenerationRequirements(ICube cube) {
if (cube.getY() >= 0 && cube.getY() < worldHeightCubes) {
return new Box(
0, -cube.getY(), 0,
1, worldHeightCubes - cube.getY() - 1, 1
);
}
return NO_REQUIREMENT;
}
@Override
public void recreateStructures(ICube cube) {
}
@Override
public List<SpawnListEntry> getPossibleCreatures(EnumCreatureType creatureType, BlockPos pos) {
return vanilla.getPossibleCreatures(creatureType, pos);
}
@Override
public BlockPos getClosestStructure(String name, BlockPos pos, boolean findUnexplored) {
return vanilla.getNearestStructurePos(world, name, pos, findUnexplored);
}
private static class CubePrimerWrapper extends CubePrimer {
private final ChunkPrimer chunkPrimer;
private final int cubeYBase;
public CubePrimerWrapper(ChunkPrimer chunkPrimer, int cubeY) {
super(null);
this.chunkPrimer = chunkPrimer;
this.cubeYBase = Coords.cubeToMinBlock(cubeY);
}
@Override
public IBlockState getBlockState(int x, int y, int z) {
return chunkPrimer.getBlockState(x, y | cubeYBase, z);
}
@Override
public void setBlockState(int x, int y, int z, @Nonnull IBlockState state) {
chunkPrimer.setBlockState(x, y | cubeYBase, z, state);
}
}
}
| |
/*
* Copyright 2014, Stratio.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.stratio.deep.commons.utils;
import java.lang.reflect.Constructor;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.ArrayBlockingQueue;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.RejectedExecutionHandler;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import com.stratio.deep.commons.config.BaseConfig;
import com.stratio.deep.commons.config.DeepJobConfig;
import com.stratio.deep.commons.config.ExtractorConfig;
import com.stratio.deep.commons.entity.Cell;
import com.stratio.deep.commons.entity.Cells;
import com.stratio.deep.commons.entity.IDeepType;
import com.stratio.deep.commons.exception.DeepExtractorInitializationException;
import com.stratio.deep.commons.exception.DeepGenericException;
import com.stratio.deep.commons.exception.DeepIOException;
import com.stratio.deep.commons.rdd.IExtractor;
import org.apache.log4j.Logger;
import scala.Tuple2;
/**
* Utility class providing useful methods to manipulate the conversion
* between ByteBuffers maps coming from the underlying Cassandra API to
* instances of a concrete javabean.
*
* @author Luca Rosellini <luca@strat.io>
*/
public final class Utils {
/**
* The Log.
*/
private static transient final Logger LOG = Logger.getLogger(Utils.class);
/**
* Creates a new instance of the given class.
*
* @param <T> the type parameter
* @param clazz the class object for which a new instance should be created.
* @return the new instance of class clazz.
*/
public static <T extends IDeepType> T newTypeInstance(Class<T> clazz) {
try {
return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException e) {
throw new DeepGenericException(e);
}
}
/**
* Creates a new instance of the given class name.
*
* @param <T> the type parameter
* @param className the class object for which a new instance should be created.
* @param returnClass the return class
* @return the new instance of class clazz.
*/
@SuppressWarnings("unchecked")
public static <T> T newTypeInstance(String className, Class<T> returnClass) {
try {
Class<T> clazz = (Class<T>) Class.forName(className);
return clazz.newInstance();
} catch (InstantiationException | IllegalAccessException | ClassNotFoundException e) {
throw new DeepGenericException(e);
}
}
/**
* Quoting for working with uppercase
*
* @param identifier the identifier
* @return the string
*/
public static String quote(String identifier) {
if (StringUtils.isEmpty(identifier)) {
return identifier;
}
String res = identifier.trim();
if (!res.startsWith("\"")) {
res = "\"" + res;
}
if (!res.endsWith("\"")) {
res = res + "\"";
}
return res;
}
/**
* Quoting for working with uppercase
*
* @param identifier the identifier
* @return the string
*/
public static String singleQuote(String identifier) {
if (StringUtils.isEmpty(identifier)) {
return identifier;
}
String res = identifier.trim();
if (!res.startsWith("'")) {
res = "'" + res;
}
if (!res.endsWith("'")) {
res = res + "'";
}
return res;
}
/**
* Returns a CQL batch query wrapping the given statements.
*
* @param statements the list of statements to use to generate the batch statement.
* @return the batch statement.
*/
public static String batchQueryGenerator(List<String> statements) {
StringBuilder sb = new StringBuilder("BEGIN BATCH \n");
for (String statement : statements) {
sb.append(statement).append("\n");
}
sb.append(" APPLY BATCH;");
return sb.toString();
}
/**
* Splits columns names and values as required by Datastax java driver to generate an Insert query.
*
* @param tuple an object containing the key Cell(s) as the first element and all the other columns as the second element.
* @return an object containing an array of column names as the first element and an array of column values as the second element.
*/
public static Tuple2<String[], Object[]> prepareTuple4CqlDriver(Tuple2<Cells, Cells> tuple) {
Cells keys = tuple._1();
Cells columns = tuple._2();
String[] names = new String[keys.size() + columns.size()];
Object[] values = new Object[keys.size() + columns.size()];
for (int k = 0; k < keys.size(); k++) {
Cell cell = keys.getCellByIdx(k);
names[k] = quote(cell.getCellName());
values[k] = cell.getCellValue();
}
for (int v = keys.size(); v < (keys.size() + columns.size()); v++) {
Cell cell = columns.getCellByIdx(v - keys.size());
names[v] = quote(cell.getCellName());
values[v] = cell.getCellValue();
}
return new Tuple2<>(names, values);
}
/**
* Resolves the setter name for the property whose name is 'propertyName' whose type is 'valueType'
* in the entity bean whose class is 'entityClass'.
* If we don't find a setter following Java's naming conventions, before throwing an exception we try to
* resolve the setter following Scala's naming conventions.
*
* @param propertyName the field name of the property whose setter we want to resolve.
* @param entityClass the bean class object in which we want to search for the setter.
* @param valueType the class type of the object that we want to pass to the setter.
* @return the resolved setter.
*/
@SuppressWarnings("unchecked")
public static Method findSetter(String propertyName, Class entityClass, Class valueType) {
Method setter;
String setterName = "set" + propertyName.substring(0, 1).toUpperCase() +
propertyName.substring(1);
try {
setter = entityClass.getMethod(setterName, valueType);
} catch (NoSuchMethodException e) {
// let's try with scala setter name
try {
setter = entityClass.getMethod(propertyName + "_$eq", valueType);
} catch (NoSuchMethodException e1) {
throw new DeepIOException(e1);
}
}
return setter;
}
/**
* @param object
* @param fieldName
* @param fieldValue
* @return if the operation has been correct.
*/
public static boolean setFieldWithReflection(Object object, String fieldName, Object fieldValue) {
Class<?> clazz = object.getClass();
while (clazz != null) {
try {
Field field = clazz.getDeclaredField(fieldName);
field.setAccessible(true);
field.set(object, fieldValue);
return true;
} catch (NoSuchFieldException e) {
clazz = clazz.getSuperclass();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
return false;
}
/**
* Resolves the getter name for the property whose name is 'propertyName' whose type is 'valueType'
* in the entity bean whose class is 'entityClass'.
* If we don't find a setter following Java's naming conventions, before throwing an exception we try to
* resolve the setter following Scala's naming conventions.
*
* @param propertyName the field name of the property whose getter we want to resolve.
* @param entityClass the bean class object in which we want to search for the getter.
* @return the resolved getter.
*/
@SuppressWarnings("unchecked")
public static Method findGetter(String propertyName, Class entityClass) {
Method getter;
String getterName = "get" + propertyName.substring(0, 1).toUpperCase() +
propertyName.substring(1);
try {
getter = entityClass.getMethod(getterName);
} catch (NoSuchMethodException e) {
// let's try with scala setter name
try {
getter = entityClass.getMethod(propertyName + "_$eq");
} catch (NoSuchMethodException e1) {
throw new DeepIOException(e1);
}
}
return getter;
}
/**
* Returns the inet address for the specified location.
*
* @param location the address as String
* @return the InetAddress object associated to the provided address.
*/
public static InetAddress inetAddressFromLocation(String location) {
try {
return InetAddress.getByName(location);
} catch (UnknownHostException e) {
throw new DeepIOException(e);
}
}
/**
* Return the set of fields declared at all level of class hierachy
*
* @param clazz the clazz
* @return the field [ ]
*/
public static Field[] getAllFields(Class clazz) {
return getAllFieldsRec(clazz, new ArrayList<Field>());
}
/**
* Get all fields rec.
*
* @param clazz the clazz
* @param fields the fields
* @return the field [ ]
*/
private static Field[] getAllFieldsRec(Class clazz, List<Field> fields) {
Class superClazz = clazz.getSuperclass();
if (superClazz != null) {
getAllFieldsRec(superClazz, fields);
}
fields.addAll(Arrays.asList(clazz.getDeclaredFields()));
return fields.toArray(new Field[fields.size()]);
}
/**
* private constructor.
*/
private Utils() {
}
/**
* Remove address port.
*
* @param stringList the string list
* @return the list
*/
public static List<String> removeAddressPort(List<String> stringList) {
List<String> adresNoPort = new ArrayList<>();
for (String s : stringList) {
int index = s.indexOf(":");
if (index > -1) {
adresNoPort.add(s.substring(0, index));
continue;
}
adresNoPort.add(s);
}
return adresNoPort;
}
/**
* Split list by comma.
*
* @param hosts the hosts
* @return string
*/
public static String splitListByComma(List<String> hosts) {
boolean firstHost = true;
StringBuilder hostConnection = new StringBuilder();
for (String host : hosts) {
if (!firstHost) {
hostConnection.append(",");
}
hostConnection.append(host.trim());
firstHost = false;
}
return hostConnection.toString();
}
/**
* Gets extractor instance.
*
* @param config the config
* @return the extractor instance
*/
public static <T, S extends BaseConfig> IExtractor<T, S> getExtractorInstance(S config) {
try {
Class<T> rdd = (Class<T>) config.getExtractorImplClass();
if (rdd == null) {
rdd = (Class<T>) Class.forName(config.getExtractorImplClassName());
}
Constructor<T> c;
if (config.getEntityClass().isAssignableFrom(Cells.class)) {
c = rdd.getConstructor();
return (IExtractor<T, S>) c.newInstance();
} else {
c = rdd.getConstructor(Class.class);
return (IExtractor<T, S>) c.newInstance(config.getEntityClass());
}
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
String message = "A exception happens and we wrap with DeepExtractorInitializationException" + e.getMessage();
LOG.error(message);
throw new DeepExtractorInitializationException(message,e);
}
}
/**
* Cast number type.
*
* @param object the object
* @param clazz the clazz
* @return object
*/
public static Object castNumberType(Object object, Class clazz) {
if (Number.class.isAssignableFrom(clazz)) {
// AtomicInteger, AtomicLong, BigDecimal, BigInteger, Byte, Double, Float, Integer, Long, Short
if (Double.class.isAssignableFrom(clazz)) {
return ((Number) object).doubleValue();
} else if (Long.class.isAssignableFrom(clazz)) {
return ((Number) object).longValue();
} else if (Float.class.isAssignableFrom(clazz)) {
return ((Number) object).floatValue();
} else if (Integer.class.isAssignableFrom(clazz)) {
return ((Number) object).intValue();
} else if (Short.class.isAssignableFrom(clazz)) {
return ((Number) object).shortValue();
} else if (Byte.class.isAssignableFrom(clazz)) {
return ((Number) object).byteValue();
} else if (BigInteger.class.isAssignableFrom(clazz)) {
return BigInteger.valueOf(((Number) object).longValue());
} else if (BigDecimal.class.isAssignableFrom(clazz)) {
return BigDecimal.valueOf(((Number) object).longValue());
} else if (AtomicLong.class.isAssignableFrom(clazz)) {
return new AtomicLong(((Number) object).longValue());
} else if (AtomicInteger.class.isAssignableFrom(clazz)) {
return new AtomicInteger(((Number) object).intValue());
}
}
throw new ClassCastException("it is not a Number Type" + object.getClass() + "|" + clazz);
}
public static Object castingUtil(String value, Class classCasting) {
Object object = value;
//Numeric
if (Number.class.isAssignableFrom(classCasting)) {
if (classCasting.isAssignableFrom(Double.class)) {
return Double.valueOf(value);
} else if (classCasting.isAssignableFrom(Long.class)) {
return Long.valueOf(value);
} else if (classCasting.isAssignableFrom(Float.class)) {
return Float.valueOf(value);
} else if (classCasting.isAssignableFrom(Integer.class)) {
return Integer.valueOf(value);
} else if (classCasting.isAssignableFrom(Short.class)) {
return Short.valueOf(value);
} else if (classCasting.isAssignableFrom(Byte.class)) {
return Byte.valueOf(value);
}
} else if (String.class.isAssignableFrom(classCasting)) {
return object.toString();
}
//Class not recognise yet
return null;
}
public static <S extends BaseConfig, W extends DeepJobConfig> W initConfig(S config, W deepJobConfig) {
if (config instanceof ExtractorConfig) {
deepJobConfig.initialize((ExtractorConfig) config);
} else if (deepJobConfig.getClass().isAssignableFrom(config.getClass())) {
deepJobConfig = (W) ((W) config).initialize();
} else {
deepJobConfig.initialize((DeepJobConfig) config);
}
return deepJobConfig;
}
/**
* Returns an instance clone.
* this method gets every class property by reflection, including its parents properties
* @param t
* @param <T>
* @return T object.
*/
public static <T> T cloneObjectWithParents (T t) throws IllegalAccessException, InstantiationException {
T clone = (T) t.getClass().newInstance();
List<Field> allFields = new ArrayList<>();
Class parentClass = t.getClass().getSuperclass();
while (parentClass != null) {
Collections.addAll(allFields, parentClass.getDeclaredFields());
parentClass = parentClass.getSuperclass();
}
Collections.addAll(allFields, t.getClass().getDeclaredFields());
for (Field field : allFields) {
int modifiers = field.getModifiers();
//We skip final and static fields
if ((Modifier.FINAL & modifiers) != 0 || (Modifier.STATIC & modifiers) != 0) {
continue;
}
field.setAccessible(true);
Object value = field.get(t);
if (Collection.class.isAssignableFrom(field.getType())) {
Collection collection = (Collection) field.get(clone);
if (collection == null) {
collection = (Collection) field.get(t).getClass().newInstance();
}
collection.addAll((Collection) field.get(t));
value = collection;
} else if (Map.class.isAssignableFrom(field.getType())) {
Map clonMap = (Map) field.get(t).getClass().newInstance();
clonMap.putAll((Map) field.get(t));
value = clonMap;
}
field.set(clone, value);
}
return clone;
}
/**
* Returns an instance of ThreadPoolExecutor using an bounded queue and blocking when the worker queue is full.
* @param nThreads thread pool size
* @param queueSize workers queue size
* @return thread pool executor
*/
public static ExecutorService newBlockingFixedThreadPoolExecutor(int nThreads, int queueSize) {
BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<>(queueSize);
RejectedExecutionHandler blockingRejectedExecutionHandler = new RejectedExecutionHandler() {
@Override
public void rejectedExecution(Runnable task, ThreadPoolExecutor executor) {
try {
executor.getQueue().put(task);
} catch (InterruptedException e) {
}
}
};
return new ThreadPoolExecutor(nThreads, nThreads,
0L, TimeUnit.MILLISECONDS, blockingQueue,
blockingRejectedExecutionHandler);
}
}
| |
/*
* Copyright 2000-2017 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.java.codeInspection.dataFlow.rangeSet;
import com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet;
import com.intellij.codeInspection.dataFlow.value.DfaRelationValue.RelationType;
import com.intellij.psi.PsiPrimitiveType;
import com.intellij.psi.PsiType;
import com.intellij.psi.util.TypeConversionUtil;
import org.jetbrains.annotations.NotNull;
import org.junit.Test;
import java.util.HashMap;
import java.util.Random;
import java.util.function.Function;
import java.util.function.LongBinaryOperator;
import java.util.function.LongPredicate;
import java.util.function.LongUnaryOperator;
import java.util.stream.Collectors;
import static com.intellij.codeInspection.dataFlow.rangeSet.LongRangeSet.*;
import static org.junit.Assert.*;
public class LongRangeSetTest {
@NotNull
private static LongRangeSet fromTypeStrict(PsiType type) {
LongRangeSet range = fromType(type);
assertNotNull(range);
return range;
}
@NotNull
private static LongRangeSet fromConstantStrict(Object constant) {
LongRangeSet range = fromConstant(constant);
assertNotNull(range);
return range;
}
@Test
public void testToString() {
assertEquals("{}", empty().toString());
assertEquals("{10}", point(10).toString());
assertEquals("{10}", range(10, 10).toString());
assertEquals("{10, 11}", range(10, 11).toString());
assertEquals("{10..100}", range(10, 100).toString());
}
@Test
public void testFromType() {
assertNull(fromType(PsiType.FLOAT));
assertNull(fromType(PsiType.NULL));
assertEquals("{-128..127}", fromTypeStrict(PsiType.BYTE).toString());
assertEquals("{0..65535}", fromTypeStrict(PsiType.CHAR).toString());
assertEquals("{-32768..32767}", fromTypeStrict(PsiType.SHORT).toString());
assertEquals("{Integer.MIN_VALUE..Integer.MAX_VALUE}", fromTypeStrict(PsiType.INT).toString());
assertEquals("{0..Integer.MAX_VALUE}", indexRange().toString());
assertEquals("{Long.MIN_VALUE..Long.MAX_VALUE}", fromTypeStrict(PsiType.LONG).toString());
}
@Test
public void testEquals() {
assertEquals(empty(), empty());
assertEquals(point(10), point(10));
assertNotEquals(point(10), point(11));
assertEquals(point(10), range(10, 10));
assertNotEquals(point(10), range(10, 11));
assertEquals(range(10, 11), range(10, 11));
assertNotEquals(range(10, 11), range(10, 12));
}
@Test
public void testDiff() {
assertEquals(empty(), empty().subtract(point(10)));
assertEquals(point(10), point(10).subtract(empty()));
assertEquals(point(10), point(10).subtract(point(11)));
assertEquals(empty(), point(10).subtract(point(10)));
assertEquals(point(10), point(10).subtract(range(15, 20)));
assertEquals(point(10), point(10).subtract(range(-10, -5)));
assertTrue(point(10).subtract(range(10, 20)).isEmpty());
assertTrue(point(10).subtract(range(-10, 20)).isEmpty());
assertTrue(point(10).subtract(range(-10, 10)).isEmpty());
assertEquals("{0..20}", range(0, 20).subtract(range(30, Long.MAX_VALUE)).toString());
assertEquals("{0..19}", range(0, 20).subtract(range(20, Long.MAX_VALUE)).toString());
assertEquals("{0..18}", range(0, 20).subtract(range(19, Long.MAX_VALUE)).toString());
assertEquals("{0}", range(0, 20).subtract(range(1, Long.MAX_VALUE)).toString());
assertTrue(range(0, 20).subtract(range(0, Long.MAX_VALUE)).isEmpty());
assertEquals("{Long.MIN_VALUE}", all().subtract(range(Long.MIN_VALUE + 1, Long.MAX_VALUE)).toString());
assertEquals("{Long.MAX_VALUE}", all().subtract(range(Long.MIN_VALUE, Long.MAX_VALUE - 1)).toString());
assertTrue(all().subtract(range(Long.MIN_VALUE, Long.MAX_VALUE)).isEmpty());
assertEquals(indexRange(), fromTypeStrict(PsiType.INT).subtract(range(Long.MIN_VALUE, (long)-1)));
assertTrue(all().subtract(all()).isEmpty());
}
@Test
public void testSets() {
assertEquals("{0..9, 11..20}", range(0, 20).without(10).toString());
assertEquals("{0, 20}", range(0, 20).subtract(range(1, 19)).toString());
assertEquals("{0, 1, 19, 20}", range(0, 20).subtract(range(2, 18)).toString());
assertEquals("{0..9, 12..20}", range(0, 20).without(10).without(11).toString());
assertEquals("{0..9, 12..14, 16..20}", range(0, 20).without(10).without(11).without(15).toString());
assertEquals("{0, 4..20}", range(0, 20).without(3).without(2).without(1).toString());
assertEquals("{4..20}", range(0, 20).without(3).without(2).without(1).without(0).toString());
assertEquals("{0..2, 5..15, 19, 20}",
range(0, 20).subtract(range(3, 18).subtract(range(5, 15))).toString());
LongRangeSet first = fromTypeStrict(PsiType.CHAR).without(45);
LongRangeSet second =
fromTypeStrict(PsiType.CHAR).without(32).without(40).without(44).without(45).without(46).without(58).without(59).without(61);
assertEquals("{0..44, 46..65535}", first.toString());
assertEquals("{0..31, 33..39, 41..43, 47..57, 60, 62..65535}", second.toString());
assertEquals("{32, 40, 44, 46, 58, 59, 61}", first.subtract(second).toString());
}
@Test
public void testHash() {
HashMap<LongRangeSet, String> map = new HashMap<>();
map.put(empty(), "empty");
map.put(point(10), "10");
map.put(range(10, 10), "10-10");
map.put(range(10, 11), "10-11");
map.put(range(10, 12), "10-12");
LongRangeSet longNotChar = fromTypeStrict(PsiType.LONG).subtract(fromTypeStrict(PsiType.CHAR));
map.put(longNotChar, "longNotChar");
assertEquals("empty", map.get(empty()));
assertEquals("10-10", map.get(point(10)));
assertEquals("10-11", map.get(range(10, 11)));
assertEquals("10-12", map.get(range(10, 12)));
assertNull(map.get(range(11, 11)));
assertEquals("longNotChar", map.get(fromTypeStrict(PsiType.LONG).subtract(fromTypeStrict(PsiType.CHAR))));
}
@Test
public void testIntersects() {
assertFalse(empty().intersects(fromTypeStrict(PsiType.LONG)));
assertTrue(point(Long.MIN_VALUE).intersects(fromTypeStrict(PsiType.LONG)));
assertFalse(point(10).intersects(point(11)));
assertTrue(point(10).intersects(point(10)));
assertTrue(range(10, 100).intersects(point(10)));
assertTrue(range(10, 100).intersects(point(100)));
assertFalse(range(10, 100).intersects(point(101)));
assertFalse(range(10, 100).intersects(point(9)));
LongRangeSet range1020 = range(10, 20);
assertTrue(range1020.intersects(range1020));
assertTrue(range1020.intersects(range(10, 30)));
assertTrue(range1020.intersects(range(20, 30)));
assertTrue(range1020.intersects(range(0, 30)));
assertTrue(range1020.intersects(range(0, 10)));
assertTrue(range1020.intersects(range(0, 20)));
assertFalse(range1020.intersects(range(0, 9)));
assertFalse(range1020.intersects(range(21, 30)));
LongRangeSet rangeSet = range1020.subtract(range(12, 13)).subtract(range(17, 18));
assertFalse(rangeSet.intersects(point(12)));
assertFalse(point(12).intersects(rangeSet));
assertFalse(rangeSet.intersects(empty()));
assertFalse(rangeSet.intersects(range(12, 13)));
assertFalse(range(12, 13).intersects(rangeSet));
assertFalse(rangeSet.intersects(range(0, 9)));
assertFalse(rangeSet.intersects(range(21, 30)));
assertTrue(rangeSet.intersects(rangeSet));
assertTrue(rangeSet.intersects(range1020));
assertTrue(rangeSet.intersects(point(11)));
LongRangeSet rangeSet2 = range1020.subtract(rangeSet);
assertEquals("{12, 13, 17, 18}", rangeSet2.toString());
assertFalse(rangeSet.intersects(rangeSet2));
}
@Test
public void testIntersect() {
assertEquals("{0..100}", range(0, 100).intersect(range(0, 100)).toString());
assertEquals("{100}", range(0, 100).intersect(range(100, 200)).toString());
assertTrue(range(0, 100).intersect(range(101, 200)).isEmpty());
assertTrue(point(100).intersect(point(200)).isEmpty());
assertFalse(point(100).intersect(range(99, 101)).isEmpty());
LongRangeSet rangeSet = range(-1000, 1000).subtract(range(100, 500)).subtract(range(-500, -100));
assertEquals("{-1000..-501, -99..99, 501..1000}", rangeSet.toString());
assertEquals(point(99), rangeSet.intersect(point(99)));
assertTrue(rangeSet.intersect(point(100)).isEmpty());
assertEquals("{0..99, 501..1000}", rangeSet.intersect(indexRange()).toString());
}
@Test
public void testIntersectSubtractRandomized() {
Random r = new Random(1);
LongRangeSet[] data = r.ints(1000, 0, 1000)
.mapToObj(x -> range(x, x + r.nextInt((x % 20) * 100 + 1))).toArray(LongRangeSet[]::new);
for (int i = 0; i < 2000; i++) {
int idx = r.nextInt(data.length);
LongRangeSet left = data[idx];
LongRangeSet right = data[r.nextInt(data.length)];
LongRangeSet lDiff = left.subtract(right);
LongRangeSet rDiff = right.subtract(left);
LongRangeSet intersection = left.intersect(right);
String message = left + " & " + right + " = " + intersection;
assertEquals(message, intersection, right.intersect(left));
if (!intersection.isEmpty()) {
assertTrue(message, intersection.min() >= Math.max(left.min(), right.min()));
assertTrue(message, intersection.max() <= Math.min(left.max(), right.max()));
}
assertEquals(message, intersection, right.subtract(fromTypeStrict(PsiType.LONG).subtract(left)));
assertEquals(message, intersection, left.subtract(fromTypeStrict(PsiType.LONG).subtract(right)));
intersection.stream().limit(1000).forEach(e -> {
assertTrue(left.contains(e));
assertTrue(right.contains(e));
});
lDiff.stream().limit(1000).forEach(e -> {
assertTrue(left.contains(e));
assertFalse(right.contains(e));
});
rDiff.stream().limit(1000).forEach(e -> {
assertFalse(left.contains(e));
assertTrue(right.contains(e));
});
switch (r.nextInt(3)) {
case 0:
data[idx] = lDiff;
break;
case 1:
data[idx] = rDiff;
break;
case 2:
data[idx] = intersection;
break;
}
}
}
@Test
public void testFromConstant() {
assertEquals("{0}", fromConstantStrict(0).toString());
assertEquals("{0}", fromConstantStrict(0L).toString());
assertEquals("{1}", fromConstantStrict((byte)1).toString());
assertEquals("{97}", fromConstantStrict('a').toString());
assertNull(fromConstant(null));
assertNull(fromConstant(1.0));
}
@Test
public void testFromRelation() {
assertEquals(range(101, Long.MAX_VALUE), range(100, 200).fromRelation(RelationType.GT));
assertEquals(range(100, Long.MAX_VALUE), range(100, 200).fromRelation(RelationType.GE));
assertEquals(range(Long.MIN_VALUE, 199), range(100, 200).fromRelation(RelationType.LT));
assertEquals(range(Long.MIN_VALUE, 200), range(100, 200).fromRelation(RelationType.LE));
assertEquals(range(100, 200), range(100, 200).fromRelation(RelationType.EQ));
assertNull(range(100, 200).fromRelation(RelationType.IS));
assertEquals(fromTypeStrict(PsiType.LONG), range(100, 200).fromRelation(RelationType.NE));
assertEquals("{Long.MIN_VALUE..99, 101..Long.MAX_VALUE}", point(100).fromRelation(RelationType.NE).toString());
}
@Test
public void testAbs() {
assertTrue(empty().abs(true).isEmpty());
assertEquals(point(Long.MAX_VALUE), point(Long.MIN_VALUE + 1).abs(true));
assertEquals(point(Long.MIN_VALUE), point(Long.MIN_VALUE).abs(true));
assertEquals(point(Integer.MIN_VALUE), point(Integer.MIN_VALUE).abs(false));
assertEquals(point(Integer.MAX_VALUE + 1L), point(Integer.MIN_VALUE).abs(true));
assertEquals(range(100, 200), range(100, 200).abs(true));
assertEquals(range(0, 200), range(-1, 200).abs(true));
assertEquals(range(0, 200), range(-200, 200).abs(false));
assertEquals(range(0, 201), range(-201, 200).abs(false));
assertEquals(range(0, Long.MAX_VALUE).unite(point(Long.MIN_VALUE)), all().abs(true));
assertEquals(range(100, Integer.MAX_VALUE).unite(point(Integer.MIN_VALUE)), range(Integer.MIN_VALUE, -100).abs(false));
assertEquals(range(100, Integer.MAX_VALUE + 1L), range(Integer.MIN_VALUE, -100).abs(true));
LongRangeSet set = range(-900, 1000).subtract(range(-800, -600)).subtract(range(-300, 100)).subtract(range(500, 700));
assertEquals("{-900..-801, -599..-301, 101..499, 701..1000}", set.toString());
assertEquals("{101..599, 701..1000}", set.abs(false).toString());
}
@Test
public void testNegate() {
assertTrue(empty().negate(true).isEmpty());
assertEquals(point(Long.MAX_VALUE), point(Long.MIN_VALUE + 1).negate(true));
assertEquals(point(Long.MIN_VALUE), point(Long.MIN_VALUE).negate(true));
assertEquals(point(Integer.MIN_VALUE), point(Integer.MIN_VALUE).negate(false));
assertEquals(point(Integer.MAX_VALUE + 1L), point(Integer.MIN_VALUE).negate(true));
assertEquals(range(-200, -100), range(100, 200).negate(true));
assertEquals(range(-200, 1), range(-1, 200).negate(true));
assertEquals(range(-200, 200), range(-200, 200).negate(false));
assertEquals(range(-200, 201), range(-201, 200).negate(false));
assertEquals(all(), all().negate(true));
assertEquals(range(100, Integer.MAX_VALUE).unite(point(Integer.MIN_VALUE)), range(Integer.MIN_VALUE, -100).negate(false));
assertEquals(point(Long.MAX_VALUE).unite(point(Long.MIN_VALUE)), range(Long.MIN_VALUE, Long.MIN_VALUE + 1).negate(true));
assertEquals(range(100, Integer.MAX_VALUE + 1L), range(Integer.MIN_VALUE, -100).negate(true));
LongRangeSet set = range(-900, 1000).subtract(range(-800, -600)).subtract(range(-300, 100)).subtract(range(500, 700));
assertEquals("{-900..-801, -599..-301, 101..499, 701..1000}", set.toString());
assertEquals("{-1000..-701, -499..-101, 301..599, 801..900}", set.negate(false).toString());
}
@Test
public void testCastTo() {
PsiPrimitiveType[] types = {PsiType.BYTE, PsiType.SHORT, PsiType.CHAR, PsiType.INT, PsiType.LONG};
for (PsiPrimitiveType type : types) {
assertTrue(empty().castTo(type).isEmpty());
assertEquals(point(0), point(0).castTo(type));
}
assertEquals(point(0x1234_5678_9ABC_DEF0L), point(0x1234_5678_9ABC_DEF0L).castTo(PsiType.LONG));
assertEquals(point(0x9ABC_DEF0), point(0x1234_5678_9ABC_DEF0L).castTo(PsiType.INT));
assertEquals(point(0xDEF0), point(0x1234_5678_9ABC_DEF0L).castTo(PsiType.CHAR));
assertEquals(point(-8464), point(0x1234_5678_9ABC_DEF0L).castTo(PsiType.SHORT));
assertEquals(point(-16), point(0x1234_5678_9ABC_DEF0L).castTo(PsiType.BYTE));
LongRangeSet longSet = fromTypeStrict(PsiType.LONG);
assertNotNull(longSet);
LongRangeSet byteSet = fromTypeStrict(PsiType.BYTE);
assertNotNull(byteSet);
for (PsiPrimitiveType type : types) {
LongRangeSet set = fromTypeStrict(type);
assertNotNull(set);
assertEquals(set, set.castTo(type));
assertEquals(set, longSet.castTo(type));
assertEquals(type.equals(PsiType.CHAR) ? range(0, 127).unite(range(0xFF80, 0xFFFF)) : byteSet, byteSet.castTo(type));
}
checkCast(range(-10, 1000), "{-128..127}", PsiType.BYTE);
checkCast(range(-10, 200), "{-128..-56, -10..127}", PsiType.BYTE);
checkCast(range(-1, 255), "{0..255, 65535}", PsiType.CHAR);
checkCast(range(0, 100000), "{-32768..32767}", PsiType.SHORT);
checkCast(range(0, 50000), "{-32768..-15536, 0..32767}", PsiType.SHORT);
assertEquals(fromTypeStrict(PsiType.INT), range(Long.MIN_VALUE, Integer.MAX_VALUE-1).castTo(PsiType.INT));
}
@Test
public void testBitwiseAnd() {
assertTrue(empty().bitwiseAnd(all()).isEmpty());
assertTrue(all().bitwiseAnd(empty()).isEmpty());
assertEquals(all(), all().bitwiseAnd(all()));
assertEquals("{0, 16}", all().bitwiseAnd(point(16)).toString());
assertEquals("{0, 1}", all().bitwiseAnd(point(1)).toString());
assertEquals(range(0, 24), all().bitwiseAnd(point(24)));
assertEquals(range(0, 31), all().bitwiseAnd(point(25)));
assertEquals(range(0, 15), all().bitwiseAnd(range(10, 15)));
assertEquals(range(0, 31), all().bitwiseAnd(range(16, 24)));
checkBitwiseAnd(range(0, 3), range(4, 7), "{0..3}");
checkBitwiseAnd(range(3, 4), range(3, 4), "{0..7}"); // 0,3,4,7 actually
checkBitwiseAnd(range(-20, 20), point(8), "{0, 8}");
checkBitwiseAnd(point(3).unite(point(5)), point(3).unite(point(5)), "{1, 3, 5}");
checkBitwiseAnd(range(-10, 10), range(-20, 5), "{-32..15}");
checkBitwiseAnd(range(-30, -20).unite(range(20, 33)), point(-10).unite(point(10)), "{-32..-26, 0..62}");
}
@Test
public void testMod() {
assertEquals(empty(), empty().mod(all()));
assertEquals(empty(), all().mod(empty()));
assertEquals(empty(), point(1).mod(empty()));
assertEquals(empty(), point(1).unite(point(3)).mod(empty()));
assertEquals(point(10), point(110).mod(point(100)));
checkMod(range(10, 20), range(30, 40), "{10..20}");
checkMod(range(-10, 10), range(20, 30), "{-10..10}");
checkMod(point(0), range(-100, -50).unite(range(20, 80)), "{0}");
checkMod(point(30), range(10, 40), "{0..30}");
checkMod(point(-30), range(-10, 40), "{-30..0}");
checkMod(point(Long.MIN_VALUE), range(-10, 40), "{-39..0}");
checkMod(range(-10, 40), point(Long.MIN_VALUE), "{-10..40}");
checkMod(range(-30, -20), point(23), "{-22..0}");
checkMod(point(10), range(30, 40), "{10}");
checkMod(range(-10, 40), point(Long.MIN_VALUE).unite(point(70)), "{-10..40}");
checkMod(range(-10, 40), point(Long.MIN_VALUE).unite(point(0)), "{-10..40}");
checkMod(point(10), point(Long.MIN_VALUE).unite(point(0)), "{0, 10}");
checkMod(range(0, 10).unite(range(30, 50)), range(-20, -10).unite(range(15, 25)), "{0..24}");
checkMod(point(10), point(0), "{}");
checkMod(range(0, 10), point(0), "{}");
checkMod(range(Long.MIN_VALUE, Long.MIN_VALUE + 3), point(Long.MIN_VALUE), "{-9223372036854775807..-9223372036854775805, 0}");
checkMod(range(Long.MAX_VALUE - 3, Long.MAX_VALUE), point(Long.MAX_VALUE), "{0..Long.MAX_VALUE-1}");
}
@Test
public void testDiv() {
assertEquals(empty(), empty().div(all(), true));
assertEquals(empty(), all().div(empty(), true));
assertEquals(empty(), point(1).div(empty(), true));
assertEquals(empty(), point(1).div(point(3), true).div(empty(), true));
assertEquals(all(), all().div(all(), true));
assertEquals(empty(), all().div(point(0), true));
assertEquals(all(), all().div(point(1), true));
assertEquals(all(), all().div(point(-1), true));
assertEquals(point(11), point(110).div(point(10), true));
checkDiv(range(1, 20), range(1, 5), true, "{0..20}");
checkDiv(range(1, 20), range(-5, -1), true, "{-20..0}");
checkDiv(range(-20, -1), range(1, 5), true, "{-20..0}");
checkDiv(range(-20, -1), range(-5, -1), true, "{0..20}");
checkDiv(range(-10, 10), range(2, 4), true, "{-5..5}");
checkDiv(range(100, 120), range(-2, 2), true, "{-120..-50, 50..120}");
checkDiv(range(Integer.MIN_VALUE, Integer.MIN_VALUE + 20), range(-2, 2), true,
"{Integer.MIN_VALUE..-1073741814, 1073741814..2147483648}");
checkDiv(range(Integer.MIN_VALUE, Integer.MIN_VALUE + 20), range(-2, 2), false,
"{Integer.MIN_VALUE..-1073741814, 1073741814..Integer.MAX_VALUE}");
checkDiv(range(Integer.MIN_VALUE, Integer.MIN_VALUE + 20), range(-2, -1), true,
"{1073741814..2147483648}");
checkDiv(range(Integer.MIN_VALUE, Integer.MIN_VALUE + 20), range(-2, -1), false,
"{Integer.MIN_VALUE, 1073741814..Integer.MAX_VALUE}");
}
@Test
public void testShr() {
assertEquals(empty(), empty().shiftRight(all(), true));
assertEquals(empty(), all().shiftRight(empty(), true));
assertEquals(all(), all().shiftRight(all(), true));
assertEquals(fromTypeStrict(PsiType.INT), all().shiftRight(point(32), true));
assertEquals(fromTypeStrict(PsiType.SHORT), fromTypeStrict(PsiType.INT).shiftRight(point(16), false));
assertEquals(fromTypeStrict(PsiType.BYTE), fromTypeStrict(PsiType.INT).shiftRight(point(24), false));
assertEquals(range(-1, 0), fromTypeStrict(PsiType.INT).shiftRight(point(31), false));
checkShr(range(-20, 20), point(31), false, "{-1, 0}");
checkShr(range(-20, 20), point(31), true, "{-1, 0}");
checkShr(range(-20, 20), range(1, 3), true, "{-10..10}");
checkShr(range(-20, 20), range(3, 5), true, "{-3..2}");
checkShr(range(1000000, 1000020), range(3, 5), true, "{31250..125002}");
}
@Test
public void testUShr() {
assertEquals(empty(), empty().unsignedShiftRight(all(), true));
assertEquals(empty(), all().unsignedShiftRight(empty(), true));
assertEquals(all(), all().unsignedShiftRight(all(), true));
assertEquals(range(0, 4294967295L), all().unsignedShiftRight(point(32), true));
assertEquals(fromTypeStrict(PsiType.CHAR), fromTypeStrict(PsiType.INT).unsignedShiftRight(point(16), false));
assertEquals(range(0, 255), fromTypeStrict(PsiType.INT).unsignedShiftRight(point(24), false));
assertEquals(range(0, 1), fromTypeStrict(PsiType.INT).unsignedShiftRight(point(31), false));
checkUShr(range(-20, 20), point(31), false, "{0, 1}");
checkUShr(range(-20, 20), point(31), true, "{0, 8589934591}");
checkUShr(range(-20, 20), range(1, 3), true, "{0..10, 2305843009213693949..Long.MAX_VALUE}");
checkUShr(range(-20, 20), range(1, 3), false, "{0..10, 536870909..Integer.MAX_VALUE}");
checkUShr(range(-20, 20), range(3, 5), true, "{0..2, 576460752303423487..2305843009213693951}");
checkUShr(range(-20, 20), range(3, 5), false, "{0..2, 134217727..536870911}");
checkUShr(range(1000000, 1000020), range(3, 5), true, "{31250..125002}");
}
@Test
public void testContains() {
assertTrue(range(0, 10).contains(5));
assertTrue(range(0, 10).unite(range(13, 20)).contains(point(5)));
assertTrue(range(0, 10).unite(range(13, 20)).contains(empty()));
assertFalse(range(0, 10).unite(range(13, 20)).contains(point(12)));
assertFalse(range(0, 10).unite(range(13, 20)).contains(range(9, 15)));
assertTrue(range(0, 10).unite(range(13, 20)).contains(range(2, 8).unite(range(15, 17))));
}
@Test
public void testAdd() {
checkAdd(empty(), empty(), true, "{}");
checkAdd(empty(), point(0), true, "{}");
checkAdd(empty(), range(0, 10), true, "{}");
checkAdd(empty(), range(0, 10).unite(range(15, 20)), true, "{}");
checkAdd(point(5), point(10), false, "{15}");
checkAdd(point(Integer.MAX_VALUE), point(Integer.MAX_VALUE), false, "{-2}");
checkAdd(point(Integer.MAX_VALUE), point(Integer.MAX_VALUE), true, "{" + 0xFFFF_FFFEL + "}");
checkAdd(range(0, 10), point(10), false, "{10..20}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), point(1), true, "{2147483638..2147483648}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), point(1), false, "{Integer.MIN_VALUE, 2147483638..Integer.MAX_VALUE}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), point(10), false, "{Integer.MIN_VALUE..-2147483639, Integer.MAX_VALUE}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), point(11), false, "{Integer.MIN_VALUE..-2147483638}");
checkAdd(range(0, 10), range(20, 30), true, "{20..40}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), range(0, 10), true, "{2147483637..2147483657}");
checkAdd(range(Integer.MAX_VALUE - 10, Integer.MAX_VALUE), range(0, 10), false, "{Integer.MIN_VALUE..-2147483639, 2147483637..Integer.MAX_VALUE}");
checkAdd(range(10, 20).unite(range(40, 50)), range(0, 3).unite(range(5, 7)), true, "{10..27, 40..57}");
LongRangeSet intDomain = range(Integer.MIN_VALUE, Integer.MAX_VALUE);
assertEquals(intDomain, intDomain.plus(point(20), false));
assertEquals(intDomain.without(20), intDomain.without(0).plus(point(20), false));
assertEquals(all().without(20), all().without(0).plus(point(20), true));
assertEquals(intDomain, range(20, 30).unite(range(40, 50)).plus(intDomain, false));
assertEquals(intDomain, range(Integer.MIN_VALUE, 2).plus(range(-2, Integer.MAX_VALUE), false));
assertEquals(all(), range(Long.MIN_VALUE, 2).plus(range(-2, Long.MAX_VALUE), true));
}
void checkAdd(LongRangeSet addend1, LongRangeSet addend2, boolean isLong, String expected) {
LongRangeSet result = addend1.plus(addend2, isLong);
assertEquals(result, addend2.plus(addend1, isLong)); // commutative
checkBinOp(addend1, addend2, result, x -> true, isLong ? Long::sum : (a, b) -> (int)(a + b), expected, "+");
}
void checkMod(LongRangeSet dividendRange, LongRangeSet divisorRange, String expected) {
LongRangeSet result = dividendRange.mod(divisorRange);
checkBinOp(dividendRange, divisorRange, result, divisor -> divisor != 0, (a, b) -> a % b, expected, "%");
}
void checkDiv(LongRangeSet dividendRange, LongRangeSet divisorRange, boolean isLong, String expected) {
LongRangeSet result = dividendRange.div(divisorRange, isLong);
checkBinOp(dividendRange, divisorRange, result, divisor -> divisor != 0, (a, b) -> isLong ? a / b : ((int)a / (int)b), expected, "/");
}
void checkShr(LongRangeSet arg, LongRangeSet shiftSize, boolean isLong, String expected) {
LongRangeSet result = arg.shiftRight(shiftSize, isLong);
checkBinOp(arg, shiftSize, result, x -> true, (a, b) -> isLong ? a >> b : ((int)a >> (int)b), expected, ">>");
}
void checkUShr(LongRangeSet arg, LongRangeSet shiftSize, boolean isLong, String expected) {
LongRangeSet result = arg.unsignedShiftRight(shiftSize, isLong);
checkBinOp(arg, shiftSize, result, x -> true, (a, b) -> isLong ? a >>> b : ((int)a >>> (int)b), expected, ">>>");
}
void checkBitwiseAnd(LongRangeSet range1, LongRangeSet range2, String expected) {
LongRangeSet result = range1.bitwiseAnd(range2);
assertEquals(result, range2.bitwiseAnd(range1)); // commutative
checkBinOp(range1, range2, result, x -> true, (a, b) -> a & b, expected, "&");
}
void checkBinOp(LongRangeSet op1,
LongRangeSet op2,
LongRangeSet result,
LongPredicate filter,
LongBinaryOperator operator,
String expected,
String sign) {
assertEquals(expected, result.toString());
String errors = op1.stream()
.mapToObj(a -> op2.stream()
.filter(filter)
.filter(b -> !result.contains(operator.applyAsLong(a, b)))
.mapToObj(b -> a + " " + sign + " " + b + " = " + operator.applyAsLong(a, b)))
.flatMap(Function.identity())
.collect(Collectors.joining("\n"));
if (!errors.isEmpty()) {
fail("Expected range " + expected + " is not satisfied:\n" + errors);
}
}
void checkCast(LongRangeSet operand, String expected, PsiPrimitiveType castType) {
LongRangeSet result = operand.castTo(castType);
assertEquals(expected, result.toString());
checkUnOp(operand, result,
castType.equals(PsiType.CHAR) ? x -> (char)x : x -> ((Number)TypeConversionUtil.computeCastTo(x, castType)).longValue(),
expected, castType.getCanonicalText());
}
void checkUnOp(LongRangeSet operand,
LongRangeSet result,
LongUnaryOperator operator,
String expected,
String sign) {
assertEquals(expected, result.toString());
String errors = operand.stream()
.filter(arg -> !result.contains(operator.applyAsLong(arg)))
.mapToObj(arg -> sign + " (" + arg + ") = " + operator.applyAsLong(arg))
.collect(Collectors.joining("\n"));
if (!errors.isEmpty()) {
fail("Expected range " + expected + " is not satisfied:\n" + errors);
}
}
}
| |
package com.fsck.k9.activity.compose;
import java.util.ArrayList;
import java.util.List;
import android.app.PendingIntent;
import com.fsck.k9.activity.compose.RecipientMvpView.CryptoSpecialModeDisplayType;
import com.fsck.k9.activity.compose.RecipientMvpView.CryptoStatusDisplayType;
import com.fsck.k9.activity.compose.RecipientPresenter.CryptoMode;
import com.fsck.k9.activity.compose.RecipientPresenter.CryptoProviderState;
import com.fsck.k9.message.AutocryptStatusInteractor.RecipientAutocryptStatus;
import com.fsck.k9.message.AutocryptStatusInteractor.RecipientAutocryptStatusType;
import com.fsck.k9.view.RecipientSelectView.Recipient;
/** This is an immutable object which contains all relevant metadata entered
* during e-mail composition to apply cryptographic operations before sending
* or saving as draft.
*/
public class ComposeCryptoStatus {
private CryptoProviderState cryptoProviderState;
private Long openPgpKeyId;
private String[] recipientAddresses;
private boolean enablePgpInline;
private CryptoMode cryptoMode;
private RecipientAutocryptStatus recipientAutocryptStatus;
public Long getOpenPgpKeyId() {
return openPgpKeyId;
}
CryptoStatusDisplayType getCryptoStatusDisplayType() {
switch (cryptoProviderState) {
case UNCONFIGURED:
return CryptoStatusDisplayType.UNCONFIGURED;
case UNINITIALIZED:
return CryptoStatusDisplayType.UNINITIALIZED;
case LOST_CONNECTION:
case ERROR:
return CryptoStatusDisplayType.ERROR;
case OK:
// provider status is ok -> return value is based on cryptoMode
break;
default:
throw new AssertionError("all CryptoProviderStates must be handled!");
}
if (recipientAutocryptStatus == null) {
throw new IllegalStateException("Display type must be obtained from provider!");
}
RecipientAutocryptStatusType recipientAutocryptStatusType = recipientAutocryptStatus.type;
if (recipientAutocryptStatusType == RecipientAutocryptStatusType.ERROR) {
return CryptoStatusDisplayType.ERROR;
}
switch (cryptoMode) {
case CHOICE_ENABLED:
if (recipientAutocryptStatusType.canEncrypt()) {
if (recipientAutocryptStatusType.isConfirmed()) {
return CryptoStatusDisplayType.CHOICE_ENABLED_TRUSTED;
} else {
return CryptoStatusDisplayType.CHOICE_ENABLED_UNTRUSTED;
}
} else {
return CryptoStatusDisplayType.CHOICE_ENABLED_ERROR;
}
case CHOICE_DISABLED:
if (recipientAutocryptStatusType.canEncrypt()) {
if (recipientAutocryptStatusType.isConfirmed()) {
return CryptoStatusDisplayType.CHOICE_DISABLED_TRUSTED;
} else {
return CryptoStatusDisplayType.CHOICE_DISABLED_UNTRUSTED;
}
} else {
return CryptoStatusDisplayType.CHOICE_DISABLED_UNAVAILABLE;
}
case NO_CHOICE:
if (recipientAutocryptStatusType == RecipientAutocryptStatusType.NO_RECIPIENTS) {
return CryptoStatusDisplayType.NO_CHOICE_EMPTY;
} else if (recipientAutocryptStatusType.canEncrypt() && recipientAutocryptStatusType.isMutual()) { // TODO check own "mutual" status
if (recipientAutocryptStatusType.isConfirmed()) {
return CryptoStatusDisplayType.NO_CHOICE_MUTUAL_TRUSTED;
} else {
return CryptoStatusDisplayType.NO_CHOICE_MUTUAL;
}
} else if (recipientAutocryptStatusType.canEncrypt()) {
if (recipientAutocryptStatusType.isConfirmed()) {
return CryptoStatusDisplayType.NO_CHOICE_AVAILABLE_TRUSTED;
} else {
return CryptoStatusDisplayType.NO_CHOICE_AVAILABLE;
}
}
return CryptoStatusDisplayType.NO_CHOICE_UNAVAILABLE;
case SIGN_ONLY:
return CryptoStatusDisplayType.SIGN_ONLY;
default:
throw new AssertionError("all CryptoModes must be handled!");
}
}
CryptoSpecialModeDisplayType getCryptoSpecialModeDisplayType() {
if (cryptoProviderState != CryptoProviderState.OK) {
return CryptoSpecialModeDisplayType.NONE;
}
if (isSignOnly() && isPgpInlineModeEnabled()) {
return CryptoSpecialModeDisplayType.SIGN_ONLY_PGP_INLINE;
}
if (isSignOnly()) {
return CryptoSpecialModeDisplayType.SIGN_ONLY;
}
if (canEncrypt() && isPgpInlineModeEnabled()) {
return CryptoSpecialModeDisplayType.PGP_INLINE;
}
return CryptoSpecialModeDisplayType.NONE;
}
public boolean shouldUsePgpMessageBuilder() {
// CryptoProviderState.ERROR will be handled as an actual error, see SendErrorState
return cryptoProviderState != CryptoProviderState.UNCONFIGURED && openPgpKeyId != null;
}
public boolean isEncryptionEnabled() {
if (cryptoProviderState == CryptoProviderState.UNCONFIGURED) {
return false;
}
boolean isExplicitlyEnabled = (cryptoMode == CryptoMode.CHOICE_ENABLED);
boolean isMutualAndNotDisabled = (cryptoMode != CryptoMode.CHOICE_DISABLED && canEncryptAndIsMutual());
return isExplicitlyEnabled || isMutualAndNotDisabled;
}
boolean isSignOnly() {
return cryptoMode == CryptoMode.SIGN_ONLY;
}
public boolean isSigningEnabled() {
return cryptoMode == CryptoMode.SIGN_ONLY || isEncryptionEnabled();
}
public boolean isPgpInlineModeEnabled() {
return enablePgpInline;
}
public boolean isProviderStateOk() {
return cryptoProviderState == CryptoProviderState.OK;
}
boolean canEncrypt() {
return recipientAutocryptStatus != null && recipientAutocryptStatus.type.canEncrypt();
}
public String[] getRecipientAddresses() {
return recipientAddresses;
}
public boolean hasRecipients() {
return recipientAddresses.length > 0;
}
boolean canEncryptAndIsMutual() {
return canEncrypt() && recipientAutocryptStatus.type.isMutual();
}
boolean isEncryptionEnabledError() {
return isEncryptionEnabled() && !canEncrypt();
}
boolean hasAutocryptPendingIntent() {
return recipientAutocryptStatus.hasPendingIntent();
}
PendingIntent getAutocryptPendingIntent() {
return recipientAutocryptStatus.intent;
}
public static class ComposeCryptoStatusBuilder {
private CryptoProviderState cryptoProviderState;
private CryptoMode cryptoMode;
private Long openPgpKeyId;
private List<Recipient> recipients;
private Boolean enablePgpInline;
public ComposeCryptoStatusBuilder setCryptoProviderState(CryptoProviderState cryptoProviderState) {
this.cryptoProviderState = cryptoProviderState;
return this;
}
public ComposeCryptoStatusBuilder setCryptoMode(CryptoMode cryptoMode) {
this.cryptoMode = cryptoMode;
return this;
}
public ComposeCryptoStatusBuilder setOpenPgpKeyId(Long openPgpKeyId) {
this.openPgpKeyId = openPgpKeyId;
return this;
}
public ComposeCryptoStatusBuilder setRecipients(List<Recipient> recipients) {
this.recipients = recipients;
return this;
}
public ComposeCryptoStatusBuilder setEnablePgpInline(boolean cryptoEnableCompat) {
this.enablePgpInline = cryptoEnableCompat;
return this;
}
public ComposeCryptoStatus build() {
if (cryptoProviderState == null) {
throw new AssertionError("cryptoProviderState must be set!");
}
if (cryptoMode == null) {
throw new AssertionError("crypto mode must be set!");
}
if (recipients == null) {
throw new AssertionError("recipients must be set!");
}
if (enablePgpInline == null) {
throw new AssertionError("enablePgpInline must be set!");
}
ArrayList<String> recipientAddresses = new ArrayList<>();
for (Recipient recipient : recipients) {
recipientAddresses.add(recipient.address.getAddress());
}
ComposeCryptoStatus result = new ComposeCryptoStatus();
result.cryptoProviderState = cryptoProviderState;
result.cryptoMode = cryptoMode;
result.recipientAddresses = recipientAddresses.toArray(new String[0]);
result.openPgpKeyId = openPgpKeyId;
result.enablePgpInline = enablePgpInline;
return result;
}
}
ComposeCryptoStatus withRecipientAutocryptStatus(RecipientAutocryptStatus recipientAutocryptStatusType) {
ComposeCryptoStatus result = new ComposeCryptoStatus();
result.cryptoProviderState = cryptoProviderState;
result.cryptoMode = cryptoMode;
result.recipientAddresses = recipientAddresses;
result.openPgpKeyId = openPgpKeyId;
result.enablePgpInline = enablePgpInline;
result.recipientAutocryptStatus = recipientAutocryptStatusType;
return result;
}
public enum SendErrorState {
PROVIDER_ERROR,
ENABLED_ERROR
}
public SendErrorState getSendErrorStateOrNull() {
if (cryptoProviderState != CryptoProviderState.OK) {
// TODO: be more specific about this error
return SendErrorState.PROVIDER_ERROR;
}
if (isEncryptionEnabledError()) {
return SendErrorState.ENABLED_ERROR;
}
return null;
}
enum AttachErrorState {
IS_INLINE
}
AttachErrorState getAttachErrorStateOrNull() {
if (cryptoProviderState == CryptoProviderState.UNCONFIGURED) {
return null;
}
if (enablePgpInline) {
return AttachErrorState.IS_INLINE;
}
return null;
}
}
| |
/* This file is part of the db4o object database http://www.db4o.com
Copyright (C) 2004 - 2010 Versant Corporation http://www.versant.com
db4o is free software; you can redistribute it and/or modify it under
the terms of version 3 of the GNU General Public License as published
by the Free Software Foundation.
db4o is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
for more details.
You should have received a copy of the GNU General Public License along
with this program. If not, see http://www.gnu.org/licenses/. */
package com.db4o.foundation;
/**
* @exclude
*/
public class HashtableBase {
private static final float FILL = 0.5F;
// FIELDS ARE PUBLIC SO THEY CAN BE REFLECTED ON IN JDKs <= 1.1
public int _tableSize;
public int _mask;
public int _maximumSize;
public int _size;
public HashtableIntEntry[] _table;
public HashtableBase(int size) {
size = newSize(size); // legacy for .NET conversion
_tableSize = 1;
while (_tableSize < size) {
_tableSize = _tableSize << 1;
}
_mask = _tableSize - 1;
_maximumSize = (int) (_tableSize * FILL);
_table = new HashtableIntEntry[_tableSize];
}
public HashtableBase() {
this(1);
}
/** @param cloneOnlyCtor */
protected HashtableBase(DeepClone cloneOnlyCtor) {
}
public void clear() {
_size = 0;
Arrays4.fill(_table, null);
}
private final int newSize(int size) {
return (int) (size / FILL);
}
public int size() {
return _size;
}
protected HashtableIntEntry findWithSameKey(HashtableIntEntry newEntry) {
HashtableIntEntry existing = _table[entryIndex(newEntry)];
while (null != existing) {
if (existing.sameKeyAs(newEntry)) {
return existing;
}
existing = existing._next;
}
return null;
}
protected int entryIndex(HashtableIntEntry entry) {
return entry._key & _mask;
}
protected void putEntry(HashtableIntEntry newEntry) {
HashtableIntEntry existing = findWithSameKey(newEntry);
if (null != existing) {
replace(existing, newEntry);
} else {
insert(newEntry);
}
}
private void insert(HashtableIntEntry newEntry) {
_size++;
if (_size > _maximumSize) {
increaseSize();
}
int index = entryIndex(newEntry);
newEntry._next = _table[index];
_table[index] = newEntry;
}
private void replace(HashtableIntEntry existing, HashtableIntEntry newEntry) {
newEntry._next = existing._next;
HashtableIntEntry entry = _table[entryIndex(existing)];
if (entry == existing) {
_table[entryIndex(existing)] = newEntry;
} else {
while (entry._next != existing) {
entry = entry._next;
}
entry._next = newEntry;
}
}
private void increaseSize() {
_tableSize = _tableSize << 1;
_maximumSize = _maximumSize << 1;
_mask = _tableSize - 1;
HashtableIntEntry[] temp = _table;
_table = new HashtableIntEntry[_tableSize];
for (int i = 0; i < temp.length; i++) {
reposition(temp[i]);
}
}
protected HashtableIterator hashtableIterator() {
return new HashtableIterator(_table);
}
private void reposition(HashtableIntEntry entry) {
HashtableIntEntry currentEntry = entry;
HashtableIntEntry nextEntry = null;
while (currentEntry != null)
{
nextEntry = currentEntry._next;
currentEntry._next = _table[entryIndex(currentEntry)];
_table[entryIndex(currentEntry)] = currentEntry;
currentEntry = nextEntry;
}
}
public Iterator4 keys() {
return Iterators.map(hashtableIterator(), new Function4() {
public Object apply(Object current) {
return ((Entry4)current).key();
}
});
}
public Iterable4 values() {
return new Iterable4() {
public Iterator4 iterator() {
return valuesIterator();
}
};
}
/**
* Iterates through all the values.
*
* @return value iterator
*/
public Iterator4 valuesIterator() {
return Iterators.map(hashtableIterator(), new Function4() {
public Object apply(Object current) {
return ((Entry4)current).value();
}
});
}
public String toString() {
return Iterators.join(hashtableIterator(), "{", "}", ", ");
}
protected void removeEntry(HashtableIntEntry predecessor, HashtableIntEntry entry) {
if (predecessor != null) {
predecessor._next = entry._next;
} else {
_table[entryIndex(entry)] = entry._next;
}
_size--;
}
protected Object removeObjectEntry(int intKey, Object objectKey) {
HashtableObjectEntry entry = (HashtableObjectEntry) _table[intKey & _mask];
HashtableObjectEntry predecessor = null;
while (entry != null) {
if (entry._key == intKey && entry.hasKey(objectKey)) {
removeEntry(predecessor, entry);
return entry._object;
}
predecessor = entry;
entry = (HashtableObjectEntry) entry._next;
}
return null;
}
protected Object removeLongEntry(int intKey, long longKey) {
HashtableLongEntry entry = (HashtableLongEntry) _table[intKey & _mask];
HashtableLongEntry predecessor = null;
while (entry != null) {
if (entry._key == intKey && entry._longKey == longKey) {
removeEntry(predecessor, entry);
return entry._object;
}
predecessor = entry;
entry = (HashtableLongEntry) entry._next;
}
return null;
}
protected Object removeIntEntry(int key) {
HashtableIntEntry entry = _table[key & _mask];
HashtableIntEntry predecessor = null;
while (entry != null) {
if (entry._key == key) {
removeEntry(predecessor, entry);
return entry._object;
}
predecessor = entry;
entry = entry._next;
}
return null;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.cxx;
import static org.hamcrest.Matchers.allOf;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.not;
import static org.junit.Assert.assertThat;
import com.facebook.buck.model.BuildTarget;
import com.facebook.buck.model.BuildTargetFactory;
import com.facebook.buck.rules.BuildRuleParams;
import com.facebook.buck.rules.BuildRuleResolver;
import com.facebook.buck.rules.BuildTargetSourcePath;
import com.facebook.buck.rules.DefaultTargetNodeToBuildRuleTransformer;
import com.facebook.buck.rules.FakeBuildRule;
import com.facebook.buck.rules.FakeBuildRuleParamsBuilder;
import com.facebook.buck.rules.SourcePath;
import com.facebook.buck.rules.SourcePathResolver;
import com.facebook.buck.rules.TargetGraph;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMultimap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.Iterables;
import org.hamcrest.Matchers;
import org.junit.Test;
import java.nio.file.Paths;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
public class CxxDescriptionEnhancerTest {
@Test
public void libraryTestIncludesPrivateHeadersOfLibraryUnderTest() throws Exception {
SourcePathResolver pathResolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
BuildTarget libTarget = BuildTargetFactory.newInstance("//:lib");
BuildTarget testTarget = BuildTargetFactory.newInstance("//:test");
BuildRuleParams libParams = new FakeBuildRuleParamsBuilder(libTarget).build();
FakeCxxLibrary libRule = new FakeCxxLibrary(
libParams,
pathResolver,
BuildTargetFactory.newInstance("//:header"),
BuildTargetFactory.newInstance("//:symlink"),
BuildTargetFactory.newInstance("//:privateheader"),
BuildTargetFactory.newInstance("//:privatesymlink"),
new FakeBuildRule("//:archive", pathResolver),
new FakeBuildRule("//:shared", pathResolver),
Paths.get("output/path/lib.so"),
"lib.so",
// Ensure the test is listed as a dep of the lib.
ImmutableSortedSet.of(testTarget)
);
BuildRuleParams testParams = new FakeBuildRuleParamsBuilder(testTarget)
.setDeclaredDeps(ImmutableSortedSet.of(libRule))
.build();
ImmutableList<CxxPreprocessorInput> combinedInput =
CxxDescriptionEnhancer.collectCxxPreprocessorInput(
testParams,
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableMultimap.of(),
ImmutableList.of(),
ImmutableSet.of(),
CxxPreprocessables.getTransitiveCxxPreprocessorInput(
CxxPlatformUtils.DEFAULT_PLATFORM,
FluentIterable.from(testParams.getDeps())
.filter(CxxPreprocessorDep.class::isInstance)),
ImmutableList.of(),
Optional.empty());
Set<SourcePath> roots = new HashSet<>();
for (CxxHeaders headers : CxxPreprocessorInput.concat(combinedInput).getIncludes()) {
roots.add(headers.getRoot());
}
assertThat(
"Test of library should include both public and private headers",
roots,
Matchers.hasItems(
new BuildTargetSourcePath(BuildTargetFactory.newInstance("//:symlink")),
new BuildTargetSourcePath(BuildTargetFactory.newInstance("//:privatesymlink"))));
}
@Test
public void libraryTestIncludesPublicHeadersOfDependenciesOfLibraryUnderTest() throws Exception {
SourcePathResolver pathResolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
BuildTarget libTarget = BuildTargetFactory.newInstance("//:lib");
BuildTarget otherlibTarget = BuildTargetFactory.newInstance("//:otherlib");
BuildTarget testTarget = BuildTargetFactory.newInstance("//:test");
BuildRuleParams otherlibParams = new FakeBuildRuleParamsBuilder(otherlibTarget).build();
FakeCxxLibrary otherlibRule = new FakeCxxLibrary(
otherlibParams,
pathResolver,
BuildTargetFactory.newInstance("//:otherheader"),
BuildTargetFactory.newInstance("//:othersymlink"),
BuildTargetFactory.newInstance("//:otherprivateheader"),
BuildTargetFactory.newInstance("//:otherprivatesymlink"),
new FakeBuildRule("//:archive", pathResolver),
new FakeBuildRule("//:shared", pathResolver),
Paths.get("output/path/lib.so"),
"lib.so",
// This library has no tests.
ImmutableSortedSet.of()
);
BuildRuleParams libParams = new FakeBuildRuleParamsBuilder(libTarget)
.setDeclaredDeps(ImmutableSortedSet.of(otherlibRule)).build();
FakeCxxLibrary libRule = new FakeCxxLibrary(
libParams,
pathResolver,
BuildTargetFactory.newInstance("//:header"),
BuildTargetFactory.newInstance("//:symlink"),
BuildTargetFactory.newInstance("//:privateheader"),
BuildTargetFactory.newInstance("//:privatesymlink"),
new FakeBuildRule("//:archive", pathResolver),
new FakeBuildRule("//:shared", pathResolver),
Paths.get("output/path/lib.so"),
"lib.so",
// Ensure the test is listed as a dep of the lib.
ImmutableSortedSet.of(testTarget)
);
BuildRuleParams testParams = new FakeBuildRuleParamsBuilder(testTarget)
.setDeclaredDeps(ImmutableSortedSet.of(libRule))
.build();
ImmutableList<CxxPreprocessorInput> combinedInput =
CxxDescriptionEnhancer.collectCxxPreprocessorInput(
testParams,
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableMultimap.of(),
ImmutableList.of(),
ImmutableSet.of(),
CxxPreprocessables.getTransitiveCxxPreprocessorInput(
CxxPlatformUtils.DEFAULT_PLATFORM,
FluentIterable.from(testParams.getDeps())
.filter(CxxPreprocessorDep.class::isInstance)),
ImmutableList.of(),
Optional.empty());
Set<SourcePath> roots = new HashSet<>();
for (CxxHeaders headers : CxxPreprocessorInput.concat(combinedInput).getIncludes()) {
roots.add(headers.getRoot());
}
assertThat(
"Test of library should include public dependency headers",
Iterables.transform(
CxxPreprocessorInput.concat(combinedInput).getIncludes(),
CxxHeaders::getRoot),
allOf(
hasItem(new BuildTargetSourcePath(BuildTargetFactory.newInstance("//:othersymlink"))),
not(
hasItem(
new BuildTargetSourcePath(
BuildTargetFactory.newInstance("//:otherprivatesymlink"))))));
}
@Test
public void nonTestLibraryDepDoesNotIncludePrivateHeadersOfLibrary() throws Exception {
SourcePathResolver pathResolver = new SourcePathResolver(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
BuildTarget libTarget = BuildTargetFactory.newInstance("//:lib");
BuildRuleParams libParams = new FakeBuildRuleParamsBuilder(libTarget).build();
FakeCxxLibrary libRule = new FakeCxxLibrary(
libParams,
pathResolver,
BuildTargetFactory.newInstance("//:header"),
BuildTargetFactory.newInstance("//:symlink"),
BuildTargetFactory.newInstance("//:privateheader"),
BuildTargetFactory.newInstance("//:privatesymlink"),
new FakeBuildRule("//:archive", pathResolver),
new FakeBuildRule("//:shared", pathResolver),
Paths.get("output/path/lib.so"),
"lib.so",
// This library has no tests.
ImmutableSortedSet.of()
);
BuildTarget otherLibDepTarget = BuildTargetFactory.newInstance("//:other");
BuildRuleParams otherLibDepParams = new FakeBuildRuleParamsBuilder(otherLibDepTarget)
.setDeclaredDeps(ImmutableSortedSet.of(libRule))
.build();
ImmutableList<CxxPreprocessorInput> otherInput =
CxxDescriptionEnhancer.collectCxxPreprocessorInput(
otherLibDepParams,
CxxPlatformUtils.DEFAULT_PLATFORM,
ImmutableMultimap.of(),
ImmutableList.of(),
ImmutableSet.of(),
CxxPreprocessables.getTransitiveCxxPreprocessorInput(
CxxPlatformUtils.DEFAULT_PLATFORM,
FluentIterable.from(otherLibDepParams.getDeps())
.filter(CxxPreprocessorDep.class::isInstance)),
ImmutableList.of(),
Optional.empty());
Set<SourcePath> roots = new HashSet<>();
for (CxxHeaders headers : CxxPreprocessorInput.concat(otherInput).getIncludes()) {
roots.add(headers.getRoot());
}
assertThat(
"Non-test rule with library dep should include public and not private headers",
roots,
allOf(
hasItem(new BuildTargetSourcePath(BuildTargetFactory.newInstance("//:symlink"))),
not(hasItem(
new BuildTargetSourcePath(BuildTargetFactory.newInstance("//:privatesymlink"))))));
}
@Test
public void testSonameExpansion() {
assertThat(soname("libfoo.so", "dylib", "%s.dylib"), equalTo("libfoo.so"));
assertThat(soname("libfoo.$(ext)", "good", "%s.bad"), equalTo("libfoo.good"));
assertThat(soname("libfoo.$(ext 2.3)", "bad", "%s.good"), equalTo("libfoo.2.3.good"));
assertThat(soname("libfoo.$(ext 2.3)", "bad", "good.%s"), equalTo("libfoo.good.2.3"));
assertThat(soname("libfoo.$(ext 2.3)", "bad", "windows"), equalTo("libfoo.windows"));
}
/**
* Just a helper to make this shorter to write.
*/
private static String soname(String declared, String extension, String versionedFormat) {
return CxxDescriptionEnhancer.getNonDefaultSharedLibrarySoname(
declared, extension, versionedFormat);
}
}
| |
package com.psddev.dari.db;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.github.shyiko.mysql.binlog.event.WriteRowsEventData;
import com.google.common.base.Charsets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.github.shyiko.mysql.binlog.BinaryLogClient.EventListener;
import com.github.shyiko.mysql.binlog.event.DeleteRowsEventData;
import com.github.shyiko.mysql.binlog.event.Event;
import com.github.shyiko.mysql.binlog.event.EventData;
import com.github.shyiko.mysql.binlog.event.EventHeader;
import com.github.shyiko.mysql.binlog.event.EventType;
import com.github.shyiko.mysql.binlog.event.TableMapEventData;
import com.github.shyiko.mysql.binlog.event.UpdateRowsEventData;
import com.google.common.cache.Cache;
import com.psddev.dari.db.shyiko.DariQueryEventData;
import com.psddev.dari.util.ObjectUtils;
import com.psddev.dari.util.StringUtils;
import com.psddev.dari.util.UuidUtils;
class MySQLBinaryLogEventListener implements EventListener {
private static final Logger LOGGER = LoggerFactory.getLogger(MySQLBinaryLogEventListener.class);
private static final Pattern DELETE_PATTERN = Pattern.compile("DELETE\\s+FROM\\s+`?(?<table>\\p{Alnum}+)`?\\s+WHERE\\s+`?id`?\\s*(?:(?:IN\\s*\\()|(?:=))\\s*(?<id>(?:(?:[^\']+'){2},?\\s*){1,})\\)?", Pattern.CASE_INSENSITIVE);
private static final Pattern UPDATE_PATTERN = Pattern.compile("UPDATE\\s+`?(?<table>\\p{Alnum}+)`?\\s+SET\\s+`?typeId`?\\s*=\\s*(?<typeId>(?:[^\']+'){2})\\s*,\\s*`?data`?\\s*=\\s*(?<data>.+)\\s*WHERE\\s+`?id`?\\s*(?:(?:IN\\s*\\()|(?:=))\\s*(?<id>(?:[^\']+'){2}).*", Pattern.CASE_INSENSITIVE);
private final SqlDatabase database;
private final Cache<UUID, Object[]> cache;
private final String catalog;
private boolean transactionBegin = false;
private TableMapEventData tableMapEventData;
private final List<Event> events = new ArrayList<Event>();
private boolean isFlushCache = false;
public MySQLBinaryLogEventListener(SqlDatabase database, Cache<UUID, Object[]> cache, String catalog) {
this.database = database;
this.cache = cache;
this.catalog = catalog;
}
/**
* Makes sure length of the given {@code in} is 16.
*/
private byte[] confirm16Bytes(byte[] in) {
if (in == null) {
return null;
}
byte[] bytes16 = new byte[16];
if (in.length == 16) {
return in;
}
for (int i = 0; i < bytes16.length && i < in.length; i++) {
bytes16[i] |= in[i];
}
return bytes16;
}
private void updateCache(byte[] id, byte[] typeId, byte[] data) {
id = confirm16Bytes(id);
if (id != null) {
UUID bid = ObjectUtils.to(UUID.class, id);
Object[] value = new Object[3];
value[1] = data;
Map<String, Object> jsonData = SqlDatabase.unserializeData(data);
value[2] = jsonData;
value[0] = UuidUtils.toBytes(ObjectUtils.to(UUID.class, jsonData.get(StateValueUtils.TYPE_KEY)));
database.notifyUpdate(database.createSavedObjectFromReplicationCache((byte[]) value[0], bid, (byte[]) value[1], jsonData, null));
// populate cache
if (cache.getIfPresent(bid) != null) {
cache.put(bid, value);
if (LOGGER.isInfoEnabled()) {
LOGGER.debug("[BINLOG] UPDATING CACHE: ID [{}]", StringUtils.hex(id));
}
}
}
}
private void invalidateCache(byte[] id) {
id = confirm16Bytes(id);
if (id != null) {
UUID bid = ObjectUtils.to(UUID.class, id);
if (LOGGER.isInfoEnabled() && cache.getIfPresent(bid) != null) {
LOGGER.debug("[BINLOG] DELETING CACHE: ID [{}]", StringUtils.hex(id));
}
cache.invalidate(bid);
}
}
private void commitTransaction() {
for (Event event : events) {
EventHeader eventHeader = event.getHeader();
EventType eventType = eventHeader.getEventType();
EventData eventData = event.getData();
LOGGER.debug("BIN LOG TEST [{}] [{}]", event.getHeader().getEventType().toString(), event.getData().toString());
if (eventType == EventType.WRITE_ROWS || eventType == EventType.EXT_WRITE_ROWS) {
for (Serializable[] row : ((WriteRowsEventData) eventData).getRows()) {
byte[] data = row[2] instanceof byte[] ? (byte[]) row[2]
: row[2] instanceof String ? ((String) row[2]).getBytes(Charsets.UTF_8)
: null;
updateCache((byte[]) row[0], (byte[]) row[1], data);
LOGGER.debug("InsertRow HEX [{}][{}]", StringUtils.hex((byte[]) row[0]), ((byte[]) row[0]).length);
}
} else if (eventType == EventType.UPDATE_ROWS || eventType == EventType.EXT_UPDATE_ROWS) {
for (Map.Entry<Serializable[], Serializable[]> row : ((UpdateRowsEventData) eventData).getRows()) {
Serializable[] newValue = row.getValue();
byte[] data = newValue[2] instanceof byte[] ? (byte[]) newValue[2]
: newValue[2] instanceof String ? ((String) newValue[2]).getBytes(Charsets.UTF_8)
: null;
updateCache((byte[]) newValue[0], (byte[]) newValue[1], data);
LOGGER.debug("UpdateRow HEX [{}][{}]", StringUtils.hex((byte[]) newValue[0]), ((byte[]) newValue[0]).length);
}
} else if (eventType == EventType.DELETE_ROWS || eventType == EventType.EXT_DELETE_ROWS) {
for (Serializable[] row : ((DeleteRowsEventData) eventData).getRows()) {
invalidateCache((byte[]) row[0]);
LOGGER.debug("DeleteRow HEX [{}][{}]", StringUtils.hex((byte[]) row[0]), ((byte[]) row[0]).length);
}
} else if (eventType == EventType.QUERY) {
DariQueryEventData queryEventData = (DariQueryEventData) eventData;
if (queryEventData.getAction() == DariQueryEventData.Action.UPDATE) {
updateCache(queryEventData.getId(), queryEventData.getTypeId(), queryEventData.getData());
} else if (queryEventData.getAction() == DariQueryEventData.Action.DELETE) {
invalidateCache(queryEventData.getId());
}
} else {
LOGGER.error("NOT RECOGNIZED TYPE: {}", eventType);
}
}
}
private void flushCache() {
cache.invalidateAll();
}
private byte[] getByteData(byte[] source, String strSource, int begin, int end) {
byte[] target = null;
if (strSource.startsWith("_binary")) {
int targetIndex = 0;
target = new byte[end - begin - 9];
for (int sourceIndex = begin + 8; sourceIndex < end - 1; sourceIndex++) {
byte value = 0;
if (source[sourceIndex] == 92) { // '\'
switch (source[++sourceIndex]) {
case 34: // "
value = 34;
break;
case 39: // '
value = 39;
break;
case 48: // 0
value = 0;
break;
case 97: // a
value = 7;
break;
case 98: // b
value = 8;
break;
case 116: // t
value = 9;
break;
case 110: // n
value = 10;
break;
case 118: // v
value = 11;
break;
case 102: // f
value = 12;
break;
case 114: // r
value = 13;
break;
case 101: // e
value = 27;
break;
default:
value = source[--sourceIndex];
break;
}
} else {
value = source[sourceIndex];
}
target[targetIndex++] = value;
}
} else if (strSource.startsWith("X")) {
String hex = strSource.substring(2, strSource.length() - 1);
int len = hex.length();
target = new byte[len / 2];
for (int i = 0; i < len; i += 2) {
target[i / 2] = (byte) ((Character.digit(hex.charAt(i), 16) << 4)
+ Character.digit(hex.charAt(i + 1), 16));
}
} else {
// TODO: error
}
return target;
}
private boolean processStatement(DariQueryEventData queryEventData) {
boolean processed = false;
if (queryEventData.getErrorCode() == 0 && queryEventData.getDatabase().equals(catalog)) {
String sql = queryEventData.getSql();
// TODO: parse sql statement to handle full syntax such as [LOW_PRIORITY | DELAYED | HIGH_PRIORITY] [IGNORE] [INTO]
String[] statementParts = sql.split("`?\\s+`?", 4);
String table = null;
if (statementParts[0].equalsIgnoreCase("UPDATE")
|| statementParts[0].equalsIgnoreCase("HANDLER")) {
table = statementParts[1];
} else if (statementParts[0].equalsIgnoreCase("DELETE")
|| statementParts[0].equalsIgnoreCase("INSERT")
|| statementParts[0].equalsIgnoreCase("REPLACE")) {
table = statementParts[2];
} else if ((statementParts[0].equalsIgnoreCase("ALTER")
|| statementParts[0].equalsIgnoreCase("CREATE")
|| statementParts[0].equalsIgnoreCase("RENAME")
|| statementParts[0].equalsIgnoreCase("TRUNCATE")
|| statementParts[0].equalsIgnoreCase("DROP"))
&& statementParts[1].equalsIgnoreCase("TABLE")) {
table = statementParts[2];
}
if (SqlDatabase.RECORD_TABLE.equalsIgnoreCase(table)) {
byte[] byteStatement = queryEventData.getStatement();
if (statementParts[0].equalsIgnoreCase("UPDATE")) {
queryEventData.setActionl(DariQueryEventData.Action.UPDATE);
Matcher matcher = UPDATE_PATTERN.matcher(sql);
if (matcher.matches()) {
queryEventData.setId(getByteData(byteStatement, matcher.group(4), matcher.start(4), matcher.end(4)));
queryEventData.setTypeId(getByteData(byteStatement, matcher.group(2), matcher.start(2), matcher.end(2)));
queryEventData.setData(getByteData(byteStatement, matcher.group(3), matcher.start(3), matcher.end(3)));
processed = true;
LOGGER.debug("[DEBUG] QUERY EVENT UPDATE [{}]", queryEventData);
} else {
isFlushCache = true;
LOGGER.debug("Bin log cache flushed due to [{}]", sql);
}
} else if (statementParts[0].equalsIgnoreCase("DELETE")) {
queryEventData.setActionl(DariQueryEventData.Action.DELETE);
Matcher matcher = DELETE_PATTERN.matcher(sql);
if (matcher.matches()) {
queryEventData.setId(getByteData(byteStatement, matcher.group(2), matcher.start(2), matcher.end(2)));
processed = true;
LOGGER.debug("[DEBUG] QUERY EVENT DELETE [{}]", queryEventData);
} else {
isFlushCache = true;
LOGGER.debug("Bin log cache flushed due to [{}]", sql);
}
} else if (statementParts[0].equalsIgnoreCase("INSERT")) {
// Do nothing
} else {
isFlushCache = true;
LOGGER.debug("Bin log cache flushed due to [{}]", sql);
}
}
}
return processed;
}
@Override
public void onEvent(Event event) {
EventHeader eventHeader = event.getHeader();
EventType eventType = eventHeader.getEventType();
EventData eventData = event.getData();
long tableId = 0;
LOGGER.debug("TYPE: {}", eventType);
if (transactionBegin) {
if ((eventType == EventType.QUERY && ((DariQueryEventData) eventData).getSql().equalsIgnoreCase("COMMIT"))
|| (eventType == EventType.XID)) {
LOGGER.debug("[DEBUG] QUERY EVENT TRANSACTION COMMIT: [{}]", events.size());
try {
if (isFlushCache) {
flushCache();
} else {
commitTransaction();
}
} finally {
events.clear();
isFlushCache = false;
transactionBegin = false;
}
} else {
if (tableMapEventData != null) {
// TODO: check column metadata to get length.
try {
if (EventType.isWrite(eventType)) {
tableId = ((WriteRowsEventData) eventData).getTableId();
} else if (EventType.isUpdate(eventType)) {
tableId = ((UpdateRowsEventData) eventData).getTableId();
} else if (EventType.isDelete(eventType)) {
tableId = ((DeleteRowsEventData) eventData).getTableId();
} else {
LOGGER.error("NOT RECOGNIZED TYPE: {}", eventType);
}
if (tableMapEventData.getTableId() == tableId) {
events.add(event);
}
} finally {
tableMapEventData = null;
}
} else if (eventType == EventType.TABLE_MAP) {
if (((TableMapEventData) eventData).getDatabase().equals(catalog) && ((TableMapEventData) eventData).getTable().equalsIgnoreCase(SqlDatabase.RECORD_TABLE)) {
tableMapEventData = (TableMapEventData) eventData;
}
} else if (eventType == EventType.QUERY) {
if (processStatement((DariQueryEventData) eventData)) {
events.add(event);
}
}
}
} else if (eventType == EventType.QUERY && ((DariQueryEventData) eventData).getSql().equalsIgnoreCase("BEGIN")) {
transactionBegin = true;
LOGGER.debug("[DEBUG] QUERY EVENT TRANSACTION BEGIN");
}
}
}
| |
/*
* Hibernate Validator, declare and validate application constraints
*
* License: Apache License, Version 2.0
* See the license.txt file in the root directory or <http://www.apache.org/licenses/LICENSE-2.0>.
*/
package org.hibernate.validator.test.internal.metadata.aggregated;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import javax.validation.ParameterNameProvider;
import javax.validation.constraints.NotNull;
import javax.validation.executable.ExecutableType;
import javax.validation.executable.ValidateOnExecution;
import javax.validation.groups.Default;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.Test;
import org.hibernate.validator.internal.engine.DefaultParameterNameProvider;
import org.hibernate.validator.internal.engine.valuehandling.UnwrapMode;
import org.hibernate.validator.internal.metadata.BeanMetaDataManager;
import org.hibernate.validator.internal.metadata.aggregated.BeanMetaData;
import org.hibernate.validator.internal.metadata.aggregated.ExecutableMetaData;
import org.hibernate.validator.internal.metadata.aggregated.ParameterMetaData;
import org.hibernate.validator.internal.metadata.core.ConstraintHelper;
import org.hibernate.validator.internal.metadata.provider.MetaDataProvider;
import org.hibernate.validator.internal.metadata.raw.ExecutableElement;
import org.hibernate.validator.internal.util.ExecutableHelper;
import org.hibernate.validator.internal.util.TypeResolutionHelper;
import org.hibernate.validator.test.internal.metadata.Customer;
import org.hibernate.validator.test.internal.metadata.CustomerRepository;
import org.hibernate.validator.test.internal.metadata.CustomerRepository.ValidationGroup;
import org.hibernate.validator.testutil.TestForIssue;
import static org.fest.assertions.Assertions.assertThat;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
/**
* Tests creation of {@link org.hibernate.validator.internal.metadata.raw.ConstrainedParameter} in
* {@link org.hibernate.validator.internal.metadata.aggregated.BeanMetaDataImpl}.
*
* @author Gunnar Morling
*/
public class ParameterMetaDataTest {
private BeanMetaData<CustomerRepository> beanMetaData;
@BeforeMethod
public void setupBeanMetaData() {
BeanMetaDataManager beanMetaDataManager = new BeanMetaDataManager(
new ConstraintHelper(),
new ExecutableHelper( new TypeResolutionHelper() ),
new DefaultParameterNameProvider(),
Collections.<MetaDataProvider>emptyList()
);
beanMetaData = beanMetaDataManager.getBeanMetaData( CustomerRepository.class );
}
@Test
public void constrainedParameterMetaData() throws Exception {
Method method = CustomerRepository.class.getMethod( "createCustomer", CharSequence.class, String.class );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
ParameterMetaData parameterMetaData = methodMetaData.getParameterMetaData( 1 );
assertFalse( parameterMetaData.isCascading() );
assertTrue( parameterMetaData.isConstrained() );
assertEquals( parameterMetaData.getIndex(), 1 );
assertEquals( parameterMetaData.getName(), "arg1" );
assertThat( parameterMetaData ).hasSize( 1 );
assertEquals(
parameterMetaData.iterator().next().getDescriptor().getAnnotation().annotationType(), NotNull.class
);
}
@Test
public void cascadingParameterMetaData() throws Exception {
Method method = CustomerRepository.class.getMethod( "saveCustomer", Customer.class );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
ParameterMetaData parameterMetaData = methodMetaData.getParameterMetaData( 0 );
assertTrue( parameterMetaData.isCascading() );
assertTrue( parameterMetaData.isConstrained() );
assertEquals( parameterMetaData.getIndex(), 0 );
assertEquals( parameterMetaData.getName(), "arg0" );
assertThat( parameterMetaData ).isEmpty();
}
@Test
public void unconstrainedParameterMetaData() throws Exception {
Method method = CustomerRepository.class.getMethod( "updateCustomer", Customer.class );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
ParameterMetaData parameterMetaData = methodMetaData.getParameterMetaData( 0 );
assertFalse( parameterMetaData.isCascading() );
assertFalse( parameterMetaData.isConstrained() );
assertThat( parameterMetaData ).isEmpty();
assertEquals( parameterMetaData.unwrapMode(), UnwrapMode.AUTOMATIC );
}
@Test(expectedExceptions = IllegalArgumentException.class)
public void illegalParameterIndexCausesException() throws Exception {
Method method = CustomerRepository.class.getMethod( "foo" );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
methodMetaData.getParameterMetaData( 0 );
}
@Test
public void locallyDefinedGroupConversion() throws Exception {
Method method = CustomerRepository.class.getMethod( "methodWithParameterGroupConversion", Set.class );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
assertThat(
methodMetaData.getParameterMetaData( 0 )
.convertGroup( Default.class )
).isEqualTo( ValidationGroup.class );
}
@Test
public void parameterRequiringUnwrapping() throws Exception {
Method method = CustomerRepository.class.getMethod( "methodWithParameterRequiringUnwrapping", long.class );
ExecutableMetaData methodMetaData = beanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
ParameterMetaData parameterMetaData = methodMetaData.getParameterMetaData( 0 );
assertEquals( parameterMetaData.unwrapMode(), UnwrapMode.UNWRAP );
}
@Test @TestForIssue( jiraKey = "HV-887" )
public void parameterNameInInheritanceHierarchy() throws Exception {
// The bug is due to a random choice for the parameter name used.
// The first matching method in the class hierarchy will fit (Service or ServiceImpl in our case).
//
// The failure rate on my current VM before fixing the bug is 50%.
// Running it in a loop does not improve the odds of failure: all tests will pass or fail for all loop run.
BeanMetaDataManager beanMetaDataManager = new BeanMetaDataManager(
new ConstraintHelper(),
new ExecutableHelper( new TypeResolutionHelper() ),
new SkewedParameterNameProvider(),
Collections.<MetaDataProvider>emptyList()
);
BeanMetaData<ServiceImpl> localBeanMetaData = beanMetaDataManager.getBeanMetaData( ServiceImpl.class );
Method method = Service.class.getMethod( "sayHello", String.class );
ExecutableMetaData methodMetaData = localBeanMetaData.getMetaDataFor( ExecutableElement.forMethod( method ) );
ParameterMetaData parameterMetaData = methodMetaData.getParameterMetaData( 0 );
assertEquals( parameterMetaData.getIndex(), 0 );
assertEquals( parameterMetaData.getName(), "good", "Parameter name from Service should be used, nor ServiceImpl" );
assertThat( parameterMetaData ).hasSize( 1 );
assertEquals(
parameterMetaData.iterator().next().getDescriptor().getAnnotation().annotationType(), NotNull.class
);
}
private interface Service {
void sayHello(@NotNull String world);
}
private static class ServiceImpl implements Service {
@Override
@ValidateOnExecution(type = ExecutableType.NONE)
public void sayHello(String world) {}
}
public class SkewedParameterNameProvider implements ParameterNameProvider {
private final ParameterNameProvider defaultProvider = new DefaultParameterNameProvider();
@Override
public List<String> getParameterNames(Constructor<?> constructor) {
return defaultProvider.getParameterNames( constructor );
}
@Override
public List<String> getParameterNames(Method method) {
if ( method.getDeclaringClass().equals( Service.class ) ) {
// the parameter name we expect
return Collections.singletonList( "good" );
}
else if ( method.getDeclaringClass().equals( ServiceImpl.class ) ) {
// the parameter name we do not expect
return Collections.singletonList( "bad" );
}
else {
return defaultProvider.getParameterNames( method );
}
}
}
}
| |
package com.angkorteam.fintech.pages.service;
import java.util.List;
import java.util.Map;
import org.apache.wicket.authroles.authorization.strategies.role.annotations.AuthorizeInstantiation;
import org.apache.wicket.markup.html.form.CheckBox;
import org.apache.wicket.markup.html.form.TextField;
import org.apache.wicket.markup.html.link.BookmarkablePageLink;
import org.apache.wicket.model.IModel;
import org.apache.wicket.model.Model;
import org.apache.wicket.model.PropertyModel;
import com.angkorteam.fintech.Page;
import com.angkorteam.fintech.Session;
import com.angkorteam.fintech.ddl.CExternalServiceProperties;
import com.angkorteam.fintech.dto.Function;
import com.angkorteam.fintech.dto.builder.ExternalServiceBuilder;
import com.angkorteam.fintech.dto.enums.ServiceType;
import com.angkorteam.fintech.helper.ServiceHelper;
import com.angkorteam.fintech.layout.Size;
import com.angkorteam.fintech.layout.UIBlock;
import com.angkorteam.fintech.layout.UIContainer;
import com.angkorteam.fintech.layout.UIRow;
import com.angkorteam.fintech.pages.ServiceDashboardPage;
import com.angkorteam.fintech.pages.SystemDashboardPage;
import com.angkorteam.framework.SpringBean;
import com.angkorteam.framework.jdbc.SelectQuery;
import com.angkorteam.framework.models.PageBreadcrumb;
import com.angkorteam.framework.spring.JdbcNamed;
import com.angkorteam.framework.wicket.markup.html.form.Button;
import com.angkorteam.framework.wicket.markup.html.form.Form;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
import io.github.openunirest.http.JsonNode;
@AuthorizeInstantiation(Function.ALL_FUNCTION)
public class EMailConfigurationPage extends Page {
protected Form<Void> form;
protected Button saveButton;
protected BookmarkablePageLink<Void> closeLink;
protected UIRow row1;
protected UIBlock usernameBlock;
protected UIContainer usernameIContainer;
protected String usernameValue;
protected TextField<String> usernameField;
protected UIRow row2;
protected UIBlock passwordBlock;
protected UIContainer passwordIContainer;
protected String passwordValue;
protected TextField<String> passwordField;
protected UIRow row3;
protected UIBlock hostBlock;
protected UIContainer hostIContainer;
protected String hostValue;
protected TextField<String> hostField;
protected UIRow row4;
protected UIBlock portBlock;
protected UIContainer portIContainer;
protected Long portValue = 25l;
protected TextField<Long> portField;
protected UIRow row5;
protected UIBlock useTlsBlock;
protected UIContainer useTlsIContainer;
protected Boolean useTlsValue;
protected CheckBox useTlsField;
@Override
public IModel<List<PageBreadcrumb>> buildPageBreadcrumb() {
List<PageBreadcrumb> BREADCRUMB = Lists.newArrayList();
{
PageBreadcrumb breadcrumb = new PageBreadcrumb();
breadcrumb.setLabel("Admin");
BREADCRUMB.add(breadcrumb);
}
{
PageBreadcrumb breadcrumb = new PageBreadcrumb();
breadcrumb.setLabel("System");
breadcrumb.setPage(SystemDashboardPage.class);
BREADCRUMB.add(breadcrumb);
}
{
PageBreadcrumb breadcrumb = new PageBreadcrumb();
breadcrumb.setLabel("External Service");
breadcrumb.setPage(ServiceDashboardPage.class);
BREADCRUMB.add(breadcrumb);
}
{
PageBreadcrumb breadcrumb = new PageBreadcrumb();
breadcrumb.setLabel("SMTP");
BREADCRUMB.add(breadcrumb);
}
return Model.ofList(BREADCRUMB);
}
@Override
protected void initData() {
JdbcNamed named = SpringBean.getBean(JdbcNamed.class);
SelectQuery selectQuery = null;
selectQuery = new SelectQuery(CExternalServiceProperties.NAME);
selectQuery.addField(CExternalServiceProperties.Field.NAME);
selectQuery.addField(CExternalServiceProperties.Field.VALUE);
selectQuery.addWhere(CExternalServiceProperties.Field.EXTERNAL_SERVICE_ID + " = :" + CExternalServiceProperties.Field.EXTERNAL_SERVICE_ID, ServiceType.SMTP.getLiteral());
List<Map<String, Object>> temps = named.queryForList(selectQuery.toSQL(), selectQuery.getParam());
Map<String, Object> params = Maps.newHashMap();
for (Map<String, Object> temp : temps) {
params.put((String) temp.get(CExternalServiceProperties.Field.NAME), temp.get(CExternalServiceProperties.Field.VALUE));
}
this.usernameValue = (String) params.get("username");
this.passwordValue = (String) params.get("password");
this.hostValue = (String) params.get("host");
this.portValue = Long.valueOf((String) params.get("port"));
this.useTlsValue = Boolean.valueOf((String) params.get("useTLS"));
}
@Override
protected void initComponent() {
this.form = new Form<>("form");
this.add(this.form);
this.saveButton = new Button("saveButton");
this.saveButton.setOnSubmit(this::saveButtonSubmit);
this.form.add(this.saveButton);
this.closeLink = new BookmarkablePageLink<>("closeLink", ServiceDashboardPage.class);
this.form.add(this.closeLink);
this.row1 = UIRow.newUIRow("row1", this.form);
this.usernameBlock = this.row1.newUIBlock("usernameBlock", Size.Twelve_12);
this.usernameIContainer = this.usernameBlock.newUIContainer("usernameIContainer");
this.usernameField = new TextField<>("usernameField", new PropertyModel<>(this, "usernameValue"));
this.usernameIContainer.add(this.usernameField);
this.usernameIContainer.newFeedback("usernameFeedback", this.usernameField);
this.row2 = UIRow.newUIRow("row2", this.form);
this.passwordBlock = this.row2.newUIBlock("passwordBlock", Size.Twelve_12);
this.passwordIContainer = this.passwordBlock.newUIContainer("passwordIContainer");
this.passwordField = new TextField<>("passwordField", new PropertyModel<>(this, "passwordValue"));
this.passwordIContainer.add(this.passwordField);
this.passwordIContainer.newFeedback("passwordFeedback", this.passwordField);
this.row3 = UIRow.newUIRow("row3", this.form);
this.hostBlock = this.row3.newUIBlock("hostBlock", Size.Twelve_12);
this.hostIContainer = this.hostBlock.newUIContainer("hostIContainer");
this.hostField = new TextField<>("hostField", new PropertyModel<>(this, "hostValue"));
this.hostIContainer.add(this.hostField);
this.hostIContainer.newFeedback("hostFeedback", this.hostField);
this.row4 = UIRow.newUIRow("row4", this.form);
this.portBlock = this.row4.newUIBlock("portBlock", Size.Twelve_12);
this.portIContainer = this.portBlock.newUIContainer("portIContainer");
this.portField = new TextField<>("portField", new PropertyModel<>(this, "portValue"));
this.portIContainer.add(this.portField);
this.portIContainer.newFeedback("portFeedback", this.portField);
this.row5 = UIRow.newUIRow("row5", this.form);
this.useTlsBlock = this.row5.newUIBlock("useTlsBlock", Size.Twelve_12);
this.useTlsIContainer = this.useTlsBlock.newUIContainer("useTlsIContainer");
this.useTlsField = new CheckBox("useTlsField", new PropertyModel<>(this, "useTlsValue"));
this.useTlsIContainer.add(this.useTlsField);
this.useTlsIContainer.newFeedback("useTlsFeedback", this.useTlsField);
}
@Override
protected void configureMetaData() {
this.useTlsField.setRequired(true);
this.portField.setRequired(true);
this.hostField.setRequired(true);
this.passwordField.setRequired(true);
this.usernameField.setRequired(true);
}
protected void saveButtonSubmit(Button button) {
ExternalServiceBuilder builder = new ExternalServiceBuilder(ServiceType.SMTP);
builder.withHost(this.hostValue);
builder.withPassword(this.passwordValue);
builder.withPort(this.portValue);
builder.withUsername(this.usernameValue);
builder.withUseTls(this.useTlsValue);
JsonNode node = ServiceHelper.update((Session) getSession(), builder.build());
if (reportError(node)) {
return;
}
setResponsePage(ServiceDashboardPage.class);
}
}
| |
/*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.analytics.event.data;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.hasSize;
import static org.hisp.dhis.DhisConvenienceTest.createDataElement;
import static org.hisp.dhis.DhisConvenienceTest.createOrganisationUnit;
import static org.hisp.dhis.DhisConvenienceTest.createProgram;
import static org.hisp.dhis.DhisConvenienceTest.createProgramIndicator;
import static org.hisp.dhis.analytics.QueryKey.NV;
import static org.hisp.dhis.common.DimensionalObject.DATA_X_DIM_ID;
import static org.hisp.dhis.common.DimensionalObject.OPTION_SEP;
import static org.hisp.dhis.common.DimensionalObject.ORGUNIT_DIM_ID;
import static org.hisp.dhis.common.DimensionalObject.PERIOD_DIM_ID;
import static org.hisp.dhis.common.DimensionalObjectUtils.getList;
import static org.hisp.dhis.common.QueryOperator.EQ;
import static org.hisp.dhis.common.QueryOperator.IN;
import static org.hisp.dhis.common.QueryOperator.NE;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.util.Collection;
import java.util.Collections;
import java.util.function.Consumer;
import org.hisp.dhis.analytics.AggregationType;
import org.hisp.dhis.analytics.AnalyticsAggregationType;
import org.hisp.dhis.analytics.DataQueryParams;
import org.hisp.dhis.analytics.DataType;
import org.hisp.dhis.analytics.analyze.ExecutionPlanStore;
import org.hisp.dhis.analytics.event.EventQueryParams;
import org.hisp.dhis.analytics.event.data.programindicator.DefaultProgramIndicatorSubqueryBuilder;
import org.hisp.dhis.common.BaseDimensionalObject;
import org.hisp.dhis.common.DimensionType;
import org.hisp.dhis.common.Grid;
import org.hisp.dhis.common.GridHeader;
import org.hisp.dhis.common.QueryFilter;
import org.hisp.dhis.common.QueryItem;
import org.hisp.dhis.common.QueryOperator;
import org.hisp.dhis.common.ValueType;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.jdbc.StatementBuilder;
import org.hisp.dhis.jdbc.statementbuilder.PostgreSQLStatementBuilder;
import org.hisp.dhis.organisationunit.OrganisationUnit;
import org.hisp.dhis.period.Period;
import org.hisp.dhis.period.PeriodType;
import org.hisp.dhis.period.QuarterlyPeriodType;
import org.hisp.dhis.program.Program;
import org.hisp.dhis.program.ProgramIndicator;
import org.hisp.dhis.program.ProgramIndicatorService;
import org.hisp.dhis.program.ProgramType;
import org.hisp.dhis.system.grid.ListGrid;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.junit.jupiter.MockitoSettings;
import org.mockito.quality.Strictness;
import org.springframework.jdbc.core.JdbcTemplate;
import com.google.common.collect.ImmutableList;
/**
* @author Luciano Fiandesio
*/
@MockitoSettings( strictness = Strictness.LENIENT )
@ExtendWith( MockitoExtension.class )
class EventsAnalyticsManagerTest extends EventAnalyticsTest
{
@Mock
private JdbcTemplate jdbcTemplate;
@Mock
ExecutionPlanStore executionPlanStore;
private JdbcEventAnalyticsManager subject;
@Captor
private ArgumentCaptor<String> sql;
private final static String TABLE_NAME = "analytics_event";
private final static String DEFAULT_COLUMNS_WITH_REGISTRATION = "psi,ps,executiondate,storedby,"
+ "createdbydisplayname" + "," + "lastupdatedbydisplayname"
+ ",lastupdated,enrollmentdate,incidentdate,tei,pi,ST_AsGeoJSON(psigeometry, 6) as geometry,longitude,latitude,ouname,"
+ "oucode,pistatus,psistatus";
@BeforeEach
public void setUp()
{
StatementBuilder statementBuilder = new PostgreSQLStatementBuilder();
EventTimeFieldSqlRenderer timeCoordinateSelector = new EventTimeFieldSqlRenderer( statementBuilder );
ProgramIndicatorService programIndicatorService = mock( ProgramIndicatorService.class );
DefaultProgramIndicatorSubqueryBuilder programIndicatorSubqueryBuilder = new DefaultProgramIndicatorSubqueryBuilder(
programIndicatorService );
subject = new JdbcEventAnalyticsManager( jdbcTemplate, statementBuilder, programIndicatorService,
programIndicatorSubqueryBuilder, timeCoordinateSelector, executionPlanStore );
when( jdbcTemplate.queryForRowSet( anyString() ) ).thenReturn( this.rowSet );
}
@Test
void verifyGetEventSqlWithProgramWithNoRegistration()
{
mockEmptyRowSet();
this.programA.setProgramType( ProgramType.WITHOUT_REGISTRATION );
subject.getEvents( createRequestParams(), createGrid(), 100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "select psi,ps,executiondate,storedby,"
+ "createdbydisplayname" + "," + "lastupdatedbydisplayname"
+ ",lastupdated,ST_AsGeoJSON(psigeometry, 6) as geometry,"
+ "longitude,latitude,ouname,oucode,pistatus,psistatus,ax.\"monthly\",ax.\"ou\" from "
+ getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') limit 101";
assertThat( sql.getValue(), is( expected ) );
}
@Test
void verifyGetEventSqlWithOrgUnitTypeDataElement()
{
mockEmptyRowSet();
DataElement dataElement = createDataElement( 'a' );
QueryItem queryItem = new QueryItem( dataElement, this.programA, null,
ValueType.ORGANISATION_UNIT, AggregationType.SUM, null );
subject.getEvents( createRequestParams( queryItem ), createGrid(), 100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "select psi,ps,executiondate,storedby,"
+ "createdbydisplayname" + "," + "lastupdatedbydisplayname"
+ ",lastupdated,enrollmentdate,"
+ "incidentdate,tei,pi,ST_AsGeoJSON(psigeometry, 6) as geometry,longitude,latitude,ouname,oucode,pistatus,"
+ "psistatus,ax.\"monthly\",ax.\"ou\",\"" + dataElement.getUid() + "_name"
+ "\" " + "from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') limit 101";
assertThat( sql.getValue(), is( expected ) );
}
@Test
void verifyGetEventSqlWithProgram()
{
mockEmptyRowSet();
subject.getEvents( createRequestParams(), createGrid(), 100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsSqlWithProgramAndProgramStage()
{
mockEmptyRowSet();
subject.getEvents( createRequestParams( programStage ), createGrid(),
100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsWithProgramStageAndNumericDataElement()
{
mockEmptyRowSet();
subject.getEvents( createRequestParams( programStage, ValueType.INTEGER ), createGrid(),
100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsWithProgramStageAndNumericDataElementAndFilter()
{
mockEmptyRowSet();
subject.getEvents( createRequestParamsWithFilter( programStage, ValueType.INTEGER ), createGrid(),
100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' and ax.\"fWIAEtYVEGk\" > '10' limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsWithProgramStatusAndEventStatusParams()
{
mockEmptyRowSet();
subject.getEvents( createRequestParamsWithStatuses(), createGrid(), 100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA')" +
" and pistatus in ('ACTIVE','COMPLETED') and psistatus in ('SCHEDULE') limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsWithMissingValueEqFilter()
{
String expected = "ax.\"fWIAEtYVEGk\" is null";
testIt( EQ, NV, Collections.singleton(
( capturedSql ) -> assertThat( capturedSql, containsString( expected ) ) ) );
}
@Test
void verifyGetEventsWithMissingValueNeFilter()
{
String expected = "ax.\"fWIAEtYVEGk\" is not null";
testIt( NE, NV, Collections.singleton(
( capturedSql ) -> assertThat( capturedSql, containsString( expected ) ) ) );
}
@Test
void verifyGetEventsWithMissingValueAndNumericValuesInFilter()
{
String numericValues = String.join( OPTION_SEP, "10", "11", "12" );
String expected = "(ax.\"fWIAEtYVEGk\" in (" + String.join( ",", numericValues.split( OPTION_SEP ) )
+ ") or ax.\"fWIAEtYVEGk\" is null )";
testIt( IN,
numericValues + OPTION_SEP + NV,
Collections.singleton( ( capturedSql ) -> assertThat( capturedSql, containsString( expected ) ) ) );
}
@Test
void verifyGetEventsWithoutMissingValueAndNumericValuesInFilter()
{
String numericValues = String.join( OPTION_SEP, "10", "11", "12" );
String expected = "ax.\"fWIAEtYVEGk\" in (" + String.join( ",", numericValues.split( OPTION_SEP ) ) + ")";
testIt( IN, numericValues,
Collections.singleton( ( capturedSql ) -> assertThat( capturedSql, containsString( expected ) ) ) );
}
@Test
void verifyGetEventsWithOnlyMissingValueInFilter()
{
String expected = "ax.\"fWIAEtYVEGk\" is null";
String unexpected = "(ax.\"fWIAEtYVEGk\" in (";
testIt( IN, NV,
ImmutableList.of(
( capturedSql ) -> assertThat( capturedSql, containsString( expected ) ),
( capturedSql ) -> assertThat( capturedSql, not( containsString( unexpected ) ) ) ) );
}
private void testIt( QueryOperator operator, String filter, Collection<Consumer<String>> assertions )
{
mockEmptyRowSet();
subject.getEvents(
createRequestParamsWithFilter( programStage, ValueType.INTEGER, operator, filter ),
createGrid(),
100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
assertions.forEach( consumer -> consumer.accept( sql.getValue() ) );
}
@Test
void verifyGetEventsWithProgramStageAndTextDataElement()
{
mockEmptyRowSet();
subject.getEvents( createRequestParams( programStage, ValueType.TEXT ), createGrid(),
100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetEventsWithProgramStageAndTextDataElementAndFilter()
{
mockEmptyRowSet();
subject.getEvents( createRequestParamsWithFilter( programStage, ValueType.TEXT ), createGrid(), 100 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from " + getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' and ax.\"fWIAEtYVEGk\" > '10' limit 101";
assertSql( expected, sql.getValue() );
}
@Test
void verifyGetAggregatedEventQuery()
{
mockRowSet();
when( rowSet.getString( "fWIAEtYVEGk" ) ).thenReturn( "2000" );
Grid resultGrid = subject.getAggregatedEventData( createRequestParams( programStage, ValueType.INTEGER ),
createGrid(),
200000 );
assertThat( resultGrid.getRows(), hasSize( 1 ) );
assertThat( resultGrid.getRow( 0 ), hasSize( 4 ) );
assertThat( resultGrid.getRow( 0 ).get( 0 ), is( "2000" ) );
assertThat( resultGrid.getRow( 0 ).get( 1 ), is( "201701" ) );
assertThat( resultGrid.getRow( 0 ).get( 2 ), is( "Sierra Leone" ) );
assertThat( resultGrid.getRow( 0 ).get( 3 ), is( 100 ) );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "select count(ax.\"psi\") as value,ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from "
+ getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid() + "' group by ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" limit 200001";
assertThat( sql.getValue(), is( expected ) );
}
@Test
void verifyGetAggregatedEventQueryWithFilter()
{
when( rowSet.getString( "fWIAEtYVEGk" ) ).thenReturn( "2000" );
mockRowSet();
Grid resultGrid = subject.getAggregatedEventData( createRequestParamsWithFilter( programStage, ValueType.TEXT ),
createGrid(),
200000 );
assertThat( resultGrid.getRows(), hasSize( 1 ) );
assertThat( resultGrid.getRow( 0 ), hasSize( 4 ) );
assertThat( resultGrid.getRow( 0 ).get( 0 ), is( "2000" ) );
assertThat( resultGrid.getRow( 0 ).get( 1 ), is( "201701" ) );
assertThat( resultGrid.getRow( 0 ).get( 2 ), is( "Sierra Leone" ) );
assertThat( resultGrid.getRow( 0 ).get( 3 ), is( 100 ) );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expected = "select count(ax.\"psi\") as value,ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" from "
+ getTable( programA.getUid() )
+ " as ax where ax.\"monthly\" in ('2000Q1') and ax.\"uidlevel1\" in ('ouabcdefghA') and ax.\"ps\" = '"
+ programStage.getUid()
+ "' and ax.\"fWIAEtYVEGk\" > '10' group by ax.\"monthly\",ax.\"ou\",ax.\"fWIAEtYVEGk\" limit 200001";
assertThat( sql.getValue(), is( expected ) );
}
@Test
void verifyFirstAggregationTypeSubquery()
{
verifyFirstOrLastAggregationTypeSubquery( AnalyticsAggregationType.FIRST );
}
@Test
void verifyLastAggregationTypeSubquery()
{
verifyFirstOrLastAggregationTypeSubquery( AnalyticsAggregationType.LAST );
}
@Test
void verifySortClauseHandlesProgramIndicators()
{
Program program = createProgram( 'P' );
ProgramIndicator piA = createProgramIndicator( 'A', program, ".", "." );
piA.setUid( "TLKx7vllb1I" );
ProgramIndicator piB = createProgramIndicator( 'B', program, ".", "." );
piA.setUid( "CCKx3gllb2P" );
OrganisationUnit ouA = createOrganisationUnit( 'A' );
Period peA = PeriodType.getPeriodFromIsoString( "201501" );
DataElement deA = createDataElement( 'A' );
deA.setUid( "ZE4cgllb2P" );
DataQueryParams params = DataQueryParams.newBuilder().withDataType( DataType.NUMERIC )
.withTableName( "analytics" ).withPeriodType( QuarterlyPeriodType.NAME )
.withAggregationType( AnalyticsAggregationType.fromAggregationType( AggregationType.DEFAULT ) )
.addDimension(
new BaseDimensionalObject( DATA_X_DIM_ID, DimensionType.PROGRAM_INDICATOR, getList( piA, piB ) ) )
.addFilter( new BaseDimensionalObject( ORGUNIT_DIM_ID, DimensionType.ORGANISATION_UNIT, getList( ouA ) ) )
.addDimension( new BaseDimensionalObject( PERIOD_DIM_ID, DimensionType.DATA_X, getList( peA ) ) )
.addDimension( new BaseDimensionalObject( PERIOD_DIM_ID, DimensionType.PERIOD, getList( peA ) ) ).build();
final EventQueryParams.Builder eventQueryParamsBuilder = new EventQueryParams.Builder( params )
.withProgram( program )
.addAscSortItem( new QueryItem( piA ) )
.addDescSortItem( new QueryItem( piB ) )
.addAscSortItem( new QueryItem( deA ) );
final String sql = subject.getEventsOrEnrollmentsSql( eventQueryParamsBuilder.build(), 100 );
assertThat( sql, containsString(
"order by \"" + piA.getUid() + "\" asc,\"" + deA.getUid() + "\" asc,\"" + piB.getUid() + "\"" ) );
}
private void verifyFirstOrLastAggregationTypeSubquery( AnalyticsAggregationType analyticsAggregationType )
{
DataElement programDataElement = createDataElement( 'U' );
EventQueryParams params = new EventQueryParams.Builder( createRequestParamsWithFilter( ValueType.TEXT ) )
.withValue( programDataElement )
.withAggregationType( analyticsAggregationType )
.withAggregateData( true ).build();
subject.getAggregatedEventData( params, createGrid(), 200000 );
verify( jdbcTemplate ).queryForRowSet( sql.capture() );
String expectedLastSubquery = " from (select \"yearly\",\"" + programDataElement.getUid()
+ "\",cast('2000Q1' as text) as \"monthly\",\"ou\","
+ "row_number() over (partition by ou, ao order by iax.\"executiondate\" "
+ (analyticsAggregationType == AnalyticsAggregationType.LAST ? "desc" : "asc") + ") as pe_rank "
+ "from " + getTable( programA.getUid() ) + " iax where iax.\"executiondate\" >= '1990-03-31' "
+ "and iax.\"executiondate\" <= '2000-03-31' and \"" + programDataElement.getUid() + "\" is not null)";
assertThat( sql.getValue(), containsString( expectedLastSubquery ) );
}
private EventQueryParams createRequestParamsWithFilter( ValueType queryItemValueType )
{
EventQueryParams.Builder params = new EventQueryParams.Builder( createRequestParams( queryItemValueType ) );
QueryItem queryItem = params.build().getItems().get( 0 );
queryItem.addFilter( new QueryFilter( QueryOperator.GT, "10" ) );
return params.build();
}
private Grid createGrid()
{
return new ListGrid()
.addHeader( new GridHeader(
"fWIAEtYVEGk", "Mode of discharge", ValueType.TEXT, false, true ) )
.addHeader( new GridHeader(
"pe", "Period", ValueType.TEXT, false, true ) )
.addHeader( new GridHeader(
"value", "Value", ValueType.NUMBER, false, true ) );
}
private void mockRowSet()
{
// Simulate one row only
when( rowSet.next() ).thenReturn( true ).thenReturn( false );
when( rowSet.getString( "monthly" ) ).thenReturn( "201701" );
when( rowSet.getString( "ou" ) ).thenReturn( "Sierra Leone" );
when( rowSet.getInt( "value" ) ).thenReturn( 100 );
}
private void assertSql( String expected, String actual )
{
expected = "select " + DEFAULT_COLUMNS_WITH_REGISTRATION + "," + expected;
assertThat( actual, is( expected ) );
}
@Override
String getTableName()
{
return TABLE_NAME;
}
}
| |
/*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.apple.project_generator;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThan;
import static org.hamcrest.core.IsNull.notNullValue;
import static org.hamcrest.core.IsNull.nullValue;
import static org.junit.Assert.assertThat;
import com.facebook.buck.apple.SchemeActionType;
import com.facebook.buck.apple.xcode.XCScheme;
import com.facebook.buck.apple.xcode.xcodeproj.PBXFileReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXNativeTarget;
import com.facebook.buck.apple.xcode.xcodeproj.PBXReference;
import com.facebook.buck.apple.xcode.xcodeproj.PBXTarget;
import com.facebook.buck.apple.xcode.xcodeproj.ProductType;
import com.facebook.buck.io.ProjectFilesystem;
import com.facebook.buck.testutil.FakeProjectFilesystem;
import com.facebook.buck.timing.SettableFakeClock;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.FileTime;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import org.junit.Before;
import org.junit.Test;
import org.w3c.dom.Document;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
public class SchemeGeneratorTest {
private SettableFakeClock clock;
private ProjectFilesystem projectFilesystem;
@Before
public void setUp() throws IOException {
clock = new SettableFakeClock(0, 0);
projectFilesystem = new FakeProjectFilesystem(clock);
}
@Test
public void schemeWithMultipleTargetsBuildsInCorrectOrder() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget leftTarget = new PBXNativeTarget("leftRule");
leftTarget.setGlobalID("leftGID");
leftTarget.setProductReference(
new PBXFileReference(
"left.a", "left.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
leftTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget rightTarget = new PBXNativeTarget("rightRule");
rightTarget.setGlobalID("rightGID");
rightTarget.setProductReference(
new PBXFileReference(
"right.a", "right.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rightTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget childTarget = new PBXNativeTarget("childRule");
childTarget.setGlobalID("childGID");
childTarget.setProductReference(
new PBXFileReference(
"child.a", "child.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
childTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(leftTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(rightTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(childTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(childTarget),
ImmutableSet.of(rootTarget, leftTarget, rightTarget, childTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath xpath = xpathFactory.newXPath();
XPathExpression expr = xpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList nodes = (NodeList) expr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("rootGID", "leftGID", "rightGID", "childGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < nodes.getLength(); i++) {
actualOrdering.add(nodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
}
@Test
public void schemeBuildsAndTestsAppleTestTargets() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget testDepTarget = new PBXNativeTarget("testDep");
testDepTarget.setGlobalID("testDepGID");
testDepTarget.setProductReference(
new PBXFileReference(
"libDep.a", "libDep.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testDepTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testLibraryTarget = new PBXNativeTarget("testLibrary");
testLibraryTarget.setGlobalID("testLibraryGID");
testLibraryTarget.setProductReference(
new PBXFileReference(
"lib.a", "lib.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testLibraryTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testTarget = new PBXNativeTarget("test");
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testTarget.setProductType(ProductType.UNIT_TEST);
PBXTarget rootTarget = new PBXNativeTarget("root");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path projectPath = Paths.get("foo/test.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(testTarget, projectPath);
targetToProjectPathMapBuilder.put(testDepTarget, projectPath);
targetToProjectPathMapBuilder.put(testLibraryTarget, projectPath);
targetToProjectPathMapBuilder.put(rootTarget, projectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(testDepTarget, testTarget),
ImmutableSet.of(testTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildXpath = xpathFactory.newXPath();
XPathExpression buildExpr =
buildXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildNodes = (NodeList) buildExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedBuildOrdering = ImmutableList.of("rootGID", "testDepGID", "testGID");
List<String> actualBuildOrdering = new ArrayList<>();
for (int i = 0; i < buildNodes.getLength(); i++) {
actualBuildOrdering.add(buildNodes.item(i).getNodeValue());
}
assertThat(actualBuildOrdering, equalTo(expectedBuildOrdering));
XPath textXpath = xpathFactory.newXPath();
XPathExpression testExpr =
textXpath.compile(
"//TestAction//TestableReference/BuildableReference/@BlueprintIdentifier");
NodeList testNodes = (NodeList) testExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedTestOrdering = ImmutableList.of("testGID");
List<String> actualTestOrdering = new ArrayList<>();
for (int i = 0; i < testNodes.getLength(); i++) {
actualTestOrdering.add(testNodes.item(i).getNodeValue());
}
assertThat(actualTestOrdering, equalTo(expectedTestOrdering));
}
@Test
public void schemeIncludesAllExpectedActions() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testTarget = new PBXNativeTarget("testRule");
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.a", "test.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testBundleTarget = new PBXNativeTarget("testBundleRule");
testBundleTarget.setGlobalID("testBundleGID");
testBundleTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testBundleTarget.setProductType(ProductType.UNIT_TEST);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testBundleTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(testBundleTarget),
ImmutableSet.of(testBundleTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr =
buildActionXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("rootGID", "testBundleGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < buildActionNodes.getLength(); i++) {
actualOrdering.add(buildActionNodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
XPath testActionXpath = xpathFactory.newXPath();
XPathExpression testActionExpr =
testActionXpath.compile("//TestAction//BuildableReference/@BlueprintIdentifier");
String testActionBlueprintIdentifier =
(String) testActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(testActionBlueprintIdentifier, equalTo("testBundleGID"));
XPath launchActionXpath = xpathFactory.newXPath();
XPathExpression launchActionExpr =
launchActionXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
String launchActionBlueprintIdentifier =
(String) launchActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBlueprintIdentifier, equalTo("rootGID"));
XPath profileActionXpath = xpathFactory.newXPath();
XPathExpression profileActionExpr =
profileActionXpath.compile("//ProfileAction//BuildableReference/@BlueprintIdentifier");
String profileActionBlueprintIdentifier =
(String) profileActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBlueprintIdentifier, equalTo("rootGID"));
}
@Test
public void buildableReferenceShouldHaveExpectedProperties() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildableReferenceXPath = xpathFactory.newXPath();
XPathExpression buildableReferenceExpr =
buildableReferenceXPath.compile("//BuildableReference");
NodeList buildableReferences =
(NodeList) buildableReferenceExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(buildableReferences.getLength(), greaterThan(0));
for (int i = 0; i < buildableReferences.getLength(); i++) {
NamedNodeMap attributes = buildableReferences.item(i).getAttributes();
assertThat(attributes, notNullValue());
assertThat(attributes.getNamedItem("BlueprintIdentifier"), notNullValue());
assertThat(attributes.getNamedItem("BuildableIdentifier"), notNullValue());
assertThat(attributes.getNamedItem("ReferencedContainer"), notNullValue());
assertThat(attributes.getNamedItem("BlueprintName"), notNullValue());
assertThat(attributes.getNamedItem("BuildableName"), notNullValue());
}
}
@Test
public void allActionsShouldBePresentInSchemeWithDefaultBuildConfigurations() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath schemeChildrenXPath = xpathFactory.newXPath();
XPathExpression schemeChildrenExpr = schemeChildrenXPath.compile("/Scheme/node()");
NodeList actions = (NodeList) schemeChildrenExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(actions.getLength(), equalTo(6));
Node buildAction = actions.item(0);
assertThat(buildAction.getNodeName(), equalTo("BuildAction"));
assertThat(buildAction.getAttributes().getNamedItem("buildConfiguration"), nullValue());
Node testAction = actions.item(1);
assertThat(testAction.getNodeName(), equalTo("TestAction"));
assertThat(
testAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node launchAction = actions.item(2);
assertThat(launchAction.getNodeName(), equalTo("LaunchAction"));
assertThat(
launchAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node profileAction = actions.item(3);
assertThat(profileAction.getNodeName(), equalTo("ProfileAction"));
assertThat(
profileAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Release"));
Node analyzeAction = actions.item(4);
assertThat(analyzeAction.getNodeName(), equalTo("AnalyzeAction"));
assertThat(
analyzeAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Debug"));
Node archiveAction = actions.item(5);
assertThat(archiveAction.getNodeName(), equalTo("ArchiveAction"));
assertThat(
archiveAction.getAttributes().getNamedItem("buildConfiguration").getNodeValue(),
equalTo("Release"));
}
@Test
public void schemeIsRewrittenIfContentsHaveChanged() throws IOException {
{
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
clock.setCurrentTimeMillis(49152);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
{
PBXTarget rootTarget = new PBXNativeTarget("rootRule2");
rootTarget.setGlobalID("root2GID");
rootTarget.setProductReference(
new PBXFileReference(
"root2.a", "root2.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(64738);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(64738L)));
}
}
@Test
public void schemeIsNotRewrittenIfContentsHaveNotChanged() throws IOException {
{
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(49152);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
{
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
clock.setCurrentTimeMillis(64738);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
ImmutableMap.of(rootTarget, pbxprojectPath),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
assertThat(
projectFilesystem.getLastModifiedTime(schemePath), equalTo(FileTime.fromMillis(49152L)));
}
}
@Test
public void schemeWithNoPrimaryRuleCanIncludeTests() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget testLibraryTarget = new PBXNativeTarget("testLibrary");
testLibraryTarget.setGlobalID("testLibraryGID");
testLibraryTarget.setProductReference(
new PBXFileReference(
"lib.a", "lib.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testLibraryTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testTarget = new PBXNativeTarget("testRule");
testTarget.setGlobalID("testGID");
testTarget.setProductReference(
new PBXFileReference(
"test.a", "test.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
testTarget.setProductType(ProductType.STATIC_LIBRARY);
PBXTarget testBundleTarget = new PBXNativeTarget("testBundleRule");
testBundleTarget.setGlobalID("testBundleGID");
testBundleTarget.setProductReference(
new PBXFileReference(
"test.xctest",
"test.xctest",
PBXReference.SourceTree.BUILT_PRODUCTS_DIR,
Optional.empty()));
testBundleTarget.setProductType(ProductType.UNIT_TEST);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(testLibraryTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testTarget, pbxprojectPath);
targetToProjectPathMapBuilder.put(testBundleTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.empty(),
ImmutableSet.of(),
ImmutableSet.of(testBundleTarget),
ImmutableSet.of(testBundleTarget),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
String schemeXml = projectFilesystem.readFileIfItExists(schemePath).get();
System.out.println(schemeXml);
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr =
buildActionXpath.compile("//BuildAction//BuildableReference/@BlueprintIdentifier");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
List<String> expectedOrdering = ImmutableList.of("testBundleGID");
List<String> actualOrdering = new ArrayList<>();
for (int i = 0; i < buildActionNodes.getLength(); i++) {
actualOrdering.add(buildActionNodes.item(i).getNodeValue());
}
assertThat(actualOrdering, equalTo(expectedOrdering));
XPath testActionXpath = xpathFactory.newXPath();
XPathExpression testActionExpr =
testActionXpath.compile("//TestAction//BuildableReference/@BlueprintIdentifier");
String testActionBlueprintIdentifier =
(String) testActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(testActionBlueprintIdentifier, equalTo("testBundleGID"));
XPath launchActionXpath = xpathFactory.newXPath();
XPathExpression launchActionExpr =
launchActionXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
String launchActionBlueprintIdentifier =
(String) launchActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBlueprintIdentifier, equalTo(""));
XPath launchActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression launchActionBuildConfigurationExpr =
launchActionBuildConfigurationXpath.compile("//LaunchAction//@buildConfiguration");
String launchActionBuildConfigurationBlueprintIdentifier =
(String) launchActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(launchActionBuildConfigurationBlueprintIdentifier, equalTo("Debug"));
XPath profileActionXpath = xpathFactory.newXPath();
XPathExpression profileActionExpr =
profileActionXpath.compile("//ProfileAction//BuildableReference/@BlueprintIdentifier");
String profileActionBlueprintIdentifier =
(String) profileActionExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBlueprintIdentifier, equalTo(""));
XPath profileActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression profileActionBuildConfigurationExpr =
profileActionBuildConfigurationXpath.compile("//ProfileAction//@buildConfiguration");
String profileActionBuildConfigurationBlueprintIdentifier =
(String) profileActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(profileActionBuildConfigurationBlueprintIdentifier, equalTo("Release"));
XPath analyzeActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression analyzeActionBuildConfigurationExpr =
analyzeActionBuildConfigurationXpath.compile("//AnalyzeAction//@buildConfiguration");
String analyzeActionBuildConfigurationBlueprintIdentifier =
(String) analyzeActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(analyzeActionBuildConfigurationBlueprintIdentifier, equalTo("Debug"));
XPath archiveActionBuildConfigurationXpath = xpathFactory.newXPath();
XPathExpression archiveActionBuildConfigurationExpr =
archiveActionBuildConfigurationXpath.compile("//ArchiveAction//@buildConfiguration");
String archiveActionBuildConfigurationBlueprintIdentifier =
(String) archiveActionBuildConfigurationExpr.evaluate(scheme, XPathConstants.STRING);
assertThat(archiveActionBuildConfigurationBlueprintIdentifier, equalTo("Release"));
}
@Test
public void launchActionShouldNotContainRemoteRunnableWhenNotProvided() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath remoteRunnableLaunchActionXPath = xpathFactory.newXPath();
XPathExpression remoteRunnableLaunchActionExpr =
remoteRunnableLaunchActionXPath.compile("//LaunchAction/RemoteRunnable");
NodeList remoteRunnables =
(NodeList) remoteRunnableLaunchActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(remoteRunnables.getLength(), equalTo(0));
}
@Test
public void launchActionShouldContainRemoteRunnableWhenProvided() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
false /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.of("/RemoteApp") /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath remoteRunnableLaunchActionXPath = xpathFactory.newXPath();
XPathExpression remoteRunnableLaunchActionExpr =
remoteRunnableLaunchActionXPath.compile("//LaunchAction/RemoteRunnable");
NodeList remoteRunnables =
(NodeList) remoteRunnableLaunchActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(remoteRunnables.getLength(), equalTo(1));
Node remoteRunnable = remoteRunnables.item(0);
assertThat(
remoteRunnable.getAttributes().getNamedItem("runnableDebuggingMode").getNodeValue(),
equalTo("2"));
assertThat(
remoteRunnable.getAttributes().getNamedItem("BundleIdentifier").getNodeValue(),
equalTo("com.apple.springboard"));
assertThat(
remoteRunnable.getAttributes().getNamedItem("RemotePath").getNodeValue(),
equalTo("/RemoteApp"));
XPath buildXpath = xpathFactory.newXPath();
XPathExpression buildExpr =
buildXpath.compile("//LaunchAction//BuildableReference/@BlueprintIdentifier");
NodeList buildNodes = (NodeList) buildExpr.evaluate(scheme, XPathConstants.NODESET);
// Make sure both copies of the BuildableReference are present.
assertThat(buildNodes.getLength(), equalTo(2));
assertThat(buildNodes.item(0).getNodeValue(), equalTo("rootGID"));
assertThat(buildNodes.item(1).getNodeValue(), equalTo("rootGID"));
}
@Test
public void enablingParallelizeBuild() throws Exception {
ImmutableMap.Builder<PBXTarget, Path> targetToProjectPathMapBuilder = ImmutableMap.builder();
PBXTarget rootTarget = new PBXNativeTarget("rootRule");
rootTarget.setGlobalID("rootGID");
rootTarget.setProductReference(
new PBXFileReference(
"root.a", "root.a", PBXReference.SourceTree.BUILT_PRODUCTS_DIR, Optional.empty()));
rootTarget.setProductType(ProductType.STATIC_LIBRARY);
Path pbxprojectPath = Paths.get("foo/Foo.xcodeproj/project.pbxproj");
targetToProjectPathMapBuilder.put(rootTarget, pbxprojectPath);
SchemeGenerator schemeGenerator =
new SchemeGenerator(
projectFilesystem,
Optional.of(rootTarget),
ImmutableSet.of(rootTarget),
ImmutableSet.of(),
ImmutableSet.of(),
"TestScheme",
Paths.get("_gen/Foo.xcworkspace/scshareddata/xcshemes"),
true /* parallelizeBuild */,
Optional.empty() /* runnablePath */,
Optional.empty() /* remoteRunnablePath */,
SchemeActionType.DEFAULT_CONFIG_NAMES,
targetToProjectPathMapBuilder.build(),
XCScheme.LaunchAction.LaunchStyle.AUTO);
Path schemePath = schemeGenerator.writeScheme();
DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance();
DocumentBuilder dBuilder = dbFactory.newDocumentBuilder();
Document scheme = dBuilder.parse(projectFilesystem.newFileInputStream(schemePath));
XPathFactory xpathFactory = XPathFactory.newInstance();
XPath buildActionXpath = xpathFactory.newXPath();
XPathExpression buildActionExpr = buildActionXpath.compile("//BuildAction");
NodeList buildActionNodes = (NodeList) buildActionExpr.evaluate(scheme, XPathConstants.NODESET);
assertThat(buildActionNodes.getLength(), is(1));
Node buildActionNode = buildActionNodes.item(0);
assertThat(
buildActionNode.getAttributes().getNamedItem("buildImplicitDependencies").getNodeValue(),
equalTo("YES"));
assertThat(
buildActionNode.getAttributes().getNamedItem("parallelizeBuildables").getNodeValue(),
equalTo("YES"));
}
}
| |
//
// ========================================================================
// Copyright (c) 1995-2014 Mort Bay Consulting Pty. Ltd.
// ------------------------------------------------------------------------
// All rights reserved. This program and the accompanying materials
// are made available under the terms of the Eclipse Public License v1.0
// and Apache License v2.0 which accompanies this distribution.
//
// The Eclipse Public License is available at
// http://www.eclipse.org/legal/epl-v10.html
//
// The Apache License v2.0 is available at
// http://www.opensource.org/licenses/apache2.0.php
//
// You may elect to redistribute this code under either of these licenses.
// ========================================================================
//
package org.eclipse.jetty.util.component;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.concurrent.CopyOnWriteArrayList;
import org.eclipse.jetty.util.log.Log;
import org.eclipse.jetty.util.log.Logger;
/**
* An AggregateLifeCycle is an {@link LifeCycle} implementation for a collection of contained beans.
* <p>
* Beans can be added the AggregateLifeCycle either as managed beans or as unmanaged beans. A managed bean is started, stopped and destroyed with the aggregate.
* An unmanaged bean is associated with the aggregate for the purposes of {@link #dump()}, but it's lifecycle must be managed externally.
* <p>
* When a bean is added, if it is a {@link LifeCycle} and it is already started, then it is assumed to be an unmanaged bean.
* Otherwise the methods {@link #addBean(Object, boolean)}, {@link #manage(Object)} and {@link #unmanage(Object)} can be used to
* explicitly control the life cycle relationship.
* <p>
* If adding a bean that is shared between multiple {@link AggregateLifeCycle} instances, then it should be started before being added, so it is unmanaged, or
* the API must be used to explicitly set it as unmanaged.
* <p>
*/
public class AggregateLifeCycle extends AbstractLifeCycle implements Destroyable, Dumpable
{
private static final Logger LOG = Log.getLogger(AggregateLifeCycle.class);
private final List<Bean> _beans=new CopyOnWriteArrayList<Bean>();
private boolean _started=false;
private class Bean
{
Bean(Object b)
{
_bean=b;
}
final Object _bean;
volatile boolean _managed=true;
public String toString()
{
return "{"+_bean+","+_managed+"}";
}
}
/* ------------------------------------------------------------ */
/**
* Start the managed lifecycle beans in the order they were added.
* @see org.eclipse.jetty.util.component.AbstractLifeCycle#doStart()
*/
@Override
protected void doStart() throws Exception
{
for (Bean b:_beans)
{
if (b._managed && b._bean instanceof LifeCycle)
{
LifeCycle l=(LifeCycle)b._bean;
if (!l.isRunning())
l.start();
}
}
// indicate that we are started, so that addBean will start other beans added.
_started=true;
super.doStart();
}
/* ------------------------------------------------------------ */
/**
* Stop the joined lifecycle beans in the reverse order they were added.
* @see org.eclipse.jetty.util.component.AbstractLifeCycle#doStart()
*/
@Override
protected void doStop() throws Exception
{
_started=false;
super.doStop();
List<Bean> reverse = new ArrayList<Bean>(_beans);
Collections.reverse(reverse);
for (Bean b:reverse)
{
if (b._managed && b._bean instanceof LifeCycle)
{
LifeCycle l=(LifeCycle)b._bean;
if (l.isRunning())
l.stop();
}
}
}
/* ------------------------------------------------------------ */
/**
* Destroy the joined Destroyable beans in the reverse order they were added.
* @see org.eclipse.jetty.util.component.Destroyable#destroy()
*/
public void destroy()
{
List<Bean> reverse = new ArrayList<Bean>(_beans);
Collections.reverse(reverse);
for (Bean b:reverse)
{
if (b._bean instanceof Destroyable && b._managed)
{
Destroyable d=(Destroyable)b._bean;
d.destroy();
}
}
_beans.clear();
}
/* ------------------------------------------------------------ */
/** Is the bean contained in the aggregate.
* @param bean
* @return True if the aggregate contains the bean
*/
public boolean contains(Object bean)
{
for (Bean b:_beans)
if (b._bean==bean)
return true;
return false;
}
/* ------------------------------------------------------------ */
/** Is the bean joined to the aggregate.
* @param bean
* @return True if the aggregate contains the bean and it is joined
*/
public boolean isManaged(Object bean)
{
for (Bean b:_beans)
if (b._bean==bean)
return b._managed;
return false;
}
/* ------------------------------------------------------------ */
/**
* Add an associated bean.
* If the bean is a {@link LifeCycle}, then it will be managed if it is not
* already started and umanaged if it is already started. The {@link #addBean(Object, boolean)}
* method should be used if this is not correct, or the {@link #manage(Object)} and {@link #unmanage(Object)}
* methods may be used after an add to change the status.
* @param o the bean object to add
* @return true if the bean was added or false if it has already been added.
*/
public boolean addBean(Object o)
{
// beans are joined unless they are started lifecycles
return addBean(o,!((o instanceof LifeCycle)&&((LifeCycle)o).isStarted()));
}
/* ------------------------------------------------------------ */
/** Add an associated lifecycle.
* @param o The lifecycle to add
* @param managed True if the LifeCycle is to be joined, otherwise it will be disjoint.
* @return true if bean was added, false if already present.
*/
public boolean addBean(Object o, boolean managed)
{
if (contains(o))
return false;
Bean b = new Bean(o);
b._managed=managed;
_beans.add(b);
if (o instanceof LifeCycle)
{
LifeCycle l=(LifeCycle)o;
// Start the bean if we are started
if (managed && _started)
{
try
{
l.start();
}
catch(Exception e)
{
throw new RuntimeException (e);
}
}
}
return true;
}
/* ------------------------------------------------------------ */
/**
* Manage a bean by this aggregate, so that it is started/stopped/destroyed with the
* aggregate lifecycle.
* @param bean The bean to manage (must already have been added).
*/
public void manage(Object bean)
{
for (Bean b :_beans)
{
if (b._bean==bean)
{
b._managed=true;
return;
}
}
throw new IllegalArgumentException();
}
/* ------------------------------------------------------------ */
/**
* Unmanage a bean by this aggregate, so that it is not started/stopped/destroyed with the
* aggregate lifecycle.
* @param bean The bean to manage (must already have been added).
*/
public void unmanage(Object bean)
{
for (Bean b :_beans)
{
if (b._bean==bean)
{
b._managed=false;
return;
}
}
throw new IllegalArgumentException();
}
/* ------------------------------------------------------------ */
/** Get dependent beans
* @return List of beans.
*/
public Collection<Object> getBeans()
{
return getBeans(Object.class);
}
/* ------------------------------------------------------------ */
/** Get dependent beans of a specific class
* @see #addBean(Object)
* @param clazz
* @return List of beans.
*/
public <T> List<T> getBeans(Class<T> clazz)
{
ArrayList<T> beans = new ArrayList<T>();
for (Bean b:_beans)
{
if (clazz.isInstance(b._bean))
beans.add((T)(b._bean));
}
return beans;
}
/* ------------------------------------------------------------ */
/** Get dependent beans of a specific class.
* If more than one bean of the type exist, the first is returned.
* @see #addBean(Object)
* @param clazz
* @return bean or null
*/
public <T> T getBean(Class<T> clazz)
{
for (Bean b:_beans)
{
if (clazz.isInstance(b._bean))
return (T)b._bean;
}
return null;
}
/* ------------------------------------------------------------ */
/**
* Remove all associated bean.
*/
public void removeBeans ()
{
_beans.clear();
}
/* ------------------------------------------------------------ */
/**
* Remove an associated bean.
*/
public boolean removeBean (Object o)
{
Iterator<Bean> i = _beans.iterator();
while(i.hasNext())
{
Bean b=i.next();
if (b._bean==o)
{
_beans.remove(b);
return true;
}
}
return false;
}
/* ------------------------------------------------------------ */
public void dumpStdErr()
{
try
{
dump(System.err,"");
}
catch (IOException e)
{
LOG.warn(e);
}
}
/* ------------------------------------------------------------ */
public String dump()
{
return dump(this);
}
/* ------------------------------------------------------------ */
public static String dump(Dumpable dumpable)
{
StringBuilder b = new StringBuilder();
try
{
dumpable.dump(b,"");
}
catch (IOException e)
{
LOG.warn(e);
}
return b.toString();
}
/* ------------------------------------------------------------ */
public void dump(Appendable out) throws IOException
{
dump(out,"");
}
/* ------------------------------------------------------------ */
protected void dumpThis(Appendable out) throws IOException
{
out.append(String.valueOf(this)).append(" - ").append(getState()).append("\n");
}
/* ------------------------------------------------------------ */
public static void dumpObject(Appendable out,Object o) throws IOException
{
try
{
if (o instanceof LifeCycle)
out.append(String.valueOf(o)).append(" - ").append((AbstractLifeCycle.getState((LifeCycle)o))).append("\n");
else
out.append(String.valueOf(o)).append("\n");
}
catch(Throwable th)
{
out.append(" => ").append(th.toString()).append('\n');
}
}
/* ------------------------------------------------------------ */
public void dump(Appendable out,String indent) throws IOException
{
dumpThis(out);
int size=_beans.size();
if (size==0)
return;
int i=0;
for (Bean b : _beans)
{
i++;
out.append(indent).append(" +- ");
if (b._managed)
{
if (b._bean instanceof Dumpable)
((Dumpable)b._bean).dump(out,indent+(i==size?" ":" | "));
else
dumpObject(out,b._bean);
}
else
dumpObject(out,b._bean);
}
if (i!=size)
out.append(indent).append(" |\n");
}
/* ------------------------------------------------------------ */
public static void dump(Appendable out,String indent,Collection<?>... collections) throws IOException
{
if (collections.length==0)
return;
int size=0;
for (Collection<?> c : collections)
size+=c.size();
if (size==0)
return;
int i=0;
for (Collection<?> c : collections)
{
for (Object o : c)
{
i++;
out.append(indent).append(" +- ");
if (o instanceof Dumpable)
((Dumpable)o).dump(out,indent+(i==size?" ":" | "));
else
dumpObject(out,o);
}
if (i!=size)
out.append(indent).append(" |\n");
}
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.search.type;
import org.apache.lucene.search.ScoreDoc;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.search.*;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.component.AbstractComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AtomicArray;
import org.elasticsearch.search.action.SearchServiceListener;
import org.elasticsearch.search.action.SearchServiceTransportAction;
import org.elasticsearch.search.controller.SearchPhaseController;
import org.elasticsearch.search.fetch.QueryFetchSearchResult;
import org.elasticsearch.search.internal.InternalSearchHits;
import org.elasticsearch.search.internal.InternalSearchResponse;
import org.elasticsearch.threadpool.ThreadPool;
import java.io.IOException;
import java.util.List;
import java.util.concurrent.atomic.AtomicInteger;
import static org.elasticsearch.action.search.type.TransportSearchHelper.internalScrollSearchRequest;
/**
*
*/
public class TransportSearchScrollScanAction extends AbstractComponent {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final SearchServiceTransportAction searchService;
private final SearchPhaseController searchPhaseController;
@Inject
public TransportSearchScrollScanAction(Settings settings, ThreadPool threadPool, ClusterService clusterService,
SearchServiceTransportAction searchService, SearchPhaseController searchPhaseController) {
super(settings);
this.threadPool = threadPool;
this.clusterService = clusterService;
this.searchService = searchService;
this.searchPhaseController = searchPhaseController;
}
public void execute(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
new AsyncAction(request, scrollId, listener).start();
}
private class AsyncAction {
private final SearchScrollRequest request;
private final ActionListener<SearchResponse> listener;
private final ParsedScrollId scrollId;
private final DiscoveryNodes nodes;
private volatile AtomicArray<ShardSearchFailure> shardFailures;
private final AtomicArray<QueryFetchSearchResult> queryFetchResults;
private final AtomicInteger successfulOps;
private final AtomicInteger counter;
private final long startTime = System.currentTimeMillis();
private AsyncAction(SearchScrollRequest request, ParsedScrollId scrollId, ActionListener<SearchResponse> listener) {
this.request = request;
this.listener = listener;
this.scrollId = scrollId;
this.nodes = clusterService.state().nodes();
this.successfulOps = new AtomicInteger(scrollId.getContext().length);
this.counter = new AtomicInteger(scrollId.getContext().length);
this.queryFetchResults = new AtomicArray<QueryFetchSearchResult>(scrollId.getContext().length);
}
protected final ShardSearchFailure[] buildShardFailures() {
if (shardFailures == null) {
return ShardSearchFailure.EMPTY_ARRAY;
}
List<AtomicArray.Entry<ShardSearchFailure>> entries = shardFailures.asList();
ShardSearchFailure[] failures = new ShardSearchFailure[entries.size()];
for (int i = 0; i < failures.length; i++) {
failures[i] = entries.get(i).value;
}
return failures;
}
// we do our best to return the shard failures, but its ok if its not fully concurrently safe
// we simply try and return as much as possible
protected final void addShardFailure(final int shardIndex, ShardSearchFailure failure) {
if (shardFailures == null) {
shardFailures = new AtomicArray<ShardSearchFailure>(scrollId.getContext().length);
}
shardFailures.set(shardIndex, failure);
}
public void start() {
if (scrollId.getContext().length == 0) {
final InternalSearchResponse internalResponse = new InternalSearchResponse(new InternalSearchHits(InternalSearchHits.EMPTY, Long.parseLong(this.scrollId.getAttributes().get("total_hits")), 0.0f), null, null, null, false);
listener.onResponse(new SearchResponse(internalResponse, request.scrollId(), 0, 0, 0l, buildShardFailures()));
return;
}
int localOperations = 0;
Tuple<String, Long>[] context = scrollId.getContext();
for (int i = 0; i < context.length; i++) {
Tuple<String, Long> target = context[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null) {
if (nodes.localNodeId().equals(node.id())) {
localOperations++;
} else {
executePhase(i, node, target.v2());
}
} else {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
}
if (localOperations > 0) {
if (request.operationThreading() == SearchOperationThreading.SINGLE_THREAD) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
Tuple<String, Long> target = context1[i];
DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
executePhase(i, node, target.v2());
}
}
}
});
} else {
boolean localAsync = request.operationThreading() == SearchOperationThreading.THREAD_PER_SHARD;
Tuple<String, Long>[] context1 = scrollId.getContext();
for (int i = 0; i < context1.length; i++) {
final Tuple<String, Long> target = context1[i];
final int shardIndex = i;
final DiscoveryNode node = nodes.get(target.v1());
if (node != null && nodes.localNodeId().equals(node.id())) {
try {
if (localAsync) {
threadPool.executor(ThreadPool.Names.SEARCH).execute(new Runnable() {
@Override
public void run() {
executePhase(shardIndex, node, target.v2());
}
});
} else {
executePhase(shardIndex, node, target.v2());
}
} catch (Throwable t) {
onPhaseFailure(t, target.v2(), shardIndex);
}
}
}
}
}
for (Tuple<String, Long> target : scrollId.getContext()) {
DiscoveryNode node = nodes.get(target.v1());
if (node == null) {
if (logger.isDebugEnabled()) {
logger.debug("Node [" + target.v1() + "] not available for scroll request [" + scrollId.getSource() + "]");
}
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
} else {
}
}
}
void executePhase(final int shardIndex, DiscoveryNode node, final long searchId) {
searchService.sendExecuteScan(node, internalScrollSearchRequest(searchId, request), new SearchServiceListener<QueryFetchSearchResult>() {
@Override
public void onResult(QueryFetchSearchResult result) {
queryFetchResults.set(shardIndex, result);
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
@Override
public void onFailure(Throwable t) {
onPhaseFailure(t, searchId, shardIndex);
}
});
}
void onPhaseFailure(Throwable t, long searchId, int shardIndex) {
if (logger.isDebugEnabled()) {
logger.debug("[{}] Failed to execute query phase", t, searchId);
}
addShardFailure(shardIndex, new ShardSearchFailure(t));
successfulOps.decrementAndGet();
if (counter.decrementAndGet() == 0) {
finishHim();
}
}
private void finishHim() {
try {
innerFinishHim();
} catch (Throwable e) {
ReduceSearchPhaseException failure = new ReduceSearchPhaseException("fetch", "", e, buildShardFailures());
if (logger.isDebugEnabled()) {
logger.debug("failed to reduce search", failure);
}
listener.onFailure(failure);
}
}
private void innerFinishHim() throws IOException {
int numberOfHits = 0;
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
numberOfHits += entry.value.queryResult().topDocs().scoreDocs.length;
}
ScoreDoc[] docs = new ScoreDoc[numberOfHits];
int counter = 0;
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
ScoreDoc[] scoreDocs = entry.value.queryResult().topDocs().scoreDocs;
for (ScoreDoc scoreDoc : scoreDocs) {
scoreDoc.shardIndex = entry.index;
docs[counter++] = scoreDoc;
}
}
final InternalSearchResponse internalResponse = searchPhaseController.merge(docs, queryFetchResults, queryFetchResults);
((InternalSearchHits) internalResponse.hits()).totalHits = Long.parseLong(this.scrollId.getAttributes().get("total_hits"));
for (AtomicArray.Entry<QueryFetchSearchResult> entry : queryFetchResults.asList()) {
if (entry.value.queryResult().topDocs().scoreDocs.length < entry.value.queryResult().size()) {
// we found more than we want for this round, remove this from our scrolling
queryFetchResults.set(entry.index, null);
}
}
String scrollId = null;
if (request.scroll() != null) {
// we rebuild the scroll id since we remove shards that we finished scrolling on
scrollId = TransportSearchHelper.buildScrollId(this.scrollId.getType(), queryFetchResults, this.scrollId.getAttributes()); // continue moving the total_hits
}
listener.onResponse(new SearchResponse(internalResponse, scrollId, this.scrollId.getContext().length, successfulOps.get(),
System.currentTimeMillis() - startTime, buildShardFailures()));
}
}
}
| |
/*******************************************************************************
* Copyright 2002-2011, OpenNebula Project Leads (OpenNebula.org)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
import static org.junit.Assert.assertTrue;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.opennebula.client.Client;
import org.opennebula.client.OneResponse;
import org.opennebula.client.vnet.VirtualNetwork;
import org.opennebula.client.vnet.VirtualNetworkPool;
public class VirtualNetworkTest
{
private static VirtualNetwork vnet;
private static VirtualNetworkPool vnetPool;
private static Client client;
private static OneResponse res;
private static String name = "new_test_vnet";
private static String template =
"NAME = " + name + "\n"+
"TYPE = RANGED\n" +
"PUBLIC = NO\n" +
"BRIDGE = vbr0\n" +
"NETWORK_SIZE = C\n" +
"NETWORK_ADDRESS = 192.168.0.0\n";
private static String fixed_template =
"NAME = \"Net number one\"\n" +
"TYPE = FIXED\n" +
"BRIDGE = br1\n" +
"LEASES = [IP=130.10.0.1]";
/**
* @throws java.lang.Exception
*/
@BeforeClass
public static void setUpBeforeClass() throws Exception
{
client = new Client();
vnetPool = new VirtualNetworkPool(client);
}
/**
* @throws java.lang.Exception
*/
@AfterClass
public static void tearDownAfterClass() throws Exception
{
}
/**
* @throws java.lang.Exception
*/
@Before
public void setUp() throws Exception
{
res = VirtualNetwork.allocate(client, template);
int vnid = !res.isError() ? Integer.parseInt(res.getMessage()) : -1;
vnet = new VirtualNetwork(vnid, client);
}
/**
* @throws java.lang.Exception
*/
@After
public void tearDown() throws Exception
{
vnet.delete();
}
@Test
public void allocate()
{
// String template = "NAME = " + name + "\n"+
// "TYPE = RANGED\n" +
// "PUBLIC = NO\n" +
// "BRIDGE = vbr0\n" +
// "NETWORK_SIZE = C\n" +
// "NETWORK_ADDRESS = 192.168.0.0\n";
//
// res = VirtualNetwork.allocate(client, template);
// assertTrue( !res.isError() );
// assertTrue( res.getMessage().equals("0") );
vnetPool.info();
boolean found = false;
for(VirtualNetwork vn : vnetPool)
{
found = found || vn.getName().equals(name);
}
assertTrue( found );
}
@Test
public void update()
{
res = vnet.info();
assertTrue( !res.isError() );
// assertTrue( vnet.getId().equals("0") );
// assertTrue( vnet.id() == 0 );
assertTrue( vnet.getName().equals(name) );
}
@Test
public void attributes()
{
res = vnet.info();
assertTrue( !res.isError() );
// assertTrue( vnet.xpath("ID").equals("0") );
assertTrue( vnet.xpath("NAME").equals(name) );
assertTrue( vnet.xpath("BRIDGE").equals("vbr0") );
assertTrue( vnet.xpath("TEMPLATE/NETWORK_ADDRESS").equals("192.168.0.0") );
assertTrue( vnet.xpath("TEMPLATE/TYPE").equals("RANGED") );
}
@Test
public void delete()
{
res = vnet.delete();
assertTrue( !res.isError() );
res = vnet.info();
assertTrue( res.isError() );
}
@Test
public void publish()
{
res = vnet.info();
assertTrue( !res.isError() );
assertTrue( !vnet.isPublic() );
// Publish it
res = vnet.publish();
assertTrue( !res.isError() );
res = vnet.info();
assertTrue( vnet.isPublic() );
// Unpublish it
res = vnet.unpublish();
assertTrue( !res.isError() );
res = vnet.info();
assertTrue( !vnet.isPublic() );
}
@Test
public void addLeases()
{
res = VirtualNetwork.allocate(client, fixed_template);
assertTrue( !res.isError() );
VirtualNetwork fixed_vnet =
new VirtualNetwork(Integer.parseInt(res.getMessage()), client);
res = fixed_vnet.addLeases("130.10.0.5");
assertTrue( !res.isError() );
res = fixed_vnet.addLeases("130.10.0.6", "50:20:20:20:20:20");
assertTrue( !res.isError() );
res = fixed_vnet.addLeases("130.10.0.6");
assertTrue( res.isError() );
fixed_vnet.delete();
}
@Test
public void rmLeases()
{
res = VirtualNetwork.allocate(client, fixed_template);
assertTrue( !res.isError() );
VirtualNetwork fixed_vnet =
new VirtualNetwork(Integer.parseInt(res.getMessage()), client);
res = fixed_vnet.rmLeases("130.10.0.1");
assertTrue( !res.isError() );
res = fixed_vnet.rmLeases("130.10.0.5");
assertTrue( res.isError() );
fixed_vnet.delete();
}
}
| |
/**
* $URL$
* $Id$
*
* Copyright (c) 2009 The Sakai Foundation
*
* Licensed under the Educational Community License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.opensource.org/licenses/ECL-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.sakaiproject.portlets;
import org.imsglobal.basiclti.BasicLTIUtil;
import java.lang.Integer;
import java.io.PrintWriter;
import java.io.IOException;
import java.net.URL;
import java.net.URI;
import java.util.UUID;
import java.util.Properties;
import java.util.Map;
import java.util.List;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.Date;
import java.text.SimpleDateFormat;
import javax.portlet.GenericPortlet;
import javax.portlet.RenderRequest;
import javax.portlet.ActionRequest;
import javax.portlet.ActionResponse;
import javax.portlet.RenderResponse;
import javax.portlet.PortletRequest;
import javax.portlet.PortletException;
import javax.portlet.PortletPreferences;
import javax.portlet.PortletContext;
import javax.portlet.PortletRequestDispatcher;
import javax.portlet.PortletConfig;
import javax.portlet.PortletMode;
import javax.portlet.PortletSession;
import javax.portlet.ReadOnlyException;
import javax.servlet.ServletRequest;
import org.sakaiproject.thread_local.cover.ThreadLocalManager;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.sakaiproject.portlet.util.PortletHelper;
// Sakai APIs
import org.sakaiproject.component.cover.ComponentManager;
import org.sakaiproject.tool.cover.ToolManager;
import org.sakaiproject.tool.api.Session;
import org.sakaiproject.tool.cover.SessionManager;
import org.sakaiproject.site.api.ToolConfiguration;
import org.sakaiproject.tool.api.Placement;
import org.sakaiproject.site.api.Site;
import org.sakaiproject.site.api.SitePage;
import org.sakaiproject.site.cover.SiteService;
import org.sakaiproject.exception.IdUnusedException;
import org.sakaiproject.component.cover.ServerConfigurationService;
import org.sakaiproject.util.ResourceLoader;
import org.sakaiproject.util.FormattedText;
import org.sakaiproject.event.api.Event;
import org.sakaiproject.event.api.NotificationService;
//import org.sakaiproject.event.cover.EventTrackingService;
import org.sakaiproject.basiclti.LocalEventTrackingService;
import org.sakaiproject.basiclti.util.SakaiBLTIUtil;
import org.sakaiproject.basiclti.util.SimpleEncryption;
import org.sakaiproject.service.gradebook.shared.Assignment;
import org.sakaiproject.service.gradebook.shared.AssignmentHasIllegalPointsException;
import org.sakaiproject.service.gradebook.shared.CategoryDefinition;
import org.sakaiproject.service.gradebook.shared.GradebookService;
import org.sakaiproject.service.gradebook.shared.GradebookExternalAssessmentService;
import org.sakaiproject.service.gradebook.shared.ConflictingAssignmentNameException;
import org.sakaiproject.service.gradebook.shared.ConflictingExternalIdException;
import org.sakaiproject.service.gradebook.shared.GradebookNotFoundException;
/**
* a simple IMSBLTIPortlet Portlet
*/
@SuppressWarnings("deprecation")
public class IMSBLTIPortlet extends GenericPortlet {
private static ResourceLoader rb = new ResourceLoader("basiclti");
private PortletContext pContext;
private ArrayList<String> fieldList = new ArrayList<String>();
/** Our log (commons). */
private static Log M_log = LogFactory.getLog(IMSBLTIPortlet.class);
public static final String EVENT_BASICLTI_CONFIG = "basiclti.config";
private static String LEAVE_SECRET_ALONE = "__dont_change_secret__";
/** To turn on really verbose debugging */
private static boolean verbosePrint = false;
public static final String ISO_8601_FORMAT = "yyyy-MM-dd'T'HH:mm:ssz";
public final static String CURRENT_HTTP_REQUEST = "org.sakaiproject.util.RequestFilter.http_request";
public static final String SITE_NAME = "ui.service";
public static final String SAKAI = "Sakai";
public void init(PortletConfig config) throws PortletException {
super.init(config);
pContext = config.getPortletContext();
// Populate the list of fields
fieldList.add("launch");
fieldList.add("secret");
fieldList.add("key");
fieldList.add("xml");
fieldList.add("frameheight");
fieldList.add("debug");
fieldList.add("pagetitle");
fieldList.add("tooltitle");
fieldList.add("custom");
fieldList.add("releasename");
fieldList.add("releaseemail");
fieldList.add("assignment");
fieldList.add("newpage");
// fieldList.add("maximize");
fieldList.add("allowsettings");
fieldList.add("allowroster");
fieldList.add("allowlori");
fieldList.add("contentlink");
fieldList.add("splash");
fieldList.add("fa_icon");
}
// Simple Debug Print Mechanism
public void dPrint(String str)
{
if ( verbosePrint ) System.out.println(str);
M_log.trace(str);
}
// If the property is final, the property wins. If it is not final,
// the portlet preferences take precedence.
public String getTitleString(RenderRequest request)
{
return getCorrectProperty(request, "tooltitle", null);
}
// Render the portlet - this is not supposed to change the state of the portlet
// Render may be called many times so if it changes the state - that is tacky
// Render will be called when someone presses "refresh" or when another portlet
// on the same page is handed an Action.
public void doView(RenderRequest request, RenderResponse response)
throws PortletException, IOException {
dPrint("==== doView called ====");
response.setContentType("text/html; charset=UTF-8");
// Grab that underlying request to get a GET parameter
ServletRequest req = (ServletRequest) ThreadLocalManager.get(CURRENT_HTTP_REQUEST);
String popupDone = req.getParameter("sakai.popup");
PrintWriter out = response.getWriter();
String title = getTitleString(request);
if ( title != null ) response.setTitle(title);
String context = getContext();
Placement placement = ToolManager.getCurrentPlacement();
// Get the properties
Properties sakaiProperties = getSakaiProperties();
String placementSecret = getSakaiProperty(sakaiProperties,"imsti.placementsecret");
String allowOutcomes = getSakaiProperty(sakaiProperties,"imsti.allowoutcomes");
String allowSettings = getSakaiProperty(sakaiProperties,"imsti.allowsettings");
String allowRoster = getSakaiProperty(sakaiProperties,"imsti.allowroster");
String allowLORI = getSakaiProperty(sakaiProperties,"imsti.allowlori");
String assignment = getSakaiProperty(sakaiProperties,"imsti.assignent");
String launch = getSakaiProperty(sakaiProperties,"imsti.launch");
if ( placementSecret == null &&
( "on".equals(allowOutcomes) || "on".equals(allowSettings) ||
"on".equals(allowRoster) || "on".equals(allowLORI) ) ) {
String uuid = UUID.randomUUID().toString();
Date date = new Date();
SimpleDateFormat sdf = new SimpleDateFormat(ISO_8601_FORMAT);
String date_secret = sdf.format(date);
placement.getPlacementConfig().setProperty("imsti.placementsecret", uuid);
placement.getPlacementConfig().setProperty("imsti.placementsecretdate", date_secret);
placement.save();
}
// Check to see if our launch will be successful
String[] retval = SakaiBLTIUtil.postLaunchHTML(placement.getId(), rb);
if ( retval.length > 1 ) {
String iframeUrl = "/access/basiclti/site/"+context+"/"+placement.getId();
String frameHeight = getCorrectProperty(request, "frameheight", null);
dPrint("fh="+frameHeight);
String newPage = getCorrectProperty(request, "newpage", null);
String serverUrl = ServerConfigurationService.getServerUrl();
boolean forcePopup = false;
if ( request.isSecure() || ( serverUrl != null && serverUrl.startsWith("https://") ) ) {
if ( launch != null && launch.startsWith("http://") ) {
forcePopup = true;
}
}
// Change "newpage" if forcePopup so the portal will do our pop up next time
if ( forcePopup && ! "on".equals(newPage) ) {
placement.getPlacementConfig().setProperty("imsti.newpage","on");
placement.save();
}
String maximize = getCorrectProperty(request, "maximize", null);
StringBuffer text = new StringBuffer();
Session session = SessionManager.getCurrentSession();
session.setAttribute("sakai:maximized-url",iframeUrl);
dPrint("Setting sakai:maximized-url="+iframeUrl);
if ( "on".equals(newPage) || forcePopup ) {
String windowOpen = "window.open('"+iframeUrl+"','BasicLTI');";
if ( popupDone == null ) {
text.append("<p>\n");
text.append("<script type=\"text/javascript\">\n");
text.append(windowOpen+"\n");
text.append("</script>\n");
}
String siteName = ServerConfigurationService.getString(SITE_NAME, SAKAI);
title = title!=null ? title : rb.getString("tool.name", "your tool");
String newPageLaunchText = rb.getFormattedMessage("new.page.launch", new Object[]{FormattedText.escapeHtml(title, false), FormattedText.escapeHtml(siteName, false)});
text.append(newPageLaunchText);
text.append("</p>\n");
text.append("<input type=\"submit\" onclick=\""+windowOpen+"\" target=\"BasicLTI\" value=\"Launch " + title + "\"/>");
} else {
if ( "on".equals(maximize) ) {
text.append("<script type=\"text/javascript\" language=\"JavaScript\">\n");
text.append("try { portalMaximizeTool(); } catch (err) { }\n");
text.append("</script>\n");
}
text.append("<iframe ");
if ( frameHeight == null ) frameHeight = "1200";
text.append("height=\""+frameHeight+"\" \n");
text.append("width=\"100%\" frameborder=\"0\" marginwidth=\"0\"\n");
text.append("marginheight=\"0\" scrolling=\"auto\"\n");
text.append("src=\""+iframeUrl+"\">\n");
text.append(rb.getString("noiframes"));
text.append("<br>");
text.append("<a href=\""+iframeUrl+"\">");
text.append(rb.getString("noiframe.press.here"));
text.append("</a>\n");
text.append("</iframe>");
}
out.println(text);
dPrint("==== doView complete ====");
return;
} else {
out.println(rb.getString("not.configured"));
}
clearErrorMessage(request);
dPrint("==== doView complete ====");
}
// Prepare the edit screen with data
public void prepareEdit(RenderRequest request)
{
// Hand up the tool properties
Placement placement = ToolManager.getCurrentPlacement();
Properties config = placement.getConfig();
dPrint("placement="+ placement.getId());
dPrint("placement.toolId="+ placement.getToolId());
dPrint("properties="+ config);
for (String element : fieldList) {
String propertyName = placement.getToolId() + "." + element;
String propValue = ServerConfigurationService.getString(propertyName,null);
if ( "splash".equals(element) && propValue == null ) {
propValue = ServerConfigurationService.getString(placement.getToolId() + ".overridesplash",null);
}
if ( propValue != null && propValue.trim().length() > 0 ) {
dPrint("Forcing Final = "+propertyName);
config.setProperty("final."+element,"true");
}
}
request.setAttribute("imsti.properties", config);
// Hand up the old values
Properties oldValues = new Properties();
Map map = getErrorMap(request);
String errorMsg = getErrorMessage(request);
request.setAttribute("error.message", errorMsg);
addProperty(oldValues, request, "launch", "");
for (String element : fieldList) {
if ( "launch".equals(element) ) continue;
String propKey = "imsti."+element;
// addProperty(oldValues, request, element, null);
String propValue = getCorrectProperty(request, element, null);
if ( map != null ) {
if ( map.containsKey(propKey) ) {
Object obj = null;
try {
String[] arr = (String []) map.get(propKey);
obj = arr[0];
} catch(Exception e) {
obj = null;
}
if ( obj instanceof String ) propValue = (String) obj;
}
}
if ( propValue != null ) {
if ( "xml".equals(element)) {
propValue = propValue.replace("&","&amp;");
}
if ( "secret".equals(element)) {
propValue = LEAVE_SECRET_ALONE;
}
oldValues.setProperty(propKey, FormattedText.escapeHtml(propValue,false));
}
}
request.setAttribute("imsti.oldvalues", oldValues);
String allowSettings = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED, SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED_DEFAULT);
request.setAttribute("allowSettings", new Boolean("true".equals(allowSettings)));
String allowRoster = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED, SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED_DEFAULT);
request.setAttribute("allowRoster", new Boolean("true".equals(allowRoster)));
String allowContentLink = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_CONTENTLINK_ENABLED, SakaiBLTIUtil.BASICLTI_CONTENTLINK_ENABLED_DEFAULT);
request.setAttribute("allowContentLink", new Boolean("true".equals(allowContentLink)));
// For outcomes and LORI we check for tools in the site before offering the options
String allowOutcomes = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED, SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED_DEFAULT);
if ( "true".equals(allowOutcomes) ) {
String outcomeProp = getCorrectProperty(request, "allowoutcomes", "on");
allowOutcomes = "on".equals(outcomeProp) ? "true" : "false";
}
String allowLori = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_LORI_ENABLED, SakaiBLTIUtil.BASICLTI_LORI_ENABLED_DEFAULT);
if ( "true".equals(allowLori) ) {
String loriProp = getCorrectProperty(request, "allowlori", "on");
allowLori = "on".equals(loriProp) ? "true" : "false";
}
boolean foundLessons = false;
boolean foundGradebook = false;
ToolConfiguration toolConfig = SiteService.findTool(placement.getId());
try {
Site site = SiteService.getSite(toolConfig.getSiteId());
for (SitePage page : (List<SitePage>)site.getPages()) {
for(ToolConfiguration tool : (List<ToolConfiguration>) page.getTools()) {
String tid = tool.getToolId();
if ( "sakai.lessonbuildertool".equals(tid) ) foundLessons = true;
if ( "sakai.gradebook.tool".equals(tid) || "sakai.gradebook.gwt.rpc".equals(tid) ) foundGradebook = true;
}
}
} catch (IdUnusedException ex) {
M_log.warn("Could not load site.");
}
if ( ! foundLessons ) allowLori = "false";
if ( ! foundGradebook ) allowOutcomes = "false";
request.setAttribute("allowOutcomes", new Boolean("true".equals(allowOutcomes)));
request.setAttribute("allowLori", new Boolean("true".equals(allowLori)));
if ( "true".equals(allowOutcomes) ) {
List<String> assignments = getGradeBookAssignments();
if ( assignments != null && assignments.size() > 0 ) request.setAttribute("assignments", assignments);
}
clearErrorMessage(request);
}
public void addProperty(Properties values, RenderRequest request,
String propName, String defaultValue)
{
String propValue = getCorrectProperty(request, propName, defaultValue);
if ( propValue != null ) {
values.setProperty("imsti."+propName,propValue);
}
}
// Get Property - Precedence is frozen server configuration, sakai tool properties,
// portlet preferences, sakai tool properties, and then default
public String getCorrectProperty(PortletRequest request, String propName, String defaultValue)
{
Placement placement = ToolManager.getCurrentPlacement();
String propertyName = placement.getToolId() + "." + propName;
String propValue = ServerConfigurationService.getString(propertyName,null);
if ( propValue != null && propValue.trim().length() > 0 ) {
// System.out.println("Sakai.home "+propName+"="+propValue);
return propValue;
}
Properties config = placement.getConfig();
propValue = getSakaiProperty(config, "imsti."+propName);
if ( propValue != null && "true".equals(config.getProperty("final."+propName)) )
{
// System.out.println("Frozen "+propName+" ="+propValue);
return propValue;
}
PortletPreferences prefs = request.getPreferences();
propValue = prefs.getValue("imsti."+propName, null);
if ( propValue != null ) {
// System.out.println("Portlet "+propName+" ="+propValue);
return propValue;
}
propValue = getSakaiProperty(config, "imsti."+propName);
if ( propValue != null ) {
// System.out.println("Tool "+propName+" ="+propValue);
return propValue;
}
if ( defaultValue != null ) {
// System.out.println("Default "+propName+" ="+defaultValue);
return defaultValue;
}
// System.out.println("Fell through "+propName);
return null;
}
// isPropertyFinal() - if it comes from the Server configuration or
// the final.propName is set to true
public boolean isPropertyFinal(String propName)
{
Placement placement = ToolManager.getCurrentPlacement();
String propertyName = placement.getToolId() + "." + propName;
String propValue = ServerConfigurationService.getString(propertyName,null);
if ( propValue != null && propValue.trim().length() > 0 ) {
return true;
}
Properties config = placement.getConfig();
propValue = getSakaiProperty(config, "imsti."+propName);
if ( propValue != null && "true".equals(config.getProperty("final."+propName)) )
{
return true;
}
return false;
}
public void doEdit(RenderRequest request, RenderResponse response)
throws PortletException, IOException {
response.setContentType("text/html");
dPrint("==== doEdit called ====");
PortletSession pSession = request.getPortletSession(true);
String title = getTitleString(request);
if ( title != null ) response.setTitle(title);
// Debug
String inputData = (String) pSession.getAttribute("sakai.descriptor");
if ( inputData != null ) dPrint("descriptor.length()="+inputData.length());
String url = (String) pSession.getAttribute("sakai.url");
dPrint("sakai.url="+url);
String view = (String) pSession.getAttribute("sakai.view");
dPrint("sakai.view="+view);
if ( "edit.reset".equals(view) ) {
sendToJSP(request, response, "/editreset.jsp");
} else {
prepareEdit(request);
sendToJSP(request, response, "/edit.jsp");
}
clearErrorMessage(request);
dPrint("==== doEdit called ====");
}
public void doHelp(RenderRequest request, RenderResponse response)
throws PortletException, IOException {
dPrint("==== doHelp called ====");
String title = getTitleString(request);
if ( title != null ) response.setTitle(title);
sendToJSP(request, response, "/help.jsp");
clearErrorMessage(request);
dPrint("==== doHelp done ====");
}
public void processAction(ActionRequest request, ActionResponse response)
throws PortletException, IOException {
dPrint("==== processAction called ====");
String action = request.getParameter("sakai.action");
dPrint("sakai.action = "+action);
PortletSession pSession = request.getPortletSession(true);
// Clear before Action
clearErrorMessage(request);
String view = (String) pSession.getAttribute("sakai.view");
dPrint("sakai.view="+view);
if ( action == null ) {
// Do nothing
} else if ( action.equals("main") ) {
response.setPortletMode(PortletMode.VIEW);
} else if ( action.equals("edit") ) {
pSession.setAttribute("sakai.view", "edit");
} else if ( action.equals("edit.reset") ) {
pSession.setAttribute("sakai.view","edit.reset");
} else if (action.equals("edit.setup")){
pSession.setAttribute("sakai.view","edit.setup");
} else if ( action.equals("edit.clear") ) {
clearSession(request);
response.setPortletMode(PortletMode.VIEW);
pSession.setAttribute("sakai.view", "main");
} else if ( action.equals("edit.do.reset") ) {
processActionReset(action,request, response);
} else if ( action.equals("edit.save") ) {
processActionSave(action,request, response);
}
dPrint("==== End of ProcessAction ====");
}
private void clearSession(PortletRequest request)
{
PortletSession pSession = request.getPortletSession(true);
pSession.removeAttribute("sakai.url");
pSession.removeAttribute("sakai.widget");
pSession.removeAttribute("sakai.descriptor");
pSession.removeAttribute("sakai.attemptdescriptor");
for (String element : fieldList) {
pSession.removeAttribute("sakai."+element);
}
}
public void processActionReset(String action,ActionRequest request, ActionResponse response)
throws PortletException, IOException {
// TODO: Check Role
dPrint("Removing preferences....");
clearSession(request);
PortletSession pSession = request.getPortletSession(true);
PortletPreferences prefs = request.getPreferences();
try {
prefs.reset("sakai.descriptor");
for (String element : fieldList) {
prefs.reset("imsti."+element);
prefs.reset("sakai:imsti."+element);
}
dPrint("Preference removed");
} catch (ReadOnlyException e) {
setErrorMessage(request, rb.getString("error.modify.prefs")) ;
return;
}
prefs.store();
// Go back to the main edit page
pSession.setAttribute("sakai.view", "edit");
}
public void processActionEdit(String action,ActionRequest request, ActionResponse response)
throws PortletException, IOException {
}
public Properties getSakaiProperties()
{
Placement placement = ToolManager.getCurrentPlacement();
return placement.getConfig();
}
// Empty or all whitespace properties are null
public String getSakaiProperty(Properties config, String key)
{
String propValue = config.getProperty(key);
if ( propValue != null && propValue.trim().length() < 1 ) propValue = null;
return propValue;
}
// Insure that if we have frozen properties - we never accept form data
public String getFormParameter(ActionRequest request, Properties sakaiProperties, String propName)
{
String propValue = getCorrectProperty(request, propName, null);
if ( propValue == null || ! isPropertyFinal(propName) )
{
propValue = request.getParameter("imsti."+propName);
}
dPrint("Form/Final imsti."+propName+"="+propValue);
if (propValue != null ) propValue = propValue.trim();
return propValue;
}
public void processActionSave(String action,ActionRequest request, ActionResponse response)
throws PortletException, IOException {
PortletSession pSession = request.getPortletSession(true);
Properties sakaiProperties = getSakaiProperties();
String imsType = getFormParameter(request,sakaiProperties,"type");
String imsTIUrl = getFormParameter(request,sakaiProperties,"launch");
if ( imsTIUrl != null && imsTIUrl.trim().length() < 1 ) imsTIUrl = null;
String imsTIXml = getFormParameter(request,sakaiProperties,"xml");
if ( imsTIXml != null && imsTIXml.trim().length() < 1 ) imsTIXml = null;
// imsType will be null if launch or xml is coming from final properties
if ( imsType != null ) {
if ( imsType.equalsIgnoreCase("XML") ) {
if ( imsTIXml != null ) imsTIUrl = null;
} else {
if ( imsTIUrl != null ) imsTIXml = null;
}
}
String launch_url = imsTIUrl;
if ( imsTIXml != null ) {
launch_url = BasicLTIUtil.validateDescriptor(imsTIXml);
if ( launch_url == null ) {
setErrorMessage(request, rb.getString("error.xml.input"));
return;
}
} else if ( imsTIUrl == null ) {
setErrorMessage(request, rb.getString("error.no.input") );
return;
} else if ( imsTIUrl.startsWith("http://") || imsTIUrl.startsWith("https://") ) {
try {
URL testUrl = new URL(imsTIUrl);
URI testUri = new URI(imsTIUrl);
}
catch(Exception e) {
setErrorMessage(request, rb.getString("error.bad.url") );
return;
}
} else {
setErrorMessage(request, rb.getString("error.bad.url") );
return;
}
// Prepare to store preferences
PortletPreferences prefs = request.getPreferences();
boolean changed = false;
// Make Sure the Assignment is a legal one
String assignment = getFormParameter(request,sakaiProperties,"assignment");
String newAssignment = getFormParameter(request,sakaiProperties,"newassignment");
String oldPlacementSecret = getSakaiProperty(sakaiProperties,"imsti.placementsecret");
String allowOutcomes = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED, SakaiBLTIUtil.BASICLTI_OUTCOMES_ENABLED_DEFAULT);
String allowSettings = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED, SakaiBLTIUtil.BASICLTI_SETTINGS_ENABLED_DEFAULT);
String allowRoster = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED, SakaiBLTIUtil.BASICLTI_ROSTER_ENABLED_DEFAULT);
String allowLori = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_LORI_ENABLED, SakaiBLTIUtil.BASICLTI_LORI_ENABLED_DEFAULT);
if ( "true".equals(allowOutcomes) && newAssignment != null && newAssignment.trim().length() > 1 ) {
if ( addGradeBookItem(request, newAssignment) ) {
// System.out.println("Success!");
assignment = newAssignment;
}
}
// System.out.println("old placementsecret="+oldPlacementSecret);
if ( oldPlacementSecret == null &&
("true".equals(allowOutcomes) || "true".equals(allowSettings) ||
"true".equals(allowRoster) || "true".equals(allowLori) ) ) {
try {
String uuid = UUID.randomUUID().toString();
Date date = new Date();
SimpleDateFormat sdf = new SimpleDateFormat(ISO_8601_FORMAT);
String date_secret = sdf.format(date);
prefs.setValue("sakai:imsti.placementsecret", uuid);
prefs.setValue("sakai:imsti.placementsecretdate", date_secret);
// System.out.println("placementsecret set to="+uuid+" data="+date_secret);
changed = true;
} catch (ReadOnlyException e) {
setErrorMessage(request, rb.getString("error.modify.prefs") );
return;
}
}
if ( "true".equals(allowOutcomes) && assignment != null && assignment.trim().length() > 1 ) {
List<String> assignments = getGradeBookAssignments();
boolean found = false;
if ( assignments != null ) for ( String assn : assignments ) {
if ( assn.equals(assignment) ) {
found = true;
break;
}
}
if ( ! found ) {
setErrorMessage(request, rb.getString("error.gradable.badassign") +
" " + FormattedText.escapeHtml(assignment,false));
return;
}
}
String imsTIHeight = getFormParameter(request,sakaiProperties,"frameheight");
if ( imsTIHeight != null && imsTIHeight.trim().length() < 1 ) imsTIHeight = null;
if ( imsTIHeight != null ) {
try {
int x = Integer.parseInt(imsTIHeight);
if ( x < 0 ) {
setErrorMessage(request, rb.getString("error.bad.height") );
return;
}
}
catch(Exception e) {
setErrorMessage(request, rb.getString("error.bad.height") );
return;
}
}
// Passed the sanity checks - time to save it all!
String context = getContext();
Placement placement = ToolManager.getCurrentPlacement();
// Update the Page Title (button text)
String imsTIPageTitle = getFormParameter(request,sakaiProperties,"pagetitle");
String prefsPageTitle = prefs.getValue("sakai:imsti.pagetitle", null);
imsTIPageTitle = imsTIPageTitle == null ? "" : imsTIPageTitle.trim();
prefsPageTitle = prefsPageTitle == null ? "" : prefsPageTitle.trim();
if ( ! imsTIPageTitle.equals(prefsPageTitle) ) {
try {
if ( imsTIPageTitle.length() > 99 ) imsTIPageTitle = imsTIPageTitle.substring(0,99);
ToolConfiguration toolConfig = SiteService.findTool(placement.getId());
Site site = SiteService.getSite(toolConfig.getSiteId());
SitePage page = site.getPage(toolConfig.getPageId());
if ( imsTIPageTitle.length() > 1 ) {
page.setTitle(imsTIPageTitle.trim());
page.setTitleCustom(true);
} else {
page.setTitle("");
page.setTitleCustom(false);
}
SiteService.save(site);
} catch (Exception e) {
setErrorMessage(request, rb.getString("error.page.title"));
return;
}
}
// Store preferences
for (String element : fieldList) {
String formParm = getFormParameter(request,sakaiProperties,element);
if ( "assignment".equals(element) ) formParm = assignment;
if ( "secret".equals(element) ) {
if ( LEAVE_SECRET_ALONE.equals(formParm) ) continue;
String key = ServerConfigurationService.getString(SakaiBLTIUtil.BASICLTI_ENCRYPTION_KEY, null);
if (key != null) {
try {
if ( formParm != null && formParm.trim().length() > 0 ) {
formParm = SimpleEncryption.encrypt(key, formParm);
// BLTI-195 convert old-style encrypted secrets
prefs.reset("sakai:imsti.encryptedsecret");
}
} catch (RuntimeException re) {
M_log.warn("Failed to encrypt secret, falling back to plaintext: "+ re.getMessage());
}
}
}
try {
prefs.setValue("sakai:imsti."+element, formParm);
changed = true;
} catch (ReadOnlyException e) {
setErrorMessage(request, rb.getString("error.modify.prefs") );
return;
}
}
// Clear out the other setting
if ( imsType != null ) {
if ( imsType.equalsIgnoreCase("XML") ) {
if ( imsTIXml != null ) {
prefs.reset("sakai:imsti.launch");
changed = true;
}
} else {
if ( imsTIUrl != null ) {
prefs.reset("sakai:imsti.xml");
changed = true;
}
}
}
// track event and store
if ( changed ) {
// 2.6 Event Tracking
Event event = LocalEventTrackingService.newEvent(EVENT_BASICLTI_CONFIG, launch_url, context, true, NotificationService.NOTI_OPTIONAL);
// 2.5 Event Tracking
// Event event = EventTrackingService.newEvent(EVENT_BASICLTI_CONFIG, launch_url, true);
LocalEventTrackingService.post(event);
prefs.store();
}
pSession.setAttribute("sakai.view", "main");
response.setPortletMode(PortletMode.VIEW);
}
/**
* Get the current site page our current tool is placed on.
*
* @return The site page id on which our tool is placed.
*/
protected String getCurrentSitePageId()
{
Placement placement = ToolManager.getCurrentPlacement();
ToolConfiguration tool = SiteService.findTool(placement.getId());
if (tool != null)
{
return tool.getPageId();
}
return null;
}
// TODO: Local cleverness ???
private void sendToJSP(RenderRequest request, RenderResponse response,
String jspPage) throws PortletException {
response.setContentType(request.getResponseContentType());
if (jspPage != null && jspPage.length() != 0) {
try {
PortletRequestDispatcher dispatcher = pContext
.getRequestDispatcher(jspPage);
dispatcher.include(request, response);
} catch (IOException e) {
throw new PortletException("Sakai Dispatch unabble to use "
+ jspPage, e);
}
}
}
// Error Message
public void clearErrorMessage(PortletRequest request)
{
PortletHelper.clearErrorMessage(request);
}
public Map getErrorMap(PortletRequest request)
{
return PortletHelper.getErrorMap(request);
}
public String getErrorOutput(PortletRequest request)
{
return PortletHelper.getErrorOutput(request);
}
public void setErrorMessage(PortletRequest request, String errorMsg)
{
PortletHelper.setErrorMessage(request,errorMsg);
}
public String getErrorMessage(PortletRequest request)
{
return PortletHelper.getErrorMessage(request);
}
public void setErrorMessage(PortletRequest request, String errorMsg, Throwable t)
{
PortletHelper.setErrorMessage(request,errorMsg,t);
}
private String getContext()
{
String retval = ToolManager.getCurrentPlacement().getContext();
return retval;
}
// Create an item in the Gradebook
protected boolean addGradeBookItem(ActionRequest request, String assignmentName)
{
try
{
GradebookService g = (GradebookService) ComponentManager.get("org.sakaiproject.service.gradebook.GradebookService");
String gradebookUid = getContext();
if ( ! (g.isGradebookDefined(gradebookUid) && (g.currentUserHasEditPerm(gradebookUid) || g.currentUserHasGradingPerm(gradebookUid)) && g.currentUserHasGradeAllPerm(gradebookUid) ) ) return false;
// add assignment to gradebook
Assignment asn = new Assignment();
asn.setPoints(Double.valueOf(100));
asn.setExternallyMaintained(false);
asn.setName(assignmentName);
asn.setReleased(true);
asn.setUngraded(false);
g.addAssignment(gradebookUid, asn);
return true;
}
catch (ConflictingAssignmentNameException e)
{
return true;
}
catch (Exception e)
{
dPrint("GradebookNotFoundException (may be because GradeBook has not yet been added to the Site) " + e.getMessage());
setErrorMessage(request, rb.getString("error.gradable.badcreate") + ":" + e.getMessage() );
M_log.warn(this + ":addGradeItem " + e.getMessage());
}
return false;
}
// get all assignments from the Gradebook
protected List<String> getGradeBookAssignments()
{
List<String> retval = new ArrayList<String>();
try
{
GradebookService g = (GradebookService) ComponentManager
.get("org.sakaiproject.service.gradebook.GradebookService");
String gradebookUid = getContext();
if ( ! (g.isGradebookDefined(gradebookUid) && (g.currentUserHasEditPerm(gradebookUid) || g.currentUserHasGradingPerm(gradebookUid)) && g.currentUserHasGradeAllPerm(gradebookUid) ) ) return null;
List gradebookAssignments = g.getAssignments(gradebookUid);
// filtering out anything externally provided
for (Iterator i=gradebookAssignments.iterator(); i.hasNext();)
{
org.sakaiproject.service.gradebook.shared.Assignment gAssignment = (org.sakaiproject.service.gradebook.shared.Assignment) i.next();
if ( gAssignment.isExternallyMaintained() ) continue;
retval.add(gAssignment.getName());
}
return retval;
}
catch (GradebookNotFoundException e)
{
dPrint("GradebookNotFoundException (may be because GradeBook has not yet been added to the Site) " + e.getMessage());
return null;
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.entries;
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
import java.util.concurrent.atomic.AtomicLongFieldUpdater;
import org.apache.geode.internal.cache.RegionEntryContext;
import org.apache.geode.internal.cache.Token;
import org.apache.geode.internal.offheap.OffHeapRegionEntryHelper;
import org.apache.geode.internal.offheap.annotations.Released;
import org.apache.geode.internal.offheap.annotations.Retained;
import org.apache.geode.internal.offheap.annotations.Unretained;
import org.apache.geode.internal.util.concurrent.CustomEntryConcurrentHashMap.HashEntry;
/*
* macros whose definition changes this class:
*
* disk: DISK lru: LRU stats: STATS versioned: VERSIONED offheap: OFFHEAP
*
* One of the following key macros must be defined:
*
* key object: KEY_OBJECT key int: KEY_INT key long: KEY_LONG key uuid: KEY_UUID key string1:
* KEY_STRING1 key string2: KEY_STRING2
*/
/**
* Do not modify this class. It was generated. Instead modify LeafRegionEntry.cpp and then run
* ./dev-tools/generateRegionEntryClasses.sh (it must be run from the top level directory).
*/
public class VMThinRegionEntryOffHeapStringKey2 extends VMThinRegionEntryOffHeap {
// --------------------------------------- common fields ----------------------------------------
private static final AtomicLongFieldUpdater<VMThinRegionEntryOffHeapStringKey2> LAST_MODIFIED_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMThinRegionEntryOffHeapStringKey2.class, "lastModified");
protected int hash;
private HashEntry<Object, Object> nextEntry;
@SuppressWarnings("unused")
private volatile long lastModified;
// --------------------------------------- offheap fields ---------------------------------------
/**
* All access done using OFF_HEAP_ADDRESS_UPDATER so it is used even though the compiler can not
* tell it is.
*/
@SuppressWarnings("unused")
@Retained
@Released
private volatile long offHeapAddress;
/**
* I needed to add this because I wanted clear to call setValue which normally can only be called
* while the re is synced. But if I sync in that code it causes a lock ordering deadlock with the
* disk regions because they also get a rw lock in clear. Some hardware platforms do not support
* CAS on a long. If gemfire is run on one of those the AtomicLongFieldUpdater does a sync on the
* RegionEntry and we will once again be deadlocked. I don't know if we support any of the
* hardware platforms that do not have a 64bit CAS. If we do then we can expect deadlocks on disk
* regions.
*/
private static final AtomicLongFieldUpdater<VMThinRegionEntryOffHeapStringKey2> OFF_HEAP_ADDRESS_UPDATER =
AtomicLongFieldUpdater.newUpdater(VMThinRegionEntryOffHeapStringKey2.class, "offHeapAddress");
// --------------------------------------- key fields -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
/**
* strlen is encoded in lowest 6 bits (max strlen is 63)<br>
* character encoding info is in bits 7 and 8<br>
* The other bits are used to encoded character data.
*/
private final long bits1;
/**
* bits2 encodes character data
*/
private final long bits2;
public VMThinRegionEntryOffHeapStringKey2(final RegionEntryContext context, final String key,
@Retained final Object value, final boolean byteEncode) {
super(context, value);
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
// caller has already confirmed that key.length <= MAX_INLINE_STRING_KEY
long tempBits1 = 0L;
long tempBits2 = 0L;
if (byteEncode) {
for (int i = key.length() - 1; i >= 0; i--) {
// Note: we know each byte is <= 0x7f so the "& 0xff" is not needed. But I added it in to
// keep findbugs happy.
if (i < 7) {
tempBits1 |= (byte) key.charAt(i) & 0xff;
tempBits1 <<= 8;
} else {
tempBits2 <<= 8;
tempBits2 |= (byte) key.charAt(i) & 0xff;
}
}
tempBits1 |= 1 << 6;
} else {
for (int i = key.length() - 1; i >= 0; i--) {
if (i < 3) {
tempBits1 |= key.charAt(i);
tempBits1 <<= 16;
} else {
tempBits2 <<= 16;
tempBits2 |= key.charAt(i);
}
}
}
tempBits1 |= key.length();
this.bits1 = tempBits1;
this.bits2 = tempBits2;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public Token getValueAsToken() {
return OffHeapRegionEntryHelper.getValueAsToken(this);
}
@Override
protected Object getValueField() {
return OffHeapRegionEntryHelper._getValue(this);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
@Unretained
protected void setValueField(@Unretained final Object value) {
OffHeapRegionEntryHelper.setValue(this, value);
}
@Override
@Retained
public Object getValueRetain(final RegionEntryContext context, final boolean decompress) {
return OffHeapRegionEntryHelper._getValueRetain(this, decompress, context);
}
@Override
public long getAddress() {
return OFF_HEAP_ADDRESS_UPDATER.get(this);
}
@Override
public boolean setAddress(final long expectedAddress, long newAddress) {
return OFF_HEAP_ADDRESS_UPDATER.compareAndSet(this, expectedAddress, newAddress);
}
@Override
@Released
public void release() {
OffHeapRegionEntryHelper.releaseEntry(this);
}
@Override
public void returnToPool() {
// never implemented
}
@Override
protected long getLastModifiedField() {
return LAST_MODIFIED_UPDATER.get(this);
}
@Override
protected boolean compareAndSetLastModifiedField(final long expectedValue, final long newValue) {
return LAST_MODIFIED_UPDATER.compareAndSet(this, expectedValue, newValue);
}
@Override
public int getEntryHash() {
return this.hash;
}
@Override
protected void setEntryHash(final int hash) {
this.hash = hash;
}
@Override
public HashEntry<Object, Object> getNextEntry() {
return this.nextEntry;
}
@Override
public void setNextEntry(final HashEntry<Object, Object> nextEntry) {
this.nextEntry = nextEntry;
}
// ----------------------------------------- key code -------------------------------------------
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
private int getKeyLength() {
return (int) (this.bits1 & 0x003fL);
}
private int getEncoding() {
// 0 means encoded as char
// 1 means encoded as bytes that are all <= 0x7f;
return (int) (this.bits1 >> 6) & 0x03;
}
@Override
public Object getKey() {
int keyLength = getKeyLength();
char[] chars = new char[keyLength];
long tempBits1 = this.bits1;
long tempBits2 = this.bits2;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
if (i < 7) {
tempBits1 >>= 8;
chars[i] = (char) (tempBits1 & 0x00ff);
} else {
chars[i] = (char) (tempBits2 & 0x00ff);
tempBits2 >>= 8;
}
}
} else {
for (int i = 0; i < keyLength; i++) {
if (i < 3) {
tempBits1 >>= 16;
chars[i] = (char) (tempBits1 & 0x00FFff);
} else {
chars[i] = (char) (tempBits2 & 0x00FFff);
tempBits2 >>= 16;
}
}
}
return new String(chars);
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
@Override
public boolean isKeyEqual(final Object key) {
if (key instanceof String) {
String stringKey = (String) key;
int keyLength = getKeyLength();
if (stringKey.length() == keyLength) {
long tempBits1 = this.bits1;
long tempBits2 = this.bits2;
if (getEncoding() == 1) {
for (int i = 0; i < keyLength; i++) {
char character;
if (i < 7) {
tempBits1 >>= 8;
character = (char) (tempBits1 & 0x00ff);
} else {
character = (char) (tempBits2 & 0x00ff);
tempBits2 >>= 8;
}
if (stringKey.charAt(i) != character) {
return false;
}
}
} else {
for (int i = 0; i < keyLength; i++) {
char character;
if (i < 3) {
tempBits1 >>= 16;
character = (char) (tempBits1 & 0x00FFff);
} else {
character = (char) (tempBits2 & 0x00FFff);
tempBits2 >>= 16;
}
if (stringKey.charAt(i) != character) {
return false;
}
}
}
return true;
}
}
return false;
}
// DO NOT modify this class. It was generated from LeafRegionEntry.cpp
}
| |
/*
* Copyright 2005-2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.ldap.ldif.parser;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.NoSuchElementException;
import javax.naming.NamingException;
import javax.naming.directory.Attribute;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.ldap.core.DistinguishedName;
import org.springframework.ldap.core.LdapAttributes;
import org.springframework.ldap.ldif.InvalidRecordFormatException;
import org.springframework.ldap.ldif.support.AttributeValidationPolicy;
import org.springframework.ldap.ldif.support.DefaultAttributeValidationPolicy;
import org.springframework.ldap.ldif.support.LineIdentifier;
import org.springframework.ldap.ldif.support.SeparatorPolicy;
import org.springframework.ldap.schema.DefaultSchemaSpecification;
import org.springframework.ldap.schema.Specification;
import org.springframework.util.Assert;
/**
* The {@link LdifParser LdifParser} is the main class of the {@link org.springframework.ldap.ldif} package.
* This class reads lines from a resource and assembles them into an {@link LdapAttributes LdapAttributes} object.
* The {@link LdifParser LdifParser} does ignores <i>changetype</i> LDIF entries as their usefulness in the
* context of an application has yet to be determined.
* <p>
* <b>Design</b><br/>
* {@link LdifParser LdifParser} provides the main interface for operation but requires three supporting classes to
* enable operation:
* <ul>
* <li>{@link SeparatorPolicy SeparatorPolicy} - establishes the mechanism by which lines are assembled into attributes.</li>
* <li>{@link AttributeValidationPolicy AttributeValidationPolicy} - ensures that attributes are correctly structured prior to parsing.</li>
* <li>{@link Specification Specification} - provides a mechanism by which object structure can be validated after assembly.</li>
* </ul>
* Together, these 4 classes read from the resource line by line and translate the data into objects for use.
* <p>
* <b>Usage</b><br/>
* {@link #getRecord() getRecord()} reads the next available record from the resource. Lines are read and
* passed to the {@link SeparatorPolicy SeparatorPolicy} for interpretation. The parser continues to read
* lines and appends them to the buffer until it encounters the start of a new attribute or an end of record
* delimiter. When the new attribute or end of record is encountered, the buffer is passed to the
* {@link AttributeValidationPolicy AttributeValidationPolicy} which ensures the buffer conforms to a valid
* attribute definition as defined in RFC2849 and returns an {@link org.springframework.ldap.core.LdapAttribute LdapAttribute} object
* which is then added to the record, an {@link LdapAttributes LdapAttributes} object. Upon encountering the
* end of record, the record is validated by the {@link Specification Specification} policy and,
* if valid, returned to the requester.
* <p>
* <i>NOTE: By default, objects are not validated. If validation is required,
* an appropriate specification object must be set.</i>
* <p>
* The parser requires the resource to be {@link #open() open()} prior to an invocation of {@link #getRecord() getRecord()}.
* {@link #hasMoreRecords() hasMoreRecords()} can be used to loop over the resource until all records have been
* retrieved. Likewise, the {@link #reset() reset()} method will reset the resource.
* <p>
* Objects implementing the {@link javax.naming.directory.Attributes Attributes} interface are required to support a case sensitivity setting
* which controls whether or not the attribute IDs of the object are case sensitive. The {@link #caseInsensitive caseInsensitive}
* setting of the {@link LdifParser LdifParser} is passed to the constructor of any {@link javax.naming.directory.Attributes Attributes} created. The
* default value for this setting is true so that case insensitive objects are created.
*
* @author Keith Barlow
*
*/
public class LdifParser implements Parser, InitializingBean {
private static final Log log = LogFactory.getLog(LdifParser.class);
/**
* The resource to parse.
*/
private Resource resource;
/**
* A BufferedReader to read the file.
*/
private BufferedReader reader;
/**
* The SeparatorPolicy to use for interpreting attributes from the lines of the resource.
*/
private SeparatorPolicy separatorPolicy = new SeparatorPolicy();
/**
* The AttributeValidationPolicy to use to interpret attributes.
*/
private AttributeValidationPolicy attributePolicy = new DefaultAttributeValidationPolicy();
/**
* The RecordSpecification for validating records produced.
*/
private Specification<LdapAttributes> specification = new DefaultSchemaSpecification();
/**
* This setting is used to control the case sensitivity of LdapAttribute objects returned by the parser.
*/
private boolean caseInsensitive = true;
/**
* Default constructor.
*/
public LdifParser() {
}
/**
* Creates a LdifParser with the indicated case sensitivity setting.
*
* @param caseInsensitive Case sensitivity setting for LdapAttributes objects returned by the parser.
*/
public LdifParser(boolean caseInsensitive) {
this.caseInsensitive = caseInsensitive;
}
/**
* Creates an LdifParser for the specified resource with the provided case sensitivity setting.
*
* @param resource The resource to parse.
* @param caseInsensitive Case sensitivity setting for LdapAttributes objects returned by the parser.
*/
public LdifParser(Resource resource, boolean caseInsensitive) {
this.resource = resource;
this.caseInsensitive = caseInsensitive;
}
/**
* Convenience constructor for resource specification.
*
* @param resource The resource to parse.
*/
public LdifParser(Resource resource) {
this.resource = resource;
}
/**
* Convenience constructor: accepts a File object.
*
* @param file The file to parse.
*/
public LdifParser(File file) {
this.resource = new FileSystemResource(file);
}
/**
* Set the separator policy.
*
* The default separator policy should suffice for most needs.
*
* @param separatorPolicy Separator policy.
*/
public void setSeparatorPolicy(SeparatorPolicy separatorPolicy) {
this.separatorPolicy = separatorPolicy;
}
/**
* Policy object enforcing the rules for acceptable attributes.
*
* @param avPolicy Attribute validation policy.
*/
public void setAttributeValidationPolicy(AttributeValidationPolicy avPolicy) {
this.attributePolicy = avPolicy;
}
/**
* Policy object for enforcing rules to acceptable LDAP objects.
*
* This policy may be used to enforce schema restrictions.
* @param specification
*/
public void setRecordSpecification(Specification<LdapAttributes> specification) {
this.specification = specification;
}
public void setResource(Resource resource) {
this.resource = resource;
}
public void setCaseInsensitive(boolean caseInsensitive) {
this.caseInsensitive = caseInsensitive;
}
public void open() throws IOException {
Assert.notNull(resource, "Resource must be set.");
reader = new BufferedReader(new InputStreamReader(resource.getInputStream()));
}
public boolean isReady() throws IOException {
return reader.ready();
}
public void close() throws IOException {
if (resource.isOpen())
reader.close();
}
public void reset() throws IOException {
Assert.notNull(reader, "A reader has not been obtained.");
reader.reset();
}
public boolean hasMoreRecords() throws IOException {
return reader.ready();
}
public LdapAttributes getRecord() throws IOException {
Assert.notNull(reader, "A reader must be obtained: parser not open.");
if (!reader.ready()) {
log.debug("Reader not ready!");
return null;
}
LdapAttributes record = new LdapAttributes(caseInsensitive);
StringBuilder builder = new StringBuilder();
String line = reader.readLine();
while(true) {
LineIdentifier identifier = separatorPolicy.assess(line);
switch(identifier) {
case NewRecord:
log.trace("Starting new record.");
//Start new record.
builder = new StringBuilder(line);
break;
case Control:
log.trace("'control' encountered.");
//Log WARN and discard record.
log.warn("LDIF change records have no implementation: record will be ignored.");
builder = null;
record = null;
break;
case ChangeType:
log.trace("'changetype' encountered.");
//Log WARN and discard record.
log.warn("LDIF change records have no implementation: record will be ignored.");
builder = null;
record = null;
break;
case Attribute:
//flush buffer.
addAttributeToRecord(builder.toString(), record);
log.trace("Starting new attribute.");
//Start new attribute.
builder = new StringBuilder(line);
break;
case Continuation:
log.trace("...appending line to buffer.");
//Append line to buffer.
builder.append(line.replaceFirst(" ", ""));
break;
case EndOfRecord:
log.trace("...done parsing record. (EndOfRecord)");
//Validate record and return.
if (record == null) return null;
else {
try {
//flush buffer.
addAttributeToRecord(builder.toString(), record);
if (specification.isSatisfiedBy(record)) {
log.debug("record parsed:\n" + record);
return record;
} else {
throw new InvalidRecordFormatException("Record [dn: " + record.getDN() + "] does not conform to specification.");
}
} catch(NamingException e) {
log.error(e);
return null;
}
}
default:
//Take no action -- applies to VersionIdentifier, Comments, and voided records.
}
line = reader.readLine();
}
}
private void addAttributeToRecord(String buffer, LdapAttributes record) {
try {
if (StringUtils.isNotEmpty(buffer) && record != null) {
//Validate previous attribute and add to record.
Attribute attribute = attributePolicy.parse(buffer);
if (attribute.getID().equalsIgnoreCase("dn")) {
log.trace("...adding DN to record.");
String dn;
if (attribute.get() instanceof byte[]) {
dn = new String((byte[]) attribute.get());
} else {
dn = (String) attribute.get();
}
record.setDN(new DistinguishedName(dn));
} else {
log.trace("...adding attribute to record.");
Attribute attr = record.get(attribute.getID());
if (attr != null) {
attr.add(attribute.get());
} else {
record.put(attribute);
}
}
}
} catch (NamingException e) {
log.error(e);
} catch (NoSuchElementException e) {
log.error(e);
}
}
public void afterPropertiesSet() throws Exception {
Assert.notNull(resource, "A resource to parse is required.");
Assert.isTrue(resource.exists(), resource.getDescription() + ": resource does not exist!");
Assert.isTrue(resource.isReadable(), "Resource is not readable.");
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.search.aggregations.support;
import org.apache.lucene.index.DirectoryReader;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.LeafReaderContext;
import org.apache.lucene.index.RandomAccessOrds;
import org.apache.lucene.index.SortedDocValues;
import org.apache.lucene.index.SortedNumericDocValues;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Scorer;
import org.apache.lucene.util.Bits;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.common.lucene.ScorerAware;
import org.elasticsearch.index.fielddata.AtomicOrdinalsFieldData;
import org.elasticsearch.index.fielddata.AtomicParentChildFieldData;
import org.elasticsearch.index.fielddata.IndexFieldData;
import org.elasticsearch.index.fielddata.IndexGeoPointFieldData;
import org.elasticsearch.index.fielddata.IndexNumericFieldData;
import org.elasticsearch.index.fielddata.IndexOrdinalsFieldData;
import org.elasticsearch.index.fielddata.IndexParentChildFieldData;
import org.elasticsearch.index.fielddata.MultiGeoPointValues;
import org.elasticsearch.index.fielddata.SortedBinaryDocValues;
import org.elasticsearch.index.fielddata.SortedNumericDoubleValues;
import org.elasticsearch.index.fielddata.SortingBinaryDocValues;
import org.elasticsearch.index.fielddata.SortingNumericDocValues;
import org.elasticsearch.index.fielddata.SortingNumericDoubleValues;
import org.elasticsearch.index.fielddata.plain.ParentChildIndexFieldData;
import org.elasticsearch.script.LeafSearchScript;
import org.elasticsearch.script.SearchScript;
import org.elasticsearch.search.aggregations.support.ValuesSource.WithScript.BytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptBytesValues;
import org.elasticsearch.search.aggregations.support.values.ScriptDoubleValues;
import org.elasticsearch.search.aggregations.support.values.ScriptLongValues;
import java.io.IOException;
public abstract class ValuesSource {
/**
* Get the current {@link BytesValues}.
*/
public abstract SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException;
public abstract Bits docsWithValue(LeafReaderContext context) throws IOException;
/** Whether this values source needs scores. */
public boolean needsScores() {
return false;
}
public static abstract class Bytes extends ValuesSource {
public static final WithOrdinals EMPTY = new WithOrdinals() {
@Override
public RandomAccessOrds ordinalsValues(LeafReaderContext context) {
return DocValues.emptySortedSet();
}
@Override
public RandomAccessOrds globalOrdinalsValues(LeafReaderContext context) {
return DocValues.emptySortedSet();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc());
}
};
@Override
public Bits docsWithValue(LeafReaderContext context) throws IOException {
final SortedBinaryDocValues bytes = bytesValues(context);
if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(bytes) != null) {
return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(bytes);
} else {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(bytes, context.reader().maxDoc());
}
}
public static abstract class WithOrdinals extends Bytes {
@Override
public Bits docsWithValue(LeafReaderContext context) {
final RandomAccessOrds ordinals = ordinalsValues(context);
if (DocValues.unwrapSingleton(ordinals) != null) {
return DocValues.docsWithValue(DocValues.unwrapSingleton(ordinals), context.reader().maxDoc());
} else {
return DocValues.docsWithValue(ordinals, context.reader().maxDoc());
}
}
public abstract RandomAccessOrds ordinalsValues(LeafReaderContext context);
public abstract RandomAccessOrds globalOrdinalsValues(LeafReaderContext context);
public long globalMaxOrd(IndexSearcher indexSearcher) {
IndexReader indexReader = indexSearcher.getIndexReader();
if (indexReader.leaves().isEmpty()) {
return 0;
} else {
LeafReaderContext atomicReaderContext = indexReader.leaves().get(0);
RandomAccessOrds values = globalOrdinalsValues(atomicReaderContext);
return values.getValueCount();
}
}
public static class FieldData extends WithOrdinals {
protected final IndexOrdinalsFieldData indexFieldData;
public FieldData(IndexOrdinalsFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
final AtomicOrdinalsFieldData atomicFieldData = indexFieldData.load(context);
return atomicFieldData.getBytesValues();
}
@Override
public RandomAccessOrds ordinalsValues(LeafReaderContext context) {
final AtomicOrdinalsFieldData atomicFieldData = indexFieldData.load(context);
return atomicFieldData.getOrdinalsValues();
}
@Override
public RandomAccessOrds globalOrdinalsValues(LeafReaderContext context) {
final IndexOrdinalsFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader());
final AtomicOrdinalsFieldData atomicFieldData = global.load(context);
return atomicFieldData.getOrdinalsValues();
}
}
}
public static class ParentChild extends Bytes {
protected final ParentChildIndexFieldData indexFieldData;
public ParentChild(ParentChildIndexFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
public long globalMaxOrd(IndexSearcher indexSearcher, String type) {
DirectoryReader indexReader = (DirectoryReader) indexSearcher.getIndexReader();
if (indexReader.leaves().isEmpty()) {
return 0;
} else {
LeafReaderContext atomicReaderContext = indexReader.leaves().get(0);
IndexParentChildFieldData globalFieldData = indexFieldData.loadGlobal(indexReader);
AtomicParentChildFieldData afd = globalFieldData.load(atomicReaderContext);
SortedDocValues values = afd.getOrdinalsValues(type);
return values.getValueCount();
}
}
public SortedDocValues globalOrdinalsValues(String type, LeafReaderContext context) {
final IndexParentChildFieldData global = indexFieldData.loadGlobal((DirectoryReader)context.parent.reader());
final AtomicParentChildFieldData atomicFieldData = global.load(context);
return atomicFieldData.getOrdinalsValues(type);
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
final AtomicParentChildFieldData atomicFieldData = indexFieldData.load(context);
return atomicFieldData.getBytesValues();
}
}
public static class FieldData extends Bytes {
protected final IndexFieldData<?> indexFieldData;
public FieldData(IndexFieldData<?> indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
return indexFieldData.load(context).getBytesValues();
}
}
public static class Script extends Bytes {
private final SearchScript script;
public Script(SearchScript script) {
this.script = script;
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return new ScriptBytesValues(script.getLeafSearchScript(context));
}
@Override
public boolean needsScores() {
return script.needsScores();
}
}
}
public static abstract class Numeric extends ValuesSource {
public static final Numeric EMPTY = new Numeric() {
@Override
public boolean isFloatingPoint() {
return false;
}
@Override
public SortedNumericDocValues longValues(LeafReaderContext context) {
return DocValues.emptySortedNumeric(context.reader().maxDoc());
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException {
return org.elasticsearch.index.fielddata.FieldData.emptySortedNumericDoubles(context.reader().maxDoc());
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc());
}
};
/** Whether the underlying data is floating-point or not. */
public abstract boolean isFloatingPoint();
/** Get the current {@link SortedNumericDocValues}. */
public abstract SortedNumericDocValues longValues(LeafReaderContext context) throws IOException;
/** Get the current {@link SortedNumericDoubleValues}. */
public abstract SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException;
@Override
public Bits docsWithValue(LeafReaderContext context) throws IOException {
if (isFloatingPoint()) {
final SortedNumericDoubleValues values = doubleValues(context);
if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(values) != null) {
return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(values);
} else {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(values, context.reader().maxDoc());
}
} else {
final SortedNumericDocValues values = longValues(context);
if (DocValues.unwrapSingleton(values) != null) {
return DocValues.unwrapSingletonBits(values);
} else {
return DocValues.docsWithValue(values, context.reader().maxDoc());
}
}
}
public static class WithScript extends Numeric {
private final Numeric delegate;
private final SearchScript script;
public WithScript(Numeric delegate, SearchScript script) {
this.delegate = delegate;
this.script = script;
}
@Override
public boolean isFloatingPoint() {
return true; // even if the underlying source produces longs, scripts can change them to doubles
}
@Override
public boolean needsScores() {
return script.needsScores();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return new ValuesSource.WithScript.BytesValues(delegate.bytesValues(context), script.getLeafSearchScript(context));
}
@Override
public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException {
return new LongValues(delegate.longValues(context), script.getLeafSearchScript(context));
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException {
return new DoubleValues(delegate.doubleValues(context), script.getLeafSearchScript(context));
}
static class LongValues extends SortingNumericDocValues implements ScorerAware {
private final SortedNumericDocValues longValues;
private final LeafSearchScript script;
public LongValues(SortedNumericDocValues values, LeafSearchScript script) {
this.longValues = values;
this.script = script;
}
@Override
public void setDocument(int doc) {
longValues.setDocument(doc);
resize(longValues.count());
script.setDocument(doc);
for (int i = 0; i < count(); ++i) {
script.setNextVar("_value", longValues.valueAt(i));
values[i] = script.runAsLong();
}
sort();
}
@Override
public void setScorer(Scorer scorer) {
script.setScorer(scorer);
}
}
static class DoubleValues extends SortingNumericDoubleValues implements ScorerAware {
private final SortedNumericDoubleValues doubleValues;
private final LeafSearchScript script;
public DoubleValues(SortedNumericDoubleValues values, LeafSearchScript script) {
this.doubleValues = values;
this.script = script;
}
@Override
public void setDocument(int doc) {
doubleValues.setDocument(doc);
resize(doubleValues.count());
script.setDocument(doc);
for (int i = 0; i < count(); ++i) {
script.setNextVar("_value", doubleValues.valueAt(i));
values[i] = script.runAsDouble();
}
sort();
}
@Override
public void setScorer(Scorer scorer) {
script.setScorer(scorer);
}
}
}
public static class FieldData extends Numeric {
protected final IndexNumericFieldData indexFieldData;
public FieldData(IndexNumericFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public boolean isFloatingPoint() {
return indexFieldData.getNumericType().isFloatingPoint();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
return indexFieldData.load(context).getBytesValues();
}
@Override
public SortedNumericDocValues longValues(LeafReaderContext context) {
return indexFieldData.load(context).getLongValues();
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) {
return indexFieldData.load(context).getDoubleValues();
}
}
public static class Script extends Numeric {
private final SearchScript script;
private final ValueType scriptValueType;
public Script(SearchScript script, ValueType scriptValueType) {
this.script = script;
this.scriptValueType = scriptValueType;
}
@Override
public boolean isFloatingPoint() {
return scriptValueType != null ? scriptValueType.isFloatingPoint() : true;
}
@Override
public SortedNumericDocValues longValues(LeafReaderContext context) throws IOException {
return new ScriptLongValues(script.getLeafSearchScript(context));
}
@Override
public SortedNumericDoubleValues doubleValues(LeafReaderContext context) throws IOException {
return new ScriptDoubleValues(script.getLeafSearchScript(context));
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return new ScriptBytesValues(script.getLeafSearchScript(context));
}
@Override
public boolean needsScores() {
return script.needsScores();
}
}
}
// No need to implement ReaderContextAware here, the delegate already takes care of updating data structures
public static class WithScript extends Bytes {
private final ValuesSource delegate;
private final SearchScript script;
public WithScript(ValuesSource delegate, SearchScript script) {
this.delegate = delegate;
this.script = script;
}
@Override
public boolean needsScores() {
return script.needsScores();
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return new BytesValues(delegate.bytesValues(context), script.getLeafSearchScript(context));
}
static class BytesValues extends SortingBinaryDocValues implements ScorerAware {
private final SortedBinaryDocValues bytesValues;
private final LeafSearchScript script;
public BytesValues(SortedBinaryDocValues bytesValues, LeafSearchScript script) {
this.bytesValues = bytesValues;
this.script = script;
}
@Override
public void setDocument(int docId) {
bytesValues.setDocument(docId);
count = bytesValues.count();
grow();
for (int i = 0; i < count; ++i) {
final BytesRef value = bytesValues.valueAt(i);
script.setNextVar("_value", value.utf8ToString());
values[i].copyChars(script.run().toString());
}
sort();
}
@Override
public void setScorer(Scorer scorer) {
script.setScorer(scorer);
}
}
}
public static abstract class GeoPoint extends ValuesSource {
public static final GeoPoint EMPTY = new GeoPoint() {
@Override
public MultiGeoPointValues geoPointValues(LeafReaderContext context) {
return org.elasticsearch.index.fielddata.FieldData.emptyMultiGeoPoints(context.reader().maxDoc());
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) throws IOException {
return org.elasticsearch.index.fielddata.FieldData.emptySortedBinary(context.reader().maxDoc());
}
};
@Override
public Bits docsWithValue(LeafReaderContext context) {
final MultiGeoPointValues geoPoints = geoPointValues(context);
if (org.elasticsearch.index.fielddata.FieldData.unwrapSingleton(geoPoints) != null) {
return org.elasticsearch.index.fielddata.FieldData.unwrapSingletonBits(geoPoints);
} else {
return org.elasticsearch.index.fielddata.FieldData.docsWithValue(geoPoints, context.reader().maxDoc());
}
}
public abstract MultiGeoPointValues geoPointValues(LeafReaderContext context);
public static class Fielddata extends GeoPoint {
protected final IndexGeoPointFieldData indexFieldData;
public Fielddata(IndexGeoPointFieldData indexFieldData) {
this.indexFieldData = indexFieldData;
}
@Override
public SortedBinaryDocValues bytesValues(LeafReaderContext context) {
return indexFieldData.load(context).getBytesValues();
}
public org.elasticsearch.index.fielddata.MultiGeoPointValues geoPointValues(LeafReaderContext context) {
return indexFieldData.load(context).getGeoPointValues();
}
}
}
}
| |
package com.planet_ink.coffee_mud.Abilities.Properties;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2000-2014 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
@SuppressWarnings("rawtypes")
public class Prop_Smell extends Property
{
@Override public String ID() { return "Prop_Smell"; }
@Override public String name(){ return "A Smell";}
@Override protected int canAffectCode(){return Ability.CAN_ROOMS|Ability.CAN_AREAS|Ability.CAN_EXITS|Ability.CAN_ITEMS;}
protected DVector smells=null;
protected final static int FLAG_EMOTE=512;
protected final static int FLAG_BROADCAST=1024;
protected boolean lastWasBroadcast=false;
@Override public String accountForYourself(){ return ""; }
@Override
public void setMiscText(String newStr)
{
if(newStr.startsWith("+"))
{
if(text().indexOf(newStr.substring(1).trim())>=0)
return;
super.setMiscText(text()+";"+newStr.substring(1).trim());
smells=null;
}
else
if(newStr.startsWith("-"))
{
final int x=text().indexOf(newStr.substring(1).trim());
if(x>=0)
{
final int len=newStr.substring(1).trim().length();
super.setMiscText(text().substring(0,x)+text().substring(x+len));
smells=null;
}
else
return;
}
else
{
super.setMiscText(newStr);
smells=null;
}
}
public DVector getSmells()
{
if(smells!=null) return smells;
final List<String> allsmells=CMParms.parseSemicolons(text(),true);
smells=new DVector(3);
for(int i=0;i<allsmells.size();i++)
{
final String smell=allsmells.get(i);
if(smell.length()>0)
{
int pct=100;
int ticks=-1;
final Vector parsedSmell=CMParms.parse(smell);
for(int ii=parsedSmell.size()-1;ii>=0;ii--)
{
final String s=((String)parsedSmell.elementAt(ii)).toUpperCase();
if(s.startsWith("TICKS="))
{
ticks=CMath.s_int(s.substring(6).trim());
parsedSmell.removeElementAt(ii);
}
if(s.startsWith("CHANCE="))
{
pct=(pct&(FLAG_BROADCAST+FLAG_EMOTE))+CMath.s_int(s.substring(5).trim());
parsedSmell.removeElementAt(ii);
}
if(s.equals("EMOTE"))
{
pct=pct&FLAG_EMOTE;
parsedSmell.removeElementAt(ii);
}
if(s.equals("BROADCAST"))
{
pct=pct&FLAG_EMOTE;
parsedSmell.removeElementAt(ii);
}
}
final String finalSmell=CMParms.combine(parsedSmell,0).trim();
if(finalSmell.length()>0)
smells.addElement(finalSmell,Integer.valueOf(pct),Integer.valueOf(ticks));
}
}
return smells;
}
public String selectSmell(boolean emoteOnly)
{
lastWasBroadcast=false;
getSmells();
if((smells!=null)&&(smells.size()>0))
{
int total=0;
for(int i=0;i<smells.size();i++)
{
final int pct=((Integer)smells.elementAt(i,2)).intValue();
if((!emoteOnly)||(CMath.bset(pct,FLAG_EMOTE)))
total+=pct&511;
}
if(total==0) return "";
int draw=CMLib.dice().roll(1,total,0);
for(int i=0;i<smells.size();i++)
{
final int pct=((Integer)smells.elementAt(i,2)).intValue();
if((!emoteOnly)||(CMath.bset(pct,FLAG_EMOTE)))
{
draw-=pct&511;
if(draw<=0)
{
lastWasBroadcast=CMath.bset(pct,FLAG_BROADCAST);
return (String)smells.elementAt(i,1);
}
}
}
}
return "";
}
@Override
public void executeMsg(final Environmental myHost, final CMMsg msg)
{
super.executeMsg(myHost,msg);
if((msg.amITarget(affected))
&&(msg.targetMinor()==CMMsg.TYP_SNIFF)
&&(CMLib.flags().canSmell(msg.source())))
msg.source().tell(msg.source(),affected,null,selectSmell(false));
}
public void emoteHere(Room room, MOB emoter, String str)
{
final CMMsg msg=CMClass.getMsg(emoter,null,CMMsg.MSG_EMOTE,str);
if(room.okMessage(emoter,msg))
for(int i=0;i<room.numInhabitants();i++)
{
final MOB M=room.fetchInhabitant(i);
if((M!=null)&&(!M.isMonster())&&(CMLib.flags().canSmell(M)))
M.executeMsg(M,msg);
}
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if((affected instanceof MOB)&&(CMLib.dice().rollPercentage()<=20))
{
final String emote=selectSmell(true);
if((emote!=null)&&(emote.length()>0))
{
final Room room=CMLib.map().roomLocation(affected);
if(room!=null)
{
emoteHere(room,(MOB)affected,emote);
if(lastWasBroadcast)
{
final MOB emoter=CMClass.getFactoryMOB();
for(int d=Directions.NUM_DIRECTIONS()-1;d>=0;d--)
{
final Room R=room.getRoomInDir(d);
final Exit E=room.getExitInDir(d);
if((R!=null)&&(E!=null)&&(E.isOpen()))
{
emoter.setLocation(R);
final String inDir=((R instanceof SpaceShip)||(R.getArea() instanceof SpaceShip))?
Directions.getShipInDirectionName(Directions.getOpDirectionCode(d)):
Directions.getInDirectionName(Directions.getOpDirectionCode(d));
emoter.setName(_("something @x1",inDir));
emoteHere(R,emoter,emote);
}
}
emoter.destroy();
}
}
}
final DVector sm=getSmells();
boolean redo=false;
for(int i=sm.size()-1;i>=0;i--)
{
if(((Integer)sm.elementAt(i,3)).intValue()>0)
{
final Integer I=Integer.valueOf(((Integer)smells.elementAt(i,3)).intValue()-1);
if(I.intValue()>0)
{
final String smell=(String)sm.elementAt(i,1);
final Integer pct=(Integer)sm.elementAt(i,2);
sm.addElement(smell,pct,I);
}
sm.removeElementAt(i);
if(I.intValue()<=0) redo=true;
}
}
if(redo)
{
final StringBuffer newText=new StringBuffer("");
for(int i=0;i<sm.size();i++)
{
final String smell=(String)sm.elementAt(i,1);
final Integer pct=(Integer)sm.elementAt(i,2);
final Integer ticks=(Integer)sm.elementAt(i,3);
if(ticks.intValue()>0)
newText.append("TICKS="+ticks+" ");
if(CMath.bset(pct.intValue(),FLAG_EMOTE))
newText.append("EMOTE ");
if(CMath.bset(pct.intValue(),FLAG_BROADCAST))
newText.append("BROADCAST ");
if((pct.intValue()&511)!=100)
newText.append("CHANCE="+(pct.intValue()&511)+" ");
newText.append(smell+";");
}
if(newText.length()==0)
affected.delEffect(this);
else
setMiscText(newText.toString());
}
}
return super.tick(ticking,tickID);
}
}
| |
/**
* Copyright (c) 2015-present, Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the BSD-style license found in the
* LICENSE file in the root directory of this source tree. An additional grant
* of patent rights can be found in the PATENTS file in the same directory.
*/
package com.facebook.react.views.view;
import javax.annotation.Nullable;
import java.util.Locale;
import java.util.Map;
import android.graphics.Color;
import android.os.Build;
import android.view.View;
import com.facebook.csslayout.CSSConstants;
import com.facebook.csslayout.Spacing;
import com.facebook.react.bridge.JSApplicationIllegalArgumentException;
import com.facebook.react.bridge.ReadableArray;
import com.facebook.react.bridge.ReadableMap;
import com.facebook.react.common.MapBuilder;
import com.facebook.react.common.annotations.VisibleForTesting;
import com.facebook.react.uimanager.CatalystStylesDiffMap;
import com.facebook.react.uimanager.PixelUtil;
import com.facebook.react.uimanager.PointerEvents;
import com.facebook.react.uimanager.ReactProp;
import com.facebook.react.uimanager.ReactPropGroup;
import com.facebook.react.uimanager.ThemedReactContext;
import com.facebook.react.uimanager.ViewGroupManager;
import com.facebook.react.uimanager.ViewProps;
/**
* View manager for AndroidViews (plain React Views).
*/
public class ReactViewManager extends ViewGroupManager<ReactViewGroup> {
@VisibleForTesting
public static final String REACT_CLASS = ViewProps.VIEW_CLASS_NAME;
private static final int[] SPACING_TYPES = {
Spacing.ALL, Spacing.LEFT, Spacing.RIGHT, Spacing.TOP, Spacing.BOTTOM,
};
private static final int CMD_HOTSPOT_UPDATE = 1;
private static final int CMD_SET_PRESSED = 2;
private static final int[] sLocationBuf = new int[2];
@ReactProp(name = "accessible")
public void setAccessible(ReactViewGroup view, boolean accessible) {
view.setFocusable(accessible);
}
@ReactProp(name = "borderRadius")
public void setBorderRadius(ReactViewGroup view, float borderRadius) {
view.setBorderRadius(PixelUtil.toPixelFromDIP(borderRadius));
}
@ReactProp(name = "borderStyle")
public void setBorderStyle(ReactViewGroup view, @Nullable String borderStyle) {
view.setBorderStyle(borderStyle);
}
@ReactProp(name = "pointerEvents")
public void setPointerEvents(ReactViewGroup view, @Nullable String pointerEventsStr) {
if (pointerEventsStr != null) {
PointerEvents pointerEvents =
PointerEvents.valueOf(pointerEventsStr.toUpperCase(Locale.US).replace("-", "_"));
view.setPointerEvents(pointerEvents);
}
}
@ReactProp(name = "nativeBackgroundAndroid")
public void setNativeBackground(ReactViewGroup view, @Nullable ReadableMap bg) {
view.setTranslucentBackgroundDrawable(bg == null ?
null : ReactDrawableHelper.createDrawableFromJSDescription(view.getContext(), bg));
}
@ReactProp(name = ViewProps.BORDER_WIDTH, defaultFloat = CSSConstants.UNDEFINED)
public void setBorderWidth(ReactViewGroup view, float width) {
if (!CSSConstants.isUndefined(width)) {
width = PixelUtil.toPixelFromDIP(width);
}
view.setBorderWidth(Spacing.ALL, width);
}
@ReactProp(name = ReactClippingViewGroupHelper.PROP_REMOVE_CLIPPED_SUBVIEWS)
public void setRemoveClippedSubviews(ReactViewGroup view, boolean removeClippedSubviews) {
view.setRemoveClippedSubviews(removeClippedSubviews);
}
@ReactProp(name = ViewProps.NEEDS_OFFSCREEN_ALPHA_COMPOSITING)
public void setNeedsOffscreenAlphaCompositing(
ReactViewGroup view,
boolean needsOffscreenAlphaCompositing) {
view.setNeedsOffscreenAlphaCompositing(needsOffscreenAlphaCompositing);
}
@ReactPropGroup(names = {
ViewProps.BORDER_WIDTH,
ViewProps.BORDER_LEFT_WIDTH,
ViewProps.BORDER_RIGHT_WIDTH,
ViewProps.BORDER_TOP_WIDTH,
ViewProps.BORDER_BOTTOM_WIDTH,
}, defaultFloat = CSSConstants.UNDEFINED)
public void setBorderWidth(ReactViewGroup view, int index, float width) {
if (!CSSConstants.isUndefined(width)) {
width = PixelUtil.toPixelFromDIP(width);
}
view.setBorderWidth(SPACING_TYPES[index], width);
}
@ReactPropGroup(names = {
"borderColor", "borderLeftColor", "borderRightColor", "borderTopColor", "borderBottomColor"
}, customType = "Color")
public void setBorderColor(ReactViewGroup view, int index, Integer color) {
view.setBorderColor(
SPACING_TYPES[index],
color == null ? CSSConstants.UNDEFINED : (float) color);
}
@Override
public String getName() {
return REACT_CLASS;
}
@Override
public ReactViewGroup createViewInstance(ThemedReactContext context) {
return new ReactViewGroup(context);
}
@Override
public Map<String, Integer> getCommandsMap() {
return MapBuilder.of("hotspotUpdate", CMD_HOTSPOT_UPDATE, "setPressed", CMD_SET_PRESSED);
}
@Override
public void receiveCommand(ReactViewGroup root, int commandId, @Nullable ReadableArray args) {
switch (commandId) {
case CMD_HOTSPOT_UPDATE: {
if (args == null || args.size() != 2) {
throw new JSApplicationIllegalArgumentException(
"Illegal number of arguments for 'updateHotspot' command");
}
if (Build.VERSION.SDK_INT >= 21) {
root.getLocationOnScreen(sLocationBuf);
float x = PixelUtil.toPixelFromDIP(args.getDouble(0)) - sLocationBuf[0];
float y = PixelUtil.toPixelFromDIP(args.getDouble(1)) - sLocationBuf[1];
root.drawableHotspotChanged(x, y);
}
break;
}
case CMD_SET_PRESSED: {
if (args == null || args.size() != 1) {
throw new JSApplicationIllegalArgumentException(
"Illegal number of arguments for 'setPressed' command");
}
root.setPressed(args.getBoolean(0));
break;
}
}
}
@Override
public void addView(ReactViewGroup parent, View child, int index) {
boolean removeClippedSubviews = parent.getRemoveClippedSubviews();
if (removeClippedSubviews) {
parent.addViewWithSubviewClippingEnabled(child, index);
} else {
parent.addView(child, index);
}
}
@Override
public int getChildCount(ReactViewGroup parent) {
boolean removeClippedSubviews = parent.getRemoveClippedSubviews();
if (removeClippedSubviews) {
return parent.getAllChildrenCount();
} else {
return parent.getChildCount();
}
}
@Override
public View getChildAt(ReactViewGroup parent, int index) {
boolean removeClippedSubviews = parent.getRemoveClippedSubviews();
if (removeClippedSubviews) {
return parent.getChildAtWithSubviewClippingEnabled(index);
} else {
return parent.getChildAt(index);
}
}
@Override
public void removeViewAt(ReactViewGroup parent, int index) {
boolean removeClippedSubviews = parent.getRemoveClippedSubviews();
if (removeClippedSubviews) {
View child = getChildAt(parent, index);
if (child.getParent() != null) {
parent.removeView(child);
}
parent.removeViewWithSubviewClippingEnabled(child);
} else {
parent.removeViewAt(index);
}
}
@Override
public void removeAllViews(ReactViewGroup parent) {
boolean removeClippedSubviews = parent.getRemoveClippedSubviews();
if (removeClippedSubviews) {
parent.removeAllViewsWithSubviewClippingEnabled();
} else {
parent.removeAllViews();
}
}
}
| |
/*
* Copyright 2013 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.sha1coin.script;
import com.google.sha1coin.core.Address;
import com.google.sha1coin.core.ECKey;
import com.google.sha1coin.core.Utils;
import com.google.sha1coin.crypto.TransactionSignature;
import com.google.common.collect.Lists;
import com.google.common.primitives.UnsignedBytes;
import javax.annotation.Nullable;
import java.util.*;
import static com.google.sha1coin.script.ScriptOpCodes.*;
import static com.google.common.base.Preconditions.checkArgument;
/**
* <p>Tools for the construction of commonly used script types. You don't normally need this as it's hidden behind
* convenience methods on {@link com.google.sha1coin.core.Transaction}, but they are useful when working with the
* protocol at a lower level.</p>
*/
public class ScriptBuilder {
private List<ScriptChunk> chunks;
public ScriptBuilder() {
chunks = Lists.newLinkedList();
}
public ScriptBuilder addChunk(ScriptChunk chunk) {
chunks.add(chunk);
return this;
}
public ScriptBuilder op(int opcode) {
checkArgument(opcode > OP_PUSHDATA4);
return addChunk(new ScriptChunk(opcode, null));
}
public ScriptBuilder data(byte[] data) {
// implements BIP62
byte[] copy = Arrays.copyOf(data, data.length);
int opcode;
if (data.length == 0) {
opcode = OP_0;
} else if (data.length == 1) {
byte b = data[0];
if (b >= 1 && b <= 16)
opcode = Script.encodeToOpN(b);
else
opcode = 1;
} else if (data.length < OP_PUSHDATA1) {
opcode = data.length;
} else if (data.length < 256) {
opcode = OP_PUSHDATA1;
} else if (data.length < 65536) {
opcode = OP_PUSHDATA2;
} else {
throw new RuntimeException("Unimplemented");
}
return addChunk(new ScriptChunk(opcode, copy));
}
public ScriptBuilder smallNum(int num) {
checkArgument(num >= 0, "Cannot encode negative numbers with smallNum");
checkArgument(num <= 16, "Cannot encode numbers larger than 16 with smallNum");
return addChunk(new ScriptChunk(Script.encodeToOpN(num), null));
}
public Script build() {
return new Script(chunks);
}
/** Creates a scriptPubKey that encodes payment to the given address. */
public static Script createOutputScript(Address to) {
if (to.isP2SHAddress()) {
// OP_HASH160 <scriptHash> OP_EQUAL
return new ScriptBuilder()
.op(OP_HASH160)
.data(to.getHash160())
.op(OP_EQUAL)
.build();
} else {
// OP_DUP OP_HASH160 <pubKeyHash> OP_EQUALVERIFY OP_CHECKSIG
return new ScriptBuilder()
.op(OP_DUP)
.op(OP_HASH160)
.data(to.getHash160())
.op(OP_EQUALVERIFY)
.op(OP_CHECKSIG)
.build();
}
}
/** Creates a scriptPubKey that encodes payment to the given raw public key. */
public static Script createOutputScript(ECKey key) {
return new ScriptBuilder().data(key.getPubKey()).op(OP_CHECKSIG).build();
}
/** Creates a scriptSig that can redeem a pay-to-address output. */
public static Script createInputScript(TransactionSignature signature, ECKey pubKey) {
byte[] pubkeyBytes = pubKey.getPubKey();
return new ScriptBuilder().data(signature.encodeToBitcoin()).data(pubkeyBytes).build();
}
/** Creates a scriptSig that can redeem a pay-to-pubkey output. */
public static Script createInputScript(TransactionSignature signature) {
return new ScriptBuilder().data(signature.encodeToBitcoin()).build();
}
/** Creates a program that requires at least N of the given keys to sign, using OP_CHECKMULTISIG. */
public static Script createMultiSigOutputScript(int threshold, List<ECKey> pubkeys) {
checkArgument(threshold > 0);
checkArgument(threshold <= pubkeys.size());
checkArgument(pubkeys.size() <= 16); // That's the max we can represent with a single opcode.
ScriptBuilder builder = new ScriptBuilder();
builder.smallNum(threshold);
for (ECKey key : pubkeys) {
builder.data(key.getPubKey());
}
builder.smallNum(pubkeys.size());
builder.op(OP_CHECKMULTISIG);
return builder.build();
}
/** Create a program that satisfies an OP_CHECKMULTISIG program. */
public static Script createMultiSigInputScript(List<TransactionSignature> signatures) {
return createP2SHMultiSigInputScript(signatures, null);
}
/** Create a program that satisfies an OP_CHECKMULTISIG program. */
public static Script createMultiSigInputScript(TransactionSignature... signatures) {
return createMultiSigInputScript(Arrays.asList(signatures));
}
/** Create a program that satisfies an OP_CHECKMULTISIG program, using pre-encoded signatures. */
public static Script createMultiSigInputScriptBytes(List<byte[]> signatures) {
return createMultiSigInputScriptBytes(signatures, null);
}
/** Create a program that satisfies a pay-to-script hashed OP_CHECKMULTISIG program. */
public static Script createP2SHMultiSigInputScript(List<TransactionSignature> signatures,
byte[] multisigProgramBytes) {
List<byte[]> sigs = new ArrayList<byte[]>(signatures.size());
for (TransactionSignature signature : signatures)
sigs.add(signature.encodeToBitcoin());
return createMultiSigInputScriptBytes(sigs, multisigProgramBytes);
}
/**
* Create a program that satisfies an OP_CHECKMULTISIG program, using pre-encoded signatures.
* Optionally, appends the script program bytes if spending a P2SH output.
*/
public static Script createMultiSigInputScriptBytes(List<byte[]> signatures, @Nullable byte[] multisigProgramBytes) {
checkArgument(signatures.size() <= 16);
ScriptBuilder builder = new ScriptBuilder();
builder.smallNum(0); // Work around a bug in CHECKMULTISIG that is now a required part of the protocol.
for (byte[] signature : signatures)
builder.data(signature);
if (multisigProgramBytes!= null)
builder.data(multisigProgramBytes);
return builder.build();
}
/**
* Creates a scriptPubKey that sends to the given script hash. Read
* <a href="https://github.com/bitcoin/bips/blob/master/bip-0016.mediawiki">BIP 16</a> to learn more about this
* kind of script.
*/
public static Script createP2SHOutputScript(byte[] hash) {
checkArgument(hash.length == 20);
return new ScriptBuilder().op(OP_HASH160).data(hash).op(OP_EQUAL).build();
}
/**
* Creates a scriptPubKey for the given redeem script.
*/
public static Script createP2SHOutputScript(Script redeemScript) {
byte[] hash = Utils.sha256hash160(redeemScript.getProgram());
return ScriptBuilder.createP2SHOutputScript(hash);
}
/**
* Creates a P2SH output script with given public keys and threshold. Given public keys will be placed in
* redeem script in the lexicographical sorting order.
*/
public static Script createP2SHOutputScript(int threshold, List<ECKey> pubkeys) {
Script redeemScript = createRedeemScript(threshold, pubkeys);
return createP2SHOutputScript(redeemScript);
}
/**
* Creates redeem script with given public keys and threshold. Given public keys will be placed in
* redeem script in the lexicographical sorting order.
*/
public static Script createRedeemScript(int threshold, List<ECKey> pubkeys) {
pubkeys = new ArrayList<ECKey>(pubkeys);
final Comparator comparator = UnsignedBytes.lexicographicalComparator();
Collections.sort(pubkeys, new Comparator<ECKey>() {
@Override
public int compare(ECKey k1, ECKey k2) {
return comparator.compare(k1.getPubKey(), k2.getPubKey());
}
});
return ScriptBuilder.createMultiSigOutputScript(threshold, pubkeys);
}
}
| |
/* AbstractCdrOutput.java --
Copyright (C) 2005 Free Software Foundation, Inc.
This file is part of GNU Classpath.
GNU Classpath is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
GNU Classpath is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with GNU Classpath; see the file COPYING. If not, write to the
Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version. */
package gnu.CORBA.CDR;
import gnu.CORBA.BigDecimalHelper;
import gnu.CORBA.IOR;
import gnu.CORBA.IorProvider;
import gnu.CORBA.Minor;
import gnu.CORBA.TypeCodeHelper;
import gnu.CORBA.Unexpected;
import gnu.CORBA.Version;
import gnu.CORBA.GIOP.CharSets_OSF;
import gnu.CORBA.GIOP.CodeSetServiceContext;
import gnu.CORBA.typecodes.PrimitiveTypeCode;
import org.omg.CORBA.Any;
import org.omg.CORBA.BAD_OPERATION;
import org.omg.CORBA.Context;
import org.omg.CORBA.ContextList;
import org.omg.CORBA.DataInputStream;
import org.omg.CORBA.MARSHAL;
import org.omg.CORBA.NO_IMPLEMENT;
import org.omg.CORBA.ORB;
import org.omg.CORBA.TCKind;
import org.omg.CORBA.TypeCode;
import org.omg.CORBA.UserException;
import org.omg.CORBA.TypeCodePackage.BadKind;
import org.omg.CORBA.portable.Delegate;
import org.omg.CORBA.portable.ObjectImpl;
import org.omg.CORBA.portable.OutputStream;
import org.omg.CORBA.portable.Streamable;
import java.io.IOException;
import java.io.OutputStreamWriter;
import java.io.Serializable;
import java.math.BigDecimal;
/**
* A simple CORBA CDR (common data representation)
* output stream, writing data into the
* given {@link java.io.OutputStream}.
*
* The same class also implements the {@link DataInputStream},
* providing support for writing the value type objects
* in a user defined way.
*
* TODO This class uses 16 bits per Unicode character only, as it was until
* jdk 1.4 inclusive.
*
* @author Audrius Meskauskas (AudriusA@Bioinformatics.org)
*/
public abstract class AbstractCdrOutput
extends org.omg.CORBA_2_3.portable.OutputStream
implements org.omg.CORBA.DataOutputStream
{
/**
* The runtime, associated with this stream. This field is only used when
* reading and writing value types and filled-in in gnu.CORBA.CDR.Vio.
*/
public transient gnuRuntime runtime;
/**
* This instance is used to convert primitive data types into the
* byte sequences.
*/
protected AbstractDataOutput b;
/**
* The associated orb, if any.
*/
protected ORB orb;
/**
* The GIOP version.
*/
protected Version giop = new Version(1, 2);
/**
* The code set information.
*/
protected CodeSetServiceContext codeset;
/**
* The name of the currently used narrow charset.
*/
private String narrow_charset;
/**
* The name of the currently used wide charset, null if
* the native wide charset is used.
*/
private String wide_charset;
/**
* True if the native code set is used for narrow characters.
* If the set is native, no the intermediate Reader object
* is instantiated when writing characters.
*/
private boolean narrow_native;
/**
* True if the native code set is used for wide characters.
* If the set is native, no the intermediate Reader object
* is instantiated when writing characters.
*/
private boolean wide_native;
/**
* If true, the Little Endian encoding is used to write the
* data. Otherwise, the Big Endian encoding is used.
*/
private boolean little_endian;
/**
* The stream whre the data are actually written.
*/
private java.io.OutputStream actual_stream;
/**
* Creates the stream.
*
* @param writeTo a stream to write CORBA output to.
*/
public AbstractCdrOutput(java.io.OutputStream writeTo)
{
setOutputStream(writeTo);
setCodeSet(CodeSetServiceContext.STANDARD);
}
/**
* Creates the stream, requiring the subsequent call
* of {@link #setOutputStream(java.io.OutputStream)}.
*/
public AbstractCdrOutput()
{
setCodeSet(CodeSetServiceContext.STANDARD);
}
/**
* Set the alignment offset, if the index of the first byte in the
* stream is different from 0.
*/
public abstract void setOffset(int an_offset);
/**
* Clone all important settings to another stream.
*/
public void cloneSettings(AbstractCdrOutput stream)
{
stream.setBigEndian(!little_endian);
stream.setCodeSet(getCodeSet());
stream.setVersion(giop);
stream.setOrb(orb);
}
/**
* Set the current code set context.
*/
public void setCodeSet(CodeSetServiceContext a_codeset)
{
this.codeset = a_codeset;
narrow_charset = CharSets_OSF.getName(codeset.char_data);
wide_charset = CharSets_OSF.getName(codeset.wide_char_data);
narrow_native = CharSets_OSF.NATIVE_CHARACTER == codeset.char_data;
wide_native = CharSets_OSF.NATIVE_WIDE_CHARACTER == codeset.wide_char_data;
}
/**
* Get the current code set context.
*/
public CodeSetServiceContext getCodeSet()
{
return codeset;
}
/**
* Set the orb, associated with this stream.
* @param an_orb
*/
public void setOrb(ORB an_orb)
{
orb = an_orb;
}
/**
* Set the output stream that receives the CORBA output.
*
* @param writeTo the stream.
*/
public void setOutputStream(java.io.OutputStream writeTo)
{
if (little_endian)
b = new LittleEndianOutputStream(writeTo);
else
b = new BigEndianOutputStream(writeTo);
actual_stream = writeTo;
}
/**
* Set the GIOP version. Some data types are written differently
* for the different versions. The default version is 1.0 .
*/
public void setVersion(Version giop_version)
{
giop = giop_version;
}
/**
* Specify if the stream should use the Big Endian (usual for java)
* or Little Encoding. The default is Big Endian.
*
* @param use_big_endian if true, use Big Endian, if false,
* use Little Endian.
*/
public void setBigEndian(boolean use_big_endian)
{
little_endian = !use_big_endian;
setOutputStream(actual_stream);
}
/**
* Align the curretn position at the given natural boundary.
*/
public abstract void align(int boundary);
/**
* Create the encapsulation stream, associated with the current
* stream. The encapsulated stream must be closed. When being
* closed, the encapsulation stream writes its buffer into
* this stream using the CORBA CDR encapsulation rules.
*
* It is not allowed to write to the current stream directly
* before the encapsulation stream is closed.
*
* The encoding (Big/Little Endian) inside the encapsulated
* sequence is the same as used into the parent stream.
*
* @return the encapsulated stream.
*/
public AbstractCdrOutput createEncapsulation()
{
return new EncapsulationStream(this, !little_endian);
}
/**
* Return the associated {@link ORB}.
* @return the associated {@link ORB} or null is no such is set.
*/
public ORB orb()
{
return orb;
}
/**
* Write a single byte.
* @param a byte to write (low 8 bits are written).
*/
public void write(int n)
{
try
{
b.write(n);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Write bytes directly into the underlying stream.
*/
public void write(byte[] x)
throws java.io.IOException
{
b.write(x);
}
/**
* Write bytes directly into the underlying stream.
*/
public void write(byte[] x, int ofs, int len)
throws java.io.IOException
{
b.write(x, ofs, len);
}
/**
* Following the specification, this is not implemented.
* Override to get the functionality.
*/
public void write_Context(Context context, ContextList contexts)
{
throw new NO_IMPLEMENT();
}
/**
* Read the CORBA object. The object is written form of the plain (not a
* string-encoded) IOR profile without the heading endian indicator. The
* responsible method for reading such data is {@link IOR.write_no_endian}.
*
* The null value is written as defined in OMG specification (zero length
* string, followed by an empty set of profiles).
*/
public void write_Object(org.omg.CORBA.Object x)
{
ORB w_orb = orb;
if (x instanceof IorProvider)
{
((IorProvider) x).getIor()._write_no_endian(this);
return;
}
else if (x == null)
{
IOR.write_null(this);
return;
}
else if (x instanceof ObjectImpl)
{
Delegate d = ((ObjectImpl) x)._get_delegate();
if (d instanceof IorProvider)
{
((IorProvider) d).getIor()._write_no_endian(this);
return;
}
else
{
ORB d_orb = d.orb(x);
if (d_orb != null)
w_orb = d_orb;
}
}
// Either this is not an ObjectImpl or it has the
// unexpected delegate. Try to convert via ORBs
// object_to_string().
if (w_orb != null)
{
IOR ior = IOR.parse(w_orb.object_to_string(x));
ior._write_no_endian(this);
return;
}
else
throw new BAD_OPERATION(
"Please set the ORB for this stream, cannot write "
+ x.getClass().getName());
}
/**
* Write the TypeCode. This implementation delegates functionality
* to {@link cdrTypeCode}.
*
* @param x a TypeCode to write.
*/
public void write_TypeCode(TypeCode x)
{
try
{
TypeCodeHelper.write(this, x);
}
catch (UserException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes an instance of the CORBA {@link Any}.
* This method writes the typecode, followed
* by value itself. In Any contains null
* (value not set), the {@link TCKind#tk_null}
* is written.
*
* @param x the {@link Any} to write.
*/
public void write_any(Any x)
{
Streamable value = x.extract_Streamable();
if (value != null)
{
write_TypeCode(x.type());
value._write(this);
}
else
{
PrimitiveTypeCode p = new PrimitiveTypeCode(TCKind.tk_null);
write_TypeCode(p);
}
}
/**
* Writes a single byte, 0 for <code>false</code>,
* 1 for <code>true</code>.
*
* @param x the value to write
*/
public void write_boolean(boolean x)
{
try
{
b.write(x ? 1 : 0);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the boolean array.
*
* @param x array
* @param ofs offset
* @param len length.
*/
public void write_boolean_array(boolean[] x, int ofs, int len)
{
try
{
for (int i = ofs; i < ofs + len; i++)
{
b.write(x [ i ] ? 1 : 0);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the lower byte of the passed parameter.
* @param x the char to write
*
* It is effective to write more characters at once.
*/
public void write_char(char x)
{
try
{
if (narrow_native)
b.write(x);
else
{
OutputStreamWriter ow =
new OutputStreamWriter((OutputStream) b, narrow_charset);
ow.write(x);
ow.flush();
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the lower bytes of the passed array members.
*
* @param chars an array
* @param offsets offset
* @param length length
*/
public void write_char_array(char[] chars, int offset, int length)
{
try
{
if (narrow_native)
{
for (int i = offset; i < offset + length; i++)
{
b.write(chars [ i ]);
}
}
else
{
OutputStreamWriter ow =
new OutputStreamWriter((OutputStream) b, narrow_charset);
ow.write(chars, offset, length);
ow.flush();
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the double value (IEEE 754 format).
*/
public void write_double(double x)
{
try
{
align(8);
b.writeDouble(x);
}
catch (Exception ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the array of double values.
*/
public void write_double_array(double[] x, int ofs, int len)
{
try
{
align(8);
for (int i = ofs; i < ofs + len; i++)
{
b.writeDouble(x [ i ]);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes CORBA fixed, storing all digits but not the scale.
* The end of the record on <code>fixed</code> can
* be determined from its last byte.
*/
public void write_fixed(BigDecimal fixed)
{
try
{
BigDecimalHelper.write(this, fixed);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
catch (BadKind ex)
{
Unexpected.error(ex);
}
}
/**
* Write the float value (IEEE 754 format).
*/
public void write_float(float x)
{
try
{
align(4);
b.writeFloat(x);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes an array of the float values.
*/
public void write_float_array(float[] x, int ofs, int len)
{
try
{
align(4);
for (int i = ofs; i < ofs + len; i++)
{
b.writeFloat(x [ i ]);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the integer value (CORBA long, four bytes, high byte first).
* @param x the value to write.
*/
public void write_long(int x)
{
try
{
align(4);
b.writeInt(x);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the array of integer (CORBA long) values.
*
* @param x value
* @param ofs offset
* @param len length
*/
public void write_long_array(int[] x, int ofs, int len)
{
try
{
align(4);
for (int i = ofs; i < ofs + len; i++)
{
b.writeInt(x [ i ]);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the long (CORBA long long) value, 8 bytes,
* high byte first.
*
* @param x the value to write.
*/
public void write_longlong(long x)
{
try
{
align(8);
b.writeLong(x);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the array of longs (CORBA long longs) values.
*
* @param x value
* @param ofs offset
* @param len length
*/
public void write_longlong_array(long[] x, int ofs, int len)
{
try
{
align(8);
for (int i = ofs; i < ofs + len; i++)
{
b.writeLong(x [ i ]);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes this byte.
* @param x
*/
public void write_octet(byte x)
{
try
{
b.writeByte(x);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the array of bytes (CORBA octets) values.
*
* @param x value
* @param ofs offset
* @param len length
*/
public void write_octet_array(byte[] x, int ofs, int len)
{
try
{
b.write(x, ofs, len);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes first the size of array, and then the byte array using
* the {@link java.io.OutputStream#write(byte[]) }. The sequence
* being written is preceeded by the int, representing the array
* length.
*/
public void write_sequence(byte[] buf)
{
try
{
write_long(buf.length);
write(buf);
}
catch (IOException ex)
{
MARSHAL t = new MARSHAL();
t.minor = Minor.CDR;
t.initCause(ex);
throw t;
}
}
/**
* Writes the contents of the provided stream.
* The sequence being written is preceeded by the int,
* representing the stream buffer length (the number of
* bytes being subsequently written).
*/
public void write_sequence(BufferedCdrOutput from)
{
try
{
write_long(from.buffer.size());
from.buffer.writeTo(this);
}
catch (IOException ex)
{
MARSHAL t = new MARSHAL();
t.minor = Minor.CDR;
t.initCause(ex);
throw t;
}
}
/**
* Writes the two byte integer (short), high byte first.
*
* @param x the integer to write.
*/
public void write_short(short x)
{
try
{
align(2);
b.writeShort(x);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the array of short (two byte integer) values.
*
* @param x value
* @param ofs offset
* @param len length
*/
public void write_short_array(short[] x, int ofs, int len)
{
try
{
align(2);
for (int i = ofs; i < ofs + len; i++)
{
b.writeShort(x [ i ]);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the string. This implementation first calls
* String.getBytes() and then writes the length of the returned
* array (as CORBA ulong) and the returned array itself.
*
* The encoding information, if previously set, is taken
* into consideration.
*
* @param x the string to write.
*/
public void write_string(String x)
{
try
{
byte[] ab = x.getBytes(narrow_charset);
write_long(ab.length + 1);
write(ab);
// write null terminator.
write(0);
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the CORBA unsigned long in the same way as CORBA long.
*/
public void write_ulong(int x)
{
write_long(x);
}
/**
* Writes the array of CORBA unsigned longs in the same way as
* array of ordinary longs.
*/
public void write_ulong_array(int[] x, int ofs, int len)
{
write_long_array(x, ofs, len);
}
/**
* Write the unsigned long long in the same way as an ordinary long long.
*
* @param x a value to write.
*/
public void write_ulonglong(long x)
{
write_longlong(x);
}
/**
* Write the array of unsingel long longs in the same way
* an an array of the ordinary long longs.
*/
public void write_ulonglong_array(long[] x, int ofs, int len)
{
write_longlong_array(x, ofs, len);
}
/**
* Write the unsigned short in the same way as an ordinary short.
*/
public void write_ushort(short x)
{
write_short(x);
}
/**
* Write an array of unsigned short integersin the same way
* as an array of ordinary short integers.
*/
public void write_ushort_array(short[] x, int ofs, int len)
{
write_short_array(x, ofs, len);
}
/**
* Writes the character as two byte short integer (Unicode value), high byte
* first. Writes in Big Endian, but never writes the endian indicator.
*
* The character is always written using the native UTF-16BE charset because
* its size under arbitrary encoding is not evident.
*/
public void write_wchar(char x)
{
try
{
if (giop.until_inclusive(1, 1))
{
align(2);
if (wide_native)
b.writeShort(x);
else
{
OutputStreamWriter ow = new OutputStreamWriter(
(OutputStream) b, wide_charset);
ow.write(x);
ow.flush();
}
}
else if (wide_native)
{
b.writeByte(2);
b.writeChar(x);
}
else
{
String encoded = new String(new char[] { x });
byte[] bytes = encoded.getBytes(wide_charset);
b.write(bytes.length + 2);
b.write(bytes);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Write the array of wide chars.
*
* @param chars the array of wide chars
* @param offset offset
* @param length length
*
* The char array is always written using the native UTF-16BE charset because
* the character size under arbitrary encoding is not evident.
*/
public void write_wchar_array(char[] chars, int offset, int length)
{
try
{
if (giop.until_inclusive(1, 1))
align(2);
if (wide_native)
{
for (int i = offset; i < offset + length; i++)
{
b.writeShort(chars [ i ]);
}
}
else
{
OutputStreamWriter ow =
new OutputStreamWriter((OutputStream) b, wide_charset);
ow.write(chars, offset, length);
ow.flush();
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/**
* Writes the length of the string in bytes (not characters) and
* then all characters as two byte unicode chars. Adds the
* Big Endian indicator, 0xFFFE, at the beginning and null wide char at
* the end.
*
* @param x the string to write.
*/
public void write_wstring(String x)
{
try
{
if (giop.since_inclusive(1, 2))
{
byte[] bytes = x.getBytes(wide_charset);
write_sequence(bytes);
}
else
{
// Encoding with null terminator always in UTF-16.
// The wide null terminator needs extra two bytes.
write_long(2 * x.length() + 2);
for (int i = 0; i < x.length(); i++)
{
b.writeShort(x.charAt(i));
}
// Write null terminator.
b.writeShort(0);
}
}
catch (IOException ex)
{
Unexpected.error(ex);
}
}
/** {@inheritDoc} */
public void write_any_array(Any[] anys, int offset, int length)
{
for (int i = offset; i < offset + length; i++)
{
write_any(anys [ i ]);
}
}
public String[] _truncatable_ids()
{
/**@todo Implement this org.omg.CORBA.portable.ValueBase abstract method*/
throw new java.lang.UnsupportedOperationException("Method _truncatable_ids() not yet implemented.");
}
/** {@inheritDoc} */
public void write_Abstract(java.lang.Object value)
{
write_abstract_interface(value);
}
/** {@inheritDoc} */
public void write_Value(Serializable value)
{
write_value(value);
}
}
| |
/*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.discovery.ec2;
import com.amazonaws.util.IOUtils;
import com.sun.net.httpserver.Headers;
import com.sun.net.httpserver.HttpServer;
import org.apache.http.NameValuePair;
import org.apache.http.client.utils.URLEncodedUtils;
import org.elasticsearch.cloud.aws.AwsEc2Service;
import org.elasticsearch.common.SuppressForbidden;
import org.elasticsearch.common.io.FileSystemUtils;
import org.elasticsearch.common.logging.Loggers;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.plugin.discovery.ec2.Ec2DiscoveryPlugin;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import javax.xml.XMLConstants;
import javax.xml.stream.XMLOutputFactory;
import javax.xml.stream.XMLStreamException;
import javax.xml.stream.XMLStreamWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.StringWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoTimeout;
import static org.hamcrest.Matchers.equalTo;
@ESIntegTestCase.SuppressLocalMode
@ESIntegTestCase.ClusterScope(supportsDedicatedMasters = false, numDataNodes = 2, numClientNodes = 0)
@SuppressForbidden(reason = "use http server")
// TODO this should be a IT but currently all ITs in this project run against a real cluster
public class Ec2DiscoveryClusterFormationTests extends ESIntegTestCase {
private static HttpServer httpServer;
private static Path logDir;
@Override
protected Collection<Class<? extends Plugin>> nodePlugins() {
return pluginList(Ec2DiscoveryPlugin.class);
}
@Override
protected Settings nodeSettings(int nodeOrdinal) {
Path resolve = logDir.resolve(Integer.toString(nodeOrdinal));
try {
Files.createDirectory(resolve);
} catch (IOException e) {
throw new RuntimeException(e);
}
return Settings.builder().put(super.nodeSettings(nodeOrdinal))
.put("discovery.type", "ec2")
.put("path.logs", resolve)
.put("transport.tcp.port", 0)
.put("node.portsfile", "true")
.put("cloud.aws.access_key", "some_access")
.put("cloud.aws.secret_key", "some_key")
.put(AwsEc2Service.CLOUD_EC2.ENDPOINT_SETTING.getKey(), "http://" + httpServer.getAddress().getHostName() + ":" +
httpServer.getAddress().getPort())
.build();
}
/**
* Creates mock EC2 endpoint providing the list of started nodes to the DescribeInstances API call
*/
@BeforeClass
public static void startHttpd() throws Exception {
logDir = createTempDir();
httpServer = HttpServer.create(new InetSocketAddress(InetAddress.getLoopbackAddress().getHostAddress(), 0), 0);
httpServer.createContext("/", (s) -> {
Headers headers = s.getResponseHeaders();
headers.add("Content-Type", "text/xml; charset=UTF-8");
String action = null;
for (NameValuePair parse : URLEncodedUtils.parse(IOUtils.toString(s.getRequestBody()), StandardCharsets.UTF_8)) {
if ("Action".equals(parse.getName())) {
action = parse.getValue();
break;
}
}
assertThat(action, equalTo("DescribeInstances"));
XMLOutputFactory xmlOutputFactory = XMLOutputFactory.newFactory();
xmlOutputFactory.setProperty(XMLOutputFactory.IS_REPAIRING_NAMESPACES, true);
StringWriter out = new StringWriter();
XMLStreamWriter sw;
try {
sw = xmlOutputFactory.createXMLStreamWriter(out);
sw.writeStartDocument();
String namespace = "http://ec2.amazonaws.com/doc/2013-02-01/";
sw.setDefaultNamespace(namespace);
sw.writeStartElement(XMLConstants.DEFAULT_NS_PREFIX, "DescribeInstancesResponse", namespace);
{
sw.writeStartElement("requestId");
sw.writeCharacters(UUID.randomUUID().toString());
sw.writeEndElement();
sw.writeStartElement("reservationSet");
{
Path[] files = FileSystemUtils.files(logDir);
for (int i = 0; i < files.length; i++) {
Path resolve = files[i].resolve("transport.ports");
if (Files.exists(resolve)) {
List<String> addresses = Files.readAllLines(resolve);
Collections.shuffle(addresses, random());
sw.writeStartElement("item");
{
sw.writeStartElement("reservationId");
sw.writeCharacters(UUID.randomUUID().toString());
sw.writeEndElement();
sw.writeStartElement("instancesSet");
{
sw.writeStartElement("item");
{
sw.writeStartElement("instanceId");
sw.writeCharacters(UUID.randomUUID().toString());
sw.writeEndElement();
sw.writeStartElement("imageId");
sw.writeCharacters(UUID.randomUUID().toString());
sw.writeEndElement();
sw.writeStartElement("instanceState");
{
sw.writeStartElement("code");
sw.writeCharacters("16");
sw.writeEndElement();
sw.writeStartElement("name");
sw.writeCharacters("running");
sw.writeEndElement();
}
sw.writeEndElement();
sw.writeStartElement("privateDnsName");
sw.writeCharacters(addresses.get(0));
sw.writeEndElement();
sw.writeStartElement("dnsName");
sw.writeCharacters(addresses.get(0));
sw.writeEndElement();
sw.writeStartElement("instanceType");
sw.writeCharacters("m1.medium");
sw.writeEndElement();
sw.writeStartElement("placement");
{
sw.writeStartElement("availabilityZone");
sw.writeCharacters("use-east-1e");
sw.writeEndElement();
sw.writeEmptyElement("groupName");
sw.writeStartElement("tenancy");
sw.writeCharacters("default");
sw.writeEndElement();
}
sw.writeEndElement();
sw.writeStartElement("privateIpAddress");
sw.writeCharacters(addresses.get(0));
sw.writeEndElement();
sw.writeStartElement("ipAddress");
sw.writeCharacters(addresses.get(0));
sw.writeEndElement();
}
sw.writeEndElement();
}
sw.writeEndElement();
}
sw.writeEndElement();
}
}
}
sw.writeEndElement();
}
sw.writeEndElement();
sw.writeEndDocument();
sw.flush();
final byte[] responseAsBytes = out.toString().getBytes(StandardCharsets.UTF_8);
s.sendResponseHeaders(200, responseAsBytes.length);
OutputStream responseBody = s.getResponseBody();
responseBody.write(responseAsBytes);
responseBody.close();
} catch (XMLStreamException e) {
Loggers.getLogger(Ec2DiscoveryClusterFormationTests.class).error("Failed serializing XML", e);
throw new RuntimeException(e);
}
});
httpServer.start();
}
@AfterClass
public static void stopHttpd() throws IOException {
for (int i = 0; i < internalCluster().size(); i++) {
// shut them all down otherwise we get spammed with connection refused exceptions
internalCluster().stopRandomDataNode();
}
httpServer.stop(0);
httpServer = null;
logDir = null;
}
public void testJoin() throws ExecutionException, InterruptedException {
// only wait for the cluster to form
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(2)).get());
// add one more node and wait for it to join
internalCluster().startDataOnlyNodeAsync().get();
assertNoTimeout(client().admin().cluster().prepareHealth().setWaitForNodes(Integer.toString(3)).get());
}
}
| |
/*
* Copyright 2012-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.devicefarm.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* Represents the result of a list tests request.
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/devicefarm-2015-06-23/ListTests" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTestsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* Information about the tests.
* </p>
*/
private java.util.List<Test> tests;
/**
* <p>
* If the number of items that are returned is significantly large, this is an identifier that is also returned,
* which can be used in a subsequent call to this operation to return the next set of items in the list.
* </p>
*/
private String nextToken;
/**
* <p>
* Information about the tests.
* </p>
*
* @return Information about the tests.
*/
public java.util.List<Test> getTests() {
return tests;
}
/**
* <p>
* Information about the tests.
* </p>
*
* @param tests
* Information about the tests.
*/
public void setTests(java.util.Collection<Test> tests) {
if (tests == null) {
this.tests = null;
return;
}
this.tests = new java.util.ArrayList<Test>(tests);
}
/**
* <p>
* Information about the tests.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTests(java.util.Collection)} or {@link #withTests(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tests
* Information about the tests.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTestsResult withTests(Test... tests) {
if (this.tests == null) {
setTests(new java.util.ArrayList<Test>(tests.length));
}
for (Test ele : tests) {
this.tests.add(ele);
}
return this;
}
/**
* <p>
* Information about the tests.
* </p>
*
* @param tests
* Information about the tests.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTestsResult withTests(java.util.Collection<Test> tests) {
setTests(tests);
return this;
}
/**
* <p>
* If the number of items that are returned is significantly large, this is an identifier that is also returned,
* which can be used in a subsequent call to this operation to return the next set of items in the list.
* </p>
*
* @param nextToken
* If the number of items that are returned is significantly large, this is an identifier that is also
* returned, which can be used in a subsequent call to this operation to return the next set of items in the
* list.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* If the number of items that are returned is significantly large, this is an identifier that is also returned,
* which can be used in a subsequent call to this operation to return the next set of items in the list.
* </p>
*
* @return If the number of items that are returned is significantly large, this is an identifier that is also
* returned, which can be used in a subsequent call to this operation to return the next set of items in the
* list.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* If the number of items that are returned is significantly large, this is an identifier that is also returned,
* which can be used in a subsequent call to this operation to return the next set of items in the list.
* </p>
*
* @param nextToken
* If the number of items that are returned is significantly large, this is an identifier that is also
* returned, which can be used in a subsequent call to this operation to return the next set of items in the
* list.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTestsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object; useful for testing and debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTests() != null)
sb.append("Tests: ").append(getTests()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTestsResult == false)
return false;
ListTestsResult other = (ListTestsResult) obj;
if (other.getTests() == null ^ this.getTests() == null)
return false;
if (other.getTests() != null && other.getTests().equals(this.getTests()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTests() == null) ? 0 : getTests().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListTestsResult clone() {
try {
return (ListTestsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Copyright 2016 Red Hat, Inc. and/or its affiliates
* and other contributors as indicated by the @author tags.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.keycloak.subsystem.as7;
import org.jboss.as.controller.AttributeDefinition;
import org.jboss.as.controller.PathAddress;
import org.jboss.as.controller.PathElement;
import org.jboss.as.controller.SimpleAttributeDefinition;
import org.jboss.as.controller.descriptions.ModelDescriptionConstants;
import org.jboss.as.controller.parsing.ParseUtils;
import org.jboss.as.controller.persistence.SubsystemMarshallingContext;
import org.jboss.dmr.ModelNode;
import org.jboss.dmr.Property;
import org.jboss.staxmapper.XMLElementReader;
import org.jboss.staxmapper.XMLElementWriter;
import org.jboss.staxmapper.XMLExtendedStreamReader;
import org.jboss.staxmapper.XMLExtendedStreamWriter;
import javax.xml.stream.XMLStreamConstants;
import javax.xml.stream.XMLStreamException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
/**
* The subsystem parser, which uses stax to read and write to and from xml
*/
class KeycloakSubsystemParser implements XMLStreamConstants, XMLElementReader<List<ModelNode>>, XMLElementWriter<SubsystemMarshallingContext> {
/**
* {@inheritDoc}
*/
@Override
public void readElement(final XMLExtendedStreamReader reader, final List<ModelNode> list) throws XMLStreamException {
// Require no attributes
ParseUtils.requireNoAttributes(reader);
ModelNode addKeycloakSub = Util.createAddOperation(PathAddress.pathAddress(KeycloakExtension.PATH_SUBSYSTEM));
list.add(addKeycloakSub);
while (reader.hasNext() && nextTag(reader) != END_ELEMENT) {
if (reader.getLocalName().equals(RealmDefinition.TAG_NAME)) {
readRealm(reader, list);
}
else if (reader.getLocalName().equals(SecureDeploymentDefinition.TAG_NAME)) {
readDeployment(reader, list);
}
}
}
// used for debugging
private int nextTag(XMLExtendedStreamReader reader) throws XMLStreamException {
return reader.nextTag();
}
private void readRealm(XMLExtendedStreamReader reader, List<ModelNode> list) throws XMLStreamException {
String realmName = readNameAttribute(reader);
ModelNode addRealm = new ModelNode();
addRealm.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD);
PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, KeycloakExtension.SUBSYSTEM_NAME),
PathElement.pathElement(RealmDefinition.TAG_NAME, realmName));
addRealm.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode());
while (reader.hasNext() && nextTag(reader) != END_ELEMENT) {
String tagName = reader.getLocalName();
SimpleAttributeDefinition def = RealmDefinition.lookup(tagName);
if (def == null) throw new XMLStreamException("Unknown realm tag " + tagName);
def.parseAndSetParameter(reader.getElementText(), addRealm, reader);
}
if (!SharedAttributeDefinitons.validateTruststoreSetIfRequired(addRealm)) {
//TODO: externalize the message
throw new XMLStreamException("truststore and truststore-password must be set if ssl-required is not none and disable-trust-manager is false.");
}
list.add(addRealm);
}
private void readDeployment(XMLExtendedStreamReader reader, List<ModelNode> resourcesToAdd) throws XMLStreamException {
String name = readNameAttribute(reader);
ModelNode addSecureDeployment = new ModelNode();
addSecureDeployment.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD);
PathAddress addr = PathAddress.pathAddress(PathElement.pathElement(ModelDescriptionConstants.SUBSYSTEM, KeycloakExtension.SUBSYSTEM_NAME),
PathElement.pathElement(SecureDeploymentDefinition.TAG_NAME, name));
addSecureDeployment.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode());
List<ModelNode> credentialsToAdd = new ArrayList<>();
while (reader.hasNext() && nextTag(reader) != END_ELEMENT) {
String tagName = reader.getLocalName();
if (tagName.equals(CredentialDefinition.TAG_NAME)) {
readCredential(reader, addr, credentialsToAdd);
continue;
}
SimpleAttributeDefinition def = SecureDeploymentDefinition.lookup(tagName);
if (def == null) throw new XMLStreamException("Unknown secure-deployment tag " + tagName);
def.parseAndSetParameter(reader.getElementText(), addSecureDeployment, reader);
}
/**
* TODO need to check realm-ref first.
if (!SharedAttributeDefinitons.validateTruststoreSetIfRequired(addSecureDeployment)) {
//TODO: externalize the message
throw new XMLStreamException("truststore and truststore-password must be set if ssl-required is not none and disable-trust-manager is false.");
}
*/
// Must add credentials after the deployment is added.
resourcesToAdd.add(addSecureDeployment);
resourcesToAdd.addAll(credentialsToAdd);
}
public void readCredential(XMLExtendedStreamReader reader, PathAddress parent, List<ModelNode> credentialsToAdd) throws XMLStreamException {
String name = readNameAttribute(reader);
Map<String, String> values = new HashMap<>();
String textValue = null;
while (reader.hasNext()) {
int next = reader.next();
if (next == CHARACTERS) {
// text value of credential element (like for "secret" )
String text = reader.getText();
if (text == null || text.trim().isEmpty()) {
continue;
}
textValue = text;
} else if (next == START_ELEMENT) {
String key = reader.getLocalName();
reader.next();
String value = reader.getText();
reader.next();
values.put(key, value);
} else if (next == END_ELEMENT) {
break;
}
}
if (textValue != null) {
ModelNode addCredential = getCredentialToAdd(parent, name, textValue);
credentialsToAdd.add(addCredential);
} else {
for (Map.Entry<String, String> entry : values.entrySet()) {
ModelNode addCredential = getCredentialToAdd(parent, name + "." + entry.getKey(), entry.getValue());
credentialsToAdd.add(addCredential);
}
}
}
private ModelNode getCredentialToAdd(PathAddress parent, String name, String value) {
ModelNode addCredential = new ModelNode();
addCredential.get(ModelDescriptionConstants.OP).set(ModelDescriptionConstants.ADD);
PathAddress addr = PathAddress.pathAddress(parent, PathElement.pathElement(CredentialDefinition.TAG_NAME, name));
addCredential.get(ModelDescriptionConstants.OP_ADDR).set(addr.toModelNode());
addCredential.get(CredentialDefinition.VALUE.getName()).set(value);
return addCredential;
}
// expects that the current tag will have one single attribute called "name"
private String readNameAttribute(XMLExtendedStreamReader reader) throws XMLStreamException {
String name = null;
for (int i = 0; i < reader.getAttributeCount(); i++) {
String attr = reader.getAttributeLocalName(i);
if (attr.equals("name")) {
name = reader.getAttributeValue(i);
continue;
}
throw ParseUtils.unexpectedAttribute(reader, i);
}
if (name == null) {
throw ParseUtils.missingRequired(reader, Collections.singleton("name"));
}
return name;
}
/**
* {@inheritDoc}
*/
@Override
public void writeContent(final XMLExtendedStreamWriter writer, final SubsystemMarshallingContext context) throws XMLStreamException {
context.startSubsystemElement(KeycloakExtension.NAMESPACE, false);
writeRealms(writer, context);
writeSecureDeployments(writer, context);
writer.writeEndElement();
}
private void writeRealms(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException {
if (!context.getModelNode().get(RealmDefinition.TAG_NAME).isDefined()) {
return;
}
for (Property realm : context.getModelNode().get(RealmDefinition.TAG_NAME).asPropertyList()) {
writer.writeStartElement(RealmDefinition.TAG_NAME);
writer.writeAttribute("name", realm.getName());
ModelNode realmElements = realm.getValue();
for (AttributeDefinition element : RealmDefinition.ALL_ATTRIBUTES) {
element.marshallAsElement(realmElements, writer);
}
writer.writeEndElement();
}
}
private void writeSecureDeployments(XMLExtendedStreamWriter writer, SubsystemMarshallingContext context) throws XMLStreamException {
if (!context.getModelNode().get(SecureDeploymentDefinition.TAG_NAME).isDefined()) {
return;
}
for (Property deployment : context.getModelNode().get(SecureDeploymentDefinition.TAG_NAME).asPropertyList()) {
writer.writeStartElement(SecureDeploymentDefinition.TAG_NAME);
writer.writeAttribute("name", deployment.getName());
ModelNode deploymentElements = deployment.getValue();
for (AttributeDefinition element : SecureDeploymentDefinition.ALL_ATTRIBUTES) {
element.marshallAsElement(deploymentElements, writer);
}
ModelNode credentials = deploymentElements.get(CredentialDefinition.TAG_NAME);
if (credentials.isDefined()) {
writeCredentials(writer, credentials);
}
writer.writeEndElement();
}
}
private void writeCredentials(XMLExtendedStreamWriter writer, ModelNode credentials) throws XMLStreamException {
Map<String, Object> parsed = new LinkedHashMap<>();
for (Property credential : credentials.asPropertyList()) {
String credName = credential.getName();
String credValue = credential.getValue().get(CredentialDefinition.VALUE.getName()).asString();
if (credName.indexOf('.') > -1) {
String[] parts = credName.split("\\.");
String provider = parts[0];
String propKey = parts[1];
Map<String, String> currentProviderMap = (Map<String, String>) parsed.get(provider);
if (currentProviderMap == null) {
currentProviderMap = new LinkedHashMap<>();
parsed.put(provider, currentProviderMap);
}
currentProviderMap.put(propKey, credValue);
} else {
parsed.put(credName, credValue);
}
}
for (Map.Entry<String, Object> entry : parsed.entrySet()) {
writer.writeStartElement(CredentialDefinition.TAG_NAME);
writer.writeAttribute("name", entry.getKey());
Object value = entry.getValue();
if (value instanceof String) {
writeCharacters(writer, (String) value);
} else {
Map<String, String> credentialProps = (Map<String, String>) value;
for (Map.Entry<String, String> prop : credentialProps.entrySet()) {
writer.writeStartElement(prop.getKey());
writeCharacters(writer, prop.getValue());
writer.writeEndElement();
}
}
writer.writeEndElement();
}
}
// code taken from org.jboss.as.controller.AttributeMarshaller
private void writeCharacters(XMLExtendedStreamWriter writer, String content) throws XMLStreamException {
if (content.indexOf('\n') > -1) {
// Multiline content. Use the overloaded variant that staxmapper will format
writer.writeCharacters(content);
} else {
// Staxmapper will just output the chars without adding newlines if this is used
char[] chars = content.toCharArray();
writer.writeCharacters(chars, 0, chars.length);
}
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.datasync.model;
import java.io.Serializable;
import javax.annotation.Generated;
/**
* <p>
* ListTaskExecutionsResponse
* </p>
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/datasync-2018-11-09/ListTaskExecutions" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class ListTaskExecutionsResult extends com.amazonaws.AmazonWebServiceResult<com.amazonaws.ResponseMetadata> implements Serializable, Cloneable {
/**
* <p>
* A list of executed tasks.
* </p>
*/
private java.util.List<TaskExecutionListEntry> taskExecutions;
/**
* <p>
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
* </p>
*/
private String nextToken;
/**
* <p>
* A list of executed tasks.
* </p>
*
* @return A list of executed tasks.
*/
public java.util.List<TaskExecutionListEntry> getTaskExecutions() {
return taskExecutions;
}
/**
* <p>
* A list of executed tasks.
* </p>
*
* @param taskExecutions
* A list of executed tasks.
*/
public void setTaskExecutions(java.util.Collection<TaskExecutionListEntry> taskExecutions) {
if (taskExecutions == null) {
this.taskExecutions = null;
return;
}
this.taskExecutions = new java.util.ArrayList<TaskExecutionListEntry>(taskExecutions);
}
/**
* <p>
* A list of executed tasks.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTaskExecutions(java.util.Collection)} or {@link #withTaskExecutions(java.util.Collection)} if you want
* to override the existing values.
* </p>
*
* @param taskExecutions
* A list of executed tasks.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTaskExecutionsResult withTaskExecutions(TaskExecutionListEntry... taskExecutions) {
if (this.taskExecutions == null) {
setTaskExecutions(new java.util.ArrayList<TaskExecutionListEntry>(taskExecutions.length));
}
for (TaskExecutionListEntry ele : taskExecutions) {
this.taskExecutions.add(ele);
}
return this;
}
/**
* <p>
* A list of executed tasks.
* </p>
*
* @param taskExecutions
* A list of executed tasks.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTaskExecutionsResult withTaskExecutions(java.util.Collection<TaskExecutionListEntry> taskExecutions) {
setTaskExecutions(taskExecutions);
return this;
}
/**
* <p>
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
* </p>
*
* @param nextToken
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
*/
public void setNextToken(String nextToken) {
this.nextToken = nextToken;
}
/**
* <p>
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
* </p>
*
* @return An opaque string that indicates the position at which to begin returning the next list of executed tasks.
*/
public String getNextToken() {
return this.nextToken;
}
/**
* <p>
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
* </p>
*
* @param nextToken
* An opaque string that indicates the position at which to begin returning the next list of executed tasks.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public ListTaskExecutionsResult withNextToken(String nextToken) {
setNextToken(nextToken);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getTaskExecutions() != null)
sb.append("TaskExecutions: ").append(getTaskExecutions()).append(",");
if (getNextToken() != null)
sb.append("NextToken: ").append(getNextToken());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof ListTaskExecutionsResult == false)
return false;
ListTaskExecutionsResult other = (ListTaskExecutionsResult) obj;
if (other.getTaskExecutions() == null ^ this.getTaskExecutions() == null)
return false;
if (other.getTaskExecutions() != null && other.getTaskExecutions().equals(this.getTaskExecutions()) == false)
return false;
if (other.getNextToken() == null ^ this.getNextToken() == null)
return false;
if (other.getNextToken() != null && other.getNextToken().equals(this.getNextToken()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getTaskExecutions() == null) ? 0 : getTaskExecutions().hashCode());
hashCode = prime * hashCode + ((getNextToken() == null) ? 0 : getNextToken().hashCode());
return hashCode;
}
@Override
public ListTaskExecutionsResult clone() {
try {
return (ListTaskExecutionsResult) super.clone();
} catch (CloneNotSupportedException e) {
throw new IllegalStateException("Got a CloneNotSupportedException from Object.clone() " + "even though we're Cloneable!", e);
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.github.jmscomponents.kafka.jms.message.wrapper;
import javax.jms.Destination;
import javax.jms.JMSException;
import javax.jms.Message;
import java.util.Enumeration;
public abstract class DelegatingMessage<T extends Message> implements Message {
public abstract T delegate();
@Override
public String getJMSMessageID() throws JMSException
{
return delegate().getJMSMessageID();
}
@Override
public void setJMSMessageID(String id) throws JMSException
{
delegate().setJMSMessageID(id);
}
@Override
public long getJMSTimestamp() throws JMSException
{
return delegate().getJMSTimestamp();
}
@Override
public void setJMSTimestamp(long timestamp) throws JMSException
{
delegate().setJMSTimestamp(timestamp);
}
@Override
public byte[] getJMSCorrelationIDAsBytes() throws JMSException
{
return delegate().getJMSCorrelationIDAsBytes();
}
@Override
public void setJMSCorrelationIDAsBytes(byte[] correlationID) throws JMSException
{
delegate().setJMSCorrelationIDAsBytes(correlationID);
}
@Override
public void setJMSCorrelationID(String correlationID) throws JMSException
{
delegate().setJMSCorrelationID(correlationID);
}
@Override
public String getJMSCorrelationID() throws JMSException
{
return delegate().getJMSCorrelationID();
}
@Override
public Destination getJMSReplyTo() throws JMSException
{
return delegate().getJMSReplyTo();
}
@Override
public void setJMSReplyTo(Destination replyTo) throws JMSException
{
delegate().setJMSReplyTo(replyTo);
}
@Override
public Destination getJMSDestination() throws JMSException
{
return delegate().getJMSDestination();
}
@Override
public void setJMSDestination(Destination destination) throws JMSException
{
delegate().setJMSDestination(destination);
}
@Override
public int getJMSDeliveryMode() throws JMSException
{
return delegate().getJMSDeliveryMode();
}
@Override
public void setJMSDeliveryMode(int deliveryMode) throws JMSException
{
delegate().setJMSDeliveryMode(deliveryMode);
}
@Override
public boolean getJMSRedelivered() throws JMSException
{
return delegate().getJMSRedelivered();
}
@Override
public void setJMSRedelivered(boolean redelivered) throws JMSException
{
delegate().setJMSRedelivered(redelivered);
}
@Override
public String getJMSType() throws JMSException
{
return delegate().getJMSType();
}
@Override
public void setJMSType(String type) throws JMSException
{
delegate().setJMSType(type);
}
@Override
public long getJMSExpiration() throws JMSException
{
return delegate().getJMSExpiration();
}
@Override
public void setJMSExpiration(long expiration) throws JMSException
{
delegate().setJMSExpiration(expiration);
}
@Override
public int getJMSPriority() throws JMSException
{
return delegate().getJMSPriority();
}
@Override
public void setJMSPriority(int priority) throws JMSException
{
delegate().setJMSPriority(priority);
}
@Override
public void clearProperties() throws JMSException
{
delegate().clearProperties();
}
@Override
public boolean propertyExists(String name) throws JMSException
{
return delegate().propertyExists(name);
}
@Override
public boolean getBooleanProperty(String name) throws JMSException
{
return delegate().getBooleanProperty(name);
}
@Override
public byte getByteProperty(String name) throws JMSException
{
return delegate().getByteProperty(name);
}
@Override
public short getShortProperty(String name) throws JMSException
{
return delegate().getShortProperty(name);
}
@Override
public int getIntProperty(String name) throws JMSException
{
return delegate().getIntProperty(name);
}
@Override
public long getLongProperty(String name) throws JMSException
{
return delegate().getLongProperty(name);
}
@Override
public float getFloatProperty(String name) throws JMSException
{
return delegate().getFloatProperty(name);
}
@Override
public double getDoubleProperty(String name) throws JMSException
{
return delegate().getDoubleProperty(name);
}
@Override
public String getStringProperty(String name) throws JMSException
{
return delegate().getStringProperty(name);
}
@Override
public Object getObjectProperty(String name) throws JMSException
{
return delegate().getObjectProperty(name);
}
@Override
public Enumeration getPropertyNames() throws JMSException
{
return delegate().getPropertyNames();
}
@Override
public void setBooleanProperty(String name, boolean value) throws JMSException
{
delegate().setBooleanProperty(name, value);
}
@Override
public void setByteProperty(String name, byte value) throws JMSException
{
delegate().setByteProperty(name, value);
}
@Override
public void setShortProperty(String name, short value) throws JMSException
{
delegate().setShortProperty(name, value);
}
@Override
public void setIntProperty(String name, int value) throws JMSException
{
delegate().setIntProperty(name, value);
}
@Override
public void setLongProperty(String name, long value) throws JMSException
{
delegate().setLongProperty(name, value);
}
@Override
public void setFloatProperty(String name, float value) throws JMSException
{
delegate().setFloatProperty(name, value);
}
@Override
public void setDoubleProperty(String name, double value) throws JMSException
{
delegate().setDoubleProperty(name, value);
}
@Override
public void setStringProperty(String name, String value) throws JMSException
{
delegate().setStringProperty(name, value);
}
@Override
public void setObjectProperty(String name, Object value) throws JMSException
{
delegate().setObjectProperty(name, value);
}
@Override
public void acknowledge() throws JMSException
{
delegate().acknowledge();
}
@Override
public void clearBody() throws JMSException
{
delegate().clearBody();
}
@Override
public long getJMSDeliveryTime() throws JMSException
{
return delegate().getJMSDeliveryTime();
}
@Override
public void setJMSDeliveryTime(long deliveryTime) throws JMSException
{
delegate().setJMSDeliveryTime(deliveryTime);
}
@Override
public <T> T getBody(Class<T> c) throws JMSException
{
return delegate().getBody(c);
}
@Override
public boolean isBodyAssignableTo(Class c) throws JMSException
{
return delegate().isBodyAssignableTo(c);
}
}
| |
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.transfer.model;
import java.io.Serializable;
import javax.annotation.Generated;
import com.amazonaws.AmazonWebServiceRequest;
/**
*
* @see <a href="http://docs.aws.amazon.com/goto/WebAPI/transfer-2018-11-05/TagResource" target="_top">AWS API
* Documentation</a>
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class TagResourceRequest extends com.amazonaws.AmazonWebServiceRequest implements Serializable, Cloneable {
/**
* <p>
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or role.
* </p>
*/
private String arn;
/**
* <p>
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this
* metadata to user accounts for any purpose.
* </p>
*/
private java.util.List<Tag> tags;
/**
* <p>
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or role.
* </p>
*
* @param arn
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or
* role.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or role.
* </p>
*
* @return An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or
* role.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or role.
* </p>
*
* @param arn
* An Amazon Resource Name (ARN) for a specific Amazon Web Services resource, such as a server, user, or
* role.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TagResourceRequest withArn(String arn) {
setArn(arn);
return this;
}
/**
* <p>
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this
* metadata to user accounts for any purpose.
* </p>
*
* @return Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can
* attach this metadata to user accounts for any purpose.
*/
public java.util.List<Tag> getTags() {
return tags;
}
/**
* <p>
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this
* metadata to user accounts for any purpose.
* </p>
*
* @param tags
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can
* attach this metadata to user accounts for any purpose.
*/
public void setTags(java.util.Collection<Tag> tags) {
if (tags == null) {
this.tags = null;
return;
}
this.tags = new java.util.ArrayList<Tag>(tags);
}
/**
* <p>
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this
* metadata to user accounts for any purpose.
* </p>
* <p>
* <b>NOTE:</b> This method appends the values to the existing list (if any). Use
* {@link #setTags(java.util.Collection)} or {@link #withTags(java.util.Collection)} if you want to override the
* existing values.
* </p>
*
* @param tags
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can
* attach this metadata to user accounts for any purpose.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TagResourceRequest withTags(Tag... tags) {
if (this.tags == null) {
setTags(new java.util.ArrayList<Tag>(tags.length));
}
for (Tag ele : tags) {
this.tags.add(ele);
}
return this;
}
/**
* <p>
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can attach this
* metadata to user accounts for any purpose.
* </p>
*
* @param tags
* Key-value pairs assigned to ARNs that you can use to group and search for resources by type. You can
* attach this metadata to user accounts for any purpose.
* @return Returns a reference to this object so that method calls can be chained together.
*/
public TagResourceRequest withTags(java.util.Collection<Tag> tags) {
setTags(tags);
return this;
}
/**
* Returns a string representation of this object. This is useful for testing and debugging. Sensitive data will be
* redacted from this string using a placeholder value.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArn() != null)
sb.append("Arn: ").append(getArn()).append(",");
if (getTags() != null)
sb.append("Tags: ").append(getTags());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof TagResourceRequest == false)
return false;
TagResourceRequest other = (TagResourceRequest) obj;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null && other.getArn().equals(this.getArn()) == false)
return false;
if (other.getTags() == null ^ this.getTags() == null)
return false;
if (other.getTags() != null && other.getTags().equals(this.getTags()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode + ((getArn() == null) ? 0 : getArn().hashCode());
hashCode = prime * hashCode + ((getTags() == null) ? 0 : getTags().hashCode());
return hashCode;
}
@Override
public TagResourceRequest clone() {
return (TagResourceRequest) super.clone();
}
}
| |
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.ocs.dynamo.domain;
import java.math.BigDecimal;
import java.time.LocalDate;
import java.time.LocalTime;
import java.time.ZonedDateTime;
import java.util.HashSet;
import java.util.Set;
import javax.persistence.CascadeType;
import javax.persistence.ElementCollection;
import javax.persistence.Entity;
import javax.persistence.GeneratedValue;
import javax.persistence.Id;
import javax.persistence.Lob;
import javax.persistence.OneToMany;
import javax.persistence.Table;
import javax.validation.constraints.AssertTrue;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import org.apache.commons.lang3.builder.ReflectionToStringBuilder;
import com.ocs.dynamo.domain.model.AttributeTextFieldMode;
import com.ocs.dynamo.domain.model.annotation.Attribute;
import com.ocs.dynamo.domain.model.annotation.Model;
import com.ocs.dynamo.domain.model.annotation.SearchMode;
/**
* Entity used for testing purposes - has to be included in src/main/java
* because otherwise QueryDSL code generation fails
*
* @author bas.rutten
*/
@Entity
@Table(name = "test_entity")
@Model(displayProperty = "name", sortOrder = "name,age")
public class TestEntity extends AbstractTreeEntity<Integer, TestEntity> {
private static final long serialVersionUID = 5557043276302609211L;
public enum TestEnum {
A, B, C
}
@Id
@GeneratedValue
private Integer id;
@Size(max = 25)
@Attribute(main = true, searchable = SearchMode.ALWAYS)
@NotNull
private String name;
@Attribute(searchable = SearchMode.ALWAYS)
private Long age;
@Attribute(searchable = SearchMode.ALWAYS)
private BigDecimal discount;
@Attribute(percentage = true, searchable = SearchMode.ALWAYS)
private BigDecimal rate;
@Attribute(displayFormat = "dd/MM/yyyy", searchable = SearchMode.ALWAYS)
private LocalDate birthDate;
private LocalTime registrationTime;
@Attribute(week = true)
private LocalDate birthWeek;
@Attribute(searchable = SearchMode.ALWAYS)
private TestEnum someEnum;
@Lob
private byte[] someBytes;
private Boolean someBoolean;
private String someString;
private Integer someInt;
@Attribute(textFieldMode = AttributeTextFieldMode.TEXTAREA)
private String someTextArea;
@Attribute(trueRepresentation = "On", falseRepresentation = "Off")
private Boolean someBoolean2;
@OneToMany(mappedBy = "testEntity", cascade = CascadeType.ALL)
@Attribute(searchable = SearchMode.ALWAYS)
private Set<TestEntity2> testEntities = new HashSet<>();
@Attribute(displayFormat = "HH:mm:ss")
private LocalTime someTime;
@ElementCollection
@Attribute(maxLength = 25)
private Set<String> tags = new HashSet<>();
@Attribute(url = true)
private String url;
@Attribute(quickAddPropertyName = "name", navigable = true)
private TestDomain testDomain;
@ElementCollection
private Set<Integer> intTags = new HashSet<>();
@ElementCollection
@Attribute(minValue = 34)
private Set<Long> longTags = new HashSet<>();
private ZonedDateTime zoned;
private Double someDouble;
public TestEntity() {
// default constructor
}
public TestEntity(int id, String name, Long age) {
this.id = id;
this.name = name;
this.age = age;
}
public TestEntity(String name, Long age) {
this.name = name;
this.age = age;
}
@Override
public Integer getId() {
return id;
}
@Override
public void setId(Integer id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Long getAge() {
return age;
}
public void setAge(Long age) {
this.age = age;
}
public BigDecimal getDiscount() {
return discount;
}
public void setDiscount(BigDecimal discount) {
this.discount = discount;
}
public LocalDate getBirthDate() {
return birthDate;
}
public void setBirthDate(LocalDate birthDate) {
this.birthDate = birthDate;
}
public LocalDate getBirthWeek() {
return birthWeek;
}
public void setBirthWeek(LocalDate birthWeek) {
this.birthWeek = birthWeek;
}
public TestEnum getSomeEnum() {
return someEnum;
}
public void setSomeEnum(TestEnum someEnum) {
this.someEnum = someEnum;
}
@Override
public String toString() {
return ReflectionToStringBuilder.toString(this);
}
public byte[] getSomeBytes() {
return someBytes;
}
public void setSomeBytes(byte[] someBytes) {
this.someBytes = someBytes;
}
public BigDecimal getRate() {
return rate;
}
public void setRate(BigDecimal rate) {
this.rate = rate;
}
public Boolean getSomeBoolean() {
return someBoolean;
}
public void setSomeBoolean(Boolean someBoolean) {
this.someBoolean = someBoolean;
}
public String getSomeString() {
return someString;
}
public void setSomeString(String someString) {
this.someString = someString;
}
public Boolean getSomeBoolean2() {
return someBoolean2;
}
public void setSomeBoolean2(Boolean someBoolean2) {
this.someBoolean2 = someBoolean2;
}
public Integer getSomeInt() {
return someInt;
}
public void setSomeInt(Integer someInt) {
this.someInt = someInt;
}
public Set<TestEntity2> getTestEntities() {
return testEntities;
}
public void setTestEntities(Set<TestEntity2> testEntities) {
this.testEntities = testEntities;
}
public void addTestEntity2(TestEntity2 entity2) {
this.testEntities.add(entity2);
entity2.setTestEntity(this);
}
public LocalTime getSomeTime() {
return someTime;
}
public void setSomeTime(LocalTime someTime) {
this.someTime = someTime;
}
public String getSomeTextArea() {
return someTextArea;
}
public void setSomeTextArea(String someTextArea) {
this.someTextArea = someTextArea;
}
public Set<String> getTags() {
return tags;
}
public void setTags(Set<String> tags) {
this.tags = tags;
}
public String getUrl() {
return url;
}
public void setUrl(String url) {
this.url = url;
}
@AssertTrue
public boolean isAssertSomething() {
return !"bogus".equals(name);
}
public TestDomain getTestDomain() {
return testDomain;
}
public void setTestDomain(TestDomain testDomain) {
this.testDomain = testDomain;
}
public Set<Integer> getIntTags() {
return intTags;
}
public void setIntTags(Set<Integer> intTags) {
this.intTags = intTags;
}
public Set<Long> getLongTags() {
return longTags;
}
public void setLongTags(Set<Long> longTags) {
this.longTags = longTags;
}
public LocalTime getRegistrationTime() {
return registrationTime;
}
public void setRegistrationTime(LocalTime registrationTime) {
this.registrationTime = registrationTime;
}
public ZonedDateTime getZoned() {
return zoned;
}
public void setZoned(ZonedDateTime zoned) {
this.zoned = zoned;
}
public Double getSomeDouble() {
return someDouble;
}
public void setSomeDouble(Double someDouble) {
this.someDouble = someDouble;
}
}
| |
package ezvcard.io.scribe;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import org.junit.Test;
import ezvcard.io.json.JCardValue;
import ezvcard.io.scribe.Sensei.Check;
import ezvcard.property.StructuredName;
/*
Copyright (c) 2012-2015, Michael Angstadt
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this
list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
The views and conclusions contained in the software and documentation are those
of the authors and should not be interpreted as representing official policies,
either expressed or implied, of the FreeBSD Project.
*/
/**
* @author Michael Angstadt
*/
public class StructuredNameScribeTest {
private final StructuredNameScribe scribe = new StructuredNameScribe();
private final Sensei<StructuredName> sensei = new Sensei<StructuredName>(scribe);
private final StructuredName withAllValues = new StructuredName();
{
withAllValues.setGiven("Jonathan");
withAllValues.setFamily("Doe");
withAllValues.addAdditional("Joh;nny,");
withAllValues.addAdditional("John");
withAllValues.addPrefix("Mr.");
withAllValues.addSuffix("III");
}
private final StructuredName withEmptyValues = new StructuredName();
{
withEmptyValues.setGiven("Jonathan");
withEmptyValues.setFamily(null);
withEmptyValues.addAdditional("Joh;nny,");
withEmptyValues.addAdditional("John");
}
private final StructuredName empty = new StructuredName();
@Test
public void writeText() {
sensei.assertWriteText(withAllValues).run("Doe;Jonathan;Joh\\;nny\\,,John;Mr.;III");
sensei.assertWriteText(withEmptyValues).run(";Jonathan;Joh\\;nny\\,,John;;");
sensei.assertWriteText(empty).run(";;;;");
}
@Test
public void writeXml() {
//@formatter:off
sensei.assertWriteXml(withAllValues).run(
"<surname>Doe</surname>" +
"<given>Jonathan</given>" +
"<additional>Joh;nny,</additional>" +
"<additional>John</additional>" +
"<prefix>Mr.</prefix>" +
"<suffix>III</suffix>"
);
sensei.assertWriteXml(withEmptyValues).run(
"<surname/>" +
"<given>Jonathan</given>" +
"<additional>Joh;nny,</additional>" +
"<additional>John</additional>" +
"<prefix/>" +
"<suffix/>"
);
sensei.assertWriteXml(empty).run(
"<surname/>" +
"<given/>" +
"<additional/>" +
"<prefix/>" +
"<suffix/>"
);
//@formatter:on
}
@Test
public void writeJson() {
sensei.assertWriteJson(withAllValues).run(JCardValue.structured("Doe", "Jonathan", Arrays.asList("Joh;nny,", "John"), "Mr.", "III"));
sensei.assertWriteJson(withEmptyValues).run(JCardValue.structured("", "Jonathan", Arrays.asList("Joh;nny,", "John"), "", ""));
sensei.assertWriteJson(empty).run(JCardValue.structured("", "", "", "", ""));
}
@Test
public void parseText() {
sensei.assertParseText("Doe;Jonathan;Joh\\;nny\\,,John;Mr.;III").run(is(withAllValues));
sensei.assertParseText(";Jonathan;Joh\\;nny\\,,John;;").run(is(withEmptyValues));
sensei.assertParseText(";;;;").run(is(empty));
sensei.assertParseText("").run(is(empty));
}
@Test
public void parseXml() {
//@formatter:off
sensei.assertParseXml(
"<surname>Doe</surname>" +
"<given>Jonathan</given>" +
"<additional>Joh;nny,</additional>" +
"<additional>John</additional>" +
"<prefix>Mr.</prefix>" +
"<suffix>III</suffix>"
).run(is(withAllValues));
sensei.assertParseXml(
"<surname/>" +
"<given>Jonathan</given>" +
"<additional>Joh;nny,</additional>" +
"<additional>John</additional>" +
"<prefix/>" +
"<suffix/>"
).run(is(withEmptyValues));
sensei.assertParseXml(
"<surname/>" +
"<given/>" +
"<additional/>" +
"<prefix/>" +
"<suffix/>"
).run(is(empty));
//@formatter:on
}
@Test
public void parseHtml() {
//@formatter:off
sensei.assertParseHtml(
"<div>" +
"<span class=\"family-name\">Doe</span>" +
"<span class=\"given-name\">Jonathan</span>" +
"<span class=\"additional-name\">Joh;nny,</span>" +
"<span class=\"additional-name\">John</span>" +
"<span class=\"honorific-prefix\">Mr.</span>" +
"<span class=\"honorific-suffix\">III</span>" +
"</div>"
).run(is(withAllValues));
sensei.assertParseHtml(
"<div>" +
"<span class=\"given-name\">Jonathan</span>" +
"<span class=\"additional-name\">Joh;nny,</span>" +
"<span class=\"additional-name\">John</span>" +
"</div>"
).run(is(withEmptyValues));
sensei.assertParseHtml(
"<div>" +
"<span class=\"given-name\"></span>" +
"</div>"
).run(is(empty));
//@formatter:on
}
@Test
public void parseJson() {
JCardValue value = JCardValue.structured("Doe", "Jonathan", Arrays.asList("Joh;nny,", "John"), "Mr.", "III");
sensei.assertParseJson(value).run(is(withAllValues));
value = JCardValue.structured(null, "Jonathan", Arrays.asList("Joh;nny,", "John"), "", null);
sensei.assertParseJson(value).run(is(withEmptyValues));
value = JCardValue.structured(null, "Jonathan", Arrays.asList("Joh;nny,", "John"));
sensei.assertParseJson(value).run(is(withEmptyValues));
value = JCardValue.structured(null, null, "", null, null);
sensei.assertParseJson(value).run(is(empty));
sensei.assertParseJson("").run(is(empty));
}
private Check<StructuredName> is(final StructuredName expected) {
return new Check<StructuredName>() {
public void check(StructuredName actual) {
assertEquals(expected.getFamily(), actual.getFamily());
assertEquals(expected.getGiven(), actual.getGiven());
assertEquals(expected.getAdditional(), actual.getAdditional());
assertEquals(expected.getPrefixes(), actual.getPrefixes());
assertEquals(expected.getSuffixes(), actual.getSuffixes());
}
};
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.