repo_name stringlengths 5 108 | path stringlengths 6 333 | size stringlengths 1 6 | content stringlengths 4 977k | license stringclasses 15 values |
|---|---|---|---|---|
jacksonic/vjlofvhjfgm | src/foam/box/socket/SocketWebAgent.java | 901 | /**
* @license
* Copyright 2020 The FOAM Authors. All Rights Reserved.
* http://www.apache.org/licenses/LICENSE-2.0
*/
package foam.box.socket;
import foam.core.X;
import foam.core.FObject;
import foam.box.Box;
import foam.box.Message;
import foam.box.SessionServerBox;
import foam.nanos.logger.Logger;
import foam.nanos.http.ServiceWebAgent;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
public class SocketWebAgent
extends ServiceWebAgent
{
public SocketWebAgent(Box skeleton, boolean authenticate) {
super(skeleton, authenticate);
}
@Override
public void execute(X x) {
try {
Message msg = (Message) x.get("requestMessage");
new SessionServerBox(x, skeleton_, authenticate_).send(msg);
} catch ( Throwable t ) {
throw new RuntimeException(t.getMessage());
}
}
}
| apache-2.0 |
siosio/intellij-community | platform/platform-impl/src/com/intellij/ide/actionsOnSave/ActionOnSaveContext.java | 1634 | // Copyright 2000-2021 JetBrains s.r.o. and contributors. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.actionsOnSave;
import com.intellij.openapi.options.ex.Settings;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.UserDataHolderBase;
import org.jetbrains.annotations.ApiStatus;
import org.jetbrains.annotations.NotNull;
/**
* This object is created when {@link ActionsOnSaveConfigurable#reset()} is called. It happens in two cases: when the 'Actions on Save'
* page is opened for the first time during the current Settings (Preferences) dialog session, and also when the 'Reset' link in the
* top-right corner of the 'Actions on Save' page is clicked.
* <br/><br/>
* {@link ActionOnSaveInfo} implementations use this object to understand their state at the moment of creation. Instances of
* <code>ActionOnSaveInfo</code> have a different lifecycle, see {@link ActionOnSaveInfoProvider#getActionOnSaveInfos(Project, ActionOnSaveContext)}.
*
* @see ActionOnSaveInfoProvider#getActionOnSaveInfos(Project, ActionOnSaveContext)
* @see ActionOnSaveInfo
*/
@ApiStatus.Experimental
public final class ActionOnSaveContext extends UserDataHolderBase {
private final @NotNull Project myProject;
private final @NotNull Settings mySettings;
ActionOnSaveContext(@NotNull Project project, @NotNull Settings settings) {
myProject = project;
mySettings = settings;
}
public @NotNull Project getProject() {
return myProject;
}
public @NotNull Settings getSettings() {
return mySettings;
}
}
| apache-2.0 |
IvanNikolaychuk/pentaho-kettle | plugins/elasticsearch-bulk-insert/src/org/pentaho/di/trans/steps/elasticsearchbulk/ElasticSearchBulkMeta.java | 28785 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2015 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.elasticsearchbulk;
import java.net.InetAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils;
import org.elasticsearch.common.transport.InetSocketTransportAddress;
import org.pentaho.di.core.CheckResult;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.annotations.Step;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleStepException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.row.RowMetaInterface;
import org.pentaho.di.core.row.ValueMeta;
import org.pentaho.di.core.row.ValueMetaInterface;
import org.pentaho.di.core.variables.VariableSpace;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStepMeta;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
import org.pentaho.metastore.api.IMetaStore;
import org.w3c.dom.Node;
@Step( id = "ElasticSearchBulk", i18nPackageName = "org.pentaho.di.trans.steps.elasticsearch",
name = "ElasticSearchBulk.TypeLongDesc.ElasticSearchBulk",
description = "ElasticSearchBulk.TypeTooltipDesc.ElasticSearchBulk",
categoryDescription = "i18n:org.pentaho.di.trans.step:BaseStep.Category.Bulk", image = "ESB.svg",
documentationUrl = "http://wiki.pentaho.com/display/EAI/ElasticSearch+Bulk+Insert" )
public class ElasticSearchBulkMeta extends BaseStepMeta implements StepMetaInterface {
private static Class<?> PKG = ElasticSearchBulkMeta.class; // for i18n purposes
/**
* Serialization aids
*/
private static class Dom {
static final String TAG_GENERAL = "general";
static final String TAG_INDEX = "index";
static final String TAG_TYPE = "type";
static final String TAG_IS_JSON = "isJson";
static final String TAG_JSON_FIELD = "jsonField";
static final String TAG_ID_IN_FIELD = "idField";
static final String TAG_OVERWRITE_IF_EXISTS = "overwriteIfExists";
static final String TAG_ID_OUT_FIELD = "idOutputField";
static final String TAG_USE_OUTPUT = "useOutput";
static final String TAG_STOP_ON_ERROR = "stopOnError";
static final String TAG_TIMEOUT = "timeout";
static final String TAG_TIMEOUT_UNIT = "timeoutUnit";
static final String TAG_BATCH_SIZE = "batchSize";
static final String TAG_FIELDS = "fields";
static final String TAG_FIELD = "field";
static final String TAG_NAME = "columnName";
static final String TAG_TARGET = "targetName";
static final String TAG_SERVERS = "servers";
static final String TAG_SERVER = "server";
static final String TAG_SERVER_ADDRESS = "address";
static final String TAG_SERVER_PORT = "port";
public static final String TAG_SETTINGS = "settings";
public static final String TAG_SETTING = "setting";
public static final String TAG_SETTING_NAME = "name";
public static final String TAG_SETTING_VALUE = "value";
static final String INDENT = " ";
}
public static final int DEFAULT_BATCH_SIZE = 50000;
public static final Long DEFAULT_TIMEOUT = 10L;
public static final TimeUnit DEFAULT_TIMEOUT_UNIT = TimeUnit.SECONDS;
public static final int DEFAULT_PORT = 9300;
// ///////////
// FIELDS //
// /////////
private String index = null;
private String type = null;
private boolean isJsonInsert = false;
private String jsonField = null;
private String idInField = null;
private boolean overWriteIfSameId = false;
private String idOutField = null;
private boolean useOutput = false;
private boolean stopOnError = true;
private String batchSize;
private String timeout;
private TimeUnit timeoutUnit;
private List<InetSocketTransportAddress> servers = new ArrayList<InetSocketTransportAddress>();
/** fields to use in json generation */
private Map<String, String> fields = new HashMap<String, String>();
private Map<String, String> settings = new HashMap<String, String>();
// ////////////////////
// GETTERS/SETTERS //
// //////////////////
public String getJsonField() {
return jsonField;
}
public void setJsonField( String jsonField ) {
this.jsonField = StringUtils.isBlank( jsonField ) ? null : jsonField;
}
public String getIdOutField() {
return idOutField;
}
public void setIdOutField( String idField ) {
this.idOutField = StringUtils.isBlank( idField ) ? null : idField;
}
public boolean isJsonInsert() {
return isJsonInsert;
}
public void setJsonInsert( boolean isJsonInsert ) {
this.isJsonInsert = isJsonInsert;
}
public String getIndex() {
return index;
}
public void setIndex( String index ) {
this.index = index;
}
public String getType() {
return type;
}
public void setType( String type ) {
this.type = type;
}
public void setIdInField( String idInField ) {
this.idInField = idInField;
}
public String getIdInField() {
return idInField;
}
public void setOverWriteIfSameId( boolean overWriteIfSameId ) {
this.overWriteIfSameId = overWriteIfSameId;
}
public boolean isOverWriteIfSameId() {
return overWriteIfSameId;
}
public boolean isUseOutput() {
return useOutput;
}
public void setUseOutput( boolean value ) {
useOutput = value;
}
public boolean isStopOnError() {
return stopOnError;
}
public void setStopOnError( boolean stopOnError ) {
this.stopOnError = stopOnError;
}
public Map<String, String> getFields() {
return fields;
}
public void setFields( Map<String, String> fields ) {
this.fields = fields;
}
public void clearFields() {
this.fields.clear();
}
public void addField( String inputName, String nameInJson ) {
this.fields.put( inputName, StringUtils.isBlank( nameInJson ) ? inputName : nameInJson );
}
public InetSocketTransportAddress[] getServers() {
return servers.toArray( new InetSocketTransportAddress[servers.size()] );
}
public void setServers( InetSocketTransportAddress[] servers ) {
this.servers = Arrays.asList( servers );
}
public void clearServers() {
servers.clear();
}
public void addServer( String addr, int port ) {
try {
InetAddress address = InetAddress.getByName( addr );
servers.add( new InetSocketTransportAddress( address, port ) );
} catch ( Exception exception ) {
// TODO What do we do here?
}
}
public Map<String, String> getSettings() {
return this.settings;
}
public void setSettings( Map<String, String> settings ) {
this.settings = settings;
}
public void clearSettings() {
settings.clear();
}
public void addSetting( String property, String value ) {
if ( StringUtils.isNotBlank( property ) ) {
settings.put( property, value );
}
}
/**
* @param value
* The batch size to set
*/
public void setBatchSize( String value ) {
this.batchSize = value;
}
/**
* @return Returns the batchSize.
*/
public String getBatchSize() {
return this.batchSize;
}
public int getBatchSizeInt( VariableSpace vars ) {
return Const.toInt( vars.environmentSubstitute( this.batchSize ), DEFAULT_BATCH_SIZE );
}
/**
* @return Returns the TimeOut.
*/
public String getTimeOut() {
return timeout;
}
/**
* @param TimeOut
* The TimeOut to set.
*/
public void setTimeOut( String TimeOut ) {
this.timeout = TimeOut;
}
public TimeUnit getTimeoutUnit() {
return timeoutUnit != null ? timeoutUnit : DEFAULT_TIMEOUT_UNIT;
}
public void setTimeoutUnit( TimeUnit timeoutUnit ) {
this.timeoutUnit = timeoutUnit;
}
// ////////////////
// CONSTRUCTOR //
// //////////////
public ElasticSearchBulkMeta() {
super(); // allocate BaseStepMeta
}
public Object clone() {
ElasticSearchBulkMeta retval = (ElasticSearchBulkMeta) super.clone();
return retval;
}
public void setDefault() {
batchSize = "" + DEFAULT_BATCH_SIZE;
timeoutUnit = DEFAULT_TIMEOUT_UNIT;
index = "twitter";
type = "tweet";
isJsonInsert = false;
jsonField = null;
idOutField = null;
useOutput = false;
stopOnError = true;
}
/* This function adds meta data to the rows being pushed out */
public void getFields( RowMetaInterface r, String name, RowMetaInterface[] info, StepMeta nextStep,
VariableSpace space, Repository repository, IMetaStore metaStore ) throws KettleStepException {
if ( StringUtils.isNotBlank( this.getIdOutField() ) ) {
ValueMetaInterface valueMeta =
new ValueMeta( space.environmentSubstitute( this.getIdOutField() ), ValueMetaInterface.TYPE_STRING );
valueMeta.setOrigin( name );
// add if doesn't exist
if ( !r.exists( valueMeta ) ) {
r.addValueMeta( valueMeta );
}
}
}
public void loadXML( Node stepnode, List<DatabaseMeta> databases, IMetaStore metaStore ) throws KettleXMLException {
try {
Node general = XMLHandler.getSubNode( stepnode, Dom.TAG_GENERAL );
batchSize = XMLHandler.getTagValue( general, Dom.TAG_BATCH_SIZE );
timeout = XMLHandler.getTagValue( general, Dom.TAG_TIMEOUT );
String timeoutStr = XMLHandler.getTagValue( general, Dom.TAG_TIMEOUT_UNIT );
try {
timeoutUnit = TimeUnit.valueOf( timeoutStr );
} catch ( Exception e ) {
timeoutUnit = DEFAULT_TIMEOUT_UNIT;
}
setIndex( XMLHandler.getTagValue( general, Dom.TAG_INDEX ) );
setType( XMLHandler.getTagValue( general, Dom.TAG_TYPE ) );
setJsonInsert( parseBool( XMLHandler.getTagValue( general, Dom.TAG_IS_JSON ) ) );
setJsonField( XMLHandler.getTagValue( general, Dom.TAG_JSON_FIELD ) );
setIdInField( XMLHandler.getTagValue( general, Dom.TAG_ID_IN_FIELD ) );
setOverWriteIfSameId( parseBool( XMLHandler.getTagValue( general, Dom.TAG_OVERWRITE_IF_EXISTS ) ) );
setIdOutField( XMLHandler.getTagValue( general, Dom.TAG_ID_OUT_FIELD ) );
setUseOutput( parseBool( XMLHandler.getTagValue( general, Dom.TAG_USE_OUTPUT ) ) );
setStopOnError( parseBool( XMLHandler.getTagValue( general, Dom.TAG_STOP_ON_ERROR ) ) );
// Fields
Node fields = XMLHandler.getSubNode( stepnode, Dom.TAG_FIELDS );
int nrFields = XMLHandler.countNodes( fields, Dom.TAG_FIELD );
this.clearFields();
for ( int i = 0; i < nrFields; i++ ) {
Node fNode = XMLHandler.getSubNodeByNr( fields, Dom.TAG_FIELD, i );
String colName = XMLHandler.getTagValue( fNode, Dom.TAG_NAME );
String targetName = XMLHandler.getTagValue( fNode, Dom.TAG_TARGET );
this.addField( colName, targetName );
}
// Servers
Node servers = XMLHandler.getSubNode( stepnode, Dom.TAG_SERVERS );
int nrServers = XMLHandler.countNodes( servers, Dom.TAG_SERVER );
this.clearServers();
for ( int i = 0; i < nrServers; i++ ) {
Node sNode = XMLHandler.getSubNodeByNr( servers, Dom.TAG_SERVER, i );
String addr = XMLHandler.getTagValue( sNode, Dom.TAG_SERVER_ADDRESS );
String portStr = XMLHandler.getTagValue( sNode, Dom.TAG_SERVER_PORT );
int port = DEFAULT_PORT;
try {
port = Integer.parseInt( portStr );
} catch ( NumberFormatException nfe ) {
// use default
}
this.addServer( addr, port );
}
// Settings
Node settings = XMLHandler.getSubNode( stepnode, Dom.TAG_SETTINGS );
int nrSettings = XMLHandler.countNodes( settings, Dom.TAG_SETTING );
this.clearSettings();
for ( int i = 0; i < nrSettings; i++ ) {
Node sNode = XMLHandler.getSubNodeByNr( settings, Dom.TAG_SETTING, i );
String name = XMLHandler.getTagValue( sNode, Dom.TAG_SETTING_NAME );
String value = XMLHandler.getTagValue( sNode, Dom.TAG_SETTING_VALUE );
this.addSetting( name, value );
}
} catch ( Exception e ) {
throw new KettleXMLException( "Unable to load step info from XML", e );
}
}
private static boolean parseBool( String val ) {
return "Y".equals( val );
}
public String getXML() {
StringBuffer retval = new StringBuffer();
Indentation indent = new Indentation();
indent.incr().incr();
// General
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_GENERAL ) ).append( Const.CR );
indent.incr();
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_INDEX, getIndex() ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_TYPE, getType() ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_BATCH_SIZE, batchSize ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_TIMEOUT, timeout ) );
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_TIMEOUT_UNIT, timeoutUnit.toString() ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_IS_JSON, isJsonInsert() ) );
if ( getJsonField() != null ) {
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_JSON_FIELD, getJsonField() ) );
}
if ( getIdOutField() != null ) {
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_ID_OUT_FIELD, getIdOutField() ) );
}
if ( getIdInField() != null ) {
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_ID_IN_FIELD, getIdInField() ) );
}
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_OVERWRITE_IF_EXISTS, isOverWriteIfSameId() ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_USE_OUTPUT, useOutput ) );
retval.append( indent.toString() + XMLHandler.addTagValue( Dom.TAG_STOP_ON_ERROR, stopOnError ) );
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_GENERAL ) ).append( Const.CR );
// Fields
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_FIELDS ) ).append( Const.CR );
indent.incr();
for ( String colName : this.getFields().keySet() ) {
String targetName = this.getFields().get( colName );
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_FIELD ) ).append( Const.CR );
indent.incr();
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_NAME, colName ) );
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_TARGET, targetName ) );
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_FIELD ) ).append( Const.CR );
}
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_FIELDS ) ).append( Const.CR );
// Servers
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_SERVERS ) ).append( Const.CR );
indent.incr();
for ( InetSocketTransportAddress istAddr : this.getServers() ) {
String address = istAddr.address().getAddress().getHostAddress();
int port = istAddr.address().getPort();
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_SERVER ) ).append( Const.CR );
indent.incr();
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_SERVER_ADDRESS, address ) );
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_SERVER_PORT, port ) );
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_SERVER ) ).append( Const.CR );
}
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_SERVERS ) ).append( Const.CR );
// Settings
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_SETTINGS ) ).append( Const.CR );
indent.incr();
for ( String settingName : this.getSettings().keySet() ) {
String settingValue = this.getSettings().get( settingName );
retval.append( indent.toString() ).append( XMLHandler.openTag( Dom.TAG_SETTING ) ).append( Const.CR );
indent.incr();
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_SETTING_NAME, settingName ) );
retval.append( indent.toString() ).append( XMLHandler.addTagValue( Dom.TAG_SETTING_VALUE, settingValue ) );
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_SETTING ) ).append( Const.CR );
}
indent.decr();
retval.append( indent.toString() ).append( XMLHandler.closeTag( Dom.TAG_SETTINGS ) ).append( Const.CR );
return retval.toString();
}
private static class Indentation {
private static String indentUnit = Dom.INDENT;
private String indent = "";
private int indentLevel = 0;
public Indentation incr() {
indentLevel++;
indent += indentUnit;
return this;
}
public Indentation decr() {
if ( --indentLevel >= 0 ) {
indent = indent.substring( 0, indent.length() - indentUnit.length() );
}
return this;
}
public String toString() {
return indent;
}
}
private static String joinRepAttr( String... args ) {
return StringUtils.join( args, "_" );
}
public void readRep( Repository rep, IMetaStore metaStore, ObjectId id_step, List<DatabaseMeta> databases )
throws KettleException {
try {
setIndex( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_INDEX ) ) );
setType( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TYPE ) ) );
setBatchSize( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_BATCH_SIZE ) ) );
setTimeOut( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TIMEOUT ) ) );
String timeoutStr = rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TIMEOUT_UNIT ) );
try {
timeoutUnit = TimeUnit.valueOf( timeoutStr );
} catch ( Exception e ) {
timeoutUnit = DEFAULT_TIMEOUT_UNIT;
}
setJsonInsert( rep.getStepAttributeBoolean( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_IS_JSON ) ) );
setJsonField( ( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_JSON_FIELD ) ) ) );
setIdInField( ( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_ID_IN_FIELD ) ) ) );
setOverWriteIfSameId( rep.getStepAttributeBoolean( id_step, joinRepAttr( Dom.TAG_GENERAL,
Dom.TAG_OVERWRITE_IF_EXISTS ) ) );
setIdOutField( ( rep.getStepAttributeString( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_ID_OUT_FIELD ) ) ) );
setUseOutput( rep.getStepAttributeBoolean( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_USE_OUTPUT ) ) );
setStopOnError( rep.getStepAttributeBoolean( id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_STOP_ON_ERROR ) ) );
// Fields
clearFields();
int fieldsNr = rep.countNrStepAttributes( id_step, joinRepAttr( Dom.TAG_FIELD, Dom.TAG_NAME ) );
for ( int i = 0; i < fieldsNr; i++ ) {
String name = rep.getStepAttributeString( id_step, i, joinRepAttr( Dom.TAG_FIELD, Dom.TAG_NAME ) );
String target = rep.getStepAttributeString( id_step, i, joinRepAttr( Dom.TAG_FIELD, Dom.TAG_TARGET ) );
addField( name, target );
}
// Servers
clearServers();
int serversNr = rep.countNrStepAttributes( id_step, joinRepAttr( Dom.TAG_SERVER, Dom.TAG_SERVER_ADDRESS ) );
for ( int i = 0; i < serversNr; i++ ) {
String addr = rep.getStepAttributeString( id_step, i, joinRepAttr( Dom.TAG_SERVER, Dom.TAG_SERVER_ADDRESS ) );
int port = (int) rep.getStepAttributeInteger( id_step, i, joinRepAttr( Dom.TAG_SERVER, Dom.TAG_SERVER_PORT ) );
addServer( addr, port );
}
// Settings
clearSettings();
int settingsNr = rep.countNrStepAttributes( id_step, joinRepAttr( Dom.TAG_SETTING, Dom.TAG_SETTING_NAME ) );
for ( int i = 0; i < settingsNr; i++ ) {
String name = rep.getStepAttributeString( id_step, i, joinRepAttr( Dom.TAG_SETTING, Dom.TAG_SETTING_NAME ) );
String value = rep.getStepAttributeString( id_step, i, joinRepAttr( Dom.TAG_SETTING, Dom.TAG_SETTING_VALUE ) );
addSetting( name, value );
}
} catch ( Exception e ) {
throw new KettleException(
BaseMessages.getString( PKG, "ElasticSearchBulkMeta.Exception.ErrorReadingRepository" ), e );
}
}
public void saveRep( Repository rep, IMetaStore metaStore, ObjectId id_transformation, ObjectId id_step )
throws KettleException {
try {
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_INDEX ), getIndex() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TYPE ), getType() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_BATCH_SIZE ), batchSize );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TIMEOUT ), getTimeOut() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_TIMEOUT_UNIT ),
getTimeoutUnit().toString() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_IS_JSON ),
isJsonInsert() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_JSON_FIELD ),
getJsonField() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_ID_IN_FIELD ),
getIdInField() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_OVERWRITE_IF_EXISTS ),
isOverWriteIfSameId() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_ID_OUT_FIELD ),
getIdOutField() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_USE_OUTPUT ),
isUseOutput() );
rep.saveStepAttribute( id_transformation, id_step, joinRepAttr( Dom.TAG_GENERAL, Dom.TAG_STOP_ON_ERROR ),
isStopOnError() );
// Fields
String[] fieldNames = getFields().keySet().toArray( new String[getFields().keySet().size()] );
for ( int i = 0; i < fieldNames.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_FIELD, Dom.TAG_NAME ), fieldNames[i] );
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_FIELD, Dom.TAG_TARGET ), getFields()
.get( fieldNames[i] ) );
}
// Servers
for ( int i = 0; i < getServers().length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_SERVER, Dom.TAG_SERVER_ADDRESS ),
getServers()[i].address().getAddress().getHostAddress() );
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_SERVER, Dom.TAG_SERVER_PORT ),
getServers()[i].address().getPort() );
}
// Settings
String[] settingNames = getSettings().keySet().toArray( new String[getSettings().keySet().size()] );
for ( int i = 0; i < settingNames.length; i++ ) {
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_SETTING, Dom.TAG_SETTING_NAME ),
settingNames[i] );
rep.saveStepAttribute( id_transformation, id_step, i, joinRepAttr( Dom.TAG_SETTING, Dom.TAG_SETTING_VALUE ),
getSettings().get( settingNames[i] ) );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.Exception.ErrorSavingToRepository", "" + id_step ), e );
}
}
public void check( List<CheckResultInterface> remarks, TransMeta transMeta, StepMeta stepMeta, RowMetaInterface prev,
String[] input, String[] output, RowMetaInterface info, VariableSpace space, Repository repository,
IMetaStore metaStore ) {
checkBasicRequiredFields( remarks, stepMeta );
checkInputFields( remarks, prev, stepMeta );
}
private void checkBasicRequiredFields( List<CheckResultInterface> remarks, StepMeta stepMeta ) {
checkRequiredString( remarks, stepMeta, getIndex(), BaseMessages.getString( PKG,
"ElasticSearchBulkDialog.Index.Label" ) );
checkRequiredString( remarks, stepMeta, getType(), BaseMessages.getString( PKG,
"ElasticSearchBulkDialog.Type.Label" ) );
checkRequiredString( remarks, stepMeta, getBatchSize(), BaseMessages.getString( PKG,
"ElasticSearchBulkDialog.BatchSize.Label" ) );
}
private void checkRequiredString( List<CheckResultInterface> remarks, StepMeta stepMeta, String value,
String fieldName ) {
if ( StringUtils.isBlank( value ) ) {
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.MissingRequired", fieldName ), stepMeta ) );
} else {
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_OK, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.RequiredOK", fieldName, value ), stepMeta ) );
}
}
private void checkInputFields( List<CheckResultInterface> remarks, RowMetaInterface prev, StepMeta stepMeta ) {
if ( prev != null && prev.size() > 0 ) {
if ( isJsonInsert() ) { // JSON
if ( StringUtils.isBlank( getJsonField() ) ) { // jsonField not set
String jsonFieldLabel = BaseMessages.getString( PKG, "ElasticSearchBulkDialog.JsonField.Label" );
String isJsonLabel = BaseMessages.getString( PKG, "ElasticSearchBulkDialog.IsJson.Label" );
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.MissingRequiredDependent", jsonFieldLabel, isJsonLabel ), stepMeta ) );
} else if ( prev.indexOfValue( getJsonField() ) < 0 ) { // jsonField not in input
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.MissingInput", getJsonField() ), stepMeta ) );
}
} else { // not JSON
for ( String fieldName : getFields().keySet() ) {
if ( prev.indexOfValue( fieldName ) < 0 ) { // fields not found
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.MissingInput", fieldName ), stepMeta ) );
}
}
}
} else { // no input
remarks.add( new CheckResult( CheckResultInterface.TYPE_RESULT_ERROR, BaseMessages.getString( PKG,
"ElasticSearchBulkMeta.CheckResult.NoInput" ), stepMeta ) );
}
}
public StepInterface getStep( StepMeta stepMeta, StepDataInterface stepDataInterface, int cnr, TransMeta transMeta,
Trans trans ) {
return new ElasticSearchBulk( stepMeta, stepDataInterface, cnr, transMeta, trans );
}
public StepDataInterface getStepData() {
return new ElasticSearchBulkData();
}
public boolean supportsErrorHandling() {
return true;
}
}
| apache-2.0 |
gstevey/gradle | subprojects/core/src/main/java/org/gradle/groovy/scripts/ScriptSource.java | 1517 | /*
* Copyright 2010 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.groovy.scripts;
import org.gradle.internal.resource.TextResource;
import org.gradle.internal.scan.UsedByScanPlugin;
import java.io.Serializable;
/**
* The source for the text of a script, with some meta-info about the script.
*/
@UsedByScanPlugin
public interface ScriptSource extends Serializable {
/**
* Returns the name to use for the compiled class for this script. Never returns null.
*/
String getClassName();
/**
* Returns the source for this script. Never returns null.
*/
TextResource getResource();
/**
* Returns the file name that is inserted into the class during compilation. For a script with a source
* file this is the path to the file. Never returns null.
*/
String getFileName();
/**
* Returns the description for this script. Never returns null.
*/
String getDisplayName();
}
| apache-2.0 |
apache/oodt | commons/src/test/java/org/apache/oodt/commons/AvroMultiServerTest.java | 4006 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.oodt.commons;
import junit.framework.TestCase;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import javax.xml.parsers.ParserConfigurationException;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.rmi.server.RemoteObject;
public class AvroMultiServerTest extends TestCase {
private InputStream testConfig;
public AvroMultiServerTest(String name) {
super(name);
}
protected void setUp() throws Exception {
super.setUp();
testConfig = getClass().getResourceAsStream("/test-multiserver.xml");
if (testConfig == null) throw new IOException("Cannot find `test-multiserver.xml'");
System.setProperty("my.other.setting", "Don't override");
}
public void tearDown() throws Exception {
if (testConfig != null) try {
testConfig.close();
} catch (IOException ignore) {}
System.getProperties().remove("my.setting");
System.getProperties().remove("my.other.setting");
super.tearDown();
}
public static class Svr1 extends RemoteObject {
public Svr1(ExecServer e) {}
}
public static class Svr2 extends RemoteObject {
public Svr2(ExecServer e) {}
}
public static class Svr3 extends RemoteObject {
public Svr3(ExecServer e) {}
}
public static class Svr4 extends RemoteObject {
public Svr4(ExecServer e) {}
}
public void testParsing() throws ParserConfigurationException, SAXException, IOException, ClassNotFoundException,
NoSuchMethodException, InstantiationException, IllegalAccessException, InvocationTargetException {
InputSource is = new InputSource(testConfig);
MultiServer.parseConfig(is);
assertEquals("test.app", MultiServer.getAppName());
assertEquals(4, MultiServer.getServers().size());
MultiServer.Server server = (MultiServer.Server) MultiServer.getServers().get("urn:eda:rmi:Test1");
assertEquals("org.apache.oodt.commons.MultiServerTest$Svr1", server.getClassName());
assertEquals(MultiServer.BINDING, server.getBindingBehavior());
server = (MultiServer.Server) MultiServer.getServers().get("urn:eda:rmi:Test2");
assertEquals("org.apache.oodt.commons.MultiServerTest$Svr2", server.getClassName());
assertEquals(MultiServer.NONBINDING, server.getBindingBehavior());
server = (MultiServer.Server) MultiServer.getServers().get("urn:eda:rmi:Test3");
assertEquals("org.apache.oodt.commons.MultiServerTest$Svr3", server.getClassName());
assertEquals(MultiServer.REBINDING, server.getBindingBehavior());
MultiServer.AutobindingServer s = (MultiServer.AutobindingServer) MultiServer.getServers().get("urn:eda:rmi:Test4");
assertEquals("org.apache.oodt.commons.MultiServerTest$Svr4", s.getClassName());
assertEquals(MultiServer.AUTO, s.getBindingBehavior());
assertEquals(360000L, s.getPeriod());
assertEquals("My Value", System.getProperty("my.setting"));
assertEquals("Don't override", System.getProperty("my.other.setting"));
}
}
| apache-2.0 |
asarazan/bazel | src/main/java/com/google/devtools/build/lib/profiler/statistics/SkylarkStatistics.java | 4191 | // Copyright 2015 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.profiler.statistics;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Multimaps;
import com.google.devtools.build.lib.profiler.ProfileInfo;
import com.google.devtools.build.lib.profiler.ProfileInfo.Task;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Map.Entry;
/**
* Computes various statistics for Skylark and built-in function usage and prints it to a given
* {@link PrintStream}.
*/
public final class SkylarkStatistics {
private final ListMultimap<String, Task> userFunctionTasks;
private final ListMultimap<String, Task> builtinFunctionTasks;
private final List<TasksStatistics> userFunctionStats;
private final List<TasksStatistics> builtinFunctionStats;
private long userTotalNanos;
private long builtinTotalNanos;
public SkylarkStatistics(ProfileInfo info) {
userFunctionTasks = info.getSkylarkUserFunctionTasks();
builtinFunctionTasks = info.getSkylarkBuiltinFunctionTasks();
userFunctionStats = new ArrayList<>();
builtinFunctionStats = new ArrayList<>();
computeStatistics();
}
/**
* @return the total time taken by all calls to built-in Skylark functions
*/
public long getBuiltinTotalNanos() {
return builtinTotalNanos;
}
/**
* @return the total time taken by all calls to user-defined Skylark functions
*/
public long getUserTotalNanos() {
return userTotalNanos;
}
/**
* @return a map from user-defined function descriptions of the form file:line#function to all
* corresponding {@link com.google.devtools.build.lib.profiler.ProfileInfo.Task}s.
*/
public ListMultimap<String, Task> getUserFunctionTasks() {
return userFunctionTasks;
}
/**
* @return a map from built-in function descriptions of the form package.class#method to all
* corresponding {@link com.google.devtools.build.lib.profiler.ProfileInfo.Task}s.
*/
public ListMultimap<String, Task> getBuiltinFunctionTasks() {
return builtinFunctionTasks;
}
public List<TasksStatistics> getBuiltinFunctionStats() {
return builtinFunctionStats;
}
public List<TasksStatistics> getUserFunctionStats() {
return userFunctionStats;
}
/**
* For each Skylark function compute a {@link TasksStatistics} object from the execution times of
* all corresponding {@link Task}s from either {@link #userFunctionTasks} or
* {@link #builtinFunctionTasks}. Fills fields {@link #userFunctionStats} and
* {@link #builtinFunctionStats}.
*/
private void computeStatistics() {
userTotalNanos = computeStatistics(userFunctionTasks, userFunctionStats);
builtinTotalNanos = computeStatistics(builtinFunctionTasks, builtinFunctionStats);
}
/**
* For each Skylark function compute a {@link TasksStatistics} object from the execution times of
* all corresponding {@link Task}s and add it to the list.
* @param tasks Map from function name to all corresponding tasks.
* @param stats The list to which {@link TasksStatistics} are to be added.
* @return The sum of the execution times of all {@link Task} values in the map.
*/
private static long computeStatistics(
ListMultimap<String, Task> tasks, List<TasksStatistics> stats) {
long total = 0L;
for (Entry<String, List<Task>> entry : Multimaps.asMap(tasks).entrySet()) {
TasksStatistics functionStats = TasksStatistics.create(entry.getKey(), entry.getValue());
stats.add(functionStats);
total += functionStats.totalNanos;
}
return total;
}
}
| apache-2.0 |
ecarm002/incubator-asterixdb | hyracks-fullstack/hyracks/hyracks-ipc/src/main/java/org/apache/hyracks/ipc/impl/HandleState.java | 947 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.hyracks.ipc.impl;
enum HandleState {
INITIAL,
CONNECT_SENT,
CONNECT_RECEIVED,
CONNECTED,
CLOSED,
}
| apache-2.0 |
twitter/bookkeeper | bookkeeper-server/src/main/java/org/apache/bookkeeper/net/NetworkTopologyImpl.java | 32170 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.bookkeeper.net;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* The class represents a cluster of computer with a tree hierarchical
* network topology.
* For example, a cluster may be consists of many data centers filled
* with racks of computers.
* In a network topology, leaves represent data nodes (computers) and inner
* nodes represent switches/routers that manage traffic in/out of data centers
* or racks.
*
*/
public class NetworkTopologyImpl implements NetworkTopology {
public final static int DEFAULT_HOST_LEVEL = 2;
public static final Logger LOG = LoggerFactory.getLogger(NetworkTopologyImpl.class);
public static class InvalidTopologyException extends RuntimeException {
private static final long serialVersionUID = 1L;
public InvalidTopologyException(String msg) {
super(msg);
}
}
/** InnerNode represents a switch/router of a data center or rack.
* Different from a leaf node, it has non-null children.
*/
static class InnerNode extends NodeBase {
protected List<Node> children = new ArrayList<Node>();
private int numOfLeaves;
/** Construct an InnerNode from a path-like string */
InnerNode(String path) {
super(path);
}
/** Construct an InnerNode from its name and its network location */
InnerNode(String name, String location) {
super(name, location);
}
/** Construct an InnerNode
* from its name, its network location, its parent, and its level */
InnerNode(String name, String location, InnerNode parent, int level) {
super(name, location, parent, level);
}
/** @return its children */
List<Node> getChildren() {
return children;
}
/** @return the number of children this node has */
int getNumOfChildren() {
return children.size();
}
/** Judge if this node represents a rack
* @return true if it has no child or its children are not InnerNodes
*/
boolean isRack() {
if (children.isEmpty()) {
return true;
}
Node firstChild = children.get(0);
if (firstChild instanceof InnerNode) {
return false;
}
return true;
}
/** Judge if this node is an ancestor of node <i>n</i>
*
* @param n a node
* @return true if this node is an ancestor of <i>n</i>
*/
boolean isAncestor(Node n) {
return getPath(this).equals(NodeBase.PATH_SEPARATOR_STR)
|| (n.getNetworkLocation() + NodeBase.PATH_SEPARATOR_STR).startsWith(getPath(this)
+ NodeBase.PATH_SEPARATOR_STR);
}
/** Judge if this node is the parent of node <i>n</i>
*
* @param n a node
* @return true if this node is the parent of <i>n</i>
*/
boolean isParent(Node n) {
return n.getNetworkLocation().equals(getPath(this));
}
/* Return a child name of this node who is an ancestor of node <i>n</i> */
private String getNextAncestorName(Node n) {
if (!isAncestor(n)) {
throw new IllegalArgumentException(this + "is not an ancestor of " + n);
}
String name = n.getNetworkLocation().substring(getPath(this).length());
if (name.charAt(0) == PATH_SEPARATOR) {
name = name.substring(1);
}
int index = name.indexOf(PATH_SEPARATOR);
if (index != -1)
name = name.substring(0, index);
return name;
}
/** Add node <i>n</i> to the subtree of this node
* @param n node to be added
* @return true if the node is added; false otherwise
*/
boolean add(Node n) {
if (!isAncestor(n))
throw new IllegalArgumentException(n.getName() + ", which is located at " + n.getNetworkLocation()
+ ", is not a decendent of " + getPath(this));
if (isParent(n)) {
// this node is the parent of n; add n directly
n.setParent(this);
n.setLevel(this.level + 1);
for (int i = 0; i < children.size(); i++) {
if (children.get(i).getName().equals(n.getName())) {
children.set(i, n);
return false;
}
}
children.add(n);
numOfLeaves++;
return true;
} else {
// find the next ancestor node
String parentName = getNextAncestorName(n);
InnerNode parentNode = null;
for (int i = 0; i < children.size(); i++) {
if (children.get(i).getName().equals(parentName)) {
parentNode = (InnerNode) children.get(i);
break;
}
}
if (parentNode == null) {
// create a new InnerNode
parentNode = createParentNode(parentName);
children.add(parentNode);
}
// add n to the subtree of the next ancestor node
if (parentNode.add(n)) {
numOfLeaves++;
return true;
} else {
return false;
}
}
}
/**
* Creates a parent node to be added to the list of children.
* Creates a node using the InnerNode four argument constructor specifying
* the name, location, parent, and level of this node.
*
* <p>To be overridden in subclasses for specific InnerNode implementations,
* as alternative to overriding the full {@link #add(Node)} method.
*
* @param parentName The name of the parent node
* @return A new inner node
* @see InnerNode#InnerNode(String, String, InnerNode, int)
*/
protected InnerNode createParentNode(String parentName) {
return new InnerNode(parentName, getPath(this), this, this.getLevel() + 1);
}
/** Remove node <i>n</i> from the subtree of this node
* @param n node to be deleted
* @return true if the node is deleted; false otherwise
*/
boolean remove(Node n) {
String parent = n.getNetworkLocation();
String currentPath = getPath(this);
if (!isAncestor(n))
throw new IllegalArgumentException(n.getName() + ", which is located at " + parent
+ ", is not a descendent of " + currentPath);
if (isParent(n)) {
// this node is the parent of n; remove n directly
for (int i = 0; i < children.size(); i++) {
if (children.get(i).getName().equals(n.getName())) {
children.remove(i);
numOfLeaves--;
n.setParent(null);
return true;
}
}
return false;
} else {
// find the next ancestor node: the parent node
String parentName = getNextAncestorName(n);
InnerNode parentNode = null;
int i;
for (i = 0; i < children.size(); i++) {
if (children.get(i).getName().equals(parentName)) {
parentNode = (InnerNode) children.get(i);
break;
}
}
if (parentNode == null) {
return false;
}
// remove n from the parent node
boolean isRemoved = parentNode.remove(n);
// if the parent node has no children, remove the parent node too
if (isRemoved) {
if (parentNode.getNumOfChildren() == 0) {
children.remove(i);
}
numOfLeaves--;
}
return isRemoved;
}
} // end of remove
/** Given a node's string representation, return a reference to the node
* @param loc string location of the form /rack/node
* @return null if the node is not found or the childnode is there but
* not an instance of {@link InnerNode}
*/
private Node getLoc(String loc) {
if (loc == null || loc.length() == 0)
return this;
String[] path = loc.split(PATH_SEPARATOR_STR, 2);
Node childnode = null;
for (int i = 0; i < children.size(); i++) {
if (children.get(i).getName().equals(path[0])) {
childnode = children.get(i);
}
}
if (childnode == null)
return null; // non-existing node
if (path.length == 1)
return childnode;
if (childnode instanceof InnerNode) {
return ((InnerNode) childnode).getLoc(path[1]);
} else {
return null;
}
}
/** get <i>leafIndex</i> leaf of this subtree
* if it is not in the <i>excludedNode</i>
*
* @param leafIndex an indexed leaf of the node
* @param excludedNode an excluded node (can be null)
* @return
*/
Node getLeaf(int leafIndex, Node excludedNode) {
int count = 0;
// check if the excluded node a leaf
boolean isLeaf = excludedNode == null || !(excludedNode instanceof InnerNode);
// calculate the total number of excluded leaf nodes
int numOfExcludedLeaves = isLeaf ? 1 : ((InnerNode) excludedNode).getNumOfLeaves();
if (isLeafParent()) { // children are leaves
if (isLeaf) { // excluded node is a leaf node
int excludedIndex = children.indexOf(excludedNode);
if (excludedIndex != -1 && leafIndex >= 0) {
// excluded node is one of the children so adjust the leaf index
leafIndex = leafIndex >= excludedIndex ? leafIndex + 1 : leafIndex;
}
}
// range check
if (leafIndex < 0 || leafIndex >= this.getNumOfChildren()) {
return null;
}
return children.get(leafIndex);
} else {
for (int i = 0; i < children.size(); i++) {
InnerNode child = (InnerNode) children.get(i);
if (excludedNode == null || excludedNode != child) {
// not the excludedNode
int numOfLeaves = child.getNumOfLeaves();
if (excludedNode != null && child.isAncestor(excludedNode)) {
numOfLeaves -= numOfExcludedLeaves;
}
if (count + numOfLeaves > leafIndex) {
// the leaf is in the child subtree
return child.getLeaf(leafIndex - count, excludedNode);
} else {
// go to the next child
count = count + numOfLeaves;
}
} else { // it is the excluededNode
// skip it and set the excludedNode to be null
excludedNode = null;
}
}
return null;
}
}
protected boolean isLeafParent() {
return isRack();
}
/**
* Determine if children a leaves, default implementation calls {@link #isRack()}
* <p>To be overridden in subclasses for specific InnerNode implementations,
* as alternative to overriding the full {@link #getLeaf(int, Node)} method.
*
* @return true if children are leaves, false otherwise
*/
protected boolean areChildrenLeaves() {
return isRack();
}
/**
* Get number of leaves.
*/
int getNumOfLeaves() {
return numOfLeaves;
}
} // end of InnerNode
/**
* the root cluster map
*/
InnerNode clusterMap;
/** Depth of all leaf nodes */
private int depthOfAllLeaves = -1;
/** rack counter */
protected int numOfRacks = 0;
/** the lock used to manage access */
protected ReadWriteLock netlock = new ReentrantReadWriteLock();
public NetworkTopologyImpl() {
clusterMap = new InnerNode(InnerNode.ROOT);
}
/** Add a leaf node
* Update node counter & rack counter if necessary
* @param node node to be added; can be null
* @exception IllegalArgumentException if add a node to a leave
or node to be added is not a leaf
*/
public void add(Node node) {
if (node == null)
return;
String oldTopoStr = this.toString();
if (node instanceof InnerNode) {
throw new IllegalArgumentException("Not allow to add an inner node: " + NodeBase.getPath(node));
}
int newDepth = NodeBase.locationToDepth(node.getNetworkLocation()) + 1;
netlock.writeLock().lock();
try {
if ((depthOfAllLeaves != -1) && (depthOfAllLeaves != newDepth)) {
LOG.error("Error: can't add leaf node at depth " + newDepth + " to topology:\n" + oldTopoStr);
throw new InvalidTopologyException("Invalid network topology. "
+ "You cannot have a rack and a non-rack node at the same level of the network topology.");
}
Node rack = getNodeForNetworkLocation(node);
if (rack != null && !(rack instanceof InnerNode)) {
throw new IllegalArgumentException("Unexpected data node " + node.toString()
+ " at an illegal network location");
}
if (clusterMap.add(node)) {
LOG.info("Adding a new node: " + NodeBase.getPath(node));
if (rack == null) {
numOfRacks++;
}
if (!(node instanceof InnerNode)) {
if (depthOfAllLeaves == -1) {
depthOfAllLeaves = node.getLevel();
}
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("NetworkTopology became:\n" + this.toString());
}
} finally {
netlock.writeLock().unlock();
}
}
/**
* Return a reference to the node given its string representation.
* Default implementation delegates to {@link #getNode(String)}.
*
* <p>To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full {@link #add(Node)}
* method.
*
* @param node The string representation of this node's network location is
* used to retrieve a Node object.
* @return a reference to the node; null if the node is not in the tree
*
* @see #add(Node)
* @see #getNode(String)
*/
protected Node getNodeForNetworkLocation(Node node) {
return getNode(node.getNetworkLocation());
}
/**
* Given a string representation of a rack, return its children
* @param loc a path-like string representation of a rack
* @return a newly allocated list with all the node's children
*/
public List<Node> getDatanodesInRack(String loc) {
netlock.readLock().lock();
try {
loc = NodeBase.normalize(loc);
if (!NodeBase.ROOT.equals(loc)) {
loc = loc.substring(1);
}
InnerNode rack = (InnerNode) clusterMap.getLoc(loc);
if (rack == null) {
return null;
}
return new ArrayList<Node>(rack.getChildren());
} finally {
netlock.readLock().unlock();
}
}
/** Remove a node
* Update node counter and rack counter if necessary
* @param node node to be removed; can be null
*/
@Override
public void remove(Node node) {
if (node == null)
return;
if (node instanceof InnerNode) {
throw new IllegalArgumentException("Not allow to remove an inner node: " + NodeBase.getPath(node));
}
LOG.info("Removing a node: " + NodeBase.getPath(node));
netlock.writeLock().lock();
try {
if (clusterMap.remove(node)) {
InnerNode rack = (InnerNode) getNode(node.getNetworkLocation());
if (rack == null) {
numOfRacks--;
}
}
if (LOG.isDebugEnabled()) {
LOG.debug("NetworkTopology became:\n" + this.toString());
}
} finally {
netlock.writeLock().unlock();
}
}
/** Check if the tree contains node <i>node</i>
*
* @param node a node
* @return true if <i>node</i> is already in the tree; false otherwise
*/
@Override
public boolean contains(Node node) {
if (node == null)
return false;
netlock.readLock().lock();
try {
Node parent = node.getParent();
for (int level = node.getLevel(); parent != null && level > 0; parent = parent.getParent(), level--) {
if (parent == clusterMap) {
return true;
}
}
} finally {
netlock.readLock().unlock();
}
return false;
}
/** Given a string representation of a node, return its reference
*
* @param loc
* a path-like string representation of a node
* @return a reference to the node; null if the node is not in the tree
*/
@Override
public Node getNode(String loc) {
netlock.readLock().lock();
try {
loc = NodeBase.normalize(loc);
if (!NodeBase.ROOT.equals(loc))
loc = loc.substring(1);
return clusterMap.getLoc(loc);
} finally {
netlock.readLock().unlock();
}
}
/** Given a string representation of a rack for a specific network
* location
*
* To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full
* {@link #getRack(String)} method.
* @param loc
* a path-like string representation of a network location
* @return a rack string
*/
public String getRack(String loc) {
return loc;
}
/** @return the total number of racks */
@Override
public int getNumOfRacks() {
netlock.readLock().lock();
try {
return numOfRacks;
} finally {
netlock.readLock().unlock();
}
}
/** @return the total number of leaf nodes */
public int getNumOfLeaves() {
netlock.readLock().lock();
try {
return clusterMap.getNumOfLeaves();
} finally {
netlock.readLock().unlock();
}
}
/** Return the distance between two nodes
* It is assumed that the distance from one node to its parent is 1
* The distance between two nodes is calculated by summing up their distances
* to their closest common ancestor.
* @param node1 one node
* @param node2 another node
* @return the distance between node1 and node2 which is zero if they are the same
* or {@link Integer#MAX_VALUE} if node1 or node2 do not belong to the cluster
*/
public int getDistance(Node node1, Node node2) {
if (node1 == node2) {
return 0;
}
Node n1 = node1, n2 = node2;
int dis = 0;
netlock.readLock().lock();
try {
int level1 = node1.getLevel(), level2 = node2.getLevel();
while (n1 != null && level1 > level2) {
n1 = n1.getParent();
level1--;
dis++;
}
while (n2 != null && level2 > level1) {
n2 = n2.getParent();
level2--;
dis++;
}
while (n1 != null && n2 != null && n1.getParent() != n2.getParent()) {
n1 = n1.getParent();
n2 = n2.getParent();
dis += 2;
}
} finally {
netlock.readLock().unlock();
}
if (n1 == null) {
LOG.warn("The cluster does not contain node: {}", NodeBase.getPath(node1));
return Integer.MAX_VALUE;
}
if (n2 == null) {
LOG.warn("The cluster does not contain node: {}", NodeBase.getPath(node2));
return Integer.MAX_VALUE;
}
return dis + 2;
}
/** Check if two nodes are on the same rack
* @param node1 one node (can be null)
* @param node2 another node (can be null)
* @return true if node1 and node2 are on the same rack; false otherwise
* @exception IllegalArgumentException when either node1 or node2 is null, or
* node1 or node2 do not belong to the cluster
*/
public boolean isOnSameRack(Node node1, Node node2) {
if (node1 == null || node2 == null) {
return false;
}
netlock.readLock().lock();
try {
return isSameParents(node1, node2);
} finally {
netlock.readLock().unlock();
}
}
/**
* Check if network topology is aware of NodeGroup
*/
public boolean isNodeGroupAware() {
return false;
}
/**
* Return false directly as not aware of NodeGroup, to be override in sub-class
*/
public boolean isOnSameNodeGroup(Node node1, Node node2) {
return false;
}
/**
* Compare the parents of each node for equality
*
* <p>To be overridden in subclasses for specific NetworkTopology
* implementations, as alternative to overriding the full
* {@link #isOnSameRack(Node, Node)} method.
*
* @param node1 the first node to compare
* @param node2 the second node to compare
* @return true if their parents are equal, false otherwise
*
* @see #isOnSameRack(Node, Node)
*/
protected boolean isSameParents(Node node1, Node node2) {
return node1.getParent() == node2.getParent();
}
final protected static Random r = new Random();
/** randomly choose one node from <i>scope</i>
* if scope starts with ~, choose one from the all nodes except for the
* ones in <i>scope</i>; otherwise, choose one from <i>scope</i>
* @param scope range of nodes from which a node will be chosen
* @return the chosen node
*/
public Node chooseRandom(String scope) {
netlock.readLock().lock();
try {
if (scope.startsWith("~")) {
return chooseRandom(NodeBase.ROOT, scope.substring(1));
} else {
return chooseRandom(scope, null);
}
} finally {
netlock.readLock().unlock();
}
}
private Node chooseRandom(String scope, String excludedScope) {
if (excludedScope != null) {
if (scope.startsWith(excludedScope)) {
return null;
}
if (!excludedScope.startsWith(scope)) {
excludedScope = null;
}
}
Node node = getNode(scope);
if (!(node instanceof InnerNode)) {
return node;
}
InnerNode innerNode = (InnerNode) node;
int numOfDatanodes = innerNode.getNumOfLeaves();
if (excludedScope == null) {
node = null;
} else {
node = getNode(excludedScope);
if (!(node instanceof InnerNode)) {
numOfDatanodes -= 1;
} else {
numOfDatanodes -= ((InnerNode) node).getNumOfLeaves();
}
}
int leaveIndex = r.nextInt(numOfDatanodes);
return innerNode.getLeaf(leaveIndex, node);
}
/** return leaves in <i>scope</i>
* @param scope a path string
* @return leaves nodes under specific scope
*/
private Set<Node> doGetLeaves(String scope) {
Node node = getNode(scope);
Set<Node> leafNodes = new HashSet<Node>();
if (!(node instanceof InnerNode)) {
leafNodes.add(node);
} else {
InnerNode innerNode = (InnerNode) node;
for (int i = 0; i < innerNode.getNumOfLeaves(); i++) {
leafNodes.add(innerNode.getLeaf(i, null));
}
}
return leafNodes;
}
@Override
public Set<Node> getLeaves(String scope) {
netlock.readLock().lock();
try {
if (scope.startsWith("~")) {
Set<Node> allNodes = doGetLeaves(NodeBase.ROOT);
Set<Node> excludeNodes = doGetLeaves(scope.substring(1));
allNodes.removeAll(excludeNodes);
return allNodes;
} else {
return doGetLeaves(scope);
}
} finally {
netlock.readLock().unlock();
}
}
/** return the number of leaves in <i>scope</i> but not in <i>excludedNodes</i>
* if scope starts with ~, return the number of nodes that are not
* in <i>scope</i> and <i>excludedNodes</i>;
* @param scope a path string that may start with ~
* @param excludedNodes a list of nodes
* @return number of available nodes
*/
public int countNumOfAvailableNodes(String scope, Collection<Node> excludedNodes) {
boolean isExcluded = false;
if (scope.startsWith("~")) {
isExcluded = true;
scope = scope.substring(1);
}
scope = NodeBase.normalize(scope);
int count = 0; // the number of nodes in both scope & excludedNodes
netlock.readLock().lock();
try {
for (Node node : excludedNodes) {
if ((NodeBase.getPath(node) + NodeBase.PATH_SEPARATOR_STR).startsWith(scope
+ NodeBase.PATH_SEPARATOR_STR)) {
count++;
}
}
Node n = getNode(scope);
int scopeNodeCount = 1;
if (n instanceof InnerNode) {
scopeNodeCount = ((InnerNode) n).getNumOfLeaves();
}
if (isExcluded) {
return clusterMap.getNumOfLeaves() - scopeNodeCount - excludedNodes.size() + count;
} else {
return scopeNodeCount - count;
}
} finally {
netlock.readLock().unlock();
}
}
/** convert a network tree to a string */
@Override
public String toString() {
// print the number of racks
StringBuilder tree = new StringBuilder();
tree.append("Number of racks: ");
tree.append(numOfRacks);
tree.append("\n");
// print the number of leaves
int numOfLeaves = getNumOfLeaves();
tree.append("Expected number of leaves:");
tree.append(numOfLeaves);
tree.append("\n");
// print nodes
for (int i = 0; i < numOfLeaves; i++) {
tree.append(NodeBase.getPath(clusterMap.getLeaf(i, null)));
tree.append("\n");
}
return tree.toString();
}
/**
* Divide networklocation string into two parts by last separator, and get
* the first part here.
*
* @param networkLocation
* @return
*/
public static String getFirstHalf(String networkLocation) {
int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
return networkLocation.substring(0, index);
}
/**
* Divide networklocation string into two parts by last separator, and get
* the second part here.
*
* @param networkLocation
* @return
*/
public static String getLastHalf(String networkLocation) {
int index = networkLocation.lastIndexOf(NodeBase.PATH_SEPARATOR_STR);
return networkLocation.substring(index);
}
/** swap two array items */
static protected void swap(Node[] nodes, int i, int j) {
Node tempNode;
tempNode = nodes[j];
nodes[j] = nodes[i];
nodes[i] = tempNode;
}
/** Sort nodes array by their distances to <i>reader</i>
* It linearly scans the array, if a local node is found, swap it with
* the first element of the array.
* If a local rack node is found, swap it with the first element following
* the local node.
* If neither local node or local rack node is found, put a random replica
* location at position 0.
* It leaves the rest nodes untouched.
* @param reader the node that wishes to read a block from one of the nodes
* @param nodes the list of nodes containing data for the reader
*/
public void pseudoSortByDistance(Node reader, Node[] nodes) {
int tempIndex = 0;
int localRackNode = -1;
if (reader != null) {
//scan the array to find the local node & local rack node
for (int i = 0; i < nodes.length; i++) {
if (tempIndex == 0 && reader == nodes[i]) { //local node
//swap the local node and the node at position 0
if (i != 0) {
swap(nodes, tempIndex, i);
}
tempIndex = 1;
if (localRackNode != -1) {
if (localRackNode == 0) {
localRackNode = i;
}
break;
}
} else if (localRackNode == -1 && isOnSameRack(reader, nodes[i])) {
//local rack
localRackNode = i;
if (tempIndex != 0)
break;
}
}
// swap the local rack node and the node at position tempIndex
if (localRackNode != -1 && localRackNode != tempIndex) {
swap(nodes, tempIndex, localRackNode);
tempIndex++;
}
}
// put a random node at position 0 if it is not a local/local-rack node
if (tempIndex == 0 && localRackNode == -1 && nodes.length != 0) {
swap(nodes, 0, r.nextInt(nodes.length));
}
}
}
| apache-2.0 |
kavin256/Derby | java/engine/org/apache/derby/impl/sql/compile/GetCurrentConnectionNode.java | 6534 | /*
Derby - Class org.apache.derby.impl.sql.compile.GetCurrentConnectionNode
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.sql.compile;
import org.apache.derby.iapi.services.context.ContextManager;
import org.apache.derby.iapi.sql.compile.CompilerContext;
import org.apache.derby.iapi.services.compiler.MethodBuilder;
import org.apache.derby.iapi.services.sanity.SanityManager;
import org.apache.derby.iapi.jdbc.ConnectionContext;
import org.apache.derby.iapi.error.StandardException;
import org.apache.derby.iapi.sql.dictionary.DataDictionary;
import org.apache.derby.iapi.types.TypeId;
import org.apache.derby.iapi.types.DataValueFactory;
import org.apache.derby.iapi.services.classfile.VMOpcode;
import org.apache.derby.iapi.store.access.Qualifier;
import org.apache.derby.impl.sql.compile.ExpressionClassBuilder;
import org.apache.derby.impl.sql.execute.BaseActivation;
import org.apache.derby.iapi.util.JBitSet;
import org.apache.derby.catalog.TypeDescriptor;
import org.apache.derby.iapi.reference.ClassName;
import java.sql.SQLException;
import java.sql.Types;
import java.util.Vector;
/**
* This node represents a unary getCurrentConnection operator
* RESOLVE - parameter will always be null for now. Someday
* we may want to allow user to specify which of their connections
* they want. Assume that we will use a String.
*
*/
public final class GetCurrentConnectionNode extends JavaValueNode
{
/**
* Constructor for a GetCurrentConnectionNode
*
*/
public GetCurrentConnectionNode()
{
/*
** The result type of getCurrentConnection is
** java.sql.Connection
*/
setJavaTypeName("java.sql.Connection");
}
/**
* Bind this operator
*
* @param fromList The query's FROM list
* @param subqueryList The subquery list being built as we find SubqueryNodes
* @param aggregateVector The aggregate vector being built as we find AggregateNodes
*
* @exception StandardException Thrown on error
*/
public JavaValueNode bindExpression(
FromList fromList, SubqueryList subqueryList,
Vector aggregateVector)
throws StandardException
{
return this;
}
/**
* Preprocess an expression tree. We do a number of transformations
* here (including subqueries, IN lists, LIKE and BETWEEN) plus
* subquery flattening.
* NOTE: This is done before the outer ResultSetNode is preprocessed.
*
* @param numTables Number of tables in the DML Statement
* @param outerFromList FromList from outer query block
* @param outerSubqueryList SubqueryList from outer query block
* @param outerPredicateList PredicateList from outer query block
*
* @exception StandardException Thrown on error
*/
public void preprocess(int numTables,
FromList outerFromList,
SubqueryList outerSubqueryList,
PredicateList outerPredicateList)
throws StandardException
{
}
/**
* Categorize this predicate. Initially, this means
* building a bit map of the referenced tables for each predicate.
* If the source of this ColumnReference (at the next underlying level)
* is not a ColumnReference or a VirtualColumnNode then this predicate
* will not be pushed down.
*
* For example, in:
* select * from (select 1 from s) a (x) where x = 1
* we will not push down x = 1.
* NOTE: It would be easy to handle the case of a constant, but if the
* inner SELECT returns an arbitrary expression, then we would have to copy
* that tree into the pushed predicate, and that tree could contain
* subqueries and method calls.
* RESOLVE - revisit this issue once we have views.
*
* @param referencedTabs JBitSet with bit map of referenced FromTables
* @param simplePredsOnly Whether or not to consider method
* calls, field references and conditional nodes
* when building bit map
*
* @return boolean Whether or not source.expression is a ColumnReference
* or a VirtualColumnNode.
*/
public boolean categorize(JBitSet referencedTabs, boolean simplePredsOnly)
{
return false;
}
/**
* Remap all ColumnReferences in this tree to be clones of the
* underlying expression.
*
* @return JavaValueNode The remapped expression tree.
*
*/
public JavaValueNode remapColumnReferencesToExpressions()
{
return this;
}
/**
* Bind a ? parameter operand of the char_length function.
*/
void bindParameter()
{
}
/**
* Return the variant type for the underlying expression.
* The variant type can be:
* VARIANT - variant within a scan
* (method calls and non-static field access)
* SCAN_INVARIANT - invariant within a scan
* (column references from outer tables)
* QUERY_INVARIANT - invariant within the life of a query
* (constant expressions)
*
* @return The variant type for the underlying expression.
*/
protected int getOrderableVariantType()
{
return Qualifier.QUERY_INVARIANT;
}
/**
*
* @see ConstantNode#generateExpression
*
* @param acb The ExpressionClassBuilder for the class being built
* @param mb The method the code to place the code
*
* @exception StandardException Thrown on error
*/
public void generateExpression(ExpressionClassBuilder acb,
MethodBuilder mb)
throws StandardException
{
mb.pushThis();
mb.callMethod(VMOpcode.INVOKEVIRTUAL, ClassName.BaseActivation, "getCurrentConnection", getJavaTypeName(), 0);
}
/**
Check the reliability type of this java value.
@exception StandardException Thrown on error
@see org.apache.derby.iapi.sql.compile.CompilerContext
*/
public void checkReliability(ValueNode sqlNode)
throws StandardException {
sqlNode.checkReliability("getCurrentConnection()",
CompilerContext.CURRENT_CONNECTION_ILLEGAL);
}
}
| apache-2.0 |
gmile/elasticsearch | src/main/java/org/elasticsearch/action/bench/BenchmarkService.java | 31500 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.bench;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ElasticsearchIllegalStateException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.support.master.MasterNodeOperationRequest;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.ClusterService;
import org.elasticsearch.cluster.ClusterState;
import org.elasticsearch.cluster.ProcessedClusterStateUpdateTask;
import org.elasticsearch.cluster.TimeoutClusterStateUpdateTask;
import org.elasticsearch.cluster.metadata.BenchmarkMetaData;
import org.elasticsearch.cluster.metadata.MetaData;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.node.DiscoveryNodes;
import org.elasticsearch.common.collect.ImmutableOpenMap;
import org.elasticsearch.common.component.AbstractLifecycleComponent;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.regex.Regex;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.TimeValue;
import org.elasticsearch.common.util.concurrent.CountDown;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.*;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.CopyOnWriteArrayList;
/**
* Service component for running benchmarks
*/
public class BenchmarkService extends AbstractLifecycleComponent<BenchmarkService> {
private final ThreadPool threadPool;
private final ClusterService clusterService;
private final TransportService transportService;
protected final BenchmarkExecutor executor;
public static final String ABORT_ACTION_NAME = "indices:data/benchmark/executor/abort";
public static final String STATUS_ACTION_NAME = "indices:data/benchmark/executor/status";
public static final String START_ACTION_NAME = "indices:data/benchmark/executor/start";
/**
* Constructs a service component for running benchmarks
*
* @param settings Settings
* @param clusterService Cluster service
* @param threadPool Thread pool
* @param client Client
* @param transportService Transport service
*/
@Inject
public BenchmarkService(Settings settings, ClusterService clusterService, ThreadPool threadPool,
Client client, TransportService transportService) {
super(settings);
this.threadPool = threadPool;
this.executor = new BenchmarkExecutor(client, clusterService);
this.clusterService = clusterService;
this.transportService = transportService;
transportService.registerHandler(START_ACTION_NAME, new BenchExecutionHandler());
transportService.registerHandler(ABORT_ACTION_NAME, new AbortExecutionHandler());
transportService.registerHandler(STATUS_ACTION_NAME, new StatusExecutionHandler());
}
@Override
protected void doStart() throws ElasticsearchException { }
@Override
protected void doStop() throws ElasticsearchException { }
@Override
protected void doClose() throws ElasticsearchException { }
/**
* Lists actively running benchmarks on the cluster
*
* @param request Status request
* @param listener Response listener
*/
public void listBenchmarks(final BenchmarkStatusRequest request, final ActionListener<BenchmarkStatusResponse> listener) {
final List<DiscoveryNode> nodes = availableBenchmarkNodes();
if (nodes.size() == 0) {
listener.onResponse(new BenchmarkStatusResponse());
} else {
BenchmarkStatusAsyncHandler async = new BenchmarkStatusAsyncHandler(nodes.size(), request, listener);
for (DiscoveryNode node : nodes) {
assert isBenchmarkNode(node);
transportService.sendRequest(node, STATUS_ACTION_NAME, new NodeStatusRequest(request), async);
}
}
}
/**
* Aborts actively running benchmarks on the cluster
*
* @param benchmarkNames Benchmark name(s) to abort
* @param listener Response listener
*/
public void abortBenchmark(final String[] benchmarkNames, final ActionListener<AbortBenchmarkResponse> listener) {
final List<DiscoveryNode> nodes = availableBenchmarkNodes();
if (nodes.size() == 0) {
listener.onFailure(new BenchmarkNodeMissingException("No available nodes for executing benchmarks"));
} else {
BenchmarkStateListener benchmarkStateListener = new BenchmarkStateListener() {
@Override
public void onResponse(final ClusterState newState, final List<BenchmarkMetaData.Entry> changed) {
if (!changed.isEmpty()) {
threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() {
@Override
public void run() {
Set<String> names = new HashSet<>();
Set<String> nodeNames = new HashSet<>();
final ImmutableOpenMap<String, DiscoveryNode> nodes = newState.nodes().nodes();
for (BenchmarkMetaData.Entry e : changed) {
names.add(e.benchmarkId());
nodeNames.addAll(Arrays.asList(e.nodes()));
}
BenchmarkAbortAsyncHandler asyncHandler = new BenchmarkAbortAsyncHandler(nodeNames.size(), listener);
String[] benchmarkNames = names.toArray(new String[names.size()]);
for (String nodeId : nodeNames) {
final DiscoveryNode node = nodes.get(nodeId);
if (node != null) {
transportService.sendRequest(node, ABORT_ACTION_NAME, new NodeAbortRequest(benchmarkNames), asyncHandler);
} else {
asyncHandler.countDown.countDown();
logger.debug("Node for ID [" + nodeId + "] not found in cluster state - skipping");
}
}
}
});
} else {
listener.onFailure(new BenchmarkMissingException("No benchmarks found for " + Arrays.toString(benchmarkNames)));
}
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
};
clusterService.submitStateUpdateTask("abort_benchmark", new AbortBenchmarkTask(benchmarkNames, benchmarkStateListener));
}
}
/**
* Executes benchmarks on the cluster
*
* @param request Benchmark request
* @param listener Response listener
*/
public void startBenchmark(final BenchmarkRequest request, final ActionListener<BenchmarkResponse> listener) {
final List<DiscoveryNode> nodes = availableBenchmarkNodes();
if (nodes.size() == 0) {
listener.onFailure(new BenchmarkNodeMissingException("No available nodes for executing benchmark [" +
request.benchmarkName() + "]"));
} else {
final BenchmarkStateListener benchListener = new BenchmarkStateListener() {
@Override
public void onResponse(final ClusterState newState, final List<BenchmarkMetaData.Entry> entries) {
threadPool.executor(ThreadPool.Names.GENERIC).execute(new Runnable() {
@Override
public void run() {
assert entries.size() == 1;
BenchmarkMetaData.Entry entry = entries.get(0);
final ImmutableOpenMap<String, DiscoveryNode> nodes = newState.nodes().nodes();
final BenchmarkSearchAsyncHandler async = new BenchmarkSearchAsyncHandler(entry.nodes().length, request, listener);
for (String nodeId : entry.nodes()) {
final DiscoveryNode node = nodes.get(nodeId);
if (node == null) {
async.handleExceptionInternal(
new ElasticsearchIllegalStateException("Node for ID [" + nodeId + "] not found in cluster state - skipping"));
} else {
logger.debug("Starting benchmark [{}] node [{}]", request.benchmarkName(), node.name());
transportService.sendRequest(node, START_ACTION_NAME, new NodeBenchRequest(request), async);
}
}
}
});
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
};
clusterService.submitStateUpdateTask("start_benchmark", new StartBenchmarkTask(request, benchListener));
}
}
private void finishBenchmark(final BenchmarkResponse benchmarkResponse, final String benchmarkId, final ActionListener<BenchmarkResponse> listener) {
clusterService.submitStateUpdateTask("finish_benchmark", new FinishBenchmarkTask("finish_benchmark", benchmarkId, new BenchmarkStateListener() {
@Override
public void onResponse(ClusterState newClusterState, List<BenchmarkMetaData.Entry> changed) {
listener.onResponse(benchmarkResponse);
}
@Override
public void onFailure(Throwable t) {
listener.onFailure(t);
}
}, (benchmarkResponse.state() != BenchmarkResponse.State.ABORTED) &&
(benchmarkResponse.state() != BenchmarkResponse.State.FAILED)));
}
private final boolean isBenchmarkNode(DiscoveryNode node) {
ImmutableMap<String, String> attributes = node.getAttributes();
if (attributes.containsKey("bench")) {
String bench = attributes.get("bench");
return Boolean.parseBoolean(bench);
}
return false;
}
private List<DiscoveryNode> findNodes(BenchmarkRequest request) {
final int numNodes = request.numExecutorNodes();
final DiscoveryNodes nodes = clusterService.state().nodes();
DiscoveryNode localNode = nodes.localNode();
List<DiscoveryNode> benchmarkNodes = new ArrayList<DiscoveryNode>();
if (isBenchmarkNode(localNode)) {
benchmarkNodes.add(localNode);
}
for (DiscoveryNode node : nodes) {
if (benchmarkNodes.size() >= numNodes) {
return benchmarkNodes;
}
if (node != localNode && isBenchmarkNode(node)) {
benchmarkNodes.add(node);
}
}
return benchmarkNodes;
}
private class BenchExecutionHandler extends BaseTransportRequestHandler<NodeBenchRequest> {
@Override
public NodeBenchRequest newInstance() {
return new NodeBenchRequest();
}
@Override
public void messageReceived(NodeBenchRequest request, TransportChannel channel) throws Exception {
BenchmarkResponse response = executor.benchmark(request.request);
channel.sendResponse(response);
}
@Override
public String executor() {
return ThreadPool.Names.BENCH;
}
}
private class StatusExecutionHandler extends BaseTransportRequestHandler<NodeStatusRequest> {
@Override
public NodeStatusRequest newInstance() {
return new NodeStatusRequest();
}
@Override
public void messageReceived(NodeStatusRequest request, TransportChannel channel) throws Exception {
BenchmarkStatusNodeResponse nodeResponse = executor.benchmarkStatus();
nodeResponse.nodeName(clusterService.localNode().name());
channel.sendResponse(nodeResponse);
}
@Override
public String executor() {
// Perform management tasks on GENERIC so as not to block pending acquisition of a thread from BENCH.
return ThreadPool.Names.GENERIC;
}
}
private class AbortExecutionHandler extends BaseTransportRequestHandler<NodeAbortRequest> {
@Override
public NodeAbortRequest newInstance() {
return new NodeAbortRequest();
}
@Override
public void messageReceived(NodeAbortRequest request, TransportChannel channel) throws Exception {
AbortBenchmarkResponse nodeResponse = executor.abortBenchmark(request.benchmarkNames);
channel.sendResponse(nodeResponse);
}
@Override
public String executor() {
// Perform management tasks on GENERIC so as not to block pending acquisition of a thread from BENCH.
return ThreadPool.Names.GENERIC;
}
}
public static class NodeAbortRequest extends TransportRequest {
private String[] benchmarkNames;
public NodeAbortRequest(String[] benchmarkNames) {
this.benchmarkNames = benchmarkNames;
}
public NodeAbortRequest() {
this(null);
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
benchmarkNames = in.readStringArray();
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
out.writeStringArray(benchmarkNames);
}
}
public static class NodeStatusRequest extends TransportRequest {
final BenchmarkStatusRequest request;
public NodeStatusRequest(BenchmarkStatusRequest request) {
this.request = request;
}
public NodeStatusRequest() {
this(new BenchmarkStatusRequest());
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
request.writeTo(out);
}
}
public static class NodeBenchRequest extends TransportRequest {
final BenchmarkRequest request;
public NodeBenchRequest(BenchmarkRequest request) {
this.request = request;
}
public NodeBenchRequest() {
this(new BenchmarkRequest());
}
@Override
public void readFrom(StreamInput in) throws IOException {
super.readFrom(in);
request.readFrom(in);
}
@Override
public void writeTo(StreamOutput out) throws IOException {
super.writeTo(out);
request.writeTo(out);
}
}
private abstract class CountDownAsyncHandler<T extends TransportResponse> implements TransportResponseHandler<T> {
protected final CountDown countDown;
protected final CopyOnWriteArrayList<T> responses = new CopyOnWriteArrayList<T>();
protected final CopyOnWriteArrayList<Throwable> failures = new CopyOnWriteArrayList<Throwable>();
protected CountDownAsyncHandler(int size) {
countDown = new CountDown(size);
}
public abstract T newInstance();
protected abstract void sendResponse();
@Override
public void handleResponse(T t) {
responses.add(t);
if (countDown.countDown()) {
sendResponse();
}
}
@Override
public void handleException(TransportException t) {
failures.add(t);
logger.error(t.getMessage(), t);
if (countDown.countDown()) {
sendResponse();
}
}
public String executor() {
return ThreadPool.Names.SAME;
}
}
private class BenchmarkAbortAsyncHandler extends CountDownAsyncHandler<AbortBenchmarkResponse> {
private final ActionListener<AbortBenchmarkResponse> listener;
public BenchmarkAbortAsyncHandler(int size, ActionListener<AbortBenchmarkResponse> listener) {
super(size);
this.listener = listener;
}
@Override
public AbortBenchmarkResponse newInstance() {
return new AbortBenchmarkResponse();
}
@Override
protected void sendResponse() {
boolean acked = true;
for (AbortBenchmarkResponse nodeResponse : responses) {
if (!nodeResponse.isAcknowledged()) {
acked = false;
break;
}
}
listener.onResponse(new AbortBenchmarkResponse(acked));
}
}
private class BenchmarkStatusAsyncHandler extends CountDownAsyncHandler<BenchmarkStatusNodeResponse> {
private final BenchmarkStatusRequest request;
private final ActionListener<BenchmarkStatusResponse> listener;
public BenchmarkStatusAsyncHandler(int nodeCount, final BenchmarkStatusRequest request, ActionListener<BenchmarkStatusResponse> listener) {
super(nodeCount);
this.request = request;
this.listener = listener;
}
@Override
public BenchmarkStatusNodeResponse newInstance() {
return new BenchmarkStatusNodeResponse();
}
@Override
protected void sendResponse() {
int activeBenchmarks = 0;
BenchmarkStatusResponse consolidatedResponse = new BenchmarkStatusResponse();
Map<String, List<BenchmarkResponse>> nameNodeResponseMap = new HashMap<>();
// Group node responses by benchmark name
for (BenchmarkStatusNodeResponse nodeResponse : responses) {
for (BenchmarkResponse benchmarkResponse : nodeResponse.benchResponses()) {
List<BenchmarkResponse> benchmarkResponses = nameNodeResponseMap.get(benchmarkResponse.benchmarkName());
if (benchmarkResponses == null) {
benchmarkResponses = new ArrayList<>();
nameNodeResponseMap.put(benchmarkResponse.benchmarkName(), benchmarkResponses);
}
benchmarkResponses.add(benchmarkResponse);
}
activeBenchmarks += nodeResponse.activeBenchmarks();
}
for (Map.Entry<String, List<BenchmarkResponse>> entry : nameNodeResponseMap.entrySet()) {
BenchmarkResponse consolidated = consolidateBenchmarkResponses(entry.getValue());
consolidatedResponse.addBenchResponse(consolidated);
}
consolidatedResponse.totalActiveBenchmarks(activeBenchmarks);
listener.onResponse(consolidatedResponse);
}
}
private BenchmarkResponse consolidateBenchmarkResponses(List<BenchmarkResponse> responses) {
BenchmarkResponse response = new BenchmarkResponse();
// Merge node responses into a single consolidated response
List<String> errors = new ArrayList<>();
for (BenchmarkResponse r : responses) {
for (Map.Entry<String, CompetitionResult> entry : r.competitionResults.entrySet()) {
if (!response.competitionResults.containsKey(entry.getKey())) {
response.competitionResults.put(entry.getKey(),
new CompetitionResult(
entry.getKey(), entry.getValue().concurrency(), entry.getValue().multiplier(),
false, entry.getValue().percentiles()));
}
CompetitionResult cr = response.competitionResults.get(entry.getKey());
cr.nodeResults().addAll(entry.getValue().nodeResults());
}
if (r.hasErrors()) {
for (String error : r.errors()) {
errors.add(error);
}
}
if (response.benchmarkName() == null) {
response.benchmarkName(r.benchmarkName());
}
assert response.benchmarkName().equals(r.benchmarkName());
if (!errors.isEmpty()) {
response.errors(errors.toArray(new String[errors.size()]));
}
response.mergeState(r.state());
assert errors.isEmpty() || response.state() != BenchmarkResponse.State.COMPLETE : "Response can't be complete since it has errors";
}
return response;
}
private class BenchmarkSearchAsyncHandler extends CountDownAsyncHandler<BenchmarkResponse> {
private final ActionListener<BenchmarkResponse> listener;
private final BenchmarkRequest request;
public BenchmarkSearchAsyncHandler(int size, BenchmarkRequest request, ActionListener<BenchmarkResponse> listener) {
super(size);
this.listener = listener;
this.request = request;
}
@Override
public BenchmarkResponse newInstance() {
return new BenchmarkResponse();
}
@Override
protected void sendResponse() {
BenchmarkResponse response = consolidateBenchmarkResponses(responses);
response.benchmarkName(request.benchmarkName());
response.verbose(request.verbose());
finishBenchmark(response, request.benchmarkName(), listener);
}
public void handleExceptionInternal(Throwable t) {
failures.add(t);
if (countDown.countDown()) {
sendResponse();
}
}
}
public static interface BenchmarkStateListener {
void onResponse(ClusterState newClusterState, List<BenchmarkMetaData.Entry> changed);
void onFailure(Throwable t);
}
public final class StartBenchmarkTask extends BenchmarkStateChangeAction<BenchmarkRequest> {
private final BenchmarkStateListener stateListener;
private List<BenchmarkMetaData.Entry> newBenchmark = new ArrayList<>();
public StartBenchmarkTask(BenchmarkRequest request, BenchmarkStateListener stateListener) {
super(request);
this.stateListener = stateListener;
}
@Override
public ClusterState execute(ClusterState currentState) {
MetaData metaData = currentState.getMetaData();
BenchmarkMetaData bmd = metaData.custom(BenchmarkMetaData.TYPE);
MetaData.Builder mdBuilder = MetaData.builder(metaData);
ImmutableList.Builder<BenchmarkMetaData.Entry> builder = ImmutableList.builder();
if (bmd != null) {
for (BenchmarkMetaData.Entry entry : bmd.entries()) {
if (request.benchmarkName().equals(entry.benchmarkId())){
if (entry.state() != BenchmarkMetaData.State.SUCCESS && entry.state() != BenchmarkMetaData.State.FAILED) {
throw new ElasticsearchException("A benchmark with ID [" + request.benchmarkName() + "] is already running in state [" + entry.state() + "]");
}
// just drop the entry it it has finished successfully or it failed!
} else {
builder.add(entry);
}
}
}
List<DiscoveryNode> nodes = findNodes(request);
String[] nodeIds = new String[nodes.size()];
int i = 0;
for (DiscoveryNode node : nodes) {
nodeIds[i++] = node.getId();
}
BenchmarkMetaData.Entry entry = new BenchmarkMetaData.Entry(request.benchmarkName(), BenchmarkMetaData.State.STARTED, nodeIds);
newBenchmark.add(entry);
bmd = new BenchmarkMetaData(builder.add(entry).build());
mdBuilder.putCustom(BenchmarkMetaData.TYPE, bmd);
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
@Override
public void onFailure(String source, Throwable t) {
logger.warn("Failed to start benchmark: [{}]", t, request.benchmarkName());
newBenchmark = null;
stateListener.onFailure(t);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, final ClusterState newState) {
if (newBenchmark != null) {
stateListener.onResponse(newState, newBenchmark);
}
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
}
public final class FinishBenchmarkTask extends UpdateBenchmarkStateTask {
private final boolean success;
public FinishBenchmarkTask(String reason, String benchmarkId, BenchmarkStateListener listener, boolean success) {
super(reason, benchmarkId, listener);
this.success = success;
}
@Override
protected BenchmarkMetaData.Entry process(BenchmarkMetaData.Entry entry) {
BenchmarkMetaData.State state = entry.state();
assert state == BenchmarkMetaData.State.STARTED || state == BenchmarkMetaData.State.ABORTED : "Expected state: STARTED or ABORTED but was: " + entry.state();
if (success) {
return new BenchmarkMetaData.Entry(entry, BenchmarkMetaData.State.SUCCESS);
} else {
return new BenchmarkMetaData.Entry(entry, BenchmarkMetaData.State.FAILED);
}
}
}
public final class AbortBenchmarkTask extends UpdateBenchmarkStateTask {
private final String[] patterns;
public AbortBenchmarkTask(String[] patterns, BenchmarkStateListener listener) {
super("abort_benchmark", null , listener);
this.patterns = patterns;
}
protected boolean match(BenchmarkMetaData.Entry entry) {
return entry.state() == BenchmarkMetaData.State.STARTED && Regex.simpleMatch(this.patterns, benchmarkId);
}
@Override
protected BenchmarkMetaData.Entry process(BenchmarkMetaData.Entry entry) {
return new BenchmarkMetaData.Entry(entry, BenchmarkMetaData.State.ABORTED);
}
}
public abstract class UpdateBenchmarkStateTask implements ProcessedClusterStateUpdateTask {
private final String reason;
protected final String benchmarkId;
private final BenchmarkStateListener listener;
private final List<BenchmarkMetaData.Entry> instances = new ArrayList<>();
protected UpdateBenchmarkStateTask(String reason, String benchmarkId, BenchmarkStateListener listener) {
this.reason = reason;
this.listener = listener;
this.benchmarkId = benchmarkId;
}
protected boolean match(BenchmarkMetaData.Entry entry) {
return entry.benchmarkId().equals(this.benchmarkId);
}
@Override
public ClusterState execute(ClusterState currentState) {
MetaData metaData = currentState.getMetaData();
BenchmarkMetaData bmd = metaData.custom(BenchmarkMetaData.TYPE);
MetaData.Builder mdBuilder = MetaData.builder(metaData);
if (bmd != null && !bmd.entries().isEmpty()) {
ImmutableList.Builder<BenchmarkMetaData.Entry> builder = new ImmutableList.Builder<BenchmarkMetaData.Entry>();
for (BenchmarkMetaData.Entry e : bmd.entries()) {
if (benchmarkId == null || match(e)) {
e = process(e) ;
instances.add(e);
}
// Don't keep finished benchmarks around in cluster state
if (e != null && (e.state() != BenchmarkMetaData.State.SUCCESS &&
e.state() != BenchmarkMetaData.State.ABORTED &&
e.state() != BenchmarkMetaData.State.FAILED)) {
builder.add(e);
}
}
if (instances.isEmpty()) {
throw new ElasticsearchException("No Benchmark found for id: [" + benchmarkId + "]");
}
bmd = new BenchmarkMetaData(builder.build());
}
if (bmd != null) {
mdBuilder.putCustom(BenchmarkMetaData.TYPE, bmd);
}
return ClusterState.builder(currentState).metaData(mdBuilder).build();
}
protected abstract BenchmarkMetaData.Entry process(BenchmarkMetaData.Entry entry);
@Override
public void onFailure(String source, Throwable t) {
logger.warn("Failed updating benchmark state for ID [{}] triggered by: [{}]", t, benchmarkId, reason);
listener.onFailure(t);
}
@Override
public void clusterStateProcessed(String source, ClusterState oldState, final ClusterState newState) {
listener.onResponse(newState, instances);
}
public String reason() {
return reason;
}
}
public abstract class BenchmarkStateChangeAction<R extends MasterNodeOperationRequest> implements TimeoutClusterStateUpdateTask {
protected final R request;
public BenchmarkStateChangeAction(R request) {
this.request = request;
}
@Override
public TimeValue timeout() {
return request.masterNodeTimeout();
}
}
private List<DiscoveryNode> availableBenchmarkNodes() {
DiscoveryNodes nodes = clusterService.state().nodes();
List<DiscoveryNode> benchmarkNodes = new ArrayList<>(nodes.size());
for (DiscoveryNode node : nodes) {
if (isBenchmarkNode(node)) {
benchmarkNodes.add(node);
}
}
return benchmarkNodes;
}
}
| apache-2.0 |
apache/rampart | modules/rampart-policy/src/main/java/org/apache/ws/secpolicy11/builders/X509TokenBuilder.java | 6352 | /*
* Copyright 2001-2004 The Apache Software Foundation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ws.secpolicy11.builders;
import java.util.Iterator;
import java.util.List;
import javax.xml.namespace.QName;
import org.apache.axiom.om.OMAttribute;
import org.apache.axiom.om.OMElement;
import org.apache.neethi.Assertion;
import org.apache.neethi.AssertionBuilderFactory;
import org.apache.neethi.Constants;
import org.apache.neethi.Policy;
import org.apache.neethi.PolicyEngine;
import org.apache.neethi.builders.AssertionBuilder;
import org.apache.ws.secpolicy.SP11Constants;
import org.apache.ws.secpolicy.SPConstants;
import org.apache.ws.secpolicy.model.X509Token;
public class X509TokenBuilder implements AssertionBuilder<OMElement> {
public final static String USER_CERT_ALIAS_LN = "userCertAlias";
public final static String ENCRYPTION_USER_LN = "encryptionUser";
public static final QName RAMPART_CONFIG = new QName("http://ws.apache.org/rampart/policy",
"RampartConfig");
public static final QName USER_CERT_ALIAS = new QName("http://ws.apache.org/rampart/policy",
USER_CERT_ALIAS_LN);
public static final QName ENCRYPTION_USER = new QName("http://ws.apache.org/rampart/policy",
ENCRYPTION_USER_LN);
public Assertion build(OMElement element, AssertionBuilderFactory factory)
throws IllegalArgumentException {
X509Token x509Token = new X509Token(SPConstants.SP_V11);
OMElement policyElement = element.getFirstElement();
//Process token inclusion
OMAttribute includeAttr = element.getAttribute(SP11Constants.INCLUDE_TOKEN);
if(includeAttr != null) {
int inclusion = SP11Constants.getInclusionFromAttributeValue(includeAttr.getAttributeValue());
x509Token.setInclusion(inclusion);
}
OMAttribute isOptional = element.getAttribute(Constants.Q_ELEM_OPTIONAL_ATTR);
if (isOptional != null) {
x509Token.setOptional(Boolean.valueOf(isOptional.getAttributeValue())
.booleanValue());
}
if (policyElement != null) {
if (policyElement.getFirstChildWithName(SP11Constants.REQUIRE_DERIVED_KEYS) != null) {
x509Token.setDerivedKeys(true);
}
Policy policy = PolicyEngine.getPolicy(element.getFirstElement());
policy = (Policy) policy.normalize(false);
for (Iterator<List<Assertion>> iterator = policy.getAlternatives(); iterator
.hasNext();) {
processAlternative(iterator.next(), x509Token);
/*
* since there should be only one alternative
*/
break;
}
}
if (x509Token != null && policyElement != null) {
OMElement ramp = null;
ramp = policyElement.getFirstChildWithName(RAMPART_CONFIG);
if (ramp != null) {
OMElement child = null;
if ((child = ramp.getFirstChildWithName(USER_CERT_ALIAS)) != null) {
x509Token.setUserCertAlias(child.getText());
}
if ((child = ramp.getFirstChildWithName(ENCRYPTION_USER)) != null) {
x509Token.setEncryptionUser(child.getText());
}
}
}
return x509Token;
}
private void processAlternative(List<Assertion> assertions, X509Token parent) {
Assertion assertion;
QName name;
for (Iterator<Assertion> iterator = assertions.iterator(); iterator.hasNext();) {
assertion = iterator.next();
name = assertion.getName();
if (SP11Constants.REQUIRE_KEY_IDENTIFIRE_REFERENCE.equals(name)) {
parent.setRequireKeyIdentifierReference(true);
} else if (SP11Constants.REQUIRE_ISSUER_SERIAL_REFERENCE.equals(name)) {
parent.setRequireIssuerSerialReference(true);
} else if (SP11Constants.REQUIRE_EMBEDDED_TOKEN_REFERENCE.equals(name)) {
parent.setRequireEmbeddedTokenReference(true);
} else if (SP11Constants.REQUIRE_THUMBPRINT_REFERENCE.equals(name)) {
parent.setRequireThumbprintReference(true);
} else if (SP11Constants.WSS_X509_V1_TOKEN_10.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_V1_TOKEN10);
} else if (SP11Constants.WSS_X509_V1_TOKEN_11.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_V1_TOKEN11);
} else if (SP11Constants.WSS_X509_V3_TOKEN_10.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_V3_TOKEN10);
} else if (SP11Constants.WSS_X509_V3_TOKEN_11.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_V3_TOKEN11);
} else if (SP11Constants.WSS_X509_PKCS7_TOKEN_10.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_PKCS7_TOKEN10);
} else if (SP11Constants.WSS_X509_PKCS7_TOKEN_11.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_PKCS7_TOKEN11);
} else if (SP11Constants.WSS_X509_PKI_PATH_V1_TOKEN_10.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_PKI_PATH_V1_TOKEN10);
} else if (SP11Constants.WSS_X509_PKI_PATH_V1_TOKEN_11.equals(name)) {
parent.setTokenVersionAndType(SPConstants.WSS_X509_PKI_PATH_V1_TOKEN11);
}
}
}
public QName[] getKnownElements() {
return new QName[] {SP11Constants.X509_TOKEN};
}
}
| apache-2.0 |
Yaliang/presto | presto-main/src/test/java/com/facebook/presto/sql/planner/optimizations/TestExpressionEquivalence.java | 9137 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.facebook.presto.sql.planner.optimizations;
import com.facebook.presto.metadata.MetadataManager;
import com.facebook.presto.spi.type.Type;
import com.facebook.presto.spi.type.TypeSignature;
import com.facebook.presto.sql.parser.ParsingOptions;
import com.facebook.presto.sql.parser.SqlParser;
import com.facebook.presto.sql.planner.Symbol;
import com.facebook.presto.sql.planner.TypeProvider;
import com.facebook.presto.sql.tree.Expression;
import com.google.common.base.Splitter;
import com.google.common.collect.ImmutableList;
import org.intellij.lang.annotations.Language;
import org.testng.annotations.Test;
import java.util.Set;
import static com.facebook.presto.SessionTestUtils.TEST_SESSION;
import static com.facebook.presto.sql.ExpressionUtils.rewriteIdentifiersToSymbolReferences;
import static com.facebook.presto.sql.parser.ParsingOptions.DecimalLiteralTreatment.AS_DOUBLE;
import static com.facebook.presto.sql.planner.SymbolsExtractor.extractUnique;
import static java.util.function.Function.identity;
import static java.util.stream.Collectors.toMap;
import static org.testng.Assert.assertFalse;
import static org.testng.Assert.assertTrue;
public class TestExpressionEquivalence
{
private static final SqlParser SQL_PARSER = new SqlParser();
private static final MetadataManager METADATA = MetadataManager.createTestMetadataManager();
private static final ExpressionEquivalence EQUIVALENCE = new ExpressionEquivalence(METADATA, SQL_PARSER);
@Test
public void testEquivalent()
{
assertEquivalent("CAST(null AS BIGINT)", "CAST(null as BIGINT)");
assertEquivalent("a_bigint < b_double", "b_double > a_bigint");
assertEquivalent("true", "true");
assertEquivalent("4", "4");
assertEquivalent("4.4", "4.4");
assertEquivalent("'foo'", "'foo'");
assertEquivalent("4 = 5", "5 = 4");
assertEquivalent("4.4 = 5.5", "5.5 = 4.4");
assertEquivalent("'foo' = 'bar'", "'bar' = 'foo'");
assertEquivalent("4 <> 5", "5 <> 4");
assertEquivalent("4 is distinct from 5", "5 is distinct from 4");
assertEquivalent("4 < 5", "5 > 4");
assertEquivalent("4 <= 5", "5 >= 4");
assertEquivalent("mod(4, 5)", "mod(4, 5)");
assertEquivalent("a_bigint", "a_bigint");
assertEquivalent("a_bigint = b_bigint", "b_bigint = a_bigint");
assertEquivalent("a_bigint < b_bigint", "b_bigint > a_bigint");
assertEquivalent("a_bigint < b_double", "b_double > a_bigint");
assertEquivalent("true and false", "false and true");
assertEquivalent("4 <= 5 and 6 < 7", "7 > 6 and 5 >= 4");
assertEquivalent("4 <= 5 or 6 < 7", "7 > 6 or 5 >= 4");
assertEquivalent("a_bigint <= b_bigint and c_bigint < d_bigint", "d_bigint > c_bigint and b_bigint >= a_bigint");
assertEquivalent("a_bigint <= b_bigint or c_bigint < d_bigint", "d_bigint > c_bigint or b_bigint >= a_bigint");
assertEquivalent("4 <= 5 and 4 <= 5", "4 <= 5");
assertEquivalent("4 <= 5 and 6 < 7", "7 > 6 and 5 >= 4 and 5 >= 4");
assertEquivalent("2 <= 3 and 4 <= 5 and 6 < 7", "7 > 6 and 5 >= 4 and 3 >= 2");
assertEquivalent("4 <= 5 or 4 <= 5", "4 <= 5");
assertEquivalent("4 <= 5 or 6 < 7", "7 > 6 or 5 >= 4 or 5 >= 4");
assertEquivalent("2 <= 3 or 4 <= 5 or 6 < 7", "7 > 6 or 5 >= 4 or 3 >= 2");
assertEquivalent("a_boolean and b_boolean and c_boolean", "c_boolean and b_boolean and a_boolean");
assertEquivalent("(a_boolean and b_boolean) and c_boolean", "(c_boolean and b_boolean) and a_boolean");
assertEquivalent("a_boolean and (b_boolean or c_boolean)", "a_boolean and (c_boolean or b_boolean) and a_boolean");
assertEquivalent(
"(a_boolean or b_boolean or c_boolean) and (d_boolean or e_boolean) and (f_boolean or g_boolean or h_boolean)",
"(h_boolean or g_boolean or f_boolean) and (b_boolean or a_boolean or c_boolean) and (e_boolean or d_boolean)");
assertEquivalent(
"(a_boolean and b_boolean and c_boolean) or (d_boolean and e_boolean) or (f_boolean and g_boolean and h_boolean)",
"(h_boolean and g_boolean and f_boolean) or (b_boolean and a_boolean and c_boolean) or (e_boolean and d_boolean)");
assertEquivalent(
"reduce(ARRAY [b_boolean], false, (s, x) -> s AND x, s -> s)",
"reduce(ARRAY [b_boolean], false, (s, x) -> x AND s, s -> s)");
}
private static void assertEquivalent(@Language("SQL") String left, @Language("SQL") String right)
{
ParsingOptions parsingOptions = new ParsingOptions(AS_DOUBLE /* anything */);
Expression leftExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(left, parsingOptions));
Expression rightExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(right, parsingOptions));
Set<Symbol> symbols = extractUnique(ImmutableList.of(leftExpression, rightExpression));
TypeProvider types = TypeProvider.copyOf(symbols.stream()
.collect(toMap(identity(), TestExpressionEquivalence::generateType)));
assertTrue(
EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, leftExpression, rightExpression, types),
String.format("Expected (%s) and (%s) to be equivalent", left, right));
assertTrue(
EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, rightExpression, leftExpression, types),
String.format("Expected (%s) and (%s) to be equivalent", right, left));
}
@Test
public void testNotEquivalent()
{
assertNotEquivalent("CAST(null AS BOOLEAN)", "false");
assertNotEquivalent("false", "CAST(null AS BOOLEAN)");
assertNotEquivalent("true", "false");
assertNotEquivalent("4", "5");
assertNotEquivalent("4.4", "5.5");
assertNotEquivalent("'foo'", "'bar'");
assertNotEquivalent("4 = 5", "5 = 6");
assertNotEquivalent("4 <> 5", "5 <> 6");
assertNotEquivalent("4 is distinct from 5", "5 is distinct from 6");
assertNotEquivalent("4 < 5", "5 > 6");
assertNotEquivalent("4 <= 5", "5 >= 6");
assertNotEquivalent("mod(4, 5)", "mod(5, 4)");
assertNotEquivalent("a_bigint", "b_bigint");
assertNotEquivalent("a_bigint = b_bigint", "b_bigint = c_bigint");
assertNotEquivalent("a_bigint < b_bigint", "b_bigint > c_bigint");
assertNotEquivalent("a_bigint < b_double", "b_double > c_bigint");
assertNotEquivalent("4 <= 5 and 6 < 7", "7 > 6 and 5 >= 6");
assertNotEquivalent("4 <= 5 or 6 < 7", "7 > 6 or 5 >= 6");
assertNotEquivalent("a_bigint <= b_bigint and c_bigint < d_bigint", "d_bigint > c_bigint and b_bigint >= c_bigint");
assertNotEquivalent("a_bigint <= b_bigint or c_bigint < d_bigint", "d_bigint > c_bigint or b_bigint >= c_bigint");
assertNotEquivalent(
"reduce(ARRAY [b_boolean], false, (s, x) -> s AND x, s -> s)",
"reduce(ARRAY [b_boolean], false, (s, x) -> s OR x, s -> s)");
}
private static void assertNotEquivalent(@Language("SQL") String left, @Language("SQL") String right)
{
ParsingOptions parsingOptions = new ParsingOptions(AS_DOUBLE /* anything */);
Expression leftExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(left, parsingOptions));
Expression rightExpression = rewriteIdentifiersToSymbolReferences(SQL_PARSER.createExpression(right, parsingOptions));
Set<Symbol> symbols = extractUnique(ImmutableList.of(leftExpression, rightExpression));
TypeProvider types = TypeProvider.copyOf(symbols.stream()
.collect(toMap(identity(), TestExpressionEquivalence::generateType)));
assertFalse(
EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, leftExpression, rightExpression, types),
String.format("Expected (%s) and (%s) to not be equivalent", left, right));
assertFalse(
EQUIVALENCE.areExpressionsEquivalent(TEST_SESSION, rightExpression, leftExpression, types),
String.format("Expected (%s) and (%s) to not be equivalent", right, left));
}
private static Type generateType(Symbol symbol)
{
String typeName = Splitter.on('_').limit(2).splitToList(symbol.getName()).get(1);
return METADATA.getType(new TypeSignature(typeName, ImmutableList.of()));
}
}
| apache-2.0 |
a186/pentaho-kettle | ui/src/org/pentaho/di/ui/repository/repositoryexplorer/controllers/BrowseController.java | 33505 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.ui.repository.repositoryexplorer.controllers;
import java.beans.PropertyChangeEvent;
import java.beans.PropertyChangeListener;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Callable;
import org.eclipse.swt.widgets.Shell;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.repository.RepositoryDirectoryInterface;
import org.pentaho.di.ui.repository.repositoryexplorer.ContextChangeVetoer;
import org.pentaho.di.ui.repository.repositoryexplorer.ContextChangeVetoer.TYPE;
import org.pentaho.di.ui.repository.repositoryexplorer.ContextChangeVetoerCollection;
import org.pentaho.di.ui.repository.repositoryexplorer.ControllerInitializationException;
import org.pentaho.di.ui.repository.repositoryexplorer.IUISupportController;
import org.pentaho.di.ui.repository.repositoryexplorer.RepositoryExplorer;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIObjectCreationException;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIObjectRegistry;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryContent;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryDirectory;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryObject;
import org.pentaho.di.ui.repository.repositoryexplorer.model.UIRepositoryObjects;
import org.pentaho.di.ui.spoon.Spoon;
import org.pentaho.ui.xul.XulComponent;
import org.pentaho.ui.xul.XulException;
import org.pentaho.ui.xul.binding.Binding;
import org.pentaho.ui.xul.binding.BindingConvertor;
import org.pentaho.ui.xul.binding.BindingFactory;
import org.pentaho.ui.xul.components.XulConfirmBox;
import org.pentaho.ui.xul.components.XulMessageBox;
import org.pentaho.ui.xul.components.XulPromptBox;
import org.pentaho.ui.xul.containers.XulTree;
import org.pentaho.ui.xul.dnd.DropEvent;
import org.pentaho.ui.xul.impl.AbstractXulEventHandler;
import org.pentaho.ui.xul.swt.SwtBindingFactory;
import org.pentaho.ui.xul.swt.custom.DialogConstant;
import org.pentaho.ui.xul.util.XulDialogCallback;
/**
*
* This is the XulEventHandler for the browse panel of the repository explorer. It sets up the bindings for browse
* functionality.
*
*/
public class BrowseController extends AbstractXulEventHandler implements IUISupportController, IBrowseController {
private static Class<?> PKG = RepositoryExplorer.class; // for i18n purposes, needed by Translator2!!
protected UIRepositoryDirectory repoDir;
protected XulTree folderTree;
protected XulTree fileTable;
protected UIRepositoryDirectory repositoryDirectory;
protected ContextChangeVetoerCollection contextChangeVetoers;
protected BindingFactory bf;
protected Binding directoryBinding, selectedItemsBinding;
protected List<UIRepositoryDirectory> selectedFolderItems;
protected List<UIRepositoryObject> selectedFileItems;
protected List<UIRepositoryDirectory> repositoryDirectories;
protected Repository repository;
List<UIRepositoryObject> repositoryObjects;
List<UIRepositoryObject> repositoryItems;
private MainController mainController;
protected XulMessageBox messageBox;
protected XulConfirmBox confirmBox;
/**
* Allows for lookup of a UIRepositoryDirectory by ObjectId. This allows the reuse of instances that are inside a UI
* tree.
*/
protected Map<ObjectId, UIRepositoryDirectory> dirMap;
private PropertyChangeListener fileChildrenListener = new PropertyChangeListener() {
public void propertyChange( PropertyChangeEvent arg0 ) {
try {
firePropertyChange( "selectedRepoDirChildren", null, getSelectedRepoDirChildren() );
} catch ( Exception e ) {
throw new RuntimeException( e );
}
}
};
public BrowseController() {
}
// begin PDI-3326 hack
private void fireRepositoryDirectoryChange() {
firePropertyChange( "repositoryDirectory", null, repositoryDirectory );
}
private void fireFoldersAndItemsChange( List<UIRepositoryDirectory> previousValue,
UIRepositoryObjects previousRepoObjects ) {
firePropertyChange( "repositoryDirectories", previousValue, getRepositoryDirectories() );
firePropertyChange( "selectedRepoDirChildren", previousRepoObjects, getSelectedRepoDirChildren() );
}
// end PDI-3326 hack
public void init( Repository repository ) throws ControllerInitializationException {
try {
this.repository = repository;
mainController = (MainController) this.getXulDomContainer().getEventHandler( "mainController" );
try {
this.repositoryDirectory =
UIObjectRegistry.getInstance().constructUIRepositoryDirectory( repository.loadRepositoryDirectoryTree(),
null, repository );
} catch ( UIObjectCreationException uoe ) {
this.repositoryDirectory =
new UIRepositoryDirectory( repository.loadRepositoryDirectoryTree(), null, repository );
}
dirMap = new HashMap<ObjectId, UIRepositoryDirectory>();
populateDirMap( repositoryDirectory );
bf = new SwtBindingFactory();
bf.setDocument( this.getXulDomContainer().getDocumentRoot() );
messageBox = (XulMessageBox) document.createElement( "messagebox" );
createBindings();
} catch ( Exception e ) {
throw new ControllerInitializationException( e );
}
}
protected void createBindings() {
folderTree = (XulTree) document.getElementById( "folder-tree" );
fileTable = (XulTree) document.getElementById( "file-table" );
if ( !repositoryDirectory.isVisible() ) {
folderTree.setHiddenrootnode( true );
} else {
folderTree.setHiddenrootnode( false );
}
BindingConvertor<List<?>, Boolean> checkIfMultipleItemsAreSelected = new BindingConvertor<List<?>, Boolean>() {
@Override
public Boolean sourceToTarget( List<?> value ) {
return value != null && value.size() == 1 && value.get( 0 ) != null;
}
@Override
public List<?> targetToSource( Boolean value ) {
return null;
}
};
bf.setBindingType( Binding.Type.ONE_WAY );
bf.createBinding( fileTable, "selectedItems", "file-context-rename", "!disabled", checkIfMultipleItemsAreSelected );
bf.createBinding( fileTable, "selectedItems", this, "selectedFileItems" );
// begin PDI-3326 hack
PropertyChangeListener childrenListener = new PropertyChangeListener() {
public void propertyChange( PropertyChangeEvent evt ) {
fireRepositoryDirectoryChange();
}
};
repositoryDirectory.addPropertyChangeListener( "children", childrenListener );
// end PDI-3326 hack
directoryBinding = createDirectoryBinding();
// Bind the selected index from the folder tree to the list of repository objects in the file table.
bf.setBindingType( Binding.Type.ONE_WAY );
bf.createBinding( folderTree, "selectedItems", this, "selectedFolderItems" );
bf.setBindingType( Binding.Type.ONE_WAY );
selectedItemsBinding = bf.createBinding( this, "selectedRepoDirChildren", fileTable, "elements" );
// bindings can be added here in subclasses
doCreateBindings();
try {
// Fires the population of the repository tree of folders.
directoryBinding.fireSourceChanged();
} catch ( Exception e ) {
// convert to runtime exception so it bubbles up through the UI
throw new RuntimeException( e );
}
try {
// Set the initial selected directory as the users home directory
RepositoryDirectoryInterface homeDir = repository.getUserHomeDirectory();
int currentDir = 0;
String[] homePath = homeDir == null ? null : homeDir.getPathArray();
if ( homePath != null ) {
UIRepositoryDirectory tempRoot = repositoryDirectory;
// Check to see if the first item in homePath is the root directory
if ( homePath.length > 0 && tempRoot.getName().equalsIgnoreCase( homePath[currentDir] ) ) {
if ( homePath.length == 1 ) {
// The home directory is home root
setSelectedFolderItems( Arrays.asList( tempRoot ) );
}
// We have used the first element. Increment to the next
currentDir++;
}
// Traverse the tree until we find our destination
for ( ; currentDir < homePath.length; currentDir++ ) {
for ( UIRepositoryObject uiObj : tempRoot ) {
if ( uiObj instanceof UIRepositoryDirectory ) {
if ( uiObj.getName().equalsIgnoreCase( homePath[currentDir] ) ) {
// We have a match. Let's move on to the next
tempRoot = (UIRepositoryDirectory) uiObj;
break;
}
}
}
}
// If we have traversed as many directories as there are in the path, we have found the directory
if ( homePath.length == currentDir ) {
setSelectedFolderItems( Arrays.asList( tempRoot ) );
folderTree.setSelectedItems( this.selectedFolderItems );
}
}
} catch ( Exception e ) {
throw new RuntimeException( e );
}
}
protected void doCreateBindings() {
}
protected Binding createDirectoryBinding() {
bf.setBindingType( Binding.Type.ONE_WAY );
return bf.createBinding( this, "repositoryDirectory", folderTree, "elements" );
}
public String getName() {
return "browseController";
}
public UIRepositoryDirectory getRepositoryDirectory() {
return repositoryDirectory;
}
protected void populateDirMap( UIRepositoryDirectory repDir ) {
dirMap.put( repDir.getObjectId(), repDir );
for ( UIRepositoryObject obj : repDir ) {
if ( obj instanceof UIRepositoryDirectory ) {
populateDirMap( (UIRepositoryDirectory) obj );
}
}
}
public void expandAllFolders() {
folderTree.expandAll();
}
public void collapseAllFolders() {
folderTree.collapseAll();
}
public void openContent() {
Collection<UIRepositoryObject> content = fileTable.getSelectedItems();
openContent( content.toArray() );
}
public void openContent( Object[] items ) {
if ( ( items != null ) && ( items.length > 0 ) ) {
for ( Object o : items ) {
if ( o instanceof UIRepositoryDirectory ) {
( (UIRepositoryDirectory) o ).toggleExpanded();
List<Object> selectedFolder = new ArrayList<Object>();
selectedFolder.add( o );
folderTree.setSelectedItems( selectedFolder );
} else if ( ( mainController != null && mainController.getCallback() != null )
&& ( o instanceof UIRepositoryContent ) ) {
mainController.getCallback().open( (UIRepositoryContent) o, null );
// TODO: fire request to close dialog
}
}
}
}
public void renameContent() throws Exception {
try {
Collection<UIRepositoryContent> content = fileTable.getSelectedItems();
UIRepositoryObject contentToRename = content.iterator().next();
renameRepositoryObject( contentToRename );
if ( contentToRename instanceof UIRepositoryDirectory ) {
directoryBinding.fireSourceChanged();
}
selectedItemsBinding.fireSourceChanged();
} catch ( Throwable th ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, th.getLocalizedMessage() ) );
messageBox.open();
}
}
protected void confirm( String title, String message, final Callable<Void> onAccept ) throws XulException {
confirmBox = (XulConfirmBox) document.createElement( "confirmbox" );
confirmBox.setTitle( BaseMessages.getString( PKG, title ) );
confirmBox.setMessage( BaseMessages.getString( PKG, message ) );
confirmBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
confirmBox.setCancelLabel( BaseMessages.getString( PKG, "Dialog.Cancel" ) );
confirmBox.addDialogCallback( new XulDialogCallback<Object>() {
public void onClose( XulComponent sender, Status returnCode, Object retVal ) {
if ( returnCode == Status.ACCEPT ) {
try {
onAccept.call();
} catch ( Exception e ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, e.getLocalizedMessage() ) );
messageBox.open();
}
}
}
public void onError( XulComponent sender, Throwable t ) {
throw new RuntimeException( t );
}
} );
confirmBox.open();
}
public void deleteContent() throws Exception {
for ( Object object : fileTable.getSelectedItems() ) {
if ( object instanceof UIRepositoryObject ) {
final UIRepositoryObject repoObject = (UIRepositoryObject) object;
Callable<Void> deleteCallable = new Callable<Void>() {
@Override
public Void call() throws Exception {
deleteContent( repoObject );
return null;
}
};
// If content to be deleted is a folder we will display a warning message
// notwithstanding the folder is empty or not. If you choose to delete this folder, all its
// item(s) will be lost. If the user accept this, then we will delete that folder
// otherwise we will end this method call
if ( repoObject instanceof UIRepositoryDirectory ) {
confirm( "BrowseController.DeleteNonEmptyFolderWarningTitle",
"BrowseController.DeleteFolderWarningMessage", deleteCallable );
} else {
confirm( "BrowseController.DeleteFileWarningTitle",
"BrowseController.DeleteFileWarningMessage", deleteCallable );
}
}
}
}
private String newName = null;
protected void deleteContent( UIRepositoryObject repoObject ) throws Exception {
repoObject.delete();
if ( repoObject instanceof UIRepositoryDirectory ) {
directoryBinding.fireSourceChanged();
if ( repoDir != null ) {
repoDir.refresh();
}
}
selectedItemsBinding.fireSourceChanged();
}
public void createFolder() throws Exception {
try {
Collection<UIRepositoryDirectory> directories = folderTree.getSelectedItems();
if ( directories == null || directories.size() == 0 ) {
return;
}
UIRepositoryDirectory selectedFolder = directories.iterator().next();
// First, ask for a name for the folder
XulPromptBox prompt = promptForName( null );
prompt.addDialogCallback( new XulDialogCallback<String>() {
public void onClose( XulComponent component, Status status, String value ) {
newName = value;
}
public void onError( XulComponent component, Throwable err ) {
throw new RuntimeException( err );
}
} );
prompt.open();
if ( newName != null ) {
if ( selectedFolder == null ) {
selectedFolder = repositoryDirectory;
}
//Do an explicit check here to see if the folder already exists in the ui
//This is to prevent a double message being sent in case the folder does
//not exist in the ui but does exist in the repo (PDI-5202)
boolean folderExistsInUI = selectedFolder.contains( newName );
if ( folderExistsInUI ) {
throw new Exception(
BaseMessages.getString(
PKG,
"BrowserController.DirAlreadyExistsInUI",
newName
)
);
}
//PDI-5202
String newNameInRepo = selectedFolder.checkDirNameExistsInRepo( newName );
if ( newNameInRepo != null ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Warning" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage(
BaseMessages.getString(
PKG,
"BrowserController.DirAlreadyExistsInRepository",
newNameInRepo
)
);
messageBox.open();
newName = newNameInRepo;
}
UIRepositoryDirectory newDir = selectedFolder.createFolder( newName );
dirMap.put( newDir.getObjectId(), newDir );
directoryBinding.fireSourceChanged();
selectedItemsBinding.fireSourceChanged();
this.folderTree.setSelectedItems( Collections.singletonList( selectedFolder ) );
}
newName = null;
} catch ( Exception e ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, e.getLocalizedMessage() ) );
messageBox.open();
}
}
public void exportFolder() throws Exception {
for ( Object object : folderTree.getSelectedItems() ) {
if ( object instanceof UIRepositoryDirectory ) {
repoDir = (UIRepositoryDirectory) object;
// Export the directories one by one...
//
if ( Spoon.getInstance().exportRepositoryDirectory( repoDir.getDirectory() ) ) {
return;
}
}
}
}
public void deleteFolder() throws Exception {
UIRepositoryDirectory newSelectedItem = null;
for ( Object object : folderTree.getSelectedItems() ) {
if ( object instanceof UIRepositoryDirectory ) {
repoDir = (UIRepositoryDirectory) object;
newSelectedItem = repoDir.getParent();
// If content to be deleted is a folder we will display a warning message
// notwithstanding the folder is empty or not. If you choose to delete this folder, all its
// item(s) will be lost. If the user accept this, then we will delete that folder
// otherwise we will end this method call
confirmBox = (XulConfirmBox) document.createElement( "confirmbox" );
confirmBox.setTitle( BaseMessages.getString( PKG, "BrowseController.DeleteNonEmptyFolderWarningTitle" ) );
confirmBox.setMessage( BaseMessages
.getString( PKG, "BrowseController.DeleteFolderWarningMessage" ) );
confirmBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
confirmBox.setCancelLabel( BaseMessages.getString( PKG, "Dialog.Cancel" ) );
confirmBox.addDialogCallback( new XulDialogCallback<Object>() {
public void onClose( XulComponent sender, Status returnCode, Object retVal ) {
if ( returnCode == Status.ACCEPT ) {
try {
deleteFolder( repoDir );
} catch ( Exception e ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, e.getLocalizedMessage() ) );
messageBox.open();
}
}
}
public void onError( XulComponent sender, Throwable t ) {
throw new RuntimeException( t );
}
} );
confirmBox.open();
break;
} else {
deleteFolder( repoDir );
}
}
// since old selected item is the now deleted one, set the parent as the selected item
if ( newSelectedItem != null ) {
folderTree.setSelectedItems( Arrays.asList( newSelectedItem ) );
}
}
protected void deleteFolder( UIRepositoryDirectory repoDir ) throws Exception {
repoDir.delete();
directoryBinding.fireSourceChanged();
selectedItemsBinding.fireSourceChanged();
repoDir.refresh();
}
public void renameFolder() throws Exception {
try {
Collection<UIRepositoryDirectory> directory = folderTree.getSelectedItems();
final UIRepositoryDirectory toRename = directory.iterator().next();
renameRepositoryObject( toRename );
directoryBinding.fireSourceChanged();
selectedItemsBinding.fireSourceChanged();
} catch ( Throwable th ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, th.getLocalizedMessage() ) );
messageBox.open();
}
}
protected void renameRepositoryObject( final UIRepositoryObject object ) throws XulException {
XulPromptBox prompt = promptForName( object );
prompt.addDialogCallback( new XulDialogCallback<String>() {
public void onClose( XulComponent component, Status status, String value ) {
if ( status == Status.ACCEPT ) {
try {
object.setName( value );
} catch ( Exception e ) {
// convert to runtime exception so it bubbles up through the UI
throw new RuntimeException( e );
}
}
}
public void onError( XulComponent component, Throwable err ) {
throw new RuntimeException( err );
}
} );
prompt.open();
}
protected XulPromptBox promptForName( final UIRepositoryObject object ) throws XulException {
XulPromptBox prompt = (XulPromptBox) document.createElement( "promptbox" );
String currentName =
( object == null ) ? BaseMessages.getString( PKG, "BrowserController.NewFolder" ) : object.getName();
prompt.setTitle( BaseMessages.getString( PKG, "BrowserController.Name" ).concat( currentName ) );
prompt.setButtons( new DialogConstant[] { DialogConstant.OK, DialogConstant.CANCEL } );
prompt.setMessage( BaseMessages.getString( PKG, "BrowserController.NameLabel" ).concat( currentName ) );
prompt.setValue( currentName );
return prompt;
}
// Object being dragged from the hierarchical folder tree
public void onDragFromGlobalTree( DropEvent event ) {
event.setAccepted( true );
}
// Object being dragged from the file listing table
public void onDragFromLocalTable( DropEvent event ) {
event.setAccepted( true );
}
public void onDrop( DropEvent event ) {
boolean result = false;
try {
List<Object> dirList = new ArrayList<Object>();
List<UIRepositoryObject> moveList = new ArrayList<UIRepositoryObject>();
UIRepositoryDirectory targetDirectory = null;
if ( event.getDropParent() != null && event.getDropParent() instanceof UIRepositoryDirectory ) {
targetDirectory = (UIRepositoryDirectory) event.getDropParent();
if ( event.getDataTransfer().getData().size() > 0 ) {
for ( Object o : event.getDataTransfer().getData() ) {
if ( o instanceof UIRepositoryObject ) {
moveList.add( (UIRepositoryObject) o );
// Make sure only Folders are copied to the Directory Tree
if ( o instanceof UIRepositoryDirectory ) {
dirList.add( o );
}
result = true;
}
}
}
}
if ( result == true ) {
List<UIRepositoryObject> collisionObjects = new ArrayList<UIRepositoryObject>();
// Check for overwriting
for ( UIRepositoryObject newChild : moveList ) {
for ( UIRepositoryObject currChild : targetDirectory.getRepositoryObjects() ) {
if ( ( currChild instanceof UIRepositoryDirectory ) && ( newChild instanceof UIRepositoryDirectory )
&& ( currChild.getName().equalsIgnoreCase( newChild.getName() ) ) ) {
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG,
"BrowseController.UnableToMove.DirectoryAlreadyExists", currChild.getPath() ) );
messageBox.open();
result = false;
break;
} else if ( !( currChild instanceof UIRepositoryDirectory )
&& ( currChild.getType().equalsIgnoreCase( newChild.getType() ) )
&& ( currChild.getName().equalsIgnoreCase( newChild.getName() ) ) ) {
collisionObjects.add( currChild );
}
}
if ( !result ) {
break;
}
}
// Prompt to overwrite
if ( result && collisionObjects.size() > 0 ) {
FileOverwriteDialogController fileOverwriteDialog =
FileOverwriteDialogController.getInstance( getXulDomContainer().getOuterContext() instanceof Shell
? (Shell) getXulDomContainer().getOuterContext() : null, collisionObjects );
fileOverwriteDialog.show();
if ( fileOverwriteDialog.isOverwriteFiles() ) {
// Delete the files before moving
for ( UIRepositoryObject o : collisionObjects ) {
o.delete();
}
} else {
// We are not moving the files
result = false;
}
}
// Make sure we are still moving the files
if ( result ) {
moveFiles( moveList, targetDirectory );
// Set UI objects to appear in folder directory
event.getDataTransfer().setData( dirList );
}
}
} catch ( Exception e ) {
result = false;
event.setAccepted( false );
messageBox.setTitle( BaseMessages.getString( PKG, "Dialog.Error" ) );
messageBox.setAcceptLabel( BaseMessages.getString( PKG, "Dialog.Ok" ) );
messageBox.setMessage( BaseMessages.getString( PKG, "BrowseController.UnableToMove", e.getLocalizedMessage() ) );
messageBox.open();
}
event.setAccepted( result );
}
protected void moveFiles( List<UIRepositoryObject> objects, UIRepositoryDirectory targetDirectory ) throws Exception {
// Perform move
for ( UIRepositoryObject o : objects ) {
o.move( targetDirectory );
}
}
public void onDoubleClick( Object[] selectedItems ) {
openContent( selectedItems );
}
public List<UIRepositoryDirectory> getSelectedFolderItems() {
return selectedFolderItems;
}
public void setSelectedFolderItems( List<UIRepositoryDirectory> selectedFolderItems ) {
if ( !compareFolderList( selectedFolderItems, this.selectedFolderItems ) ) {
List<TYPE> pollResults = pollContextChangeVetoResults();
if ( !contains( TYPE.CANCEL, pollResults ) ) {
this.selectedFolderItems = selectedFolderItems;
setRepositoryDirectories( selectedFolderItems );
} else if ( contains( TYPE.CANCEL, pollResults ) ) {
folderTree.setSelectedItems( this.selectedFolderItems );
fileTable.setSelectedItems( this.selectedFileItems );
}
} else {
setRepositoryDirectories( selectedFolderItems );
}
}
public List<UIRepositoryObject> getSelectedFileItems() {
return selectedFileItems;
}
public void setSelectedFileItems( List<UIRepositoryObject> selectedFileItems ) {
if ( !compareFileList( selectedFileItems, this.selectedFileItems ) ) {
List<TYPE> pollResults = pollContextChangeVetoResults();
if ( !contains( TYPE.CANCEL, pollResults ) ) {
this.selectedFileItems = selectedFileItems;
setRepositoryObjects( selectedFileItems );
setRepositoryItems( selectedFileItems );
} else if ( contains( TYPE.CANCEL, pollResults ) ) {
fileTable.setSelectedItems( this.selectedFileItems );
}
} else {
setRepositoryItems( selectedFileItems );
}
}
public Binding getSelectedItemsBinding() {
return selectedItemsBinding;
}
public void setSelectedItemsBinding( Binding selectedItemsBinding ) {
this.selectedItemsBinding = selectedItemsBinding;
}
public void setRepositoryObjects( List<UIRepositoryObject> selectedFileItems ) {
this.repositoryObjects = selectedFileItems;
firePropertyChange( "repositoryObjects", null, selectedFileItems );
}
public List<UIRepositoryObject> getRepositoryObjects() {
return repositoryObjects;
}
public void setRepositoryItems( List<UIRepositoryObject> selectedItems ) {
this.repositoryItems = selectedItems;
firePropertyChange( "repositoryItems", null, repositoryItems );
}
public List<UIRepositoryObject> getRepositoryItems() {
return repositoryItems;
}
public List<UIRepositoryDirectory> getRepositoryDirectories() {
if ( repositoryDirectories != null && repositoryDirectories.size() == 0 ) {
return null;
}
return repositoryDirectories;
}
public void setRepositoryDirectories( List<UIRepositoryDirectory> selectedFolderItems ) {
List<UIRepositoryDirectory> previousVal = null;
UIRepositoryObjects previousRepoObjects = null;
try {
if ( repositoryDirectories != null && repositoryDirectories.size() > 0 ) {
previousVal = new ArrayList<UIRepositoryDirectory>();
previousVal.addAll( repositoryDirectories );
previousRepoObjects = getSelectedRepoDirChildren();
}
// Remove children listener
if ( this.repositoryDirectories != null && this.repositoryDirectories.size() > 0 ) {
this.repositoryDirectories.get( 0 ).getRepositoryObjects().removePropertyChangeListener( fileChildrenListener );
}
this.repositoryDirectories = selectedFolderItems;
// Add children Listener
if ( this.repositoryDirectories != null && this.repositoryDirectories.size() > 0 ) {
this.repositoryDirectories.get( 0 ).getRepositoryObjects().addPropertyChangeListener( "children",
fileChildrenListener );
}
} catch ( KettleException e ) {
throw new RuntimeException( e );
}
fireFoldersAndItemsChange( previousVal, previousRepoObjects );
}
public UIRepositoryObjects getSelectedRepoDirChildren() {
UIRepositoryObjects repoObjects = null;
if ( selectedFolderItems != null && selectedFolderItems.size() > 0 ) {
try {
repoObjects = repositoryDirectories.get( 0 ).getRepositoryObjects();
} catch ( KettleException e ) {
// convert to runtime exception so it bubbles up through the UI
throw new RuntimeException( e );
}
}
return repoObjects;
}
public void addContextChangeVetoer( ContextChangeVetoer listener ) {
if ( contextChangeVetoers == null ) {
contextChangeVetoers = new ContextChangeVetoerCollection();
}
contextChangeVetoers.add( listener );
}
public void removeContextChangeVetoer( ContextChangeVetoer listener ) {
if ( contextChangeVetoers != null ) {
contextChangeVetoers.remove( listener );
}
}
private boolean contains( TYPE type, List<TYPE> typeList ) {
for ( TYPE t : typeList ) {
if ( t.equals( type ) ) {
return true;
}
}
return false;
}
/**
* Fire all current {@link ContextChangeVetoer}. Every one who has added their self as a vetoer has a change to vote
* on what should happen.
*/
List<TYPE> pollContextChangeVetoResults() {
if ( contextChangeVetoers != null ) {
return contextChangeVetoers.fireContextChange();
} else {
List<TYPE> returnValue = new ArrayList<TYPE>();
returnValue.add( TYPE.NO_OP );
return returnValue;
}
}
boolean compareFolderList( List<UIRepositoryDirectory> rd1, List<UIRepositoryDirectory> rd2 ) {
if ( rd1 != null && rd2 != null ) {
if ( rd1.size() != rd2.size() ) {
return false;
}
for ( int i = 0; i < rd1.size(); i++ ) {
if ( rd1.get( i ) != null && rd2.get( i ) != null ) {
if ( !rd1.get( i ).getName().equals( rd2.get( i ).getName() ) ) {
return false;
}
}
}
} else {
return false;
}
return true;
}
boolean compareFileList( List<UIRepositoryObject> ro1, List<UIRepositoryObject> ro2 ) {
if ( ro1 != null && ro2 != null ) {
if ( ro1.size() != ro2.size() ) {
return false;
}
for ( int i = 0; i < ro1.size(); i++ ) {
if ( ro1.get( i ) != null && ro2.get( i ) != null ) {
if ( !ro1.get( i ).getName().equals( ro2.get( i ).getName() ) ) {
return false;
}
}
}
} else {
return false;
}
return true;
}
}
| apache-2.0 |
LatencyUtils/cassandra-stress2 | src/java/org/apache/cassandra/config/Schema.java | 17822 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.config;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.*;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Sets;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.cassandra.cql3.functions.Functions;
import org.apache.cassandra.cql3.functions.UDAggregate;
import org.apache.cassandra.cql3.functions.UDFunction;
import org.apache.cassandra.db.*;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.commitlog.CommitLog;
import org.apache.cassandra.db.compaction.CompactionManager;
import org.apache.cassandra.db.marshal.UserType;
import org.apache.cassandra.io.sstable.Descriptor;
import org.apache.cassandra.schema.LegacySchemaTables;
import org.apache.cassandra.service.MigrationManager;
import org.apache.cassandra.utils.ConcurrentBiMap;
import org.apache.cassandra.utils.Pair;
import org.cliffc.high_scale_lib.NonBlockingHashMap;
public class Schema
{
private static final Logger logger = LoggerFactory.getLogger(Schema.class);
public static final Schema instance = new Schema();
/**
* longest permissible KS or CF name. Our main concern is that filename not be more than 255 characters;
* the filename will contain both the KS and CF names. Since non-schema-name components only take up
* ~64 characters, we could allow longer names than this, but on Windows, the entire path should be not greater than
* 255 characters, so a lower limit here helps avoid problems. See CASSANDRA-4110.
*/
public static final int NAME_LENGTH = 48;
/* metadata map for faster keyspace lookup */
private final Map<String, KSMetaData> keyspaces = new NonBlockingHashMap<>();
/* Keyspace objects, one per keyspace. Only one instance should ever exist for any given keyspace. */
private final Map<String, Keyspace> keyspaceInstances = new NonBlockingHashMap<>();
/* metadata map for faster ColumnFamily lookup */
private final ConcurrentBiMap<Pair<String, String>, UUID> cfIdMap = new ConcurrentBiMap<>();
private volatile UUID version;
// 59adb24e-f3cd-3e02-97f0-5b395827453f
public static final UUID emptyVersion;
static
{
try
{
emptyVersion = UUID.nameUUIDFromBytes(MessageDigest.getInstance("MD5").digest());
}
catch (NoSuchAlgorithmException e)
{
throw new AssertionError();
}
}
/**
* Initialize empty schema object and load the hardcoded system tables
*/
public Schema()
{
load(SystemKeyspace.definition());
}
/** load keyspace (keyspace) definitions, but do not initialize the keyspace instances. */
public Schema loadFromDisk()
{
load(LegacySchemaTables.readSchemaFromSystemTables());
updateVersion();
return this;
}
/**
* Load up non-system keyspaces
*
* @param keyspaceDefs The non-system keyspace definitions
*
* @return self to support chaining calls
*/
public Schema load(Collection<KSMetaData> keyspaceDefs)
{
for (KSMetaData def : keyspaceDefs)
load(def);
return this;
}
/**
* Load specific keyspace into Schema
*
* @param keyspaceDef The keyspace to load up
*
* @return self to support chaining calls
*/
public Schema load(KSMetaData keyspaceDef)
{
for (CFMetaData cfm : keyspaceDef.cfMetaData().values())
load(cfm);
setKeyspaceDefinition(keyspaceDef);
return this;
}
/**
* Get keyspace instance by name
*
* @param keyspaceName The name of the keyspace
*
* @return Keyspace object or null if keyspace was not found
*/
public Keyspace getKeyspaceInstance(String keyspaceName)
{
return keyspaceInstances.get(keyspaceName);
}
public ColumnFamilyStore getColumnFamilyStoreInstance(UUID cfId)
{
Pair<String, String> pair = cfIdMap.inverse().get(cfId);
if (pair == null)
return null;
Keyspace instance = getKeyspaceInstance(pair.left);
if (instance == null)
return null;
return instance.getColumnFamilyStore(cfId);
}
/**
* Store given Keyspace instance to the schema
*
* @param keyspace The Keyspace instance to store
*
* @throws IllegalArgumentException if Keyspace is already stored
*/
public void storeKeyspaceInstance(Keyspace keyspace)
{
if (keyspaceInstances.containsKey(keyspace.getName()))
throw new IllegalArgumentException(String.format("Keyspace %s was already initialized.", keyspace.getName()));
keyspaceInstances.put(keyspace.getName(), keyspace);
}
/**
* Remove keyspace from schema
*
* @param keyspaceName The name of the keyspace to remove
*
* @return removed keyspace instance or null if it wasn't found
*/
public Keyspace removeKeyspaceInstance(String keyspaceName)
{
return keyspaceInstances.remove(keyspaceName);
}
/**
* Remove keyspace definition from system
*
* @param ksm The keyspace definition to remove
*/
public void clearKeyspaceDefinition(KSMetaData ksm)
{
keyspaces.remove(ksm.name);
}
/**
* Given a keyspace name & column family name, get the column family
* meta data. If the keyspace name or column family name is not valid
* this function returns null.
*
* @param keyspaceName The keyspace name
* @param cfName The ColumnFamily name
*
* @return ColumnFamily Metadata object or null if it wasn't found
*/
public CFMetaData getCFMetaData(String keyspaceName, String cfName)
{
assert keyspaceName != null;
KSMetaData ksm = keyspaces.get(keyspaceName);
return (ksm == null) ? null : ksm.cfMetaData().get(cfName);
}
/**
* Get ColumnFamily metadata by its identifier
*
* @param cfId The ColumnFamily identifier
*
* @return metadata about ColumnFamily
*/
public CFMetaData getCFMetaData(UUID cfId)
{
Pair<String,String> cf = getCF(cfId);
return (cf == null) ? null : getCFMetaData(cf.left, cf.right);
}
public CFMetaData getCFMetaData(Descriptor descriptor)
{
return getCFMetaData(descriptor.ksname, descriptor.cfname);
}
/**
* Get metadata about keyspace by its name
*
* @param keyspaceName The name of the keyspace
*
* @return The keyspace metadata or null if it wasn't found
*/
public KSMetaData getKSMetaData(String keyspaceName)
{
assert keyspaceName != null;
return keyspaces.get(keyspaceName);
}
/**
* @return collection of the non-system keyspaces
*/
public List<String> getNonSystemKeyspaces()
{
return ImmutableList.copyOf(Sets.difference(keyspaces.keySet(), Collections.singleton(SystemKeyspace.NAME)));
}
/**
* Get metadata about keyspace inner ColumnFamilies
*
* @param keyspaceName The name of the keyspace
*
* @return metadata about ColumnFamilies the belong to the given keyspace
*/
public Map<String, CFMetaData> getKeyspaceMetaData(String keyspaceName)
{
assert keyspaceName != null;
KSMetaData ksm = keyspaces.get(keyspaceName);
assert ksm != null;
return ksm.cfMetaData();
}
/**
* @return collection of the all keyspace names registered in the system (system and non-system)
*/
public Set<String> getKeyspaces()
{
return keyspaces.keySet();
}
/**
* @return collection of the metadata about all keyspaces registered in the system (system and non-system)
*/
public Collection<KSMetaData> getKeyspaceDefinitions()
{
return keyspaces.values();
}
/**
* Update (or insert) new keyspace definition
*
* @param ksm The metadata about keyspace
*/
public void setKeyspaceDefinition(KSMetaData ksm)
{
assert ksm != null;
keyspaces.put(ksm.name, ksm);
}
/* ColumnFamily query/control methods */
/**
* @param cfId The identifier of the ColumnFamily to lookup
* @return The (ksname,cfname) pair for the given id, or null if it has been dropped.
*/
public Pair<String,String> getCF(UUID cfId)
{
return cfIdMap.inverse().get(cfId);
}
/**
* @param cfId The identifier of the ColumnFamily to lookup
* @return true if the CF id is a known one, false otherwise.
*/
public boolean hasCF(UUID cfId)
{
return cfIdMap.containsValue(cfId);
}
/**
* Lookup keyspace/ColumnFamily identifier
*
* @param ksName The keyspace name
* @param cfName The ColumnFamily name
*
* @return The id for the given (ksname,cfname) pair, or null if it has been dropped.
*/
public UUID getId(String ksName, String cfName)
{
return cfIdMap.get(Pair.create(ksName, cfName));
}
/**
* Load individual ColumnFamily Definition to the schema
* (to make ColumnFamily lookup faster)
*
* @param cfm The ColumnFamily definition to load
*/
public void load(CFMetaData cfm)
{
Pair<String, String> key = Pair.create(cfm.ksName, cfm.cfName);
if (cfIdMap.containsKey(key))
throw new RuntimeException(String.format("Attempting to load already loaded table %s.%s", cfm.ksName, cfm.cfName));
logger.debug("Adding {} to cfIdMap", cfm);
cfIdMap.put(key, cfm.cfId);
}
/**
* Used for ColumnFamily data eviction out from the schema
*
* @param cfm The ColumnFamily Definition to evict
*/
public void purge(CFMetaData cfm)
{
cfIdMap.remove(Pair.create(cfm.ksName, cfm.cfName));
cfm.markPurged();
}
/* Version control */
/**
* @return current schema version
*/
public UUID getVersion()
{
return version;
}
/**
* Read schema from system keyspace and calculate MD5 digest of every row, resulting digest
* will be converted into UUID which would act as content-based version of the schema.
*/
public void updateVersion()
{
version = LegacySchemaTables.calculateSchemaDigest();
SystemKeyspace.updateSchemaVersion(version);
}
/*
* Like updateVersion, but also announces via gossip
*/
public void updateVersionAndAnnounce()
{
updateVersion();
MigrationManager.passiveAnnounce(version);
}
/**
* Clear all KS/CF metadata and reset version.
*/
public synchronized void clear()
{
for (String keyspaceName : getNonSystemKeyspaces())
{
KSMetaData ksm = getKSMetaData(keyspaceName);
for (CFMetaData cfm : ksm.cfMetaData().values())
purge(cfm);
clearKeyspaceDefinition(ksm);
}
updateVersionAndAnnounce();
}
public void addKeyspace(KSMetaData ksm)
{
assert getKSMetaData(ksm.name) == null;
load(ksm);
Keyspace.open(ksm.name);
MigrationManager.instance.notifyCreateKeyspace(ksm);
}
public void updateKeyspace(String ksName)
{
KSMetaData oldKsm = getKSMetaData(ksName);
assert oldKsm != null;
KSMetaData newKsm = LegacySchemaTables.createKeyspaceFromName(ksName).cloneWith(oldKsm.cfMetaData().values(), oldKsm.userTypes);
setKeyspaceDefinition(newKsm);
Keyspace.open(ksName).createReplicationStrategy(newKsm);
MigrationManager.instance.notifyUpdateKeyspace(newKsm);
}
public void dropKeyspace(String ksName)
{
KSMetaData ksm = Schema.instance.getKSMetaData(ksName);
String snapshotName = Keyspace.getTimestampedSnapshotName(ksName);
CompactionManager.instance.interruptCompactionFor(ksm.cfMetaData().values(), true);
Keyspace keyspace = Keyspace.open(ksm.name);
// remove all cfs from the keyspace instance.
List<UUID> droppedCfs = new ArrayList<>();
for (CFMetaData cfm : ksm.cfMetaData().values())
{
ColumnFamilyStore cfs = keyspace.getColumnFamilyStore(cfm.cfName);
purge(cfm);
if (DatabaseDescriptor.isAutoSnapshot())
cfs.snapshot(snapshotName);
Keyspace.open(ksm.name).dropCf(cfm.cfId);
droppedCfs.add(cfm.cfId);
}
// remove the keyspace from the static instances.
Keyspace.clear(ksm.name);
clearKeyspaceDefinition(ksm);
keyspace.writeOrder.awaitNewBarrier();
// force a new segment in the CL
CommitLog.instance.forceRecycleAllSegments(droppedCfs);
MigrationManager.instance.notifyDropKeyspace(ksm);
}
public void addTable(CFMetaData cfm)
{
assert getCFMetaData(cfm.ksName, cfm.cfName) == null;
KSMetaData ksm = getKSMetaData(cfm.ksName).cloneWithTableAdded(cfm);
logger.info("Loading {}", cfm);
load(cfm);
// make sure it's init-ed w/ the old definitions first,
// since we're going to call initCf on the new one manually
Keyspace.open(cfm.ksName);
setKeyspaceDefinition(ksm);
Keyspace.open(ksm.name).initCf(cfm.cfId, cfm.cfName, true);
MigrationManager.instance.notifyCreateColumnFamily(cfm);
}
public void updateTable(String ksName, String tableName)
{
CFMetaData cfm = getCFMetaData(ksName, tableName);
assert cfm != null;
boolean columnsDidChange = cfm.reload();
Keyspace keyspace = Keyspace.open(cfm.ksName);
keyspace.getColumnFamilyStore(cfm.cfName).reload();
MigrationManager.instance.notifyUpdateColumnFamily(cfm, columnsDidChange);
}
public void dropTable(String ksName, String tableName)
{
KSMetaData ksm = getKSMetaData(ksName);
assert ksm != null;
ColumnFamilyStore cfs = Keyspace.open(ksName).getColumnFamilyStore(tableName);
assert cfs != null;
// reinitialize the keyspace.
CFMetaData cfm = ksm.cfMetaData().get(tableName);
purge(cfm);
setKeyspaceDefinition(ksm.cloneWithTableRemoved(cfm));
CompactionManager.instance.interruptCompactionFor(Arrays.asList(cfm), true);
if (DatabaseDescriptor.isAutoSnapshot())
cfs.snapshot(Keyspace.getTimestampedSnapshotName(cfs.name));
Keyspace.open(ksm.name).dropCf(cfm.cfId);
MigrationManager.instance.notifyDropColumnFamily(cfm);
CommitLog.instance.forceRecycleAllSegments(Collections.singleton(cfm.cfId));
}
public void addType(UserType ut)
{
KSMetaData ksm = getKSMetaData(ut.keyspace);
assert ksm != null;
logger.info("Loading {}", ut);
ksm.userTypes.addType(ut);
MigrationManager.instance.notifyCreateUserType(ut);
}
public void updateType(UserType ut)
{
KSMetaData ksm = getKSMetaData(ut.keyspace);
assert ksm != null;
logger.info("Updating {}", ut);
ksm.userTypes.addType(ut);
MigrationManager.instance.notifyUpdateUserType(ut);
}
public void dropType(UserType ut)
{
KSMetaData ksm = getKSMetaData(ut.keyspace);
assert ksm != null;
ksm.userTypes.removeType(ut);
MigrationManager.instance.notifyDropUserType(ut);
}
public void addFunction(UDFunction udf)
{
logger.info("Loading {}", udf);
Functions.addFunction(udf);
MigrationManager.instance.notifyCreateFunction(udf);
}
public void updateFunction(UDFunction udf)
{
logger.info("Updating {}", udf);
Functions.replaceFunction(udf);
MigrationManager.instance.notifyUpdateFunction(udf);
}
public void dropFunction(UDFunction udf)
{
logger.info("Drop {}", udf);
// TODO: this is kind of broken as this remove all overloads of the function name
Functions.removeFunction(udf.name(), udf.argTypes());
MigrationManager.instance.notifyDropFunction(udf);
}
public void addAggregate(UDAggregate udf)
{
logger.info("Loading {}", udf);
Functions.addFunction(udf);
MigrationManager.instance.notifyCreateAggregate(udf);
}
public void updateAggregate(UDAggregate udf)
{
logger.info("Updating {}", udf);
Functions.replaceFunction(udf);
MigrationManager.instance.notifyUpdateAggregate(udf);
}
public void dropAggregate(UDAggregate udf)
{
logger.info("Drop {}", udf);
// TODO: this is kind of broken as this remove all overloads of the function name
Functions.removeFunction(udf.name(), udf.argTypes());
MigrationManager.instance.notifyDropAggregate(udf);
}
}
| apache-2.0 |
WillJiang/WillJiang | src/core/src/test/java/org/apache/struts2/views/jsp/ui/OptionTransferSelectTagTest.java | 16199 | /*
* $Id: OptionTransferSelectTagTest.java 651946 2008-04-27 13:41:38Z apetrelli $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.views.jsp.ui;
import java.util.ArrayList;
import java.util.List;
import org.apache.struts2.TestAction;
import org.apache.struts2.views.jsp.AbstractUITagTest;
/**
*
*/
public class OptionTransferSelectTagTest extends AbstractUITagTest {
public void testWithAllSelected() throws Exception {
List left = new ArrayList();
left.add("Left1");
left.add("Left2");
List right = new ArrayList();
right.add("Right1");
right.add("Right2");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list2");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("true");
tag.setDoubleCssClass("c2");
tag.setDoubleCssStyle("s2");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.setDoubleHeaderKey("Double Header Key");
tag.setDoubleHeaderValue("Double Header Value");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-1.txt"));
}
public void testWithPartialSelectedOnBothSides() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("true");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.setDoubleHeaderKey("Double Header Key");
tag.setDoubleHeaderValue("Double Header Value");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-2.txt"));
}
public void testWithoutHeaderOnBothSides() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("true");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-3.txt"));
}
public void testWithoutHeaderOnOneSide() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("true");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-4.txt"));
}
public void testWithoutEmptyOptionOnBothSides() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("false");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("false");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.setDoubleHeaderKey("Double Header Key");
tag.setDoubleHeaderValue("Double Header Value");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-5.txt"));
}
public void testWithoutEmptyOptionOnOneSide() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("false");
tag.setAllowAddAllToLeft("true");
tag.setAllowAddAllToRight("true");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("true");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.setDoubleHeaderKey("Double Header Key");
tag.setDoubleHeaderValue("Double Header Value");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-6.txt"));
}
public void testDisableSomeButtons() throws Exception {
List left = new ArrayList();
left.add("Left2");
List right = new ArrayList();
right.add("Right2");
List leftVal = new ArrayList();
leftVal.add("Left1");
leftVal.add("Left2");
leftVal.add("Left3");
List rightVal = new ArrayList();
rightVal.add("Right1");
rightVal.add("Right2");
rightVal.add("Right3");
TestAction testaction = (TestAction) action;
testaction.setCollection(left);
testaction.setList2(right);
testaction.setCollection2(leftVal);
testaction.setList3(rightVal);
OptionTransferSelectTag tag = new OptionTransferSelectTag();
tag.setPageContext(pageContext);
tag.setName("collection");
tag.setId("id");
tag.setList("collection2");
tag.setSize("20");
tag.setMultiple("true");
tag.setEmptyOption("true");
tag.setDoubleName("list2");
tag.setDoubleList("list3");
tag.setDoubleId("doubleId");
tag.setDoubleSize("20");
tag.setMultiple("true");
tag.setDoubleEmptyOption("true");
tag.setAllowAddAllToLeft("false");
tag.setAllowAddAllToRight("false");
tag.setAllowAddToLeft("true");
tag.setAllowAddToRight("true");
tag.setAllowSelectAll("false");
tag.setAddAllToLeftLabel("All Left");
tag.setAddAllToRightLabel("All Right");
tag.setAddToLeftLabel("Left");
tag.setAddToRightLabel("Right");
tag.setSelectAllLabel("Select All");
tag.setLeftTitle("Title Left");
tag.setRightTitle("Title Right");
tag.setButtonCssClass("buttonCssClass");
tag.setButtonCssStyle("buttonCssStyle");
tag.setHeaderKey("Header Key");
tag.setHeaderValue("Header Value");
tag.setDoubleHeaderKey("Double Header Key");
tag.setDoubleHeaderValue("Double Header Value");
tag.setAddToLeftOnclick("alert('Moving Left')");
tag.setAddToRightOnclick("alert('Moving Right')");
tag.doStartTag();
tag.doEndTag();
//System.out.println(writer.toString());
verify(OptionTransferSelectTagTest.class.getResource("optiontransferselect-7.txt"));
}
}
| apache-2.0 |
apache/kylin | core-metadata/src/main/java/org/apache/kylin/source/adhocquery/IPushDownRunner.java | 2212 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.source.adhocquery;
import java.util.List;
import org.apache.kylin.common.KylinConfig;
import org.apache.kylin.metadata.querymeta.SelectedColumnMeta;
public interface IPushDownRunner {
void init(KylinConfig config);
/**
* Run an pushdown query in the source database in case Kylin cannot serve using cube.
*
* @param query the query statement
* @param returnRows an empty list to collect returning rows
* @param returnColumnMeta an empty list to collect metadata of returning columns
* @throws Exception if running pushdown query fails
*/
void executeQuery(String query, List<List<String>> returnRows, List<SelectedColumnMeta> returnColumnMeta) throws Exception;
/**
* Run an pushdown non-query sql
*
* @param sql the sql statement
*
* @return whether the SQL is executed successfully
*
* @throws Exception if running pushdown fails
*/
void executeUpdate(String sql) throws Exception;
/**
* Convert a sql to according to the sql dialect of underlying engine.
* @param kylinConfig
* @param sql
* @param project
* @param defaultSchema
* @param isPrepare
* @return
*/
String convertSql(KylinConfig kylinConfig, String sql, String project, String defaultSchema, boolean isPrepare);
}
| apache-2.0 |
xhoong/incubator-calcite | core/src/main/java/org/apache/calcite/sql/validate/SqlMonikerImpl.java | 2583 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.calcite.sql.validate;
import org.apache.calcite.sql.SqlIdentifier;
import org.apache.calcite.sql.parser.SqlParserPos;
import org.apache.calcite.util.Util;
import com.google.common.collect.ImmutableList;
import java.util.List;
import java.util.Objects;
/**
* A generic implementation of {@link SqlMoniker}.
*/
public class SqlMonikerImpl implements SqlMoniker {
//~ Instance fields --------------------------------------------------------
private final ImmutableList<String> names;
private final SqlMonikerType type;
//~ Constructors -----------------------------------------------------------
/**
* Creates a moniker with an array of names.
*/
public SqlMonikerImpl(List<String> names, SqlMonikerType type) {
this.names = ImmutableList.copyOf(names);
this.type = Objects.requireNonNull(type);
}
/**
* Creates a moniker with a single name.
*/
public SqlMonikerImpl(String name, SqlMonikerType type) {
this(ImmutableList.of(name), type);
}
//~ Methods ----------------------------------------------------------------
@Override public boolean equals(Object obj) {
return this == obj
|| obj instanceof SqlMonikerImpl
&& type == ((SqlMonikerImpl) obj).type
&& names.equals(((SqlMonikerImpl) obj).names);
}
@Override public int hashCode() {
return Objects.hash(type, names);
}
public SqlMonikerType getType() {
return type;
}
public List<String> getFullyQualifiedNames() {
return names;
}
public SqlIdentifier toIdentifier() {
return new SqlIdentifier(names, SqlParserPos.ZERO);
}
public String toString() {
return Util.sepList(names, ".");
}
public String id() {
return type + "(" + this + ")";
}
}
// End SqlMonikerImpl.java
| apache-2.0 |
arenadata/ambari | ambari-server/src/test/java/org/apache/ambari/server/checks/HostMaintenanceModeCheckTest.java | 5435 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ambari.server.checks;
import java.util.ArrayList;
import java.util.List;
import org.apache.ambari.server.controller.PrereqCheckRequest;
import org.apache.ambari.server.state.Cluster;
import org.apache.ambari.server.state.Clusters;
import org.apache.ambari.server.state.Host;
import org.apache.ambari.server.state.MaintenanceState;
import org.apache.ambari.server.state.StackId;
import org.apache.ambari.server.state.stack.PrereqCheckStatus;
import org.apache.ambari.server.state.stack.PrerequisiteCheck;
import org.apache.ambari.server.state.stack.upgrade.UpgradeType;
import org.junit.Assert;
import org.junit.Test;
import org.mockito.Mockito;
import com.google.inject.Provider;
/**
* Tests {@link HostMaintenanceModeCheck}.
*/
public class HostMaintenanceModeCheckTest {
private final Clusters clusters = Mockito.mock(Clusters.class);
/**
* @throws Exception
*/
@Test
public void testPerform() throws Exception {
final HostMaintenanceModeCheck hostMaintenanceModeCheck = new HostMaintenanceModeCheck();
hostMaintenanceModeCheck.clustersProvider = new Provider<Clusters>() {
@Override
public Clusters get() {
return clusters;
}
};
final Cluster cluster = Mockito.mock(Cluster.class);
Mockito.when(cluster.getClusterId()).thenReturn(1L);
Mockito.when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP", "2.2"));
Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
final List<Host> hosts = new ArrayList<>();
final Host host1 = Mockito.mock(Host.class);
final Host host2 = Mockito.mock(Host.class);
final Host host3 = Mockito.mock(Host.class);
Mockito.when(host1.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
Mockito.when(host2.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
hosts.add(host1);
hosts.add(host2);
hosts.add(host3);
Mockito.when(cluster.getHosts()).thenReturn(hosts);
PrerequisiteCheck check = new PrerequisiteCheck(null, null);
hostMaintenanceModeCheck.perform(check, new PrereqCheckRequest("cluster"));
Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
// put a host into MM in order to trigger the warning
check = new PrerequisiteCheck(null, null);
Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.ON);
hostMaintenanceModeCheck.perform(check, new PrereqCheckRequest("cluster"));
Assert.assertEquals(PrereqCheckStatus.WARNING, check.getStatus());
}
@Test
public void testPerformHostOrdered() throws Exception {
final HostMaintenanceModeCheck hostMaintenanceModeCheck = new HostMaintenanceModeCheck();
hostMaintenanceModeCheck.clustersProvider = new Provider<Clusters>() {
@Override
public Clusters get() {
return clusters;
}
};
final Cluster cluster = Mockito.mock(Cluster.class);
Mockito.when(cluster.getClusterId()).thenReturn(1L);
Mockito.when(cluster.getCurrentStackVersion()).thenReturn(new StackId("HDP", "2.2"));
Mockito.when(clusters.getCluster("cluster")).thenReturn(cluster);
final List<Host> hosts = new ArrayList<>();
final Host host1 = Mockito.mock(Host.class);
final Host host2 = Mockito.mock(Host.class);
final Host host3 = Mockito.mock(Host.class);
Mockito.when(host1.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
Mockito.when(host2.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.OFF);
Mockito.when(host1.getHostName()).thenReturn("h1");
Mockito.when(host2.getHostName()).thenReturn("h2");
Mockito.when(host3.getHostName()).thenReturn("h3");
hosts.add(host1);
hosts.add(host2);
hosts.add(host3);
Mockito.when(cluster.getHosts()).thenReturn(hosts);
PrerequisiteCheck check = new PrerequisiteCheck(null, null);
hostMaintenanceModeCheck.perform(check, new PrereqCheckRequest("cluster"));
Assert.assertEquals(PrereqCheckStatus.PASS, check.getStatus());
// put a host into MM in order to trigger the warning
check = new PrerequisiteCheck(null, null);
Mockito.when(host3.getMaintenanceState(1L)).thenReturn(MaintenanceState.ON);
hostMaintenanceModeCheck.perform(check, new PrereqCheckRequest("cluster", UpgradeType.HOST_ORDERED));
Assert.assertEquals(PrereqCheckStatus.FAIL, check.getStatus());
Assert.assertEquals("The following hosts cannot be in Maintenance Mode: h3.", check.getFailReason());
}
}
| apache-2.0 |
futur/usergrid-stack | core/src/main/java/org/usergrid/utils/ExceptionUtils.java | 1112 | /*******************************************************************************
* Copyright 2012 Apigee Corporation
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
******************************************************************************/
package org.usergrid.utils;
public class ExceptionUtils {
public static String getMessages(Throwable e) {
String msg = "";
while (e != null) {
String s = e.getMessage();
if (msg.length() > 0) {
msg += "; ";
}
msg += e.getClass().getName();
if (s != null) {
msg += ": " + s;
}
e = e.getCause();
}
return msg;
}
}
| apache-2.0 |
DenverM80/ds3_autogen | ds3-autogen-net/src/main/java/com/spectralogic/ds3autogen/net/generators/parsers/element/NullableListElement.java | 1574 | /*
* ******************************************************************************
* Copyright 2014-2017 Spectra Logic Corporation. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use
* this file except in compliance with the License. A copy of the License is located at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* or in the "license" file accompanying this file.
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
* ****************************************************************************
*/
package com.spectralogic.ds3autogen.net.generators.parsers.element;
import static com.spectralogic.ds3autogen.utils.Helper.capFirst;
/**
* Represents an element within a .net model that contains
* a list of elements that do not have an encapsulating tag
*/
public class NullableListElement extends BaseNullableElement {
public NullableListElement(
final String name,
final String xmlTag,
final String parserName) {
super(name, xmlTag, parserName);
}
/**
* Gets the .net code for parsing this list of elements
*/
@Override
public String printParseElement() {
return getName() + " = element" +
".Elements(\"" + capFirst(getXmlTag()) + "\")" +
".Select(" + getParserName() + ").ToList()";
}
}
| apache-2.0 |
darkforestzero/buck | test/com/facebook/buck/rules/ToolTest.java | 6191 | /*
* Copyright 2014-present Facebook, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License. You may obtain
* a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package com.facebook.buck.rules;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assume.assumeTrue;
import com.facebook.buck.rules.keys.DefaultRuleKeyFactory;
import com.facebook.buck.rules.keys.RuleKeyBuilder;
import com.facebook.buck.rules.keys.RuleKeyResult;
import com.facebook.buck.testutil.FakeFileHashCache;
import com.facebook.buck.util.environment.Platform;
import com.google.common.base.Strings;
import com.google.common.collect.ImmutableMap;
import org.junit.Test;
import java.nio.file.Path;
import java.nio.file.Paths;
public class ToolTest {
@Test
public void hashFileToolsCreatedWithTheSamePathAreEqual() {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
.put("path", Strings.repeat("a", 40))
.put("other-path", Strings.repeat("b", 40))
.put("same", Strings.repeat("a", 40))
.build()),
pathResolver,
ruleFinder);
Path path = Paths.get("path");
Path otherPath = Paths.get("other-path");
Path same = Paths.get("same");
Tool tool1 = new HashedFileTool(path);
RuleKey tool1RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool1)
.build()
.result;
Tool tool2 = new HashedFileTool(path);
RuleKey tool2RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool2)
.build()
.result;
// Same name, same sha1
assertEquals(tool1RuleKey, tool2RuleKey);
Tool tool3 = new HashedFileTool(otherPath);
RuleKey tool3RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool3)
.build()
.result;
// Different name, different sha1
assertNotEquals(tool1RuleKey, tool3RuleKey);
Tool tool4 = new HashedFileTool(same);
RuleKey tool4RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool4)
.build()
.result;
// Different name, same sha1
assertNotEquals(tool1RuleKey, tool4RuleKey);
}
@Test
public void customVersion() {
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.of()),
pathResolver,
ruleFinder);
String tool = "tool";
String version = "version";
Tool tool1 =
VersionedTool.of(
Paths.get("something"),
tool,
version);
RuleKey tool1RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool1)
.build()
.result;
Tool tool2 =
VersionedTool.of(
Paths.get("something-else"),
tool,
version);
RuleKey tool2RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool2)
.build()
.result;
assertEquals(tool1RuleKey, tool2RuleKey);
}
@Test
public void shouldAssumeThatToolsInDifferentAbsoluteLocationsWithTheSameNameAreTheSame() {
assumeTrue(Platform.detect() == Platform.MACOS || Platform.detect() == Platform.LINUX);
// Note: both file names are the same
HashedFileTool tool1 = new HashedFileTool(Paths.get("/usr/local/bin/python2.7"));
HashedFileTool tool2 = new HashedFileTool(Paths.get("/opt/bin/python2.7"));
SourcePathRuleFinder ruleFinder = new SourcePathRuleFinder(
new BuildRuleResolver(TargetGraph.EMPTY, new DefaultTargetNodeToBuildRuleTransformer())
);
SourcePathResolver pathResolver = new SourcePathResolver(ruleFinder);
DefaultRuleKeyFactory ruleKeyFactory =
new DefaultRuleKeyFactory(
0,
FakeFileHashCache.createFromStrings(
ImmutableMap.<String, String>builder()
// Note: the hashes of both files are the same
.put("/usr/local/bin/python2.7", Strings.repeat("a", 40))
.put("/opt/bin/python2.7", Strings.repeat("a", 40))
.build()),
pathResolver,
ruleFinder);
RuleKey tool1RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool1)
.build()
.result;
RuleKey tool2RuleKey =
createRuleKeyBuilder(ruleKeyFactory, pathResolver)
.setReflectively("tool", tool2)
.build()
.result;
assertEquals(tool1RuleKey, tool2RuleKey);
}
private RuleKeyBuilder<RuleKeyResult<RuleKey>> createRuleKeyBuilder(
DefaultRuleKeyFactory factory,
SourcePathResolver resolver) {
return factory.newBuilderForTesting(new FakeBuildRule("//:test", resolver));
}
}
| apache-2.0 |
googlesamples/io2014-codelabs | play-games/end/Squash/app/src/main/java/com/google/example/squash/SquashActivity.java | 10160 | package com.google.example.squash;
import java.io.UnsupportedEncodingException;
import java.util.ArrayList;
import java.util.HashMap;
import android.app.Activity;
import android.content.Intent;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.Bundle;
import android.view.Menu;
import android.view.MenuItem;
import android.view.View;
import android.widget.Toast;
import com.google.android.gms.common.api.PendingResult;
import com.google.android.gms.common.api.ResultCallback;
import com.google.android.gms.games.Games;
import com.google.android.gms.games.GamesActivityResultCodes;
import com.google.android.gms.games.request.GameRequest;
import com.google.android.gms.games.request.Requests;
import com.google.example.games.basegameutils.BaseGameActivity;
public class SquashActivity extends BaseGameActivity {
public static final int REQUEST_ACHIEVEMENTS = 1001;
public static final int REQUEST_LEADERBOARD = 1002;
public static final int SEND_GIFT = 1003;
public static final int SHOW_INBOX = 1004;
public SquashActivity() {
super(CLIENT_GAMES | CLIENT_PLUS);
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
switch (item.getItemId()) {
case R.id.menu_leaderboard:
startActivityForResult(
Games.Leaderboards.getLeaderboardIntent( getApiClient(),
getResources().getString(
R.string.leaderboard_bounces)),
REQUEST_LEADERBOARD);
return true;
case R.id.menu_reset:
return true;
case R.id.menu_achievements:
if (isSignedIn()) {
startActivityForResult(
Games.Achievements.getAchievementsIntent( getApiClient() ),
REQUEST_ACHIEVEMENTS);
}
return true;
case R.id.menu_send_gift:
if (isSignedIn()) {
Bitmap bm = BitmapFactory.decodeResource(getResources(), R.drawable.ic_launcher);
Intent intent = Games.Requests.getSendIntent(getApiClient(), GameRequest.TYPE_GIFT,
"Gold".getBytes(), Requests.REQUEST_DEFAULT_LIFETIME_DAYS, bm, "A treasure chest!");
startActivityForResult(intent, SEND_GIFT);
}
return true;
case R.id.menu_gift_inbox:
if (isSignedIn()) {
startActivityForResult(Games.Requests.getInboxIntent(getApiClient()), SHOW_INBOX);
}
return true;
}
return super.onOptionsItemSelected(item);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_squash);
// This sets up the click listener.
findViewById(R.id.sign_in_button).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
// start the asynchronous sign in flow
beginUserInitiatedSignIn();
}
});
findViewById(R.id.sign_out_button).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View v) {
signOut();
setViewVisibility();
}
});
findViewById(R.id.quick_match_button).setOnClickListener(
new View.OnClickListener() {
@Override
public void onClick(View view) {
startQuickGame();
}
});
}
private void startQuickGame() {
((SquashView) findViewById(R.id.squashView)).start();
findViewById(R.id.sign_out_button).setVisibility(View.GONE);
findViewById(R.id.sign_in_button).setVisibility(View.GONE);
findViewById(R.id.quick_match_button).setVisibility(View.GONE);
findViewById(R.id.squashView).setVisibility(View.VISIBLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.squash, menu);
return true;
}
@Override
public void onResume() {
super.onResume();
((SquashView) findViewById(R.id.squashView)).setAnimating(true);
}
@Override
public void onStop() {
super.onStop();
((SquashView) findViewById(R.id.squashView)).setAnimating(false);
}
// Called whenever the Squash game starts.
public void onGameStart(SquashView v) {
if (isSignedIn()) {
Games.Achievements.unlock( getApiClient(),
getResources().getString(R.string.achievement_first) );
}
}
// Called whenever the Squash game stops.
public void onGameStop(SquashView v) {
setViewVisibility();
findViewById(R.id.squashView).setVisibility(View.GONE);
findViewById(R.id.quick_match_button).setVisibility(View.VISIBLE);
if (isSignedIn() && v.mScore > 0) {
Games.Achievements.increment( getApiClient(),
getResources().getString(R.string.achievement_20),
v.mScore);
Games.Leaderboards.submitScore(getApiClient(),
getResources().getString(R.string.leaderboard_bounces),
v.mScore);
}
}
// Set the login button visible or not
public void setViewVisibility() {
if (isSignedIn()) {
findViewById(R.id.sign_out_button).setVisibility(View.VISIBLE);
findViewById(R.id.sign_in_button).setVisibility(View.GONE);
} else {
findViewById(R.id.sign_out_button).setVisibility(View.GONE);
findViewById(R.id.sign_in_button).setVisibility(View.VISIBLE);
}
}
@Override
public void onSignInFailed() {
setViewVisibility();
}
@Override
public void onSignInSucceeded() {
setViewVisibility();
ArrayList<GameRequest> requests = getGameHelper().getRequests();
if (requests != null) {
handleGiftRequest(requests);
}
}
@Override
public void onActivityResult(int request, int response, Intent intent) {
super.onActivityResult(request, response, intent);
if (request == SEND_GIFT) {
if (response == GamesActivityResultCodes.RESULT_SEND_REQUEST_FAILED) {
Toast.makeText(this, "Failed to send gift!", Toast.LENGTH_LONG).show();
}
}
else if (request == SHOW_INBOX) {
if (response == Activity.RESULT_OK && intent != null) {
handleGiftRequest(Games.Requests.getGameRequestsFromInboxResponse(intent));
} else {
Toast.makeText(this, "Error receiving gift!", Toast.LENGTH_LONG).show();
}
}
}
private void handleGiftRequest(ArrayList<GameRequest> requests) {
if (requests == null) {
return;
}
// Attempt to accept these requests.
ArrayList<String> requestIds = new ArrayList<String>();
final HashMap<String, GameRequest> gameRequestMap
= new HashMap<String, GameRequest>();
// Cache the requests.
for (GameRequest request : requests) {
String requestId = request.getRequestId();
requestIds.add(requestId);
gameRequestMap.put(requestId, request);
}
// Accept the requests.
PendingResult<Requests.UpdateRequestsResult> pendingResults = Games.Requests.acceptRequests(getApiClient(), requestIds);
pendingResults.setResultCallback(
new ResultCallback<Requests.UpdateRequestsResult>() {
@Override
public void onResult(Requests.UpdateRequestsResult result) {
// Scan each result outcome and process accordingly.
for (String requestId : result.getRequestIds()) {
// We must have a local cached copy of the request
// and the request needs to be a
// success in order to continue.
if (!gameRequestMap.containsKey(requestId)
|| result.getRequestOutcome(requestId)
!= Requests.REQUEST_UPDATE_OUTCOME_SUCCESS) {
continue;
}
// Update succeeded here. Find the type of request
// and act accordingly. For wishes, a
// responding gift will be automatically sent.
switch (gameRequestMap.get(requestId).getType()) {
case GameRequest.TYPE_GIFT:
// Reward the player here
try {
Toast.makeText(getApplicationContext(), "Accepted a gift! It is... " + new String(gameRequestMap.get(requestId).getData(), "UTF-8"), Toast.LENGTH_LONG).show();
} catch (UnsupportedEncodingException e) {
Toast.makeText(getApplicationContext(), "Accepted a gift!", Toast.LENGTH_LONG).show();
}
break;
case GameRequest.TYPE_WISH:
// Process the wish request
Toast.makeText(getApplicationContext(), "Accepted wish!", Toast.LENGTH_LONG).show();
break;
}
}
}
}
);
}
}
| apache-2.0 |
IvanNikolaychuk/pentaho-kettle | engine/src/org/pentaho/di/trans/steps/checksum/CheckSum.java | 8616 | /*! ******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2013 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.trans.steps.checksum;
import java.security.MessageDigest;
import java.util.zip.Adler32;
import java.util.zip.CRC32;
import org.apache.commons.codec.binary.Hex;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.row.RowDataUtil;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.trans.Trans;
import org.pentaho.di.trans.TransMeta;
import org.pentaho.di.trans.step.BaseStep;
import org.pentaho.di.trans.step.StepDataInterface;
import org.pentaho.di.trans.step.StepInterface;
import org.pentaho.di.trans.step.StepMeta;
import org.pentaho.di.trans.step.StepMetaInterface;
/**
* Caculate a checksum for each row.
*
* @author Samatar Hassan
* @since 30-06-2008
*/
public class CheckSum extends BaseStep implements StepInterface {
private static Class<?> PKG = CheckSumMeta.class; // for i18n purposes, needed by Translator2!!
private CheckSumMeta meta;
private CheckSumData data;
public CheckSum( StepMeta stepMeta, StepDataInterface stepDataInterface, int copyNr, TransMeta transMeta,
Trans trans ) {
super( stepMeta, stepDataInterface, copyNr, transMeta, trans );
}
public boolean processRow( StepMetaInterface smi, StepDataInterface sdi ) throws KettleException {
meta = (CheckSumMeta) smi;
data = (CheckSumData) sdi;
Object[] r = getRow(); // get row, set busy!
if ( r == null ) {
// no more input to be expected...
setOutputDone();
return false;
}
if ( first ) {
first = false;
data.outputRowMeta = getInputRowMeta().clone();
data.nrInfields = data.outputRowMeta.size();
meta.getFields( data.outputRowMeta, getStepname(), null, null, this, repository, metaStore );
if ( meta.getFieldName() == null || meta.getFieldName().length > 0 ) {
data.fieldnrs = new int[meta.getFieldName().length];
for ( int i = 0; i < meta.getFieldName().length; i++ ) {
data.fieldnrs[i] = getInputRowMeta().indexOfValue( meta.getFieldName()[i] );
if ( data.fieldnrs[i] < 0 ) {
logError( BaseMessages.getString( PKG, "CheckSum.Log.CanNotFindField", meta.getFieldName()[i] ) );
throw new KettleException( BaseMessages.getString( PKG, "CheckSum.Log.CanNotFindField", meta
.getFieldName()[i] ) );
}
}
} else {
data.fieldnrs = new int[r.length];
for ( int i = 0; i < r.length; i++ ) {
data.fieldnrs[i] = i;
}
}
data.fieldnr = data.fieldnrs.length;
try {
if ( meta.getCheckSumType().equals( CheckSumMeta.TYPE_MD5 )
|| meta.getCheckSumType().equals( CheckSumMeta.TYPE_SHA1 ) ) {
data.digest = MessageDigest.getInstance( meta.getCheckSumType() );
}
} catch ( Exception e ) {
throw new KettleException( BaseMessages.getString( PKG, "CheckSum.Error.Digest" ), e );
}
} // end if first
Object[] outputRowData = null;
try {
if ( meta.getCheckSumType().equals( CheckSumMeta.TYPE_ADLER32 )
|| meta.getCheckSumType().equals( CheckSumMeta.TYPE_CRC32 ) ) {
// get checksum
Long checksum = calculCheckSum( r );
outputRowData = RowDataUtil.addValueData( r, data.nrInfields, checksum );
} else {
// get checksum
byte[] o = createCheckSum( r );
switch ( meta.getResultType() ) {
case CheckSumMeta.result_TYPE_BINARY:
outputRowData = RowDataUtil.addValueData( r, data.nrInfields, o );
break;
case CheckSumMeta.result_TYPE_HEXADECIMAL:
String hex =
meta.isCompatibilityMode() ? byteToHexEncode_compatible( o ) : new String( Hex.encodeHex( o ) );
outputRowData = RowDataUtil.addValueData( r, data.nrInfields, hex );
break;
default:
outputRowData = RowDataUtil.addValueData( r, data.nrInfields, getStringFromBytes( o ) );
break;
}
}
if ( checkFeedback( getLinesRead() ) ) {
if ( log.isDetailed() ) {
logDetailed( BaseMessages.getString( PKG, "CheckSum.Log.LineNumber", Long.toString( getLinesRead() ) ) );
}
}
// add new values to the row.
putRow( data.outputRowMeta, outputRowData ); // copy row to output
// rowset(s);
} catch ( Exception e ) {
boolean sendToErrorRow = false;
String errorMessage = null;
if ( getStepMeta().isDoingErrorHandling() ) {
sendToErrorRow = true;
errorMessage = e.toString();
} else {
logError( BaseMessages.getString( PKG, "CheckSum.ErrorInStepRunning" ) + e.getMessage() );
setErrors( 1 );
stopAll();
setOutputDone(); // signal end to receiver(s)
return false;
}
if ( sendToErrorRow ) {
// Simply add this row to the error row
putError( getInputRowMeta(), r, 1, errorMessage, meta.getResultFieldName(), "CheckSum001" );
}
}
return true;
}
private byte[] createCheckSum( Object[] r ) throws Exception {
StringBuilder Buff = new StringBuilder();
// Loop through fields
for ( int i = 0; i < data.fieldnr; i++ ) {
String fieldvalue = getInputRowMeta().getString( r, data.fieldnrs[i] );
Buff.append( fieldvalue );
}
// Updates the digest using the specified array of bytes
data.digest.update( Buff.toString().getBytes() );
// Completes the hash computation by performing final operations such as padding
byte[] hash = data.digest.digest();
// After digest has been called, the MessageDigest object is reset to its initialized state
return hash;
}
private static String getStringFromBytes( byte[] bytes ) {
StringBuilder sb = new StringBuilder();
for ( int i = 0; i < bytes.length; i++ ) {
byte b = bytes[i];
sb.append( 0x00FF & b );
if ( i + 1 < bytes.length ) {
sb.append( "-" );
}
}
return sb.toString();
}
public String byteToHexEncode_compatible( byte[] in ) {
if ( in == null ) {
return null;
}
final char[] hexDigits = { '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' };
String hex = new String( in );
char[] s = hex.toCharArray();
StringBuilder hexString = new StringBuilder( 2 * s.length );
for ( int i = 0; i < s.length; i++ ) {
hexString.append( hexDigits[( s[i] & 0x00F0 ) >> 4] ); // hi nibble
hexString.append( hexDigits[s[i] & 0x000F] ); // lo nibble
}
return hexString.toString();
}
private Long calculCheckSum( Object[] r ) throws Exception {
Long retval;
StringBuilder Buff = new StringBuilder();
// Loop through fields
for ( int i = 0; i < data.fieldnr; i++ ) {
String fieldvalue = getInputRowMeta().getString( r, data.fieldnrs[i] );
Buff.append( fieldvalue );
}
if ( meta.getCheckSumType().equals( CheckSumMeta.TYPE_CRC32 ) ) {
CRC32 crc32 = new CRC32();
crc32.update( Buff.toString().getBytes() );
retval = new Long( crc32.getValue() );
} else {
Adler32 adler32 = new Adler32();
adler32.update( Buff.toString().getBytes() );
retval = new Long( adler32.getValue() );
}
return retval;
}
public boolean init( StepMetaInterface smi, StepDataInterface sdi ) {
meta = (CheckSumMeta) smi;
data = (CheckSumData) sdi;
if ( super.init( smi, sdi ) ) {
if ( Const.isEmpty( meta.getResultFieldName() ) ) {
logError( BaseMessages.getString( PKG, "CheckSum.Error.ResultFieldMissing" ) );
return false;
}
return true;
}
return false;
}
}
| apache-2.0 |
paweld2/rest-assured | rest-assured/src/main/java/io/restassured/response/ResponseBodyExtractionOptions.java | 5765 | /*
* Copyright 2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.restassured.response;
import io.restassured.mapper.ObjectMapper;
import io.restassured.mapper.ObjectMapperType;
import io.restassured.path.json.JsonPath;
import io.restassured.path.json.config.JsonPathConfig;
import io.restassured.path.xml.XmlPath;
import io.restassured.path.xml.config.XmlPathConfig;
public interface ResponseBodyExtractionOptions extends ResponseBodyData {
/**
* Get the body and map it to a Java object. For JSON responses this requires that you have either
* <ol>
* <li>Jackson, or</li>
* <li>Gson</li>
* </ol>
* in the classpath or for XML responses it requires JAXB to be in the classpath.
* <br/>
* It also requires that the response content-type is either JSON or XML or that a default parser has been been set.
* You can also force a specific object mapper using {@link #as(Class, ObjectMapper)}.
*
* @return The object
*/
<T> T as(Class<T> cls);
/**
* Get the body and map it to a Java object using a specific object mapper type. It will use the supplied
* mapper regardless of the response content-type.
*
* @return The object
*/
<T> T as(Class<T> cls, ObjectMapperType mapperType);
/**
* Get the body and map it to a Java object using a specific object mapper. It will use the supplied
* mapper regardless of the response content-type.
*
* @return The object
*/
<T> T as(Class<T> cls, ObjectMapper mapper);
/**
* Get a JsonPath view of the response body. This will let you use the JsonPath syntax to get values from the response.
* Example:
* <p>
* Assume that the GET request (to <tt>http://localhost:8080/lotto</tt>) returns JSON as:
* <pre>
* {
* "lotto":{
* "lottoId":5,
* "winning-numbers":[2,45,34,23,7,5,3],
* "winners":[{
* "winnerId":23,
* "numbers":[2,45,34,23,3,5]
* },{
* "winnerId":54,
* "numbers":[52,3,12,11,18,22]
* }]
* }
* }
* </pre>
* </p>
* You can the make the request and get the winner id's by using JsonPath:
* <pre>
* List<Integer> winnerIds = get("/lotto").jsonPath().getList("lotto.winnders.winnerId");
* </pre>
*/
JsonPath jsonPath();
/**
* Get a JsonPath view of the response body using the specified configuration.
*
* @param config The configuration to use
* @see #jsonPath()
*/
JsonPath jsonPath(JsonPathConfig config);
/**
* Get an XmlPath view of the response body. This will let you use the XmlPath syntax to get values from the response.
* Example:
* <p>
* Imagine that a POST request to <tt>http://localhost:8080/greetXML<tt> returns:
* <pre>
* <greeting>
* <firstName>John</firstName>
* <lastName>Doe</lastName>
* </greeting>
* </pre>
* </pre>
* </p>
* You can the make the request and get the winner id's by using JsonPath:
* <pre>
* String firstName = get("/greetXML").xmlPath().getString("greeting.firstName");
* </pre>
*/
XmlPath xmlPath();
/**
* Get an XmlPath view of the response body with a given configuration.
*
* @param config The configuration of the XmlPath
* @see #xmlPath()
*/
XmlPath xmlPath(XmlPathConfig config);
/**
* Get an XmlPath view of the response body but also pass in a {@link XmlPath.CompatibilityMode}.
* This is mainly useful if you want to parse HTML documents.
*
* @param compatibilityMode The compatibility mode to use
* @see #htmlPath()
* @see #xmlPath()
*/
XmlPath xmlPath(XmlPath.CompatibilityMode compatibilityMode);
/**
* Get an XmlPath view of the response body that uses {@link XmlPath.CompatibilityMode} <code>HTML</code>.
* This is mainly useful when parsing HTML documents.
* <p>
* Note that this is the same as calling {@link #xmlPath(XmlPath.CompatibilityMode)} with <code>CompatibilityMode</code> <code>HTML</code>.
* </p>
*/
XmlPath htmlPath();
/**
* Get a value from the response body using the JsonPath or XmlPath syntax. REST Assured will
* automatically determine whether to use JsonPath or XmlPath based on the content-type of the response.
* If no content-type is defined then REST Assured will try to look at the "default parser" if defined (RestAssured.defaultParser).
* <p>
* Note that you can also also supply arguments, for example:
* <pre>
* String z = get("/x").path("x.y.%s", "z");
* </pre>
*
* The path and arguments follows the standard <a href="http://download.oracle.com/javase/1,5.0/docs/api/java/util/Formatter.html#syntax">formatting syntax</a> of Java.
* </p>
*
* @param path The json- or xml path
* @param <T> The return type
* @param arguments Options arguments
* @return The value returned by the path
* @see #jsonPath()
* @see #xmlPath()
*/
<T> T path(String path, String... arguments);
}
| apache-2.0 |
berndhopp/guava | android/guava/src/com/google/common/collect/RegularImmutableSet.java | 2802 | /*
* Copyright (C) 2007 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.collect;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.VisibleForTesting;
import org.checkerframework.checker.nullness.compatqual.NullableDecl;
/**
* Implementation of {@link ImmutableSet} with two or more elements.
*
* @author Kevin Bourrillion
*/
@GwtCompatible(serializable = true, emulated = true)
@SuppressWarnings("serial") // uses writeReplace(), not default serialization
final class RegularImmutableSet<E> extends ImmutableSet<E> {
static final RegularImmutableSet<Object> EMPTY =
new RegularImmutableSet<>(new Object[0], 0, null, 0, 0);
@VisibleForTesting final transient Object[] elements;
// the same elements in hashed positions (plus nulls)
@VisibleForTesting final transient Object[] table;
// 'and' with an int to get a valid table index.
private final transient int mask;
private final transient int hashCode;
private final transient int size;
RegularImmutableSet(Object[] elements, int hashCode, Object[] table, int mask, int size) {
this.elements = elements;
this.table = table;
this.mask = mask;
this.hashCode = hashCode;
this.size = size;
}
@Override
public boolean contains(@NullableDecl Object target) {
Object[] table = this.table;
if (target == null || table == null) {
return false;
}
for (int i = Hashing.smearedHash(target); ; i++) {
i &= mask;
Object candidate = table[i];
if (candidate == null) {
return false;
} else if (candidate.equals(target)) {
return true;
}
}
}
@Override
public int size() {
return size;
}
@Override
public UnmodifiableIterator<E> iterator() {
return asList().iterator();
}
@Override
int copyIntoArray(Object[] dst, int offset) {
System.arraycopy(elements, 0, dst, offset, size);
return offset + size;
}
@Override
ImmutableList<E> createAsList() {
return ImmutableList.asImmutableList(elements, size);
}
@Override
boolean isPartialView() {
return false;
}
@Override
public int hashCode() {
return hashCode;
}
@Override
boolean isHashCodeFast() {
return true;
}
}
| apache-2.0 |
allotria/intellij-community | java/compiler/impl/src/com/intellij/packaging/impl/artifacts/ArtifactVirtualFileListener.java | 4376 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.packaging.impl.artifacts;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.MultiValuesMap;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.vfs.VirtualFile;
import com.intellij.openapi.vfs.newvfs.BulkFileListener;
import com.intellij.openapi.vfs.newvfs.events.VFileEvent;
import com.intellij.openapi.vfs.newvfs.events.VFileMoveEvent;
import com.intellij.openapi.vfs.newvfs.events.VFilePropertyChangeEvent;
import com.intellij.packaging.artifacts.Artifact;
import com.intellij.packaging.artifacts.ModifiableArtifactModel;
import com.intellij.packaging.impl.elements.FileOrDirectoryCopyPackagingElement;
import com.intellij.psi.util.CachedValue;
import com.intellij.psi.util.CachedValueProvider;
import com.intellij.psi.util.CachedValuesManager;
import com.intellij.util.PathUtil;
import org.jetbrains.annotations.NotNull;
import java.util.Collection;
import java.util.List;
final class ArtifactVirtualFileListener implements BulkFileListener {
private final CachedValue<MultiValuesMap<String, Artifact>> myParentPathsToArtifacts;
private final ArtifactManagerImpl myArtifactManager;
ArtifactVirtualFileListener(@NotNull Project project, @NotNull ArtifactManagerImpl artifactManager) {
myArtifactManager = artifactManager;
myParentPathsToArtifacts =
CachedValuesManager.getManager(project).createCachedValue(() -> {
MultiValuesMap<String, Artifact> result = computeParentPathToArtifactMap();
return CachedValueProvider.Result.createSingleDependency(result, artifactManager.getModificationTracker());
}, false);
}
private MultiValuesMap<String, Artifact> computeParentPathToArtifactMap() {
final MultiValuesMap<String, Artifact> result = new MultiValuesMap<>();
for (final Artifact artifact : myArtifactManager.getArtifacts()) {
ArtifactUtil.processFileOrDirectoryCopyElements(artifact, new PackagingElementProcessor<>() {
@Override
public boolean process(@NotNull FileOrDirectoryCopyPackagingElement<?> element, @NotNull PackagingElementPath pathToElement) {
String path = element.getFilePath();
while (path.length() > 0) {
result.put(path, artifact);
path = PathUtil.getParentPath(path);
}
return true;
}
}, myArtifactManager.getResolvingContext(), false);
}
return result;
}
@Override
public void after(@NotNull List<? extends @NotNull VFileEvent> events) {
for (VFileEvent event : events) {
if (event instanceof VFileMoveEvent) {
filePathChanged(((VFileMoveEvent)event).getOldPath(), event.getPath());
}
else if (event instanceof VFilePropertyChangeEvent) {
propertyChanged((VFilePropertyChangeEvent)event);
}
}
}
private void filePathChanged(@NotNull final String oldPath, @NotNull final String newPath) {
final Collection<Artifact> artifacts = myParentPathsToArtifacts.getValue().get(oldPath);
if (artifacts != null) {
final ModifiableArtifactModel model = myArtifactManager.createModifiableModel();
for (Artifact artifact : artifacts) {
final Artifact copy = model.getOrCreateModifiableArtifact(artifact);
ArtifactUtil.processFileOrDirectoryCopyElements(copy, new PackagingElementProcessor<>() {
@Override
public boolean process(@NotNull FileOrDirectoryCopyPackagingElement<?> element, @NotNull PackagingElementPath pathToElement) {
final String path = element.getFilePath();
if (FileUtil.startsWith(path, oldPath)) {
element.setFilePath(newPath + path.substring(oldPath.length()));
}
return true;
}
}, myArtifactManager.getResolvingContext(), false);
}
model.commit();
}
}
private void propertyChanged(@NotNull VFilePropertyChangeEvent event) {
if (VirtualFile.PROP_NAME.equals(event.getPropertyName())) {
final VirtualFile parent = event.getFile().getParent();
if (parent != null) {
String parentPath = parent.getPath();
filePathChanged(parentPath + "/" + event.getOldValue(), parentPath + "/" + event.getNewValue());
}
}
}
}
| apache-2.0 |
agileowl/tapestry-5 | tapestry-core/src/test/java/org/apache/tapestry5/integration/app1/services/Reloadable.java | 717 | // Copyright 2010 The Apache Software Foundation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package org.apache.tapestry5.integration.app1.services;
public interface Reloadable
{
String getStatus();
}
| apache-2.0 |
kchilton2/incubator-rya | sail/src/main/java/org/apache/rya/rdftriplestore/inference/ReflexivePropertyVisitor.java | 3520 | package org.apache.rya.rdftriplestore.inference;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import org.apache.rya.api.RdfCloudTripleStoreConfiguration;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.query.algebra.StatementPattern;
import org.eclipse.rdf4j.query.algebra.Var;
import org.eclipse.rdf4j.query.algebra.ZeroLengthPath;
/**
* Expands the query tree to account for any relevant reflexive properties
* known to the {@link InferenceEngine}.
*
* A reflexive property is a property for which any node can be inferred to have
* reflexively: If :p is a reflexive property, then <?x :p ?x> is true for all ?x.
*
* Applies to any statement pattern whose predicate is defined (not a variable)
* and is a reflexive property according to the InferenceEngine. If the property
* is reflexive, then the statement pattern should match when the subject equals
* the object. Therefore, replace the statement pattern with a union of itself
* and a ZeroLengthPath between the subject and object. This union is similar to
* the ZeroOrOnePath property path expression in SPARQL: <?x :p? ?y> matches if
* ?x and ?y are connected via :p or if ?x and ?y are equal.
*/
public class ReflexivePropertyVisitor extends AbstractInferVisitor {
/**
* Creates a new {@link ReflexivePropertyVisitor}.
* @param conf The {@link RdfCloudTripleStoreConfiguration}.
* @param inferenceEngine The InferenceEngine containing the relevant ontology.
*/
public ReflexivePropertyVisitor(RdfCloudTripleStoreConfiguration conf, InferenceEngine inferenceEngine) {
super(conf, inferenceEngine);
include = conf.isInferReflexiveProperty();
}
/**
* Check whether any solution for the {@link StatementPattern} could be derived from
* reflexive property inference, and if so, replace the pattern with a union of itself and the
* reflexive solution.
*/
@Override
protected void meetSP(StatementPattern node) throws Exception {
// Only applies when the predicate is defined and reflexive
final Var predVar = node.getPredicateVar();
if (predVar.getValue() != null && inferenceEngine.isReflexiveProperty((IRI) predVar.getValue())) {
final StatementPattern originalSP = node.clone();
// The reflexive solution is a ZeroLengthPath between subject and
// object: they can be matched to one another, whether constants or
// variables.
final Var subjVar = node.getSubjectVar();
final Var objVar = node.getObjectVar();
final ZeroLengthPath reflexiveSolution = new ZeroLengthPath(subjVar, objVar);
node.replaceWith(new InferUnion(originalSP, reflexiveSolution));
}
}
}
| apache-2.0 |
WillJiang/WillJiang | src/plugins/dojo/src/main/java/org/apache/struts2/dojo/views/velocity/components/DivDirective.java | 1523 | /*
* $Id: DivDirective.java 651946 2008-04-27 13:41:38Z apetrelli $
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.struts2.dojo.views.velocity.components;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.struts2.components.Component;
import org.apache.struts2.dojo.components.Div;
import com.opensymphony.xwork2.util.ValueStack;
/**
* @see Div
*/
public class DivDirective extends DojoAbstractDirective {
public String getBeanName() {
return "div";
}
protected Component getBean(ValueStack stack, HttpServletRequest req, HttpServletResponse res) {
return new Div(stack, req, res);
}
public int getType() {
return BLOCK;
}
}
| apache-2.0 |
Deepnekroz/kaa | server/node/src/main/java/org/kaaproject/kaa/server/admin/client/mvp/view/schema/BaseCtlSchemasGrid.java | 6465 | /**
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.admin.client.mvp.view.schema;
import java.util.Comparator;
import org.kaaproject.avro.ui.gwt.client.widget.grid.cell.ActionsButtonCell;
import org.kaaproject.avro.ui.gwt.client.widget.grid.event.RowActionEvent;
import org.kaaproject.kaa.common.dto.BaseSchemaDto;
import org.kaaproject.kaa.server.admin.client.mvp.view.grid.AbstractKaaGrid;
import org.kaaproject.kaa.server.admin.client.mvp.view.grid.KaaRowAction;
import org.kaaproject.kaa.server.admin.client.util.Utils;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.safehtml.shared.SafeHtml;
import com.google.gwt.safehtml.shared.SafeHtmlUtils;
import com.google.gwt.user.cellview.client.Column;
import com.google.gwt.user.cellview.client.DataGrid;
import com.google.gwt.user.cellview.client.Header;
import com.google.gwt.user.cellview.client.SafeHtmlHeader;
public class BaseCtlSchemasGrid<T extends BaseSchemaDto> extends AbstractKaaGrid<T, String> {
private static final int DEFAULT_PAGE_SIZE = 12;
private Column<T, T> downloadSchemaColumn;
public BaseCtlSchemasGrid() {
super(Unit.PX, false, DEFAULT_PAGE_SIZE);
}
@Override
protected float constructColumnsImpl(DataGrid<T> table) {
float prefWidth = 0;
prefWidth += constructStringColumn(table,
Utils.constants.version(),
new StringValueProvider<T>() {
@Override
public String getValue(T item) {
return item.getVersion()+"";
}
},
new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
return o1.compareTo(o2);
}
},
Boolean.FALSE,
80);
prefWidth += constructStringColumn(table,
Utils.constants.name(),
new StringValueProvider<T>() {
@Override
public String getValue(T item) {
return item.getName();
}
},
new Comparator<T>() {
@Override
public int compare(T o1, T o2) {
return o1.getName().compareToIgnoreCase(o2.getName());
}
},
null,
true,
80);
prefWidth += constructStringColumn(table,
Utils.constants.author(),
new StringValueProvider<T>() {
@Override
public String getValue(T item) {
return item.getCreatedUsername();
}
}, 60);
prefWidth += constructStringColumn(table,
Utils.constants.dateCreated(),
new StringValueProvider<T>() {
@Override
public String getValue(T item) {
return Utils.millisecondsToDateString(item.getCreatedTime());
}
}, 40);
return prefWidth;
}
@Override
protected float constructActions(DataGrid<T> table, float prefWidth) {
float result = 0;
if (downloadSchemaColumn == null || table.getColumnIndex(downloadSchemaColumn) == -1) {
Header<SafeHtml> downloadRecordSchemaHeader = new SafeHtmlHeader(
SafeHtmlUtils.fromSafeConstant(Utils.constants.downloadRecordSchema()));
downloadSchemaColumn = constructDownloadSchemaColumn();
table.addColumn(downloadSchemaColumn, downloadRecordSchemaHeader);
table.setColumnWidth(downloadSchemaColumn, 60, Unit.PX);
result += 60;
}
return result;
}
protected Column<T, T> constructDownloadSchemaColumn() {
ActionsButtonCell<T> cell = new ActionsButtonCell<>(Utils.resources.export(), Utils.constants.export());
cell.addMenuItem(Utils.constants.shallow(), new ActionsButtonCell.ActionMenuItemListener<T>() {
@Override
public void onMenuItemSelected(T value) {
RowActionEvent<String> schemaEvent =
new RowActionEvent<>(value.getCtlSchemaId(), KaaRowAction.CTL_EXPORT_SHALLOW);
fireEvent(schemaEvent);
}
});
cell.addMenuItem(Utils.constants.deep(), new ActionsButtonCell.ActionMenuItemListener<T>() {
@Override
public void onMenuItemSelected(T value) {
RowActionEvent<String> schemaEvent =
new RowActionEvent<>(value.getCtlSchemaId(), KaaRowAction.CTL_EXPORT_DEEP);
fireEvent(schemaEvent);
}
});
cell.addMenuItem(Utils.constants.flat(), new ActionsButtonCell.ActionMenuItemListener<T>() {
@Override
public void onMenuItemSelected(T value) {
RowActionEvent<String> schemaEvent =
new RowActionEvent<>(value.getCtlSchemaId(), KaaRowAction.CTL_EXPORT_FLAT);
fireEvent(schemaEvent);
}
});
cell.addMenuItem(Utils.constants.javaLibrary(), new ActionsButtonCell.ActionMenuItemListener<T>() {
@Override
public void onMenuItemSelected(T value) {
RowActionEvent<String> schemaEvent =
new RowActionEvent<>(value.getCtlSchemaId(), KaaRowAction.CTL_EXPORT_LIBRARY);
fireEvent(schemaEvent);
}
});
Column<T, T> column = new Column<T, T>(cell) {
@Override
public T getValue(T item) {
return item;
}
};
return column;
}
}
| apache-2.0 |
throughsky/lywebank | hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftServerRunner.java | 55050 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.thrift;
import static org.apache.hadoop.hbase.util.Bytes.getBytes;
import java.io.IOException;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.TreeMap;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionGroup;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Increment;
import org.apache.hadoop.hbase.client.OperationWithAttributes;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.filter.ParseFilter;
import org.apache.hadoop.hbase.filter.PrefixFilter;
import org.apache.hadoop.hbase.filter.WhileMatchFilter;
import org.apache.hadoop.hbase.thrift.CallQueue.Call;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
import org.apache.hadoop.hbase.thrift.generated.BatchMutation;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.Hbase;
import org.apache.hadoop.hbase.thrift.generated.IOError;
import org.apache.hadoop.hbase.thrift.generated.IllegalArgument;
import org.apache.hadoop.hbase.thrift.generated.Mutation;
import org.apache.hadoop.hbase.thrift.generated.TCell;
import org.apache.hadoop.hbase.thrift.generated.TIncrement;
import org.apache.hadoop.hbase.thrift.generated.TRegionInfo;
import org.apache.hadoop.hbase.thrift.generated.TRowResult;
import org.apache.hadoop.hbase.thrift.generated.TScan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.thrift.TException;
import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TProtocolFactory;
import org.apache.thrift.server.THsHaServer;
import org.apache.thrift.server.TNonblockingServer;
import org.apache.thrift.server.TServer;
import org.apache.thrift.server.TThreadedSelectorServer;
import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TNonblockingServerSocket;
import org.apache.thrift.transport.TNonblockingServerTransport;
import org.apache.thrift.transport.TServerSocket;
import org.apache.thrift.transport.TServerTransport;
import org.apache.thrift.transport.TTransportFactory;
import com.google.common.base.Joiner;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
/**
* ThriftServerRunner - this class starts up a Thrift server which implements
* the Hbase API specified in the Hbase.thrift IDL file.
*/
@InterfaceAudience.Private
public class ThriftServerRunner implements Runnable {
private static final Log LOG = LogFactory.getLog(ThriftServerRunner.class);
static final String SERVER_TYPE_CONF_KEY =
"hbase.regionserver.thrift.server.type";
static final String BIND_CONF_KEY = "hbase.regionserver.thrift.ipaddress";
static final String COMPACT_CONF_KEY = "hbase.regionserver.thrift.compact";
static final String FRAMED_CONF_KEY = "hbase.regionserver.thrift.framed";
static final String PORT_CONF_KEY = "hbase.regionserver.thrift.port";
static final String COALESCE_INC_KEY = "hbase.regionserver.thrift.coalesceIncrement";
private static final String DEFAULT_BIND_ADDR = "0.0.0.0";
public static final int DEFAULT_LISTEN_PORT = 9090;
private final int listenPort;
private Configuration conf;
volatile TServer tserver;
private final Hbase.Iface handler;
private final ThriftMetrics metrics;
/** An enum of server implementation selections */
enum ImplType {
HS_HA("hsha", true, THsHaServer.class, true),
NONBLOCKING("nonblocking", true, TNonblockingServer.class, true),
THREAD_POOL("threadpool", false, TBoundedThreadPoolServer.class, true),
THREADED_SELECTOR(
"threadedselector", true, TThreadedSelectorServer.class, true);
public static final ImplType DEFAULT = THREAD_POOL;
final String option;
final boolean isAlwaysFramed;
final Class<? extends TServer> serverClass;
final boolean canSpecifyBindIP;
ImplType(String option, boolean isAlwaysFramed,
Class<? extends TServer> serverClass, boolean canSpecifyBindIP) {
this.option = option;
this.isAlwaysFramed = isAlwaysFramed;
this.serverClass = serverClass;
this.canSpecifyBindIP = canSpecifyBindIP;
}
/**
* @return <code>-option</code> so we can get the list of options from
* {@link #values()}
*/
@Override
public String toString() {
return "-" + option;
}
String getDescription() {
StringBuilder sb = new StringBuilder("Use the " +
serverClass.getSimpleName());
if (isAlwaysFramed) {
sb.append(" This implies the framed transport.");
}
if (this == DEFAULT) {
sb.append("This is the default.");
}
return sb.toString();
}
static OptionGroup createOptionGroup() {
OptionGroup group = new OptionGroup();
for (ImplType t : values()) {
group.addOption(new Option(t.option, t.getDescription()));
}
return group;
}
static ImplType getServerImpl(Configuration conf) {
String confType = conf.get(SERVER_TYPE_CONF_KEY, THREAD_POOL.option);
for (ImplType t : values()) {
if (confType.equals(t.option)) {
return t;
}
}
throw new AssertionError("Unknown server ImplType.option:" + confType);
}
static void setServerImpl(CommandLine cmd, Configuration conf) {
ImplType chosenType = null;
int numChosen = 0;
for (ImplType t : values()) {
if (cmd.hasOption(t.option)) {
chosenType = t;
++numChosen;
}
}
if (numChosen < 1) {
LOG.info("Using default thrift server type");
chosenType = DEFAULT;
} else if (numChosen > 1) {
throw new AssertionError("Exactly one option out of " +
Arrays.toString(values()) + " has to be specified");
}
LOG.info("Using thrift server type " + chosenType.option);
conf.set(SERVER_TYPE_CONF_KEY, chosenType.option);
}
public String simpleClassName() {
return serverClass.getSimpleName();
}
public static List<String> serversThatCannotSpecifyBindIP() {
List<String> l = new ArrayList<String>();
for (ImplType t : values()) {
if (!t.canSpecifyBindIP) {
l.add(t.simpleClassName());
}
}
return l;
}
}
public ThriftServerRunner(Configuration conf) throws IOException {
this(conf, new ThriftServerRunner.HBaseHandler(conf));
}
public ThriftServerRunner(Configuration conf, HBaseHandler handler) {
this.conf = HBaseConfiguration.create(conf);
this.listenPort = conf.getInt(PORT_CONF_KEY, DEFAULT_LISTEN_PORT);
this.metrics = new ThriftMetrics(conf, ThriftMetrics.ThriftServerType.ONE);
handler.initMetrics(metrics);
this.handler = HbaseHandlerMetricsProxy.newInstance(handler, metrics, conf);
}
/*
* Runs the Thrift server
*/
@Override
public void run() {
try {
setupServer();
tserver.serve();
} catch (Exception e) {
LOG.fatal("Cannot run ThriftServer", e);
// Crash the process if the ThriftServer is not running
System.exit(-1);
}
}
public void shutdown() {
if (tserver != null) {
tserver.stop();
tserver = null;
}
}
/**
* Setting up the thrift TServer
*/
private void setupServer() throws Exception {
// Construct correct ProtocolFactory
TProtocolFactory protocolFactory;
if (conf.getBoolean(COMPACT_CONF_KEY, false)) {
LOG.debug("Using compact protocol");
protocolFactory = new TCompactProtocol.Factory();
} else {
LOG.debug("Using binary protocol");
protocolFactory = new TBinaryProtocol.Factory();
}
Hbase.Processor<Hbase.Iface> processor =
new Hbase.Processor<Hbase.Iface>(handler);
ImplType implType = ImplType.getServerImpl(conf);
// Construct correct TransportFactory
TTransportFactory transportFactory;
if (conf.getBoolean(FRAMED_CONF_KEY, false) || implType.isAlwaysFramed) {
transportFactory = new TFramedTransport.Factory();
LOG.debug("Using framed transport");
} else {
transportFactory = new TTransportFactory();
}
if (conf.get(BIND_CONF_KEY) != null && !implType.canSpecifyBindIP) {
LOG.error("Server types " + Joiner.on(", ").join(
ImplType.serversThatCannotSpecifyBindIP()) + " don't support IP " +
"address binding at the moment. See " +
"https://issues.apache.org/jira/browse/HBASE-2155 for details.");
throw new RuntimeException(
"-" + BIND_CONF_KEY + " not supported with " + implType);
}
if (implType == ImplType.HS_HA || implType == ImplType.NONBLOCKING ||
implType == ImplType.THREADED_SELECTOR) {
InetAddress listenAddress = getBindAddress(conf);
TNonblockingServerTransport serverTransport = new TNonblockingServerSocket(
new InetSocketAddress(listenAddress, listenPort));
if (implType == ImplType.NONBLOCKING) {
TNonblockingServer.Args serverArgs =
new TNonblockingServer.Args(serverTransport);
serverArgs.processor(processor)
.transportFactory(transportFactory)
.protocolFactory(protocolFactory);
tserver = new TNonblockingServer(serverArgs);
} else if (implType == ImplType.HS_HA) {
THsHaServer.Args serverArgs = new THsHaServer.Args(serverTransport);
CallQueue callQueue =
new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
ExecutorService executorService = createExecutor(
callQueue, serverArgs.getWorkerThreads());
serverArgs.executorService(executorService)
.processor(processor)
.transportFactory(transportFactory)
.protocolFactory(protocolFactory);
tserver = new THsHaServer(serverArgs);
} else { // THREADED_SELECTOR
TThreadedSelectorServer.Args serverArgs =
new HThreadedSelectorServerArgs(serverTransport, conf);
CallQueue callQueue =
new CallQueue(new LinkedBlockingQueue<Call>(), metrics);
ExecutorService executorService = createExecutor(
callQueue, serverArgs.getWorkerThreads());
serverArgs.executorService(executorService)
.processor(processor)
.transportFactory(transportFactory)
.protocolFactory(protocolFactory);
tserver = new TThreadedSelectorServer(serverArgs);
}
LOG.info("starting HBase " + implType.simpleClassName() +
" server on " + Integer.toString(listenPort));
} else if (implType == ImplType.THREAD_POOL) {
// Thread pool server. Get the IP address to bind to.
InetAddress listenAddress = getBindAddress(conf);
TServerTransport serverTransport = new TServerSocket(
new InetSocketAddress(listenAddress, listenPort));
TBoundedThreadPoolServer.Args serverArgs =
new TBoundedThreadPoolServer.Args(serverTransport, conf);
serverArgs.processor(processor)
.transportFactory(transportFactory)
.protocolFactory(protocolFactory);
LOG.info("starting " + ImplType.THREAD_POOL.simpleClassName() + " on "
+ listenAddress + ":" + Integer.toString(listenPort)
+ "; " + serverArgs);
TBoundedThreadPoolServer tserver =
new TBoundedThreadPoolServer(serverArgs, metrics);
this.tserver = tserver;
} else {
throw new AssertionError("Unsupported Thrift server implementation: " +
implType.simpleClassName());
}
// A sanity check that we instantiated the right type of server.
if (tserver.getClass() != implType.serverClass) {
throw new AssertionError("Expected to create Thrift server class " +
implType.serverClass.getName() + " but got " +
tserver.getClass().getName());
}
registerFilters(conf);
}
ExecutorService createExecutor(BlockingQueue<Runnable> callQueue,
int workerThreads) {
ThreadFactoryBuilder tfb = new ThreadFactoryBuilder();
tfb.setDaemon(true);
tfb.setNameFormat("thrift-worker-%d");
return new ThreadPoolExecutor(workerThreads, workerThreads,
Long.MAX_VALUE, TimeUnit.SECONDS, callQueue, tfb.build());
}
private InetAddress getBindAddress(Configuration conf)
throws UnknownHostException {
String bindAddressStr = conf.get(BIND_CONF_KEY, DEFAULT_BIND_ADDR);
return InetAddress.getByName(bindAddressStr);
}
protected static class ResultScannerWrapper {
private final ResultScanner scanner;
private final boolean sortColumns;
public ResultScannerWrapper(ResultScanner resultScanner,
boolean sortResultColumns) {
scanner = resultScanner;
sortColumns = sortResultColumns;
}
public ResultScanner getScanner() {
return scanner;
}
public boolean isColumnSorted() {
return sortColumns;
}
}
/**
* The HBaseHandler is a glue object that connects Thrift RPC calls to the
* HBase client API primarily defined in the HBaseAdmin and HTable objects.
*/
public static class HBaseHandler implements Hbase.Iface {
protected Configuration conf;
protected volatile HBaseAdmin admin = null;
protected final Log LOG = LogFactory.getLog(this.getClass().getName());
// nextScannerId and scannerMap are used to manage scanner state
protected int nextScannerId = 0;
protected HashMap<Integer, ResultScannerWrapper> scannerMap = null;
private ThriftMetrics metrics = null;
private static ThreadLocal<Map<String, HTable>> threadLocalTables =
new ThreadLocal<Map<String, HTable>>() {
@Override
protected Map<String, HTable> initialValue() {
return new TreeMap<String, HTable>();
}
};
IncrementCoalescer coalescer = null;
/**
* Returns a list of all the column families for a given htable.
*
* @param table
* @throws IOException
*/
byte[][] getAllColumns(HTable table) throws IOException {
HColumnDescriptor[] cds = table.getTableDescriptor().getColumnFamilies();
byte[][] columns = new byte[cds.length][];
for (int i = 0; i < cds.length; i++) {
columns[i] = Bytes.add(cds[i].getName(),
KeyValue.COLUMN_FAMILY_DELIM_ARRAY);
}
return columns;
}
/**
* Creates and returns an HTable instance from a given table name.
*
* @param tableName
* name of table
* @return HTable object
* @throws IOException
* @throws IOError
*/
public HTable getTable(final byte[] tableName) throws
IOException {
String table = new String(tableName);
Map<String, HTable> tables = threadLocalTables.get();
if (!tables.containsKey(table)) {
tables.put(table, new HTable(conf, tableName));
}
return tables.get(table);
}
public HTable getTable(final ByteBuffer tableName) throws IOException {
return getTable(getBytes(tableName));
}
/**
* Assigns a unique ID to the scanner and adds the mapping to an internal
* hash-map.
*
* @param scanner
* @return integer scanner id
*/
protected synchronized int addScanner(ResultScanner scanner,boolean sortColumns) {
int id = nextScannerId++;
ResultScannerWrapper resultScannerWrapper = new ResultScannerWrapper(scanner, sortColumns);
scannerMap.put(id, resultScannerWrapper);
return id;
}
/**
* Returns the scanner associated with the specified ID.
*
* @param id
* @return a Scanner, or null if ID was invalid.
*/
protected synchronized ResultScannerWrapper getScanner(int id) {
return scannerMap.get(id);
}
/**
* Removes the scanner associated with the specified ID from the internal
* id->scanner hash-map.
*
* @param id
* @return a Scanner, or null if ID was invalid.
*/
protected synchronized ResultScannerWrapper removeScanner(int id) {
return scannerMap.remove(id);
}
/**
* Constructs an HBaseHandler object.
* @throws IOException
*/
protected HBaseHandler()
throws IOException {
this(HBaseConfiguration.create());
}
protected HBaseHandler(final Configuration c) throws IOException {
this.conf = c;
scannerMap = new HashMap<Integer, ResultScannerWrapper>();
this.coalescer = new IncrementCoalescer(this);
}
/**
* Obtain HBaseAdmin. Creates the instance if it is not already created.
*/
private HBaseAdmin getHBaseAdmin() throws IOException {
if (admin == null) {
synchronized (this) {
if (admin == null) {
admin = new HBaseAdmin(conf);
}
}
}
return admin;
}
@Override
public void enableTable(ByteBuffer tableName) throws IOError {
try{
getHBaseAdmin().enableTable(getBytes(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void disableTable(ByteBuffer tableName) throws IOError{
try{
getHBaseAdmin().disableTable(getBytes(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public boolean isTableEnabled(ByteBuffer tableName) throws IOError {
try {
return HTable.isTableEnabled(this.conf, getBytes(tableName));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void compact(ByteBuffer tableNameOrRegionName) throws IOError {
try{
getHBaseAdmin().compact(getBytes(tableNameOrRegionName));
} catch (InterruptedException e) {
throw new IOError(e.getMessage());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void majorCompact(ByteBuffer tableNameOrRegionName) throws IOError {
try{
getHBaseAdmin().majorCompact(getBytes(tableNameOrRegionName));
} catch (InterruptedException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public List<ByteBuffer> getTableNames() throws IOError {
try {
TableName[] tableNames = this.getHBaseAdmin().listTableNames();
ArrayList<ByteBuffer> list = new ArrayList<ByteBuffer>(tableNames.length);
for (int i = 0; i < tableNames.length; i++) {
list.add(ByteBuffer.wrap(tableNames[i].getName()));
}
return list;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
/**
* @return the list of regions in the given table, or an empty list if the table does not exist
*/
@Override
public List<TRegionInfo> getTableRegions(ByteBuffer tableName)
throws IOError {
try {
HTable table;
try {
table = getTable(tableName);
} catch (TableNotFoundException ex) {
return new ArrayList<TRegionInfo>();
}
Map<HRegionInfo, ServerName> regionLocations =
table.getRegionLocations();
List<TRegionInfo> results = new ArrayList<TRegionInfo>();
for (Map.Entry<HRegionInfo, ServerName> entry :
regionLocations.entrySet()) {
HRegionInfo info = entry.getKey();
ServerName serverName = entry.getValue();
TRegionInfo region = new TRegionInfo();
region.serverName = ByteBuffer.wrap(
Bytes.toBytes(serverName.getHostname()));
region.port = serverName.getPort();
region.startKey = ByteBuffer.wrap(info.getStartKey());
region.endKey = ByteBuffer.wrap(info.getEndKey());
region.id = info.getRegionId();
region.name = ByteBuffer.wrap(info.getRegionName());
region.version = info.getVersion();
results.add(region);
}
return results;
} catch (TableNotFoundException e) {
// Return empty list for non-existing table
return Collections.emptyList();
} catch (IOException e){
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Deprecated
@Override
public List<TCell> get(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return get(tableName, row, famAndQf[0], null, attributes);
}
if (famAndQf.length == 2) {
return get(tableName, row, famAndQf[0], famAndQf[1], attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
/**
* Note: this internal interface is slightly different from public APIs in regard to handling
* of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
* we respect qual == null as a request for the entire column family. The caller (
* {@link #get(ByteBuffer, ByteBuffer, ByteBuffer, Map)}) interface IS consistent in that the
* column is parse like normal.
*/
protected List<TCell> get(ByteBuffer tableName,
ByteBuffer row,
byte[] family,
byte[] qualifier,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
if (qualifier == null) {
get.addFamily(family);
} else {
get.addColumn(family, qualifier);
}
Result result = table.get(get);
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Deprecated
@Override
public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return getVer(tableName, row, famAndQf[0], null, numVersions, attributes);
}
if (famAndQf.length == 2) {
return getVer(tableName, row, famAndQf[0], famAndQf[1], numVersions, attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
/**
* Note: this public interface is slightly different from public Java APIs in regard to
* handling of the qualifier. Here we differ from the public Java API in that null != byte[0].
* Rather, we respect qual == null as a request for the entire column family. If you want to
* access the entire column family, use
* {@link #getVer(ByteBuffer, ByteBuffer, ByteBuffer, int, Map)} with a {@code column} value
* that lacks a {@code ':'}.
*/
public List<TCell> getVer(ByteBuffer tableName, ByteBuffer row, byte[] family,
byte[] qualifier, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
if (null == qualifier) {
get.addFamily(family);
} else {
get.addColumn(family, qualifier);
}
get.setMaxVersions(numVersions);
Result result = table.get(get);
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Deprecated
@Override
public List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
return getVerTs(tableName, row, famAndQf[0], null, timestamp, numVersions, attributes);
}
if (famAndQf.length == 2) {
return getVerTs(tableName, row, famAndQf[0], famAndQf[1], timestamp, numVersions,
attributes);
}
throw new IllegalArgumentException("Invalid familyAndQualifier provided.");
}
/**
* Note: this internal interface is slightly different from public APIs in regard to handling
* of the qualifier. Here we differ from the public Java API in that null != byte[0]. Rather,
* we respect qual == null as a request for the entire column family. The caller (
* {@link #getVerTs(ByteBuffer, ByteBuffer, ByteBuffer, long, int, Map)}) interface IS
* consistent in that the column is parse like normal.
*/
protected List<TCell> getVerTs(ByteBuffer tableName, ByteBuffer row, byte[] family,
byte[] qualifier, long timestamp, int numVersions, Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
if (null == qualifier) {
get.addFamily(family);
} else {
get.addColumn(family, qualifier);
}
get.setTimeRange(0, timestamp);
get.setMaxVersions(numVersions);
Result result = table.get(get);
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public List<TRowResult> getRow(ByteBuffer tableName, ByteBuffer row,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
return getRowWithColumnsTs(tableName, row, null,
HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public List<TRowResult> getRowWithColumns(ByteBuffer tableName,
ByteBuffer row,
List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
return getRowWithColumnsTs(tableName, row, columns,
HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public List<TRowResult> getRowTs(ByteBuffer tableName, ByteBuffer row,
long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
return getRowWithColumnsTs(tableName, row, null,
timestamp, attributes);
}
@Override
public List<TRowResult> getRowWithColumnsTs(
ByteBuffer tableName, ByteBuffer row, List<ByteBuffer> columns,
long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
if (columns == null) {
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
get.setTimeRange(0, timestamp);
Result result = table.get(get);
return ThriftUtilities.rowResultFromHBase(result);
}
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
for(ByteBuffer column : columns) {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]);
} else {
get.addColumn(famAndQf[0], famAndQf[1]);
}
}
get.setTimeRange(0, timestamp);
Result result = table.get(get);
return ThriftUtilities.rowResultFromHBase(result);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public List<TRowResult> getRows(ByteBuffer tableName,
List<ByteBuffer> rows,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
return getRowsWithColumnsTs(tableName, rows, null,
HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public List<TRowResult> getRowsWithColumns(ByteBuffer tableName,
List<ByteBuffer> rows,
List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
return getRowsWithColumnsTs(tableName, rows, columns,
HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public List<TRowResult> getRowsTs(ByteBuffer tableName,
List<ByteBuffer> rows,
long timestamp,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
return getRowsWithColumnsTs(tableName, rows, null,
timestamp, attributes);
}
@Override
public List<TRowResult> getRowsWithColumnsTs(ByteBuffer tableName,
List<ByteBuffer> rows,
List<ByteBuffer> columns, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
List<Get> gets = new ArrayList<Get>(rows.size());
HTable table = getTable(tableName);
if (metrics != null) {
metrics.incNumRowKeysInBatchGet(rows.size());
}
for (ByteBuffer row : rows) {
Get get = new Get(getBytes(row));
addAttributes(get, attributes);
if (columns != null) {
for(ByteBuffer column : columns) {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
get.addFamily(famAndQf[0]);
} else {
get.addColumn(famAndQf[0], famAndQf[1]);
}
}
}
get.setTimeRange(0, timestamp);
gets.add(get);
}
Result[] result = table.get(gets);
return ThriftUtilities.rowResultFromHBase(result);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void deleteAll(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
deleteAllTs(tableName, row, column, HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public void deleteAllTs(ByteBuffer tableName,
ByteBuffer row,
ByteBuffer column,
long timestamp, Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
Delete delete = new Delete(getBytes(row));
addAttributes(delete, attributes);
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if (famAndQf.length == 1) {
delete.deleteFamily(famAndQf[0], timestamp);
} else {
delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
}
table.delete(delete);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void deleteAllRow(
ByteBuffer tableName, ByteBuffer row,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
deleteAllRowTs(tableName, row, HConstants.LATEST_TIMESTAMP, attributes);
}
@Override
public void deleteAllRowTs(
ByteBuffer tableName, ByteBuffer row, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
Delete delete = new Delete(getBytes(row), timestamp);
addAttributes(delete, attributes);
table.delete(delete);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void createTable(ByteBuffer in_tableName,
List<ColumnDescriptor> columnFamilies) throws IOError,
IllegalArgument, AlreadyExists {
byte [] tableName = getBytes(in_tableName);
try {
if (getHBaseAdmin().tableExists(tableName)) {
throw new AlreadyExists("table name already in use");
}
HTableDescriptor desc = new HTableDescriptor(TableName.valueOf(tableName));
for (ColumnDescriptor col : columnFamilies) {
HColumnDescriptor colDesc = ThriftUtilities.colDescFromThrift(col);
desc.addFamily(colDesc);
}
getHBaseAdmin().createTable(desc);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(e.getMessage());
}
}
@Override
public void deleteTable(ByteBuffer in_tableName) throws IOError {
byte [] tableName = getBytes(in_tableName);
if (LOG.isDebugEnabled()) {
LOG.debug("deleteTable: table=" + Bytes.toString(tableName));
}
try {
if (!getHBaseAdmin().tableExists(tableName)) {
throw new IOException("table does not exist");
}
getHBaseAdmin().deleteTable(tableName);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void mutateRow(ByteBuffer tableName, ByteBuffer row,
List<Mutation> mutations, Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, IllegalArgument {
mutateRowTs(tableName, row, mutations, HConstants.LATEST_TIMESTAMP,
attributes);
}
@Override
public void mutateRowTs(ByteBuffer tableName, ByteBuffer row,
List<Mutation> mutations, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, IllegalArgument {
HTable table = null;
try {
table = getTable(tableName);
Put put = new Put(getBytes(row), timestamp);
addAttributes(put, attributes);
Delete delete = new Delete(getBytes(row));
addAttributes(delete, attributes);
if (metrics != null) {
metrics.incNumRowKeysInBatchMutate(mutations.size());
}
// I apologize for all this mess :)
for (Mutation m : mutations) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
if (m.isDelete) {
if (famAndQf.length == 1) {
delete.deleteFamily(famAndQf[0], timestamp);
} else {
delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
}
delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
: Durability.SKIP_WAL);
} else {
if(famAndQf.length == 1) {
LOG.warn("No column qualifier specified. Delete is the only mutation supported "
+ "over the whole column family.");
} else {
put.addImmutable(famAndQf[0], famAndQf[1],
m.value != null ? getBytes(m.value)
: HConstants.EMPTY_BYTE_ARRAY);
}
put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
}
}
if (!delete.isEmpty())
table.delete(delete);
if (!put.isEmpty())
table.put(put);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(e.getMessage());
}
}
@Override
public void mutateRows(ByteBuffer tableName, List<BatchMutation> rowBatches,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, IllegalArgument, TException {
mutateRowsTs(tableName, rowBatches, HConstants.LATEST_TIMESTAMP, attributes);
}
@Override
public void mutateRowsTs(
ByteBuffer tableName, List<BatchMutation> rowBatches, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, IllegalArgument, TException {
List<Put> puts = new ArrayList<Put>();
List<Delete> deletes = new ArrayList<Delete>();
for (BatchMutation batch : rowBatches) {
byte[] row = getBytes(batch.row);
List<Mutation> mutations = batch.mutations;
Delete delete = new Delete(row);
addAttributes(delete, attributes);
Put put = new Put(row, timestamp);
addAttributes(put, attributes);
for (Mutation m : mutations) {
byte[][] famAndQf = KeyValue.parseColumn(getBytes(m.column));
if (m.isDelete) {
// no qualifier, family only.
if (famAndQf.length == 1) {
delete.deleteFamily(famAndQf[0], timestamp);
} else {
delete.deleteColumns(famAndQf[0], famAndQf[1], timestamp);
}
delete.setDurability(m.writeToWAL ? Durability.SYNC_WAL
: Durability.SKIP_WAL);
} else {
if (famAndQf.length == 1) {
LOG.warn("No column qualifier specified. Delete is the only mutation supported "
+ "over the whole column family.");
}
if (famAndQf.length == 2) {
put.addImmutable(famAndQf[0], famAndQf[1],
m.value != null ? getBytes(m.value)
: HConstants.EMPTY_BYTE_ARRAY);
} else {
throw new IllegalArgumentException("Invalid famAndQf provided.");
}
put.setDurability(m.writeToWAL ? Durability.SYNC_WAL : Durability.SKIP_WAL);
}
}
if (!delete.isEmpty())
deletes.add(delete);
if (!put.isEmpty())
puts.add(put);
}
HTable table = null;
try {
table = getTable(tableName);
if (!puts.isEmpty())
table.put(puts);
if (!deletes.isEmpty())
table.delete(deletes);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
} catch (IllegalArgumentException e) {
LOG.warn(e.getMessage(), e);
throw new IllegalArgument(e.getMessage());
}
}
@Deprecated
@Override
public long atomicIncrement(
ByteBuffer tableName, ByteBuffer row, ByteBuffer column, long amount)
throws IOError, IllegalArgument, TException {
byte [][] famAndQf = KeyValue.parseColumn(getBytes(column));
if(famAndQf.length == 1) {
return atomicIncrement(tableName, row, famAndQf[0], HConstants.EMPTY_BYTE_ARRAY, amount);
}
return atomicIncrement(tableName, row, famAndQf[0], famAndQf[1], amount);
}
protected long atomicIncrement(ByteBuffer tableName, ByteBuffer row,
byte [] family, byte [] qualifier, long amount)
throws IOError, IllegalArgument, TException {
HTable table;
try {
table = getTable(tableName);
return table.incrementColumnValue(
getBytes(row), family, qualifier, amount);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void scannerClose(int id) throws IOError, IllegalArgument {
LOG.debug("scannerClose: id=" + id);
ResultScannerWrapper resultScannerWrapper = getScanner(id);
if (resultScannerWrapper == null) {
String message = "scanner ID is invalid";
LOG.warn(message);
throw new IllegalArgument("scanner ID is invalid");
}
resultScannerWrapper.getScanner().close();
removeScanner(id);
}
@Override
public List<TRowResult> scannerGetList(int id,int nbRows)
throws IllegalArgument, IOError {
LOG.debug("scannerGetList: id=" + id);
ResultScannerWrapper resultScannerWrapper = getScanner(id);
if (null == resultScannerWrapper) {
String message = "scanner ID is invalid";
LOG.warn(message);
throw new IllegalArgument("scanner ID is invalid");
}
Result [] results = null;
try {
results = resultScannerWrapper.getScanner().next(nbRows);
if (null == results) {
return new ArrayList<TRowResult>();
}
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
return ThriftUtilities.rowResultFromHBase(results, resultScannerWrapper.isColumnSorted());
}
@Override
public List<TRowResult> scannerGet(int id) throws IllegalArgument, IOError {
return scannerGetList(id,1);
}
@Override
public int scannerOpenWithScan(ByteBuffer tableName, TScan tScan,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan();
addAttributes(scan, attributes);
if (tScan.isSetStartRow()) {
scan.setStartRow(tScan.getStartRow());
}
if (tScan.isSetStopRow()) {
scan.setStopRow(tScan.getStopRow());
}
if (tScan.isSetTimestamp()) {
scan.setTimeRange(0, tScan.getTimestamp());
}
if (tScan.isSetCaching()) {
scan.setCaching(tScan.getCaching());
}
if (tScan.isSetBatchSize()) {
scan.setBatch(tScan.getBatchSize());
}
if (tScan.isSetColumns() && tScan.getColumns().size() != 0) {
for(ByteBuffer column : tScan.getColumns()) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
if (tScan.isSetFilterString()) {
ParseFilter parseFilter = new ParseFilter();
scan.setFilter(
parseFilter.parseFilterString(tScan.getFilterString()));
}
return addScanner(table.getScanner(scan), tScan.sortColumns);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public int scannerOpen(ByteBuffer tableName, ByteBuffer startRow,
List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError {
try {
HTable table = getTable(tableName);
Scan scan = new Scan(getBytes(startRow));
addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public int scannerOpenWithStop(ByteBuffer tableName, ByteBuffer startRow,
ByteBuffer stopRow, List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, TException {
try {
HTable table = getTable(tableName);
Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
addAttributes(scan, attributes);
if(columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public int scannerOpenWithPrefix(ByteBuffer tableName,
ByteBuffer startAndPrefix,
List<ByteBuffer> columns,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, TException {
try {
HTable table = getTable(tableName);
Scan scan = new Scan(getBytes(startAndPrefix));
addAttributes(scan, attributes);
Filter f = new WhileMatchFilter(
new PrefixFilter(getBytes(startAndPrefix)));
scan.setFilter(f);
if (columns != null && columns.size() != 0) {
for(ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public int scannerOpenTs(ByteBuffer tableName, ByteBuffer startRow,
List<ByteBuffer> columns, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes) throws IOError, TException {
try {
HTable table = getTable(tableName);
Scan scan = new Scan(getBytes(startRow));
addAttributes(scan, attributes);
scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public int scannerOpenWithStopTs(ByteBuffer tableName, ByteBuffer startRow,
ByteBuffer stopRow, List<ByteBuffer> columns, long timestamp,
Map<ByteBuffer, ByteBuffer> attributes)
throws IOError, TException {
try {
HTable table = getTable(tableName);
Scan scan = new Scan(getBytes(startRow), getBytes(stopRow));
addAttributes(scan, attributes);
scan.setTimeRange(0, timestamp);
if (columns != null && columns.size() != 0) {
for (ByteBuffer column : columns) {
byte [][] famQf = KeyValue.parseColumn(getBytes(column));
if(famQf.length == 1) {
scan.addFamily(famQf[0]);
} else {
scan.addColumn(famQf[0], famQf[1]);
}
}
}
scan.setTimeRange(0, timestamp);
return addScanner(table.getScanner(scan), false);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public Map<ByteBuffer, ColumnDescriptor> getColumnDescriptors(
ByteBuffer tableName) throws IOError, TException {
try {
TreeMap<ByteBuffer, ColumnDescriptor> columns =
new TreeMap<ByteBuffer, ColumnDescriptor>();
HTable table = getTable(tableName);
HTableDescriptor desc = table.getTableDescriptor();
for (HColumnDescriptor e : desc.getFamilies()) {
ColumnDescriptor col = ThriftUtilities.colDescFromHbase(e);
columns.put(col.name, col);
}
return columns;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public List<TCell> getRowOrBefore(ByteBuffer tableName, ByteBuffer row,
ByteBuffer family) throws IOError {
try {
HTable table = getTable(getBytes(tableName));
Result result = table.getRowOrBefore(getBytes(row), getBytes(family));
return ThriftUtilities.cellFromHBase(result.rawCells());
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public TRegionInfo getRegionInfo(ByteBuffer searchRow) throws IOError {
try {
HTable table = getTable(TableName.META_TABLE_NAME.getName());
byte[] row = getBytes(searchRow);
Result startRowResult = table.getRowOrBefore(
row, HConstants.CATALOG_FAMILY);
if (startRowResult == null) {
throw new IOException("Cannot find row in "+ TableName.META_TABLE_NAME+", row="
+ Bytes.toStringBinary(row));
}
// find region start and end keys
HRegionInfo regionInfo = HRegionInfo.getHRegionInfo(startRowResult);
if (regionInfo == null) {
throw new IOException("HRegionInfo REGIONINFO was null or " +
" empty in Meta for row="
+ Bytes.toStringBinary(row));
}
TRegionInfo region = new TRegionInfo();
region.setStartKey(regionInfo.getStartKey());
region.setEndKey(regionInfo.getEndKey());
region.id = regionInfo.getRegionId();
region.setName(regionInfo.getRegionName());
region.version = regionInfo.getVersion();
// find region assignment to server
ServerName serverName = HRegionInfo.getServerName(startRowResult);
if (serverName != null) {
region.setServerName(Bytes.toBytes(serverName.getHostname()));
region.port = serverName.getPort();
}
return region;
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
private void initMetrics(ThriftMetrics metrics) {
this.metrics = metrics;
}
@Override
public void increment(TIncrement tincrement) throws IOError, TException {
if (tincrement.getRow().length == 0 || tincrement.getTable().length == 0) {
throw new TException("Must supply a table and a row key; can't increment");
}
if (conf.getBoolean(COALESCE_INC_KEY, false)) {
this.coalescer.queueIncrement(tincrement);
return;
}
try {
HTable table = getTable(tincrement.getTable());
Increment inc = ThriftUtilities.incrementFromThrift(tincrement);
table.increment(inc);
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
throw new IOError(e.getMessage());
}
}
@Override
public void incrementRows(List<TIncrement> tincrements) throws IOError, TException {
if (conf.getBoolean(COALESCE_INC_KEY, false)) {
this.coalescer.queueIncrements(tincrements);
return;
}
for (TIncrement tinc : tincrements) {
increment(tinc);
}
}
}
/**
* Adds all the attributes into the Operation object
*/
private static void addAttributes(OperationWithAttributes op,
Map<ByteBuffer, ByteBuffer> attributes) {
if (attributes == null || attributes.size() == 0) {
return;
}
for (Map.Entry<ByteBuffer, ByteBuffer> entry : attributes.entrySet()) {
String name = Bytes.toStringBinary(getBytes(entry.getKey()));
byte[] value = getBytes(entry.getValue());
op.setAttribute(name, value);
}
}
public static void registerFilters(Configuration conf) {
String[] filters = conf.getStrings("hbase.thrift.filters");
if(filters != null) {
for(String filterClass: filters) {
String[] filterPart = filterClass.split(":");
if(filterPart.length != 2) {
LOG.warn("Invalid filter specification " + filterClass + " - skipping");
} else {
ParseFilter.registerFilter(filterPart[0], filterPart[1]);
}
}
}
}
}
| apache-2.0 |
blucas/netty | common/src/test/java/io/netty/util/concurrent/NonStickyEventExecutorGroupTest.java | 5189 | /*
* Copyright 2016 The Netty Project
*
* The Netty Project licenses this file to you under the Apache License,
* version 2.0 (the "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*/
package io.netty.util.concurrent;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicReference;
@RunWith(Parameterized.class)
public class NonStickyEventExecutorGroupTest {
private final int maxTaskExecutePerRun;
@Test(expected = IllegalArgumentException.class)
public void testInvalidGroup() {
EventExecutorGroup group = new DefaultEventExecutorGroup(1);
try {
new NonStickyEventExecutorGroup(group);
} finally {
group.shutdownGracefully();
}
}
@Parameterized.Parameters(name = "{index}: maxTaskExecutePerRun = {0}")
public static Collection<Object[]> data() throws Exception {
List<Object[]> params = new ArrayList<Object[]>();
params.add(new Object[] {64});
params.add(new Object[] {256});
params.add(new Object[] {1024});
params.add(new Object[] {Integer.MAX_VALUE});
return params;
}
public NonStickyEventExecutorGroupTest(int maxTaskExecutePerRun) {
this.maxTaskExecutePerRun = maxTaskExecutePerRun;
}
@Test(timeout = 10000)
public void testOrdering() throws Throwable {
final int threads = Runtime.getRuntime().availableProcessors() * 2;
final EventExecutorGroup group = new UnorderedThreadPoolEventExecutor(threads);
final NonStickyEventExecutorGroup nonStickyGroup = new NonStickyEventExecutorGroup(group, maxTaskExecutePerRun);
try {
final CountDownLatch startLatch = new CountDownLatch(1);
final AtomicReference<Throwable> error = new AtomicReference<Throwable>();
List<Thread> threadList = new ArrayList<Thread>(threads);
for (int i = 0 ; i < threads; i++) {
Thread thread = new Thread(new Runnable() {
@Override
public void run() {
try {
execute(nonStickyGroup, startLatch);
} catch (Throwable cause) {
error.compareAndSet(null, cause);
}
}
});
threadList.add(thread);
thread.start();
}
startLatch.countDown();
for (Thread t: threadList) {
t.join();
}
Throwable cause = error.get();
if (cause != null) {
throw cause;
}
} finally {
nonStickyGroup.shutdownGracefully();
}
}
private static void execute(EventExecutorGroup group, CountDownLatch startLatch) throws Throwable {
EventExecutor executor = group.next();
Assert.assertTrue(executor instanceof OrderedEventExecutor);
final AtomicReference<Throwable> cause = new AtomicReference<Throwable>();
final AtomicInteger last = new AtomicInteger();
int tasks = 10000;
List<Future<?>> futures = new ArrayList<Future<?>>(tasks);
final CountDownLatch latch = new CountDownLatch(tasks);
startLatch.await();
for (int i = 1 ; i <= tasks; i++) {
final int id = i;
futures.add(executor.submit(new Runnable() {
@Override
public void run() {
try {
if (cause.get() == null) {
int lastId = last.get();
if (lastId >= id) {
cause.compareAndSet(null, new AssertionError(
"Out of order execution id(" + id + ") >= lastId(" + lastId + ')'));
}
if (!last.compareAndSet(lastId, id)) {
cause.compareAndSet(null, new AssertionError("Concurrent execution of tasks"));
}
}
} finally {
latch.countDown();
}
}
}));
}
latch.await();
for (Future<?> future: futures) {
future.syncUninterruptibly();
}
Throwable error = cause.get();
if (error != null) {
throw error;
}
}
}
| apache-2.0 |
TheRealRasu/arx | src/main/org/deidentifier/arx/algorithm/FLASHPhaseConfiguration.java | 3037 | /*
* ARX: Powerful Data Anonymization
* Copyright 2012 - 2015 Florian Kohlmayer, Fabian Prasser
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.deidentifier.arx.algorithm;
import org.deidentifier.arx.framework.lattice.NodeAction;
/**
* This class parameterizes a phase the interwoven two-phase Flash algorithm.
*
* @author Fabian Prasser
* @author Florian Kohlmayer
*/
public class FLASHPhaseConfiguration {
/** A trigger for tagging nodes in this phase. */
private final NodeAction triggerTag;
/** A trigger for checking nodes in this phase. */
private final NodeAction triggerCheck;
/** A trigger for evaluating nodes in this phase. */
private final NodeAction triggerEvaluate;
/** A trigger for skipping nodes in this phase. */
private final NodeAction triggerSkip;
/** The main anonymity property. */
private final int anonymityProperty;
/**
* Creates a configuration for an active phase.
*
* @param anonymityProperty
* @param triggerTag
* @param triggerCheck
* @param triggerEvaluate
* @param triggerSkip
*/
public FLASHPhaseConfiguration(int anonymityProperty,
NodeAction triggerTag,
NodeAction triggerCheck,
NodeAction triggerEvaluate,
NodeAction triggerSkip) {
this.anonymityProperty = anonymityProperty;
this.triggerTag = triggerTag;
this.triggerCheck = triggerCheck;
this.triggerEvaluate = triggerEvaluate;
this.triggerSkip = triggerSkip;
}
/**
* Getter: The main anonymity property.
*
* @return
*/
public int getAnonymityProperty() {
return anonymityProperty;
}
/**
* Getter: A trigger for checking nodes in this phase.
*
* @return
*/
public NodeAction getTriggerCheck() {
return triggerCheck;
}
/**
* Getter: A trigger for evaluating nodes in this phase.
*
* @return
*/
public NodeAction getTriggerEvaluate() {
return triggerEvaluate;
}
/**
* Getter: A trigger for skipping nodes in this phase.
*
* @return
*/
public NodeAction getTriggerSkip() {
return triggerSkip;
}
/**
* Getter: A trigger for tagging nodes in this phase.
*
* @return
*/
public NodeAction getTriggerTag() {
return triggerTag;
}
}
| apache-2.0 |
pubudu08/carbon-deployment | components/webapp-mgt/org.wso2.carbon.tomcat.patch/src/main/java/org/apache/tomcat/util/http/mapper/Mapper.java | 53045 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.tomcat.util.http.mapper;
import javax.naming.NamingException;
import javax.naming.directory.DirContext;
import org.apache.tomcat.util.buf.Ascii;
import org.apache.tomcat.util.buf.CharChunk;
import org.apache.tomcat.util.buf.MessageBytes;
import org.apache.tomcat.util.res.StringManager;
/**
* Mapper, which implements the servlet API mapping rules (which are derived
* from the HTTP rules).
*
* @author Remy Maucherat
*/
public final class Mapper {
private static final org.apache.juli.logging.Log log =
org.apache.juli.logging.LogFactory.getLog(Mapper.class);
protected static final StringManager sm =
StringManager.getManager(Mapper.class.getPackage().getName());
// ----------------------------------------------------- Instance Variables
/**
* Array containing the virtual hosts definitions.
*/
protected Host[] hosts = new Host[0];
/**
* Default host name.
*/
protected String defaultHostName = null;
/**
* Context associated with this wrapper, used for wrapper mapping.
*/
protected ContextVersion context = new ContextVersion();
// --------------------------------------------------------- Public Methods
/**
* Set default host.
*
* @param defaultHostName Default host name
*/
public void setDefaultHostName(String defaultHostName) {
this.defaultHostName = defaultHostName;
}
/**
* Add a new host to the mapper.
*
* @param name Virtual host name
* @param host Host object
*/
public synchronized void addHost(String name, String[] aliases,
Object host) {
Host[] newHosts = new Host[hosts.length + 1];
Host newHost = new Host();
ContextList contextList = new ContextList();
newHost.name = name;
newHost.contextList = contextList;
newHost.object = host;
if (insertMap(hosts, newHosts, newHost)) {
hosts = newHosts;
}
for (int i = 0; i < aliases.length; i++) {
newHosts = new Host[hosts.length + 1];
newHost = new Host();
newHost.name = aliases[i];
newHost.contextList = contextList;
newHost.object = host;
if (insertMap(hosts, newHosts, newHost)) {
hosts = newHosts;
}
}
}
/**
* Remove a host from the mapper.
*
* @param name Virtual host name
*/
public synchronized void removeHost(String name) {
// Find and remove the old host
int pos = find(hosts, name);
if (pos < 0) {
return;
}
Object host = hosts[pos].object;
Host[] newHosts = new Host[hosts.length - 1];
if (removeMap(hosts, newHosts, name)) {
hosts = newHosts;
}
// Remove all aliases (they will map to the same host object)
for (int i = 0; i < newHosts.length; i++) {
if (newHosts[i].object == host) {
Host[] newHosts2 = new Host[hosts.length - 1];
if (removeMap(hosts, newHosts2, newHosts[i].name)) {
hosts = newHosts2;
}
}
}
}
/**
* Add an alias to an existing host.
* @param name The name of the host
* @param alias The alias to add
*/
public synchronized void addHostAlias(String name, String alias) {
int pos = find(hosts, name);
if (pos < 0) {
// Should not be adding an alias for a host that doesn't exist but
// just in case...
return;
}
Host realHost = hosts[pos];
Host[] newHosts = new Host[hosts.length + 1];
Host newHost = new Host();
newHost.name = alias;
newHost.contextList = realHost.contextList;
newHost.object = realHost.object;
if (insertMap(hosts, newHosts, newHost)) {
hosts = newHosts;
}
}
/**
* Remove a host alias
* @param alias The alias to remove
*/
public synchronized void removeHostAlias(String alias) {
// Find and remove the alias
int pos = find(hosts, alias);
if (pos < 0) {
return;
}
Host[] newHosts = new Host[hosts.length - 1];
if (removeMap(hosts, newHosts, alias)) {
hosts = newHosts;
}
}
/**
* Set context, used for wrapper mapping (request dispatcher).
*
* @param welcomeResources Welcome files defined for this context
* @param resources Static resources of the context
*/
public void setContext(String path, String[] welcomeResources,
javax.naming.Context resources) {
context.path = path;
context.welcomeResources = welcomeResources;
context.resources = resources;
}
/**
* Add a new Context to an existing Host.
*
* @param hostName Virtual host name this context belongs to
* @param host Host object
* @param path Context path
* @param version Context version
* @param context Context object
* @param welcomeResources Welcome files defined for this context
* @param resources Static resources of the context
*/
public void addContextVersion(String hostName, Object host, String path,
String version, Object context, String[] welcomeResources,
javax.naming.Context resources) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if( pos <0 ) {
addHost(hostName, new String[0], host);
hosts = this.hosts;
pos = find(hosts, hostName);
}
if (pos < 0) {
log.error("No host found: " + hostName);
}
Host mappedHost = hosts[pos];
if (mappedHost.name.equals(hostName)) {
int slashCount = slashCount(path);
synchronized (mappedHost) {
Context[] contexts = mappedHost.contextList.contexts;
// Update nesting
if (slashCount > mappedHost.contextList.nesting) {
mappedHost.contextList.nesting = slashCount;
}
int pos2 = find(contexts, path);
if (pos2 < 0 || !path.equals(contexts[pos2].name)) {
Context newContext = new Context();
newContext.name = path;
Context[] newContexts = new Context[contexts.length + 1];
if (insertMap(contexts, newContexts, newContext)) {
mappedHost.contextList.contexts = newContexts;
}
pos2 = find(newContexts, path);
}
Context mappedContext = mappedHost.contextList.contexts[pos2];
ContextVersion[] contextVersions = mappedContext.versions;
ContextVersion[] newContextVersions =
new ContextVersion[contextVersions.length + 1];
ContextVersion newContextVersion = new ContextVersion();
newContextVersion.path = path;
newContextVersion.name = version;
newContextVersion.object = context;
newContextVersion.welcomeResources = welcomeResources;
newContextVersion.resources = resources;
if (insertMap(contextVersions, newContextVersions, newContextVersion)) {
mappedContext.versions = newContextVersions;
}
}
}
}
/**
* Remove a context from an existing host.
*
* @param hostName Virtual host name this context belongs to
* @param path Context path
* @param version Context version
*/
public void removeContextVersion(String hostName, String path,
String version) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
synchronized (host) {
Context[] contexts = host.contextList.contexts;
if (contexts.length == 0 ){
return;
}
int pos2 = find(contexts, path);
if (pos2 < 0 || !path.equals(contexts[pos2].name)) {
return;
}
Context context = contexts[pos2];
ContextVersion[] contextVersions = context.versions;
ContextVersion[] newContextVersions =
new ContextVersion[contextVersions.length - 1];
if (removeMap(contextVersions, newContextVersions, version)) {
context.versions = newContextVersions;
if (context.versions.length == 0) {
// Remove the context
Context[] newContexts = new Context[contexts.length -1];
if (removeMap(contexts, newContexts, path)) {
host.contextList.contexts = newContexts;
// Recalculate nesting
host.contextList.nesting = 0;
for (int i = 0; i < newContexts.length; i++) {
int slashCount = slashCount(newContexts[i].name);
if (slashCount > host.contextList.nesting) {
host.contextList.nesting = slashCount;
}
}
}
}
}
}
}
}
public void addWrapper(String hostName, String contextPath, String version,
String path, Object wrapper, boolean jspWildCard,
boolean resourceOnly) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
log.error("No context found: " + contextPath );
return;
}
Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
ContextVersion[] contextVersions = context.versions;
int pos3 = find(contextVersions, version);
if( pos3<0 ) {
log.error("No context version found: " + contextPath + " " +
version);
return;
}
ContextVersion contextVersion = contextVersions[pos3];
if (contextVersion.name.equals(version)) {
addWrapper(contextVersion, path, wrapper, jspWildCard,
resourceOnly);
}
}
}
}
public void addWrapper(String path, Object wrapper, boolean jspWildCard,
boolean resourceOnly) {
addWrapper(context, path, wrapper, jspWildCard, resourceOnly);
}
/**
* Adds a wrapper to the given context.
*
* @param context The context to which to add the wrapper
* @param path Wrapper mapping
* @param wrapper The Wrapper object
* @param jspWildCard true if the wrapper corresponds to the JspServlet
* @param resourceOnly true if this wrapper always expects a physical
* resource to be present (such as a JSP)
* and the mapping path contains a wildcard; false otherwise
*/
protected void addWrapper(ContextVersion context, String path,
Object wrapper, boolean jspWildCard, boolean resourceOnly) {
synchronized (context) {
Wrapper newWrapper = new Wrapper();
newWrapper.object = wrapper;
newWrapper.jspWildCard = jspWildCard;
newWrapper.resourceOnly = resourceOnly;
if (path.endsWith("/*")) {
// Wildcard wrapper
newWrapper.name = path.substring(0, path.length() - 2);
Wrapper[] oldWrappers = context.wildcardWrappers;
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.wildcardWrappers = newWrappers;
int slashCount = slashCount(newWrapper.name);
if (slashCount > context.nesting) {
context.nesting = slashCount;
}
}
} else if (path.startsWith("*.")) {
// Extension wrapper
newWrapper.name = path.substring(2);
Wrapper[] oldWrappers = context.extensionWrappers;
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.extensionWrappers = newWrappers;
}
} else if (path.equals("/")) {
// Default wrapper
newWrapper.name = "";
context.defaultWrapper = newWrapper;
} else {
// Exact wrapper
if (path.length() == 0) {
// Special case for the Context Root mapping which is
// treated as an exact match
newWrapper.name = "/";
} else {
newWrapper.name = path;
}
Wrapper[] oldWrappers = context.exactWrappers;
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length + 1];
if (insertMap(oldWrappers, newWrappers, newWrapper)) {
context.exactWrappers = newWrappers;
}
}
}
}
/**
* Remove a wrapper from the context associated with this wrapper.
*
* @param path Wrapper mapping
*/
public void removeWrapper(String path) {
removeWrapper(context, path);
}
/**
* Remove a wrapper from an existing context.
*
* @param hostName Virtual host name this wrapper belongs to
* @param contextPath Context path this wrapper belongs to
* @param path Wrapper mapping
*/
public void removeWrapper
(String hostName, String contextPath, String version, String path) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
return;
}
Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
ContextVersion[] contextVersions = context.versions;
int pos3 = find(contextVersions, version);
if( pos3<0 ) {
return;
}
ContextVersion contextVersion = contextVersions[pos3];
if (contextVersion.name.equals(version)) {
removeWrapper(contextVersion, path);
}
}
}
}
protected void removeWrapper(ContextVersion context, String path) {
if (log.isDebugEnabled()) {
log.debug(sm.getString("mapper.removeWrapper", context.name, path));
}
synchronized (context) {
if (path.endsWith("/*")) {
// Wildcard wrapper
String name = path.substring(0, path.length() - 2);
Wrapper[] oldWrappers = context.wildcardWrappers;
if (oldWrappers.length == 0) {
return;
}
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
// Recalculate nesting
context.nesting = 0;
for (int i = 0; i < newWrappers.length; i++) {
int slashCount = slashCount(newWrappers[i].name);
if (slashCount > context.nesting) {
context.nesting = slashCount;
}
}
context.wildcardWrappers = newWrappers;
}
} else if (path.startsWith("*.")) {
// Extension wrapper
String name = path.substring(2);
Wrapper[] oldWrappers = context.extensionWrappers;
if (oldWrappers.length == 0) {
return;
}
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
context.extensionWrappers = newWrappers;
}
} else if (path.equals("/")) {
// Default wrapper
context.defaultWrapper = null;
} else {
// Exact wrapper
String name = path;
Wrapper[] oldWrappers = context.exactWrappers;
if (oldWrappers.length == 0) {
return;
}
Wrapper[] newWrappers =
new Wrapper[oldWrappers.length - 1];
if (removeMap(oldWrappers, newWrappers, name)) {
context.exactWrappers = newWrappers;
}
}
}
}
/**
* Add a welcome file to the given context.
*
* @param hostName
* @param contextPath
* @param welcomeFile
*/
public void addWelcomeFile(String hostName, String contextPath,
String version, String welcomeFile) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
log.error("No context found: " + contextPath );
return;
}
Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
ContextVersion[] contextVersions = context.versions;
int pos3 = find(contextVersions, version);
if( pos3<0 ) {
log.error("No context version found: " + contextPath + " " +
version);
return;
}
ContextVersion contextVersion = contextVersions[pos3];
if (contextVersion.name.equals(version)) {
int len = contextVersion.welcomeResources.length + 1;
String[] newWelcomeResources = new String[len];
System.arraycopy(contextVersion.welcomeResources, 0,
newWelcomeResources, 0, len - 1);
newWelcomeResources[len - 1] = welcomeFile;
contextVersion.welcomeResources = newWelcomeResources;
}
}
}
}
/**
* Remove a welcome file from the given context.
*
* @param hostName
* @param contextPath
* @param welcomeFile
*/
public void removeWelcomeFile(String hostName, String contextPath,
String version, String welcomeFile) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
log.error("No context found: " + contextPath );
return;
}
Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
ContextVersion[] contextVersions = context.versions;
int pos3 = find(contextVersions, version);
if( pos3<0 ) {
log.error("No context version found: " + contextPath + " " +
version);
return;
}
ContextVersion contextVersion = contextVersions[pos3];
if (contextVersion.name.equals(version)) {
int match = -1;
for (int i = 0; i < contextVersion.welcomeResources.length; i++) {
if (welcomeFile.equals(contextVersion.welcomeResources[i])) {
match = i;
break;
}
}
if (match > -1) {
int len = contextVersion.welcomeResources.length - 1;
String[] newWelcomeResources = new String[len];
System.arraycopy(contextVersion.welcomeResources, 0,
newWelcomeResources, 0, match);
if (match < len) {
System.arraycopy(contextVersion.welcomeResources, match + 1,
newWelcomeResources, match, len - match);
}
contextVersion.welcomeResources = newWelcomeResources;
}
}
}
}
}
/**
* Clear the welcome files for the given context.
*
* @param hostName
* @param contextPath
*/
public void clearWelcomeFiles(String hostName, String contextPath,
String version) {
Host[] hosts = this.hosts;
int pos = find(hosts, hostName);
if (pos < 0) {
return;
}
Host host = hosts[pos];
if (host.name.equals(hostName)) {
Context[] contexts = host.contextList.contexts;
int pos2 = find(contexts, contextPath);
if (pos2 < 0) {
log.error("No context found: " + contextPath );
return;
}
Context context = contexts[pos2];
if (context.name.equals(contextPath)) {
ContextVersion[] contextVersions = context.versions;
int pos3 = find(contextVersions, version);
if( pos3<0 ) {
log.error("No context version found: " + contextPath + " " +
version);
return;
}
ContextVersion contextVersion = contextVersions[pos3];
if (contextVersion.name.equals(version)) {
contextVersion.welcomeResources = new String[0];
}
}
}
}
/**
* Map the specified host name and URI, mutating the given mapping data.
*
* @param host Virtual host name
* @param uri URI
* @param mappingData This structure will contain the result of the mapping
* operation
*/
public void map(MessageBytes host, MessageBytes uri, String version,
MappingData mappingData)
throws Exception {
if (host.isNull()) {
host.getCharChunk().append(defaultHostName);
}
host.toChars();
uri.toChars();
internalMap(host.getCharChunk(), uri.getCharChunk(), version,
mappingData);
}
/**
* Map the specified URI relative to the context,
* mutating the given mapping data.
*
* @param uri URI
* @param mappingData This structure will contain the result of the mapping
* operation
*/
public void map(MessageBytes uri, MappingData mappingData)
throws Exception {
uri.toChars();
CharChunk uricc = uri.getCharChunk();
uricc.setLimit(-1);
internalMapWrapper(context, uricc, mappingData);
}
// -------------------------------------------------------- Private Methods
/**
* Map the specified URI.
*/
private final void internalMap(CharChunk host, CharChunk uri,
String version, MappingData mappingData) throws Exception {
uri.setLimit(-1);
Context[] contexts = null;
Context context = null;
ContextVersion contextVersion = null;
int nesting = 0;
// Virtual host mapping
if (mappingData.host == null) {
Host[] hosts = this.hosts;
int pos = findIgnoreCase(hosts, host);
if ((pos != -1) && (host.equalsIgnoreCase(hosts[pos].name))) {
mappingData.host = hosts[pos].object;
contexts = hosts[pos].contextList.contexts;
nesting = hosts[pos].contextList.nesting;
} else {
if (defaultHostName == null) {
return;
}
pos = find(hosts, defaultHostName);
if ((pos != -1) && (defaultHostName.equals(hosts[pos].name))) {
mappingData.host = hosts[pos].object;
contexts = hosts[pos].contextList.contexts;
nesting = hosts[pos].contextList.nesting;
} else {
return;
}
}
}
// Context mapping
if (mappingData.context == null) {
int pos = find(contexts, uri);
if (pos == -1) {
return;
}
int lastSlash = -1;
int uriEnd = uri.getEnd();
int length = -1;
boolean found = false;
while (pos >= 0) {
if (uri.startsWith(contexts[pos].name)) {
length = contexts[pos].name.length();
if (uri.getLength() == length) {
found = true;
break;
} else if (uri.startsWithIgnoreCase("/", length)) {
found = true;
break;
}
}
if (lastSlash == -1) {
lastSlash = nthSlash(uri, nesting + 1);
} else {
lastSlash = lastSlash(uri);
}
uri.setEnd(lastSlash);
pos = find(contexts, uri);
}
uri.setEnd(uriEnd);
if (!found) {
if (contexts[0].name.equals("")) {
context = contexts[0];
}
} else {
context = contexts[pos];
}
if (context != null) {
mappingData.contextPath.setString(context.name);
}
}
if (context != null) {
ContextVersion[] contextVersions = context.versions;
int versionCount = contextVersions.length;
if (versionCount > 1) {
Object[] contextObjects = new Object[contextVersions.length];
for (int i = 0; i < contextObjects.length; i++) {
contextObjects[i] = contextVersions[i].object;
}
mappingData.contexts = contextObjects;
}
if (version == null) {
// Return the latest version
contextVersion = contextVersions[versionCount - 1];
} else {
int pos = find(contextVersions, version);
if (pos < 0 || !contextVersions[pos].name.equals(version)) {
// Return the latest version
contextVersion = contextVersions[versionCount - 1];
} else {
contextVersion = contextVersions[pos];
}
}
mappingData.context = contextVersion.object;
}
// Wrapper mapping
if ((contextVersion != null) && (mappingData.wrapper == null)) {
internalMapWrapper(contextVersion, uri, mappingData);
}
}
/**
* Wrapper mapping.
*/
private final void internalMapWrapper(ContextVersion contextVersion,
CharChunk path,
MappingData mappingData)
throws Exception {
int pathOffset = path.getOffset();
int pathEnd = path.getEnd();
int servletPath = pathOffset;
boolean noServletPath = false;
int length = contextVersion.path.length();
if (length != (pathEnd - pathOffset)) {
servletPath = pathOffset + length;
} else {
noServletPath = true;
path.append('/');
pathOffset = path.getOffset();
pathEnd = path.getEnd();
servletPath = pathOffset+length;
}
path.setOffset(servletPath);
// Rule 1 -- Exact Match
Wrapper[] exactWrappers = contextVersion.exactWrappers;
internalMapExactWrapper(exactWrappers, path, mappingData);
// Rule 2 -- Prefix Match
boolean checkJspWelcomeFiles = false;
Wrapper[] wildcardWrappers = contextVersion.wildcardWrappers;
if (mappingData.wrapper == null) {
internalMapWildcardWrapper(wildcardWrappers, contextVersion.nesting,
path, mappingData);
if (mappingData.wrapper != null && mappingData.jspWildCard) {
char[] buf = path.getBuffer();
if (buf[pathEnd - 1] == '/') {
/*
* Path ending in '/' was mapped to JSP servlet based on
* wildcard match (e.g., as specified in url-pattern of a
* jsp-property-group.
* Force the context's welcome files, which are interpreted
* as JSP files (since they match the url-pattern), to be
* considered. See Bugzilla 27664.
*/
mappingData.wrapper = null;
checkJspWelcomeFiles = true;
} else {
// See Bugzilla 27704
mappingData.wrapperPath.setChars(buf, path.getStart(),
path.getLength());
mappingData.pathInfo.recycle();
}
}
}
if(mappingData.wrapper == null && noServletPath) {
// The path is empty, redirect to "/"
mappingData.redirectPath.setChars
(path.getBuffer(), pathOffset, pathEnd-pathOffset);
path.setEnd(pathEnd - 1);
return;
}
// Rule 3 -- Extension Match
Wrapper[] extensionWrappers = contextVersion.extensionWrappers;
if (mappingData.wrapper == null && !checkJspWelcomeFiles) {
internalMapExtensionWrapper(extensionWrappers, path, mappingData,
true);
}
// Rule 4 -- Welcome resources processing for servlets
if (mappingData.wrapper == null) {
boolean checkWelcomeFiles = checkJspWelcomeFiles;
if (!checkWelcomeFiles) {
char[] buf = path.getBuffer();
checkWelcomeFiles = (buf[pathEnd - 1] == '/');
}
if (checkWelcomeFiles) {
for (int i = 0; (i < contextVersion.welcomeResources.length)
&& (mappingData.wrapper == null); i++) {
path.setOffset(pathOffset);
path.setEnd(pathEnd);
path.append(contextVersion.welcomeResources[i], 0,
contextVersion.welcomeResources[i].length());
path.setOffset(servletPath);
// Rule 4a -- Welcome resources processing for exact macth
internalMapExactWrapper(exactWrappers, path, mappingData);
// Rule 4b -- Welcome resources processing for prefix match
if (mappingData.wrapper == null) {
internalMapWildcardWrapper
(wildcardWrappers, contextVersion.nesting,
path, mappingData);
}
// Rule 4c -- Welcome resources processing
// for physical folder
if (mappingData.wrapper == null
&& contextVersion.resources != null) {
Object file = null;
String pathStr = path.toString();
try {
file = contextVersion.resources.lookup(pathStr);
} catch(NamingException nex) {
// Swallow not found, since this is normal
}
if (file != null && !(file instanceof DirContext) ) {
internalMapExtensionWrapper(extensionWrappers, path,
mappingData, true);
if (mappingData.wrapper == null
&& contextVersion.defaultWrapper != null) {
mappingData.wrapper =
contextVersion.defaultWrapper.object;
mappingData.requestPath.setChars
(path.getBuffer(), path.getStart(),
path.getLength());
mappingData.wrapperPath.setChars
(path.getBuffer(), path.getStart(),
path.getLength());
mappingData.requestPath.setString(pathStr);
mappingData.wrapperPath.setString(pathStr);
}
}
}
}
path.setOffset(servletPath);
path.setEnd(pathEnd);
}
}
/* welcome file processing - take 2
* Now that we have looked for welcome files with a physical
* backing, now look for an extension mapping listed
* but may not have a physical backing to it. This is for
* the case of index.jsf, index.do, etc.
* A watered down version of rule 4
*/
if (mappingData.wrapper == null) {
boolean checkWelcomeFiles = checkJspWelcomeFiles;
if (!checkWelcomeFiles) {
char[] buf = path.getBuffer();
checkWelcomeFiles = (buf[pathEnd - 1] == '/');
}
if (checkWelcomeFiles) {
for (int i = 0; (i < contextVersion.welcomeResources.length)
&& (mappingData.wrapper == null); i++) {
path.setOffset(pathOffset);
path.setEnd(pathEnd);
path.append(contextVersion.welcomeResources[i], 0,
contextVersion.welcomeResources[i].length());
path.setOffset(servletPath);
internalMapExtensionWrapper(extensionWrappers, path,
mappingData, false);
}
path.setOffset(servletPath);
path.setEnd(pathEnd);
}
}
// Rule 7 -- Default servlet
if (mappingData.wrapper == null && !checkJspWelcomeFiles) {
if (contextVersion.defaultWrapper != null) {
mappingData.wrapper = contextVersion.defaultWrapper.object;
mappingData.requestPath.setChars
(path.getBuffer(), path.getStart(), path.getLength());
mappingData.wrapperPath.setChars
(path.getBuffer(), path.getStart(), path.getLength());
}
// Redirection to a folder
char[] buf = path.getBuffer();
if (contextVersion.resources != null && buf[pathEnd -1 ] != '/') {
Object file = null;
String pathStr = path.toString();
try {
file = contextVersion.resources.lookup(pathStr);
} catch(NamingException nex) {
// Swallow, since someone else handles the 404
}
if (file != null && file instanceof DirContext) {
// Note: this mutates the path: do not do any processing
// after this (since we set the redirectPath, there
// shouldn't be any)
path.setOffset(pathOffset);
path.append('/');
mappingData.redirectPath.setChars
(path.getBuffer(), path.getStart(), path.getLength());
} else {
mappingData.requestPath.setString(pathStr);
mappingData.wrapperPath.setString(pathStr);
}
}
}
path.setOffset(pathOffset);
path.setEnd(pathEnd);
}
/**
* Exact mapping.
*/
private final void internalMapExactWrapper
(Wrapper[] wrappers, CharChunk path, MappingData mappingData) {
int pos = find(wrappers, path);
if ((pos != -1) && (path.equals(wrappers[pos].name))) {
mappingData.requestPath.setString(wrappers[pos].name);
mappingData.wrapper = wrappers[pos].object;
if (path.equals("/")) {
// Special handling for Context Root mapped servlet
mappingData.pathInfo.setString("/");
mappingData.wrapperPath.setString("");
// This seems wrong but it is what the spec says...
mappingData.contextPath.setString("");
} else {
mappingData.wrapperPath.setString(wrappers[pos].name);
}
}
}
/**
* Wildcard mapping.
*/
private final void internalMapWildcardWrapper
(Wrapper[] wrappers, int nesting, CharChunk path,
MappingData mappingData) {
int pathEnd = path.getEnd();
int lastSlash = -1;
int length = -1;
int pos = find(wrappers, path);
if (pos != -1) {
boolean found = false;
while (pos >= 0) {
if (path.startsWith(wrappers[pos].name)) {
length = wrappers[pos].name.length();
if (path.getLength() == length) {
found = true;
break;
} else if (path.startsWithIgnoreCase("/", length)) {
found = true;
break;
}
}
if (lastSlash == -1) {
lastSlash = nthSlash(path, nesting + 1);
} else {
lastSlash = lastSlash(path);
}
path.setEnd(lastSlash);
pos = find(wrappers, path);
}
path.setEnd(pathEnd);
if (found) {
mappingData.wrapperPath.setString(wrappers[pos].name);
if (path.getLength() > length) {
mappingData.pathInfo.setChars
(path.getBuffer(),
path.getOffset() + length,
path.getLength() - length);
}
mappingData.requestPath.setChars
(path.getBuffer(), path.getOffset(), path.getLength());
mappingData.wrapper = wrappers[pos].object;
mappingData.jspWildCard = wrappers[pos].jspWildCard;
}
}
}
/**
* Extension mappings.
*
* @param wrappers Set of wrappers to check for matches
* @param path Path to map
* @param mappingData Mapping data for result
* @param resourceExpected Is this mapping expecting to find a resource
*/
private final void internalMapExtensionWrapper(Wrapper[] wrappers,
CharChunk path, MappingData mappingData, boolean resourceExpected) {
char[] buf = path.getBuffer();
int pathEnd = path.getEnd();
int servletPath = path.getOffset();
int slash = -1;
for (int i = pathEnd - 1; i >= servletPath; i--) {
if (buf[i] == '/') {
slash = i;
break;
}
}
if (slash >= 0) {
int period = -1;
for (int i = pathEnd - 1; i > slash; i--) {
if (buf[i] == '.') {
period = i;
break;
}
}
if (period >= 0) {
path.setOffset(period + 1);
path.setEnd(pathEnd);
int pos = find(wrappers, path);
if ((pos != -1) && (path.equals(wrappers[pos].name)) &&
(resourceExpected || !wrappers[pos].resourceOnly)) {
mappingData.wrapperPath.setChars
(buf, servletPath, pathEnd - servletPath);
mappingData.requestPath.setChars
(buf, servletPath, pathEnd - servletPath);
mappingData.wrapper = wrappers[pos].object;
}
path.setOffset(servletPath);
path.setEnd(pathEnd);
}
}
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int find(MapElement[] map, CharChunk name) {
return find(map, name, name.getStart(), name.getEnd());
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int find(MapElement[] map, CharChunk name,
int start, int end) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (compare(name, start, end, map[0].name) < 0 ) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = compare(name, start, end, map[i].name);
if (result == 1) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = compare(name, start, end, map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int findIgnoreCase(MapElement[] map, CharChunk name) {
return findIgnoreCase(map, name, name.getStart(), name.getEnd());
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int findIgnoreCase(MapElement[] map, CharChunk name,
int start, int end) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (compareIgnoreCase(name, start, end, map[0].name) < 0 ) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = compareIgnoreCase(name, start, end, map[i].name);
if (result == 1) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = compareIgnoreCase(name, start, end, map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Find a map element given its name in a sorted array of map elements.
* This will return the index for the closest inferior or equal item in the
* given array.
*/
private static final int find(MapElement[] map, String name) {
int a = 0;
int b = map.length - 1;
// Special cases: -1 and 0
if (b == -1) {
return -1;
}
if (name.compareTo(map[0].name) < 0) {
return -1;
}
if (b == 0) {
return 0;
}
int i = 0;
while (true) {
i = (b + a) / 2;
int result = name.compareTo(map[i].name);
if (result > 0) {
a = i;
} else if (result == 0) {
return i;
} else {
b = i;
}
if ((b - a) == 1) {
int result2 = name.compareTo(map[b].name);
if (result2 < 0) {
return a;
} else {
return b;
}
}
}
}
/**
* Compare given char chunk with String.
* Return -1, 0 or +1 if inferior, equal, or superior to the String.
*/
private static final int compare(CharChunk name, int start, int end,
String compareTo) {
int result = 0;
char[] c = name.getBuffer();
int len = compareTo.length();
if ((end - start) < len) {
len = end - start;
}
for (int i = 0; (i < len) && (result == 0); i++) {
if (c[i + start] > compareTo.charAt(i)) {
result = 1;
} else if (c[i + start] < compareTo.charAt(i)) {
result = -1;
}
}
if (result == 0) {
if (compareTo.length() > (end - start)) {
result = -1;
} else if (compareTo.length() < (end - start)) {
result = 1;
}
}
return result;
}
/**
* Compare given char chunk with String ignoring case.
* Return -1, 0 or +1 if inferior, equal, or superior to the String.
*/
private static final int compareIgnoreCase(CharChunk name, int start, int end,
String compareTo) {
int result = 0;
char[] c = name.getBuffer();
int len = compareTo.length();
if ((end - start) < len) {
len = end - start;
}
for (int i = 0; (i < len) && (result == 0); i++) {
if (Ascii.toLower(c[i + start]) > Ascii.toLower(compareTo.charAt(i))) {
result = 1;
} else if (Ascii.toLower(c[i + start]) < Ascii.toLower(compareTo.charAt(i))) {
result = -1;
}
}
if (result == 0) {
if (compareTo.length() > (end - start)) {
result = -1;
} else if (compareTo.length() < (end - start)) {
result = 1;
}
}
return result;
}
/**
* Find the position of the last slash in the given char chunk.
*/
private static final int lastSlash(CharChunk name) {
char[] c = name.getBuffer();
int end = name.getEnd();
int start = name.getStart();
int pos = end;
while (pos > start) {
if (c[--pos] == '/') {
break;
}
}
return (pos);
}
/**
* Find the position of the nth slash, in the given char chunk.
*/
private static final int nthSlash(CharChunk name, int n) {
char[] c = name.getBuffer();
int end = name.getEnd();
int start = name.getStart();
int pos = start;
int count = 0;
while (pos < end) {
if ((c[pos++] == '/') && ((++count) == n)) {
pos--;
break;
}
}
return (pos);
}
/**
* Return the slash count in a given string.
*/
private static final int slashCount(String name) {
int pos = -1;
int count = 0;
while ((pos = name.indexOf('/', pos + 1)) != -1) {
count++;
}
return count;
}
/**
* Insert into the right place in a sorted MapElement array, and prevent
* duplicates.
*/
private static final boolean insertMap
(MapElement[] oldMap, MapElement[] newMap, MapElement newElement) {
int pos = find(oldMap, newElement.name);
if ((pos != -1) && (newElement.name.equals(oldMap[pos].name))) {
return false;
}
System.arraycopy(oldMap, 0, newMap, 0, pos + 1);
newMap[pos + 1] = newElement;
System.arraycopy
(oldMap, pos + 1, newMap, pos + 2, oldMap.length - pos - 1);
return true;
}
/**
* Insert into the right place in a sorted MapElement array.
*/
private static final boolean removeMap
(MapElement[] oldMap, MapElement[] newMap, String name) {
int pos = find(oldMap, name);
if ((pos != -1) && (name.equals(oldMap[pos].name))) {
System.arraycopy(oldMap, 0, newMap, 0, pos);
System.arraycopy(oldMap, pos + 1, newMap, pos,
oldMap.length - pos - 1);
return true;
}
return false;
}
// ------------------------------------------------- MapElement Inner Class
protected abstract static class MapElement {
public String name = null;
public Object object = null;
}
// ------------------------------------------------------- Host Inner Class
protected static final class Host
extends MapElement {
public ContextList contextList = null;
}
// ------------------------------------------------ ContextList Inner Class
protected static final class ContextList {
public Context[] contexts = new Context[0];
public int nesting = 0;
}
// ---------------------------------------------------- Context Inner Class
protected static final class Context extends MapElement {
public ContextVersion[] versions = new ContextVersion[0];
}
protected static final class ContextVersion extends MapElement {
public String path = null;
public String[] welcomeResources = new String[0];
public javax.naming.Context resources = null;
public Wrapper defaultWrapper = null;
public Wrapper[] exactWrappers = new Wrapper[0];
public Wrapper[] wildcardWrappers = new Wrapper[0];
public Wrapper[] extensionWrappers = new Wrapper[0];
public int nesting = 0;
}
// ---------------------------------------------------- Wrapper Inner Class
protected static class Wrapper
extends MapElement {
public boolean jspWildCard = false;
public boolean resourceOnly = false;
}
}
| apache-2.0 |
mswiderski/drools | drools-core/src/main/java/org/drools/impl/InternalKnowledgeBase.java | 183 | package org.drools.impl;
import org.drools.KnowledgeBase;
import org.drools.RuleBase;
public interface InternalKnowledgeBase extends KnowledgeBase {
RuleBase getRuleBase();
}
| apache-2.0 |
RLDevOps/Demo | src/main/java/org/olat/modules/scorm/archiver/ScormExportVisitor.java | 988 | /**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) 2009 frentix GmbH, Switzerland<br>
* <p>
*/
package org.olat.modules.scorm.archiver;
/**
* Description:<br>
* Visitor pattern for ScormExportManager
* <P>
* Initial Date: 17 août 2009 <br>
*
* @author srosse
*/
public interface ScormExportVisitor {
public void visit(ScoDatas data);
}
| apache-2.0 |
michaelgallacher/intellij-community | platform/core-api/src/com/intellij/openapi/application/Application.java | 15484 | /*
* Copyright 2000-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.application;
import com.intellij.openapi.Disposable;
import com.intellij.openapi.components.ComponentManager;
import com.intellij.openapi.progress.ProcessCanceledException;
import com.intellij.openapi.util.Computable;
import com.intellij.openapi.util.Condition;
import com.intellij.openapi.util.ThrowableComputable;
import org.jetbrains.annotations.NotNull;
import java.awt.*;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
/**
* Provides access to core application-wide functionality and methods for working with the IDEA
* thread model. The thread model defines two main types of actions which can access the PSI and other
* IDEA data structures: read actions (which do not modify the data) and write actions (which modify
* some data).<p>
* You can call methods requiring read access from the Swing event-dispatch thread without using
* {@link #runReadAction} method. If you need to invoke such methods from another thread you have to use
* {@link #runReadAction}. Multiple read actions can run at the same time without locking each other.
* <p>
* Write actions can be called only from the Swing thread using {@link #runWriteAction} method.
* If there are read actions running at this moment {@code runWriteAction} is blocked until they are completed.
*/
public interface Application extends ComponentManager {
/**
* Runs the specified read action. Can be called from any thread. The action is executed immediately
* if no write action is currently running, or blocked until the currently running write action completes.
*
* @param action the action to run.
*/
void runReadAction(@NotNull Runnable action);
/**
* Runs the specified computation in a read action. Can be called from any thread. The action is executed
* immediately if no write action is currently running, or blocked until the currently running write action
* completes.
*
* @param computation the computation to perform.
* @return the result returned by the computation.
*/
<T> T runReadAction(@NotNull Computable<T> computation);
/**
* Runs the specified computation in a read action. Can be called from any thread. The action is executed
* immediately if no write action is currently running, or blocked until the currently running write action
* completes.
*
* @param computation the computation to perform.
* @return the result returned by the computation.
* @exception E re-frown from ThrowableComputable
*/
<T, E extends Throwable> T runReadAction(@NotNull ThrowableComputable<T, E> computation) throws E;
/**
* Runs the specified write action. Must be called from the Swing dispatch thread. The action is executed
* immediately if no read actions are currently running, or blocked until all read actions complete.
*
* @param action the action to run
*/
void runWriteAction(@NotNull Runnable action);
/**
* Runs the specified computation in a write action. Must be called from the Swing dispatch thread.
* The action is executed immediately if no read actions or write actions are currently running,
* or blocked until all read actions and write actions complete.
*
* @param computation the computation to run
* @return the result returned by the computation.
*/
<T> T runWriteAction(@NotNull Computable<T> computation);
/**
* Runs the specified computation in a write action. Must be called from the Swing dispatch thread.
* The action is executed immediately if no read actions or write actions are currently running,
* or blocked until all read actions and write actions complete.
*
* @param computation the computation to run
* @return the result returned by the computation.
* @exception E re-frown from ThrowableComputable
*/
<T, E extends Throwable> T runWriteAction(@NotNull ThrowableComputable<T, E> computation) throws E;
/**
* Returns true if there is currently executing write action of the specified class.
*
* @param actionClass the class of the write action to return.
* @return true if the action is running, or false if no action of the specified class is currently executing.
*/
boolean hasWriteAction(@NotNull Class<?> actionClass);
/**
* Asserts whether the read access is allowed.
*/
void assertReadAccessAllowed();
/**
* Asserts whether the write access is allowed.
*/
void assertWriteAccessAllowed();
/**
* Asserts whether the method is being called from the event dispatch thread.
*/
void assertIsDispatchThread();
/**
* Adds an {@link ApplicationListener}.
*
* @param listener the listener to add
*/
void addApplicationListener(@NotNull ApplicationListener listener);
/**
* Adds an {@link ApplicationListener}.
*
* @param listener the listener to add
* @param parent the parent disposable which dispose will trigger this listener removal
*/
void addApplicationListener(@NotNull ApplicationListener listener, @NotNull Disposable parent);
/**
* Removes an {@link ApplicationListener}.
*
* @param listener the listener to remove
*/
void removeApplicationListener(@NotNull ApplicationListener listener);
/**
* Saves all open documents and projects.
*/
void saveAll();
/**
* Saves all application settings.
*/
void saveSettings();
/**
* Exits the application, showing the exit confirmation prompt if it is enabled.
*/
void exit();
/**
* Checks if the write access is currently allowed.
*
* @return true if the write access is currently allowed, false otherwise.
* @see #assertWriteAccessAllowed()
* @see #runWriteAction(Runnable)
*/
boolean isWriteAccessAllowed();
/**
* Checks if the read access is currently allowed.
*
* @return true if the read access is currently allowed, false otherwise.
* @see #assertReadAccessAllowed()
* @see #runReadAction(Runnable)
*/
boolean isReadAccessAllowed();
/**
* Checks if the current thread is the Swing dispatch thread.
*
* @return true if the current thread is the Swing dispatch thread, false otherwise.
*/
boolean isDispatchThread();
/**
* @return a facade, which lets to call all those invokeLater() with a ActionCallback handle returned.
*/
@NotNull
ModalityInvokator getInvokator();
/**
* Causes {@code runnable.run()} to be executed asynchronously on the
* AWT event dispatching thread, with {@link ModalityState#defaultModalityState()} modality state. This will happen after all
* pending AWT events have been processed.<p/>
*
* Please use this method instead of {@link javax.swing.SwingUtilities#invokeLater(Runnable)} or {@link com.intellij.util.ui.UIUtil} methods
* for the reasons described in {@link ModalityState} documentation.
*
* @param runnable the runnable to execute.
*/
void invokeLater(@NotNull Runnable runnable);
/**
* Causes {@code runnable.run()} to be executed asynchronously on the
* AWT event dispatching thread - unless the expiration condition is fulfilled.
* This will happen after all pending AWT events have been processed and in {@link ModalityState#defaultModalityState()} modality state
* (or a state with less modal dialogs open).<p/>
*
* Please use this method instead of {@link javax.swing.SwingUtilities#invokeLater(Runnable)} or {@link com.intellij.util.ui.UIUtil} methods
* for the reasons described in {@link ModalityState} documentation.
*
* @param runnable the runnable to execute.
* @param expired condition to check before execution.
*/
void invokeLater(@NotNull Runnable runnable, @NotNull Condition expired);
/**
* Causes {@code runnable.run()} to be executed asynchronously on the
* AWT event dispatching thread, when IDEA is in the specified modality
* state (or a state with less modal dialogs open).
*
* Please use this method instead of {@link javax.swing.SwingUtilities#invokeLater(Runnable)} or {@link com.intellij.util.ui.UIUtil} methods
* for the reasons described in {@link ModalityState} documentation.
*
* @param runnable the runnable to execute.
* @param state the state in which the runnable will be executed.
*/
void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state);
/**
* Causes {@code runnable.run()} to be executed asynchronously on the
* AWT event dispatching thread, when IDEA is in the specified modality
* state(or a state with less modal dialogs open) - unless the expiration condition is fulfilled.
* This will happen after all pending AWT events have been processed.
*
* Please use this method instead of {@link javax.swing.SwingUtilities#invokeLater(Runnable)} or {@link com.intellij.util.ui.UIUtil} methods
* for the reasons described in {@link ModalityState} documentation.
*
* @param runnable the runnable to execute.
* @param state the state in which the runnable will be executed.
* @param expired condition to check before execution.
*/
void invokeLater(@NotNull Runnable runnable, @NotNull ModalityState state, @NotNull Condition expired);
/**
* <p>Causes {@code runnable.run()} to be executed synchronously on the
* AWT event dispatching thread, when the IDE is in the specified modality
* state (or a state with less modal dialogs open). This call blocks until all pending AWT events have been processed and (then)
* {@code runnable.run()} returns.</p>
*
* <p>If current thread is an event dispatch thread then {@code runnable.run()}
* is executed immediately regardless of the modality state.</p>
*
* Please use this method instead of {@link javax.swing.SwingUtilities#invokeAndWait(Runnable)} or {@link com.intellij.util.ui.UIUtil} methods
* for the reasons described in {@link ModalityState} documentation.
*
* @param runnable the runnable to execute.
* @param modalityState the state in which the runnable will be executed.
* @throws ProcessCanceledException when the current thread is interrupted
*/
void invokeAndWait(@NotNull Runnable runnable, @NotNull ModalityState modalityState) throws ProcessCanceledException;
/**
* Same as {@link #invokeAndWait(Runnable, ModalityState)}, using {@link ModalityState#defaultModalityState()}.
*/
void invokeAndWait(@NotNull Runnable runnable) throws ProcessCanceledException;
/**
* Returns current modality state corresponding to the currently opened modal dialogs. Can only be invoked on AWT thread.
*
* @return the current modality state.
* @see ModalityState#current()
*/
@NotNull
ModalityState getCurrentModalityState();
/**
* Returns the modality state for the dialog to which the specified component belongs.
*
* @param c the component for which the modality state is requested.
* @return the modality state.
* @see ModalityState#stateForComponent(Component)
*/
@NotNull
ModalityState getModalityStateForComponent(@NotNull Component c);
/**
* When invoked on AWT thread, returns {@link #getCurrentModalityState()} ()}. When invoked in the thread of some modal progress, returns modality state
* corresponding to that progress' dialog. Otherwise, returns {@link #getNoneModalityState()}.
*
* @return the modality state for the current thread.
* @see ModalityState#defaultModalityState()
*/
@NotNull
ModalityState getDefaultModalityState();
/**
* Returns the modality state representing the state when no modal dialogs
* are active.
*
* @return the modality state for no modal dialogs.
* @see ModalityState#NON_MODAL
*/
@NotNull
ModalityState getNoneModalityState();
/**
* Returns modality state which is active anytime. Please don't use it unless absolutely needed for the reasons described in
* {@link ModalityState} documentation.
* @return modality state
* @see ModalityState#any()
*/
@NotNull
ModalityState getAnyModalityState();
/**
* Returns the time of IDEA start, in milliseconds since midnight, January 1, 1970 UTC.
*
* @return the IDEA start time.
*/
long getStartTime();
/**
* Returns the time in milliseconds during which IDEA received no input events.
*
* @return the idle time of IDEA.
*/
long getIdleTime();
/**
* Checks if IDEA is currently running unit tests. No UI should be shown when unit
* tests are being executed.
*
* @return true if IDEA is running unit tests, false otherwise
*/
boolean isUnitTestMode();
/**
* Checks if IDEA is running as a command line applet or in unit test mode.
* No UI should be shown when IDEA is running in this mode.
*
* @return true if IDEA is running in UI-less mode, false otherwise
*/
boolean isHeadlessEnvironment();
/**
* Checks if IDEA is running as a command line applet or in unit test mode.
* UI can be shown (e.g. diff frame)
*
* @return true if IDEA is running in command line mode, false otherwise
*/
boolean isCommandLine();
/**
* Requests pooled thread to execute the action.
* This pool is an<ul>
* <li>Unbounded.</li>
* <li>Application-wide, always active, non-shutdownable singleton.</li>
* </ul>
* You can use this pool for long-running and/or IO-bound tasks.
* @param action to be executed
* @return future result
*/
@NotNull
Future<?> executeOnPooledThread(@NotNull Runnable action);
/**
* Requests pooled thread to execute the action.
* This pool is<ul>
* <li>Unbounded.</li>
* <li>Application-wide, always active, non-shutdownable singleton.</li>
* </ul>
* You can use this pool for long-running and/or IO-bound tasks.
* @param action to be executed
* @return future result
*/
@NotNull
<T> Future<T> executeOnPooledThread(@NotNull Callable<T> action);
/**
* @return true if application is currently disposing (but not yet disposed completely)
*/
boolean isDisposeInProgress();
/**
* Checks if IDEA is capable of restarting itself on the current platform and with the current execution mode.
*
* @return true if IDEA can restart itself, false otherwise.
* @since 8.1
*/
boolean isRestartCapable();
/**
* Exits and restarts IDEA. If the current platform is not restart capable, only exits.
*
* @since 8.1
*/
void restart();
/**
* Checks if the application is active
* @return true if one of application windows is focused, false -- otherwise
* @since 9.0
*/
boolean isActive();
/**
* Returns lock used for read operations, should be closed in finally block
*/
@NotNull
AccessToken acquireReadActionLock();
/**
* Returns lock used for write operations, should be closed in finally block
*/
@NotNull
AccessToken acquireWriteActionLock(@NotNull Class marker);
boolean isInternal();
boolean isEAP();
}
| apache-2.0 |
msebire/intellij-community | platform/lang-impl/src/com/intellij/ide/todo/nodes/TodoItemNode.java | 7989 | // Copyright 2000-2018 JetBrains s.r.o. Use of this source code is governed by the Apache 2.0 license that can be found in the LICENSE file.
package com.intellij.ide.todo.nodes;
import com.intellij.ide.projectView.PresentationData;
import com.intellij.ide.todo.HighlightedRegionProvider;
import com.intellij.ide.todo.SmartTodoItemPointer;
import com.intellij.ide.todo.TodoTreeBuilder;
import com.intellij.ide.util.treeView.AbstractTreeNode;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.RangeMarker;
import com.intellij.openapi.editor.highlighter.EditorHighlighter;
import com.intellij.openapi.editor.highlighter.HighlighterIterator;
import com.intellij.openapi.editor.markup.TextAttributes;
import com.intellij.openapi.project.Project;
import com.intellij.openapi.util.Comparing;
import com.intellij.openapi.util.TextRange;
import com.intellij.psi.search.TodoItem;
import com.intellij.ui.HighlightedRegion;
import com.intellij.util.text.CharArrayUtil;
import org.jetbrains.annotations.NotNull;
import javax.swing.*;
import java.awt.*;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
public final class TodoItemNode extends BaseToDoNode<SmartTodoItemPointer> implements HighlightedRegionProvider {
private static final Logger LOG = Logger.getInstance("#com.intellij.ide.toDo.TodoItemNodeDescriptor");
private final ArrayList<HighlightedRegion> myHighlightedRegions;
private final List<HighlightedRegionProvider> myAdditionalLines;
public TodoItemNode(Project project,
@NotNull SmartTodoItemPointer value,
TodoTreeBuilder builder) {
super(project, value, builder);
RangeMarker rangeMarker = getValue().getRangeMarker();
LOG.assertTrue(rangeMarker.isValid());
myHighlightedRegions = new ArrayList<>();
myAdditionalLines = new ArrayList<>();
}
@Override
public boolean contains(Object element) {
return canRepresent(element);
}
@Override
public boolean canRepresent(Object element) {
SmartTodoItemPointer value = getValue();
TodoItem item = value != null ? value.getTodoItem() : null;
return Comparing.equal(item, element);
}
@Override
public int getFileCount(final SmartTodoItemPointer val) {
return 1;
}
@Override
public int getTodoItemCount(final SmartTodoItemPointer val) {
return 1;
}
@Override
public ArrayList<HighlightedRegion> getHighlightedRegions() {
return myHighlightedRegions;
}
@Override
@NotNull
public Collection<AbstractTreeNode> getChildren() {
return Collections.emptyList();
}
@Override
public void update(@NotNull PresentationData presentation) {
SmartTodoItemPointer todoItemPointer = getValue();
assert todoItemPointer != null;
TodoItem todoItem = todoItemPointer.getTodoItem();
RangeMarker myRangeMarker = todoItemPointer.getRangeMarker();
if (!todoItem.getFile().isValid() || !myRangeMarker.isValid() || myRangeMarker.getStartOffset() == myRangeMarker.getEndOffset()) {
myRangeMarker.dispose();
setValue(null);
return;
}
myHighlightedRegions.clear();
myAdditionalLines.clear();
// Update name
Document document = todoItemPointer.getDocument();
CharSequence chars = document.getCharsSequence();
int startOffset = myRangeMarker.getStartOffset();
int endOffset = myRangeMarker.getEndOffset();
int lineNumber = document.getLineNumber(startOffset);
int lineStartOffset = document.getLineStartOffset(lineNumber);
int columnNumber = startOffset - lineStartOffset;
// skip all white space characters
while (lineStartOffset < document.getTextLength() && (chars.charAt(lineStartOffset) == '\t' || chars.charAt(lineStartOffset) == ' ')) {
lineStartOffset++;
}
int lineEndOffset = document.getLineEndOffset(lineNumber);
String lineColumnPrefix = "(" + (lineNumber + 1) + ", " + (columnNumber + 1) + ") ";
String highlightedText = chars.subSequence(lineStartOffset, Math.min(lineEndOffset, chars.length())).toString();
String newName = lineColumnPrefix + highlightedText;
// Update icon
Icon newIcon = todoItem.getPattern().getAttributes().getIcon();
// Update highlighted regions
myHighlightedRegions.clear();
EditorHighlighter highlighter = myBuilder.getHighlighter(todoItem.getFile(), document);
collectHighlights(myHighlightedRegions, highlighter, lineStartOffset, lineEndOffset, lineColumnPrefix.length());
TextAttributes attributes = todoItem.getPattern().getAttributes().getTextAttributes();
myHighlightedRegions.add(new HighlightedRegion(
lineColumnPrefix.length() + startOffset - lineStartOffset,
lineColumnPrefix.length() + endOffset - lineStartOffset,
attributes
));
//
presentation.setPresentableText(newName);
presentation.setIcon(newIcon);
for (RangeMarker additionalMarker : todoItemPointer.getAdditionalRangeMarkers()) {
if (!additionalMarker.isValid()) break;
ArrayList<HighlightedRegion> highlights = new ArrayList<>();
int lineNum = document.getLineNumber(additionalMarker.getStartOffset());
int lineStart = document.getLineStartOffset(lineNum);
int lineEnd = document.getLineEndOffset(lineNum);
int lineStartNonWs = CharArrayUtil.shiftForward(chars, lineStart, " \t");
if (lineStartNonWs > additionalMarker.getStartOffset() || lineEnd < additionalMarker.getEndOffset()) {
// can happen for an invalid (obsolete) node, tree implementation can call this method for such a node
break;
}
collectHighlights(highlights, highlighter, lineStartNonWs, lineEnd, 0);
highlights.add(new HighlightedRegion(
additionalMarker.getStartOffset() - lineStartNonWs,
additionalMarker.getEndOffset() - lineStartNonWs,
attributes
));
myAdditionalLines.add(new AdditionalTodoLine(document.getText(new TextRange(lineStartNonWs, lineEnd)), highlights));
}
}
private static void collectHighlights(@NotNull List<? super HighlightedRegion> highlights, @NotNull EditorHighlighter highlighter,
int startOffset, int endOffset, int highlightOffsetShift) {
HighlighterIterator iterator = highlighter.createIterator(startOffset);
while (!iterator.atEnd()) {
int start = Math.max(iterator.getStart(), startOffset);
int end = Math.min(iterator.getEnd(), endOffset);
if (start >= endOffset) break;
TextAttributes attributes = iterator.getTextAttributes();
int fontType = attributes.getFontType();
if ((fontType & Font.BOLD) != 0) { // suppress bold attribute
attributes = attributes.clone();
attributes.setFontType(fontType & ~Font.BOLD);
}
HighlightedRegion region = new HighlightedRegion(
highlightOffsetShift + start - startOffset,
highlightOffsetShift + end - startOffset,
attributes
);
highlights.add(region);
iterator.advance();
}
}
public int getRowCount() {
return myAdditionalLines.size() + 1;
}
@Override
public String getTestPresentation() {
return "Item: " + getValue().getTodoItem().getTextRange();
}
@Override
public int getWeight() {
return 5;
}
@NotNull
public List<HighlightedRegionProvider> getAdditionalLines() {
return myAdditionalLines;
}
private static class AdditionalTodoLine implements HighlightedRegionProvider {
private final String myText;
private final List<HighlightedRegion> myHighlights;
private AdditionalTodoLine(String text, List<HighlightedRegion> highlights) {
myText = text;
myHighlights = highlights;
}
@Override
public Iterable<HighlightedRegion> getHighlightedRegions() {
return myHighlights;
}
@Override
public String toString() {
return myText;
}
}
}
| apache-2.0 |
zyjiang08/servestream | src/net/sourceforge/servestream/dslv/ResourceDragSortCursorAdapter.java | 5596 | /*
* Copyright (C) 2011 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.sourceforge.servestream.dslv;
import android.content.Context;
import android.database.Cursor;
import android.view.View;
import android.view.ViewGroup;
import android.view.LayoutInflater;
// taken from v4 rev. 10 ResourceCursorAdapter.java
/**
* Static library support version of the framework's {@link android.widget.ResourceCursorAdapter}.
* Used to write apps that run on platforms prior to Android 3.0. When running
* on Android 3.0 or above, this implementation is still used; it does not try
* to switch to the framework's implementation. See the framework SDK
* documentation for a class overview.
*/
public abstract class ResourceDragSortCursorAdapter extends DragSortCursorAdapter {
private int mLayout;
private int mDropDownLayout;
private LayoutInflater mInflater;
/**
* Constructor the enables auto-requery.
*
* @deprecated This option is discouraged, as it results in Cursor queries
* being performed on the application's UI thread and thus can cause poor
* responsiveness or even Application Not Responding errors. As an alternative,
* use {@link android.app.LoaderManager} with a {@link android.content.CursorLoader}.
*
* @param context The context where the ListView associated with this adapter is running
* @param layout resource identifier of a layout file that defines the views
* for this list item. Unless you override them later, this will
* define both the item views and the drop down views.
*/
@Deprecated
public ResourceDragSortCursorAdapter(Context context, int layout, Cursor c) {
super(context, c);
mLayout = mDropDownLayout = layout;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
/**
* Constructor with default behavior as per
* {@link CursorAdapter#CursorAdapter(Context, Cursor, boolean)}; it is recommended
* you not use this, but instead {@link #ResourceCursorAdapter(Context, int, Cursor, int)}.
* When using this constructor, {@link #FLAG_REGISTER_CONTENT_OBSERVER}
* will always be set.
*
* @param context The context where the ListView associated with this adapter is running
* @param layout resource identifier of a layout file that defines the views
* for this list item. Unless you override them later, this will
* define both the item views and the drop down views.
* @param c The cursor from which to get the data.
* @param autoRequery If true the adapter will call requery() on the
* cursor whenever it changes so the most recent
* data is always displayed. Using true here is discouraged.
*/
public ResourceDragSortCursorAdapter(Context context, int layout, Cursor c, boolean autoRequery) {
super(context, c, autoRequery);
mLayout = mDropDownLayout = layout;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
/**
* Standard constructor.
*
* @param context The context where the ListView associated with this adapter is running
* @param layout Resource identifier of a layout file that defines the views
* for this list item. Unless you override them later, this will
* define both the item views and the drop down views.
* @param c The cursor from which to get the data.
* @param flags Flags used to determine the behavior of the adapter,
* as per {@link CursorAdapter#CursorAdapter(Context, Cursor, int)}.
*/
public ResourceDragSortCursorAdapter(Context context, int layout, Cursor c, int flags) {
super(context, c, flags);
mLayout = mDropDownLayout = layout;
mInflater = (LayoutInflater) context.getSystemService(Context.LAYOUT_INFLATER_SERVICE);
}
/**
* Inflates view(s) from the specified XML file.
*
* @see android.widget.CursorAdapter#newView(android.content.Context,
* android.database.Cursor, ViewGroup)
*/
@Override
public View newView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(mLayout, parent, false);
}
@Override
public View newDropDownView(Context context, Cursor cursor, ViewGroup parent) {
return mInflater.inflate(mDropDownLayout, parent, false);
}
/**
* <p>Sets the layout resource of the item views.</p>
*
* @param layout the layout resources used to create item views
*/
public void setViewResource(int layout) {
mLayout = layout;
}
/**
* <p>Sets the layout resource of the drop down views.</p>
*
* @param dropDownLayout the layout resources used to create drop down views
*/
public void setDropDownViewResource(int dropDownLayout) {
mDropDownLayout = dropDownLayout;
}
}
| apache-2.0 |
yanjunh/elasticsearch | core/src/main/java/org/elasticsearch/action/admin/cluster/node/tasks/get/TransportGetTaskAction.java | 12006 | /*
* Licensed to Elasticsearch under one or more contributor
* license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright
* ownership. Elasticsearch licenses this file to you under
* the Apache License, Version 2.0 (the "License"); you may
* not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.elasticsearch.action.admin.cluster.node.tasks.get;
import org.elasticsearch.ElasticsearchException;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.ResourceNotFoundException;
import org.elasticsearch.action.ActionListener;
import org.elasticsearch.action.get.GetRequest;
import org.elasticsearch.action.get.GetResponse;
import org.elasticsearch.action.support.ActionFilters;
import org.elasticsearch.action.support.HandledTransportAction;
import org.elasticsearch.client.Client;
import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver;
import org.elasticsearch.cluster.node.DiscoveryNode;
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.ParseFieldMatcher;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.util.concurrent.AbstractRunnable;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.index.IndexNotFoundException;
import org.elasticsearch.tasks.TaskResult;
import org.elasticsearch.tasks.Task;
import org.elasticsearch.tasks.TaskId;
import org.elasticsearch.tasks.TaskInfo;
import org.elasticsearch.tasks.TaskResultsService;
import org.elasticsearch.threadpool.ThreadPool;
import org.elasticsearch.transport.TransportException;
import org.elasticsearch.transport.TransportRequestOptions;
import org.elasticsearch.transport.TransportResponseHandler;
import org.elasticsearch.transport.TransportService;
import java.io.IOException;
import static org.elasticsearch.action.admin.cluster.node.tasks.list.TransportListTasksAction.waitForCompletionTimeout;
/**
* Action to get a single task. If the task isn't running then it'll try to request the status from request index.
*
* The general flow is:
* <ul>
* <li>If this isn't being executed on the node to which the requested TaskId belongs then move to that node.
* <li>Look up the task and return it if it exists
* <li>If it doesn't then look up the task from the results index
* </ul>
*/
public class TransportGetTaskAction extends HandledTransportAction<GetTaskRequest, GetTaskResponse> {
private final ClusterService clusterService;
private final TransportService transportService;
private final Client client;
@Inject
public TransportGetTaskAction(Settings settings, ThreadPool threadPool, TransportService transportService, ActionFilters actionFilters,
IndexNameExpressionResolver indexNameExpressionResolver, ClusterService clusterService, Client client) {
super(settings, GetTaskAction.NAME, threadPool, transportService, actionFilters, indexNameExpressionResolver, GetTaskRequest::new);
this.clusterService = clusterService;
this.transportService = transportService;
this.client = client;
}
@Override
protected void doExecute(GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
throw new UnsupportedOperationException("Task is required");
}
@Override
protected void doExecute(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
if (clusterService.localNode().getId().equals(request.getTaskId().getNodeId())) {
getRunningTaskFromNode(thisTask, request, listener);
} else {
runOnNodeWithTaskIfPossible(thisTask, request, listener);
}
}
/**
* Executed on the coordinating node to forward execution of the remaining work to the node that matches that requested
* {@link TaskId#getNodeId()}. If the node isn't in the cluster then this will just proceed to
* {@link #getFinishedTaskFromIndex(Task, GetTaskRequest, ActionListener)} on this node.
*/
private void runOnNodeWithTaskIfPossible(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
TransportRequestOptions.Builder builder = TransportRequestOptions.builder();
if (request.getTimeout() != null) {
builder.withTimeout(request.getTimeout());
}
builder.withCompress(false);
DiscoveryNode node = clusterService.state().nodes().get(request.getTaskId().getNodeId());
if (node == null) {
// Node is no longer part of the cluster! Try and look the task up from the results index.
getFinishedTaskFromIndex(thisTask, request, listener);
return;
}
GetTaskRequest nodeRequest = request.nodeRequest(clusterService.localNode().getId(), thisTask.getId());
taskManager.registerChildTask(thisTask, node.getId());
transportService.sendRequest(node, GetTaskAction.NAME, nodeRequest, builder.build(),
new TransportResponseHandler<GetTaskResponse>() {
@Override
public GetTaskResponse newInstance() {
return new GetTaskResponse();
}
@Override
public void handleResponse(GetTaskResponse response) {
listener.onResponse(response);
}
@Override
public void handleException(TransportException exp) {
listener.onFailure(exp);
}
@Override
public String executor() {
return ThreadPool.Names.SAME;
}
});
}
/**
* Executed on the node that should be running the task to find and return the running task. Falls back to
* {@link #getFinishedTaskFromIndex(Task, GetTaskRequest, ActionListener)} if the task isn't still running.
*/
void getRunningTaskFromNode(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
Task runningTask = taskManager.getTask(request.getTaskId().getId());
if (runningTask == null) {
// Task isn't running, go look in the task index
getFinishedTaskFromIndex(thisTask, request, listener);
} else {
if (request.getWaitForCompletion()) {
// Shift to the generic thread pool and let it wait for the task to complete so we don't block any important threads.
threadPool.generic().execute(new AbstractRunnable() {
@Override
protected void doRun() throws Exception {
taskManager.waitForTaskCompletion(runningTask, waitForCompletionTimeout(request.getTimeout()));
waitedForCompletion(thisTask, request, runningTask.taskInfo(clusterService.localNode().getId(), true), listener);
}
@Override
public void onFailure(Exception e) {
listener.onFailure(e);
}
});
} else {
TaskInfo info = runningTask.taskInfo(clusterService.localNode().getId(), true);
listener.onResponse(new GetTaskResponse(new TaskResult(false, info)));
}
}
}
/**
* Called after waiting for the task to complete. Attempts to load the results of the task from the tasks index. If it isn't in the
* index then returns a snapshot of the task taken shortly after completion.
*/
void waitedForCompletion(Task thisTask, GetTaskRequest request, TaskInfo snapshotOfRunningTask,
ActionListener<GetTaskResponse> listener) {
getFinishedTaskFromIndex(thisTask, request, new ActionListener<GetTaskResponse>() {
@Override
public void onResponse(GetTaskResponse response) {
// We were able to load the task from the task index. Let's send that back.
listener.onResponse(response);
}
@Override
public void onFailure(Exception e) {
/*
* We couldn't load the task from the task index. Instead of 404 we should use the snapshot we took after it finished. If
* the error isn't a 404 then we'll just throw it back to the user.
*/
if (ExceptionsHelper.unwrap(e, ResourceNotFoundException.class) != null) {
listener.onResponse(new GetTaskResponse(new TaskResult(true, snapshotOfRunningTask)));
} else {
listener.onFailure(e);
}
}
});
}
/**
* Send a {@link GetRequest} to the tasks index looking for a persisted copy of the task completed task. It'll only be found only if the
* task's result was stored. Called on the node that once had the task if that node is still part of the cluster or on the
* coordinating node if the node is no longer part of the cluster.
*/
void getFinishedTaskFromIndex(Task thisTask, GetTaskRequest request, ActionListener<GetTaskResponse> listener) {
GetRequest get = new GetRequest(TaskResultsService.TASK_INDEX, TaskResultsService.TASK_TYPE,
request.getTaskId().toString());
get.setParentTask(clusterService.localNode().getId(), thisTask.getId());
client.get(get, new ActionListener<GetResponse>() {
@Override
public void onResponse(GetResponse getResponse) {
try {
onGetFinishedTaskFromIndex(getResponse, listener);
} catch (Exception e) {
listener.onFailure(e);
}
}
@Override
public void onFailure(Exception e) {
if (ExceptionsHelper.unwrap(e, IndexNotFoundException.class) != null) {
// We haven't yet created the index for the task results so it can't be found.
listener.onFailure(new ResourceNotFoundException("task [{}] isn't running and hasn't stored its results", e,
request.getTaskId()));
} else {
listener.onFailure(e);
}
}
});
}
/**
* Called with the {@linkplain GetResponse} from loading the task from the results index. Called on the node that once had the task if
* that node is part of the cluster or on the coordinating node if the node wasn't part of the cluster.
*/
void onGetFinishedTaskFromIndex(GetResponse response, ActionListener<GetTaskResponse> listener) throws IOException {
if (false == response.isExists()) {
listener.onFailure(new ResourceNotFoundException("task [{}] isn't running or stored its results", response.getId()));
return;
}
if (response.isSourceEmpty()) {
listener.onFailure(new ElasticsearchException("Stored task status for [{}] didn't contain any source!", response.getId()));
return;
}
try (XContentParser parser = XContentHelper.createParser(response.getSourceAsBytesRef())) {
TaskResult result = TaskResult.PARSER.apply(parser, () -> ParseFieldMatcher.STRICT);
listener.onResponse(new GetTaskResponse(result));
}
}
}
| apache-2.0 |
Vansee/RocketMQ | common/src/test/java/org/apache/rocketmq/common/CountDownLatch2Test.java | 3851 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.common;
import org.junit.Test;
import java.util.concurrent.TimeUnit;
import static org.hamcrest.core.Is.is;
import static org.hamcrest.core.StringContains.containsString;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
* CountDownLatch2 Unit Test
*
* @see CountDownLatch2
*/
public class CountDownLatch2Test {
/**
* test constructor with invalid init param
*
* @see CountDownLatch2#CountDownLatch2(int)
*/
@Test
public void testConstructorError() {
int count = -1;
try {
CountDownLatch2 latch = new CountDownLatch2(count);
} catch (IllegalArgumentException e) {
assertThat(e.getMessage(), is("count < 0"));
}
}
/**
* test constructor with valid init param
*
* @see CountDownLatch2#CountDownLatch2(int)
*/
@Test
public void testConstructor() {
int count = 10;
CountDownLatch2 latch = new CountDownLatch2(count);
assertEquals("Expected equal", count, latch.getCount());
assertThat("Expected contain", latch.toString(), containsString("[Count = " + count + "]"));
}
/**
* test await timeout
*
* @see CountDownLatch2#await(long, TimeUnit)
*/
@Test
public void testAwaitTimeout() throws InterruptedException {
int count = 1;
CountDownLatch2 latch = new CountDownLatch2(count);
boolean await = latch.await(10, TimeUnit.MILLISECONDS);
assertFalse("Expected false", await);
latch.countDown();
boolean await2 = latch.await(10, TimeUnit.MILLISECONDS);
assertTrue("Expected true", await2);
}
/**
* test reset
*
* @see CountDownLatch2#countDown()
*/
@Test(timeout = 1000)
public void testCountDownAndGetCount() throws InterruptedException {
int count = 2;
CountDownLatch2 latch = new CountDownLatch2(count);
assertEquals("Expected equal", count, latch.getCount());
latch.countDown();
assertEquals("Expected equal", count - 1, latch.getCount());
latch.countDown();
latch.await();
assertEquals("Expected equal", 0, latch.getCount());
}
/**
* test reset
*
* @see CountDownLatch2#reset()
*/
@Test
public void testReset() throws InterruptedException {
int count = 2;
CountDownLatch2 latch = new CountDownLatch2(count);
latch.countDown();
assertEquals("Expected equal", count - 1, latch.getCount());
latch.reset();
assertEquals("Expected equal", count, latch.getCount());
latch.countDown();
latch.countDown();
latch.await();
assertEquals("Expected equal", 0, latch.getCount());
// coverage Sync#tryReleaseShared, c==0
latch.countDown();
assertEquals("Expected equal", 0, latch.getCount());
}
}
| apache-2.0 |
gradle/gradle | subprojects/language-native/src/main/java/org/gradle/language/nativeplatform/internal/incremental/DefaultCompilationStateCacheFactory.java | 3667 | /*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.language.nativeplatform.internal.incremental;
import org.gradle.cache.FileLockManager;
import org.gradle.cache.PersistentCache;
import org.gradle.cache.PersistentIndexedCache;
import org.gradle.cache.PersistentIndexedCacheParameters;
import org.gradle.cache.PersistentStateCache;
import org.gradle.cache.internal.InMemoryCacheDecoratorFactory;
import org.gradle.cache.scopes.BuildScopedCache;
import org.gradle.internal.service.scopes.Scopes;
import org.gradle.internal.service.scopes.ServiceScope;
import java.io.Closeable;
import static org.gradle.cache.internal.filelock.LockOptionsBuilder.mode;
@ServiceScope(Scopes.Gradle.class)
public class DefaultCompilationStateCacheFactory implements CompilationStateCacheFactory, Closeable {
private final PersistentIndexedCache<String, CompilationState> compilationStateIndexedCache;
private final PersistentCache cache;
public DefaultCompilationStateCacheFactory(BuildScopedCache cacheRepository, InMemoryCacheDecoratorFactory inMemoryCacheDecoratorFactory) {
cache = cacheRepository
.cache("nativeCompile")
.withDisplayName("native compile cache")
.withLockOptions(mode(FileLockManager.LockMode.OnDemand)) // Lock on demand
.open();
PersistentIndexedCacheParameters<String, CompilationState> parameters = PersistentIndexedCacheParameters.of("nativeCompile", String.class, new CompilationStateSerializer())
.withCacheDecorator(inMemoryCacheDecoratorFactory.decorator(2000, false));
compilationStateIndexedCache = cache.createCache(parameters);
}
@Override
public void close() {
cache.close();
}
@Override
public PersistentStateCache<CompilationState> create(String taskPath) {
return new PersistentCompilationStateCache(taskPath, compilationStateIndexedCache);
}
private static class PersistentCompilationStateCache implements PersistentStateCache<CompilationState> {
private final String taskPath;
private final PersistentIndexedCache<String, CompilationState> compilationStateIndexedCache;
PersistentCompilationStateCache(String taskPath, PersistentIndexedCache<String, CompilationState> compilationStateIndexedCache) {
this.taskPath = taskPath;
this.compilationStateIndexedCache = compilationStateIndexedCache;
}
@Override
public CompilationState get() {
return compilationStateIndexedCache.getIfPresent(taskPath);
}
@Override
public void set(CompilationState newValue) {
compilationStateIndexedCache.put(taskPath, newValue);
}
@Override
public CompilationState update(UpdateAction<CompilationState> updateAction) {
throw new UnsupportedOperationException();
}
@Override
public CompilationState maybeUpdate(UpdateAction<CompilationState> updateAction) {
throw new UnsupportedOperationException();
}
}
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-storagegateway/src/main/java/com/amazonaws/services/storagegateway/model/transform/CreateStorediSCSIVolumeResultJsonUnmarshaller.java | 3589 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.storagegateway.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateStorediSCSIVolumeResult JSON Unmarshaller
*/
public class CreateStorediSCSIVolumeResultJsonUnmarshaller implements
Unmarshaller<CreateStorediSCSIVolumeResult, JsonUnmarshallerContext> {
public CreateStorediSCSIVolumeResult unmarshall(
JsonUnmarshallerContext context) throws Exception {
CreateStorediSCSIVolumeResult createStorediSCSIVolumeResult = new CreateStorediSCSIVolumeResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("VolumeARN", targetDepth)) {
context.nextToken();
createStorediSCSIVolumeResult.setVolumeARN(context
.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("VolumeSizeInBytes", targetDepth)) {
context.nextToken();
createStorediSCSIVolumeResult.setVolumeSizeInBytes(context
.getUnmarshaller(Long.class).unmarshall(context));
}
if (context.testExpression("TargetARN", targetDepth)) {
context.nextToken();
createStorediSCSIVolumeResult.setTargetARN(context
.getUnmarshaller(String.class).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createStorediSCSIVolumeResult;
}
private static CreateStorediSCSIVolumeResultJsonUnmarshaller instance;
public static CreateStorediSCSIVolumeResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateStorediSCSIVolumeResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
apache/kylin | core-dictionary/src/main/java/org/apache/kylin/dict/global/GlobalDictMetadata.java | 1963 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kylin.dict.global;
import com.google.common.base.Preconditions;
import org.apache.kylin.dict.BytesConverter;
import java.util.NavigableMap;
import java.util.TreeMap;
/**
* Encapsulates the metadata for a particular version of the global dictionary.
* Usually each version of a global dictionary stores its metadata in an index file.
*/
public class GlobalDictMetadata {
public final int baseId;
public final int maxId;
public final int maxValueLength;
public final int nValues;
public final BytesConverter bytesConverter;
public final TreeMap<AppendDictSliceKey, String> sliceFileMap; // slice key -> slice file name
public GlobalDictMetadata(int baseId, int maxId, int maxValueLength, int nValues, BytesConverter bytesConverter, NavigableMap<AppendDictSliceKey, String> sliceFileMap) {
Preconditions.checkNotNull(sliceFileMap, "sliceFileMap");
this.baseId = baseId;
this.maxId = maxId;
this.maxValueLength = maxValueLength;
this.nValues = nValues;
this.bytesConverter = bytesConverter;
this.sliceFileMap = new TreeMap<>(sliceFileMap);
}
}
| apache-2.0 |
alibaba/fastjson | src/test/java/com/alibaba/json/bvt/serializer/filters/ValueFilterTest_field_long.java | 3731 | package com.alibaba.json.bvt.serializer.filters;
import java.util.HashMap;
import java.util.Map;
import junit.framework.TestCase;
import org.junit.Assert;
import com.alibaba.fastjson.JSON;
import com.alibaba.fastjson.serializer.JSONSerializer;
import com.alibaba.fastjson.serializer.SerializeWriter;
import com.alibaba.fastjson.serializer.ValueFilter;
public class ValueFilterTest_field_long extends TestCase {
public void test_valuefilter() throws Exception {
ValueFilter filter = new ValueFilter() {
public Object process(Object source, String name, Object value) {
if (name.equals("id")) {
return "AAA";
}
return value;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getValueFilters().add(filter);
Bean a = new Bean();
serializer.write(a);
String text = out.toString();
Assert.assertEquals("{\"id\":\"AAA\"}", text);
}
public void test_toJSONString() throws Exception {
ValueFilter filter = new ValueFilter() {
public Object process(Object source, String name, Object value) {
if (name.equals("id")) {
return "AAA";
}
return value;
}
};
Assert.assertEquals("{\"id\":\"AAA\"}", JSON.toJSONString(new Bean(), filter));
}
public void test_valuefilter_1() throws Exception {
ValueFilter filter = new ValueFilter() {
public Object process(Object source, String name, Object value) {
if (name.equals("name")) {
return "AAA";
}
return value;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getValueFilters().add(filter);
Bean a = new Bean();
serializer.write(a);
String text = out.toString();
Assert.assertEquals("{\"id\":0,\"name\":\"AAA\"}", text);
}
public void test_valuefilter_2() throws Exception {
ValueFilter filter = new ValueFilter() {
public Object process(Object source, String name, Object value) {
if (name.equals("name")) {
return "AAA";
}
return value;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getValueFilters().add(filter);
Map<String, Object> map = new HashMap<String, Object>();
map.put("name", null);
serializer.write(map);
String text = out.toString();
Assert.assertEquals("{\"name\":\"AAA\"}", text);
}
public void test_valuefilter_3() throws Exception {
ValueFilter filter = new ValueFilter() {
public Object process(Object source, String name, Object value) {
if (name.equals("name")) {
return null;
}
return value;
}
};
SerializeWriter out = new SerializeWriter();
JSONSerializer serializer = new JSONSerializer(out);
serializer.getValueFilters().add(filter);
Map<String, Object> map = new HashMap<String, Object>();
map.put("name", "AA");
serializer.write(map);
String text = out.toString();
Assert.assertEquals("{}", text);
}
public static class Bean {
public long id;
public String name;
}
}
| apache-2.0 |
Bizyroth/hadoop | hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestFifoScheduler.java | 27073 | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.yarn.server.resourcemanager;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.ContainerState;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceOption;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.server.api.protocolrecords.UpdateNodeResourceRequest;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttempt;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptImpl;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.AbstractYarnScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.Allocation;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacitySchedulerConfiguration;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.AppAttemptAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeAddedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeRemovedSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.NodeUpdateSchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.event.SchedulerEvent;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
import org.apache.hadoop.yarn.util.resource.Resources;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestFifoScheduler {
private static final Log LOG = LogFactory.getLog(TestFifoScheduler.class);
private final int GB = 1024;
private static YarnConfiguration conf;
@BeforeClass
public static void setup() {
conf = new YarnConfiguration();
conf.setClass(YarnConfiguration.RM_SCHEDULER,
FifoScheduler.class, ResourceScheduler.class);
}
@Test (timeout = 30000)
public void testConfValidation() throws Exception {
FifoScheduler scheduler = new FifoScheduler();
Configuration conf = new YarnConfiguration();
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 2048);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB, 1024);
try {
scheduler.serviceInit(conf);
fail("Exception is expected because the min memory allocation is" +
" larger than the max memory allocation.");
} catch (YarnRuntimeException e) {
// Exception is expected.
assertTrue("The thrown exception is not the expected one.",
e.getMessage().startsWith(
"Invalid resource scheduler memory"));
}
}
@Test
public void testAllocateContainerOnNodeWithoutOffSwitchSpecified()
throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
MockRM rm = new MockRM(conf);
rm.start();
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 6 * GB);
RMApp app1 = rm.submitApp(2048);
// kick the scheduling, 2 GB given to AM1, remaining 4GB on nm1
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
// add request for containers
List<ResourceRequest> requests = new ArrayList<ResourceRequest>();
requests.add(am1.createResourceReq("127.0.0.1", 1 * GB, 1, 1));
requests.add(am1.createResourceReq("/default-rack", 1 * GB, 1, 1));
am1.allocate(requests, null); // send the request
try {
// kick the schedule
nm1.nodeHeartbeat(true);
} catch (NullPointerException e) {
Assert.fail("NPE when allocating container on node but "
+ "forget to set off-switch request should be handled");
}
rm.stop();
}
@Test
public void test() throws Exception {
Logger rootLogger = LogManager.getRootLogger();
rootLogger.setLevel(Level.DEBUG);
MockRM rm = new MockRM(conf);
rm.start();
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 6 * GB);
MockNM nm2 = rm.registerNode("127.0.0.2:5678", 4 * GB);
RMApp app1 = rm.submitApp(2048);
// kick the scheduling, 2 GB given to AM1, remaining 4GB on nm1
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
SchedulerNodeReport report_nm1 = rm.getResourceScheduler().getNodeReport(
nm1.getNodeId());
Assert.assertEquals(2 * GB, report_nm1.getUsedResource().getMemory());
RMApp app2 = rm.submitApp(2048);
// kick the scheduling, 2GB given to AM, remaining 2 GB on nm2
nm2.nodeHeartbeat(true);
RMAppAttempt attempt2 = app2.getCurrentAppAttempt();
MockAM am2 = rm.sendAMLaunched(attempt2.getAppAttemptId());
am2.registerAppAttempt();
SchedulerNodeReport report_nm2 = rm.getResourceScheduler().getNodeReport(
nm2.getNodeId());
Assert.assertEquals(2 * GB, report_nm2.getUsedResource().getMemory());
// add request for containers
am1.addRequests(new String[] { "127.0.0.1", "127.0.0.2" }, GB, 1, 1);
AllocateResponse alloc1Response = am1.schedule(); // send the request
// add request for containers
am2.addRequests(new String[] { "127.0.0.1", "127.0.0.2" }, 3 * GB, 0, 1);
AllocateResponse alloc2Response = am2.schedule(); // send the request
// kick the scheduler, 1 GB and 3 GB given to AM1 and AM2, remaining 0
nm1.nodeHeartbeat(true);
while (alloc1Response.getAllocatedContainers().size() < 1) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(1000);
alloc1Response = am1.schedule();
}
while (alloc2Response.getAllocatedContainers().size() < 1) {
LOG.info("Waiting for containers to be created for app 2...");
Thread.sleep(1000);
alloc2Response = am2.schedule();
}
// kick the scheduler, nothing given remaining 2 GB.
nm2.nodeHeartbeat(true);
List<Container> allocated1 = alloc1Response.getAllocatedContainers();
Assert.assertEquals(1, allocated1.size());
Assert.assertEquals(1 * GB, allocated1.get(0).getResource().getMemory());
Assert.assertEquals(nm1.getNodeId(), allocated1.get(0).getNodeId());
List<Container> allocated2 = alloc2Response.getAllocatedContainers();
Assert.assertEquals(1, allocated2.size());
Assert.assertEquals(3 * GB, allocated2.get(0).getResource().getMemory());
Assert.assertEquals(nm1.getNodeId(), allocated2.get(0).getNodeId());
report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
report_nm2 = rm.getResourceScheduler().getNodeReport(nm2.getNodeId());
Assert.assertEquals(0, report_nm1.getAvailableResource().getMemory());
Assert.assertEquals(2 * GB, report_nm2.getAvailableResource().getMemory());
Assert.assertEquals(6 * GB, report_nm1.getUsedResource().getMemory());
Assert.assertEquals(2 * GB, report_nm2.getUsedResource().getMemory());
Container c1 = allocated1.get(0);
Assert.assertEquals(GB, c1.getResource().getMemory());
ContainerStatus containerStatus = BuilderUtils.newContainerStatus(
c1.getId(), ContainerState.COMPLETE, "", 0);
nm1.containerStatus(containerStatus);
int waitCount = 0;
while (attempt1.getJustFinishedContainers().size() < 1
&& waitCount++ != 20) {
LOG.info("Waiting for containers to be finished for app 1... Tried "
+ waitCount + " times already..");
Thread.sleep(1000);
}
Assert.assertEquals(1, attempt1.getJustFinishedContainers().size());
Assert.assertEquals(1, am1.schedule().getCompletedContainersStatuses().size());
report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
Assert.assertEquals(5 * GB, report_nm1.getUsedResource().getMemory());
rm.stop();
}
@Test
public void testNodeUpdateBeforeAppAttemptInit() throws Exception {
FifoScheduler scheduler = new FifoScheduler();
MockRM rm = new MockRM(conf);
scheduler.setRMContext(rm.getRMContext());
scheduler.init(conf);
scheduler.start();
scheduler.reinitialize(conf, rm.getRMContext());
RMNode node = MockNodes.newNodeInfo(1,
Resources.createResource(1024, 4), 1, "127.0.0.1");
scheduler.handle(new NodeAddedSchedulerEvent(node));
ApplicationId appId = ApplicationId.newInstance(0, 1);
scheduler.addApplication(appId, "queue1", "user1", false);
NodeUpdateSchedulerEvent updateEvent = new NodeUpdateSchedulerEvent(node);
try {
scheduler.handle(updateEvent);
} catch (NullPointerException e) {
Assert.fail();
}
ApplicationAttemptId attId = ApplicationAttemptId.newInstance(appId, 1);
scheduler.addApplicationAttempt(attId, false, false);
rm.stop();
}
private void testMinimumAllocation(YarnConfiguration conf, int testAlloc)
throws Exception {
MockRM rm = new MockRM(conf);
rm.start();
// Register node1
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 6 * GB);
// Submit an application
RMApp app1 = rm.submitApp(testAlloc);
// kick the scheduling
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
SchedulerNodeReport report_nm1 = rm.getResourceScheduler().getNodeReport(
nm1.getNodeId());
int checkAlloc =
conf.getInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB,
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB);
Assert.assertEquals(checkAlloc, report_nm1.getUsedResource().getMemory());
rm.stop();
}
@Test
public void testDefaultMinimumAllocation() throws Exception {
// Test with something lesser than default
testMinimumAllocation(
new YarnConfiguration(TestFifoScheduler.conf),
YarnConfiguration.DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB / 2);
}
@Test
public void testNonDefaultMinimumAllocation() throws Exception {
// Set custom min-alloc to test tweaking it
int allocMB = 1536;
YarnConfiguration conf = new YarnConfiguration(TestFifoScheduler.conf);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, allocMB);
conf.setInt(YarnConfiguration.RM_SCHEDULER_MAXIMUM_ALLOCATION_MB,
allocMB * 10);
// Test for something lesser than this.
testMinimumAllocation(conf, allocMB / 2);
}
@Test (timeout = 50000)
public void testReconnectedNode() throws Exception {
CapacitySchedulerConfiguration conf = new CapacitySchedulerConfiguration();
conf.setQueues("default", new String[] {"default"});
conf.setCapacity("default", 100);
FifoScheduler fs = new FifoScheduler();
fs.init(conf);
fs.start();
// mock rmContext to avoid NPE.
RMContext context = mock(RMContext.class);
fs.reinitialize(conf, null);
fs.setRMContext(context);
RMNode n1 =
MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1, "127.0.0.2");
RMNode n2 =
MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 2, "127.0.0.3");
fs.handle(new NodeAddedSchedulerEvent(n1));
fs.handle(new NodeAddedSchedulerEvent(n2));
fs.handle(new NodeUpdateSchedulerEvent(n1));
Assert.assertEquals(6 * GB, fs.getRootQueueMetrics().getAvailableMB());
// reconnect n1 with downgraded memory
n1 =
MockNodes.newNodeInfo(0, MockNodes.newResource(2 * GB), 1, "127.0.0.2");
fs.handle(new NodeRemovedSchedulerEvent(n1));
fs.handle(new NodeAddedSchedulerEvent(n1));
fs.handle(new NodeUpdateSchedulerEvent(n1));
Assert.assertEquals(4 * GB, fs.getRootQueueMetrics().getAvailableMB());
fs.stop();
}
@Test (timeout = 50000)
public void testBlackListNodes() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
rm.start();
FifoScheduler fs = (FifoScheduler) rm.getResourceScheduler();
int rack_num_0 = 0;
int rack_num_1 = 1;
// Add 4 nodes in 2 racks
// host_0_0 in rack0
String host_0_0 = "127.0.0.1";
RMNode n1 =
MockNodes.newNodeInfo(rack_num_0, MockNodes.newResource(4 * GB), 1, host_0_0);
fs.handle(new NodeAddedSchedulerEvent(n1));
// host_0_1 in rack0
String host_0_1 = "127.0.0.2";
RMNode n2 =
MockNodes.newNodeInfo(rack_num_0, MockNodes.newResource(4 * GB), 1, host_0_1);
fs.handle(new NodeAddedSchedulerEvent(n2));
// host_1_0 in rack1
String host_1_0 = "127.0.0.3";
RMNode n3 =
MockNodes.newNodeInfo(rack_num_1, MockNodes.newResource(4 * GB), 1, host_1_0);
fs.handle(new NodeAddedSchedulerEvent(n3));
// host_1_1 in rack1
String host_1_1 = "127.0.0.4";
RMNode n4 =
MockNodes.newNodeInfo(rack_num_1, MockNodes.newResource(4 * GB), 1, host_1_1);
fs.handle(new NodeAddedSchedulerEvent(n4));
// Add one application
ApplicationId appId1 = BuilderUtils.newApplicationId(100, 1);
ApplicationAttemptId appAttemptId1 = BuilderUtils.newApplicationAttemptId(
appId1, 1);
createMockRMApp(appAttemptId1, rm.getRMContext());
SchedulerEvent appEvent =
new AppAddedSchedulerEvent(appId1, "queue", "user");
fs.handle(appEvent);
SchedulerEvent attemptEvent =
new AppAttemptAddedSchedulerEvent(appAttemptId1, false);
fs.handle(attemptEvent);
List<ContainerId> emptyId = new ArrayList<ContainerId>();
List<ResourceRequest> emptyAsk = new ArrayList<ResourceRequest>();
// Allow rack-locality for rack_1, but blacklist host_1_0
// Set up resource requests
// Ask for a 1 GB container for app 1
List<ResourceRequest> ask1 = new ArrayList<ResourceRequest>();
ask1.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
"rack1", BuilderUtils.newResource(GB, 1), 1));
ask1.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
ResourceRequest.ANY, BuilderUtils.newResource(GB, 1), 1));
fs.allocate(appAttemptId1, ask1, emptyId, Collections.singletonList(host_1_0), null);
// Trigger container assignment
fs.handle(new NodeUpdateSchedulerEvent(n3));
// Get the allocation for the application and verify no allocation on blacklist node
Allocation allocation1 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation1", 0, allocation1.getContainers().size());
// verify host_1_1 can get allocated as not in blacklist
fs.handle(new NodeUpdateSchedulerEvent(n4));
Allocation allocation2 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation2", 1, allocation2.getContainers().size());
List<Container> containerList = allocation2.getContainers();
for (Container container : containerList) {
Assert.assertEquals("Container is allocated on n4",
container.getNodeId(), n4.getNodeID());
}
// Ask for a 1 GB container again for app 1
List<ResourceRequest> ask2 = new ArrayList<ResourceRequest>();
// this time, rack0 is also in blacklist, so only host_1_1 is available to
// be assigned
ask2.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
ResourceRequest.ANY, BuilderUtils.newResource(GB, 1), 1));
fs.allocate(appAttemptId1, ask2, emptyId, Collections.singletonList("rack0"), null);
// verify n1 is not qualified to be allocated
fs.handle(new NodeUpdateSchedulerEvent(n1));
Allocation allocation3 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation3", 0, allocation3.getContainers().size());
// verify n2 is not qualified to be allocated
fs.handle(new NodeUpdateSchedulerEvent(n2));
Allocation allocation4 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation4", 0, allocation4.getContainers().size());
// verify n3 is not qualified to be allocated
fs.handle(new NodeUpdateSchedulerEvent(n3));
Allocation allocation5 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation5", 0, allocation5.getContainers().size());
fs.handle(new NodeUpdateSchedulerEvent(n4));
Allocation allocation6 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("allocation6", 1, allocation6.getContainers().size());
containerList = allocation6.getContainers();
for (Container container : containerList) {
Assert.assertEquals("Container is allocated on n4",
container.getNodeId(), n4.getNodeID());
}
rm.stop();
}
@Test (timeout = 50000)
public void testHeadroom() throws Exception {
Configuration conf = new Configuration();
conf.setClass(YarnConfiguration.RM_SCHEDULER, FifoScheduler.class,
ResourceScheduler.class);
MockRM rm = new MockRM(conf);
rm.start();
FifoScheduler fs = (FifoScheduler) rm.getResourceScheduler();
// Add a node
RMNode n1 =
MockNodes.newNodeInfo(0, MockNodes.newResource(4 * GB), 1, "127.0.0.2");
fs.handle(new NodeAddedSchedulerEvent(n1));
// Add two applications
ApplicationId appId1 = BuilderUtils.newApplicationId(100, 1);
ApplicationAttemptId appAttemptId1 = BuilderUtils.newApplicationAttemptId(
appId1, 1);
createMockRMApp(appAttemptId1, rm.getRMContext());
SchedulerEvent appEvent =
new AppAddedSchedulerEvent(appId1, "queue", "user");
fs.handle(appEvent);
SchedulerEvent attemptEvent =
new AppAttemptAddedSchedulerEvent(appAttemptId1, false);
fs.handle(attemptEvent);
ApplicationId appId2 = BuilderUtils.newApplicationId(200, 2);
ApplicationAttemptId appAttemptId2 = BuilderUtils.newApplicationAttemptId(
appId2, 1);
createMockRMApp(appAttemptId2, rm.getRMContext());
SchedulerEvent appEvent2 =
new AppAddedSchedulerEvent(appId2, "queue", "user");
fs.handle(appEvent2);
SchedulerEvent attemptEvent2 =
new AppAttemptAddedSchedulerEvent(appAttemptId2, false);
fs.handle(attemptEvent2);
List<ContainerId> emptyId = new ArrayList<ContainerId>();
List<ResourceRequest> emptyAsk = new ArrayList<ResourceRequest>();
// Set up resource requests
// Ask for a 1 GB container for app 1
List<ResourceRequest> ask1 = new ArrayList<ResourceRequest>();
ask1.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
ResourceRequest.ANY, BuilderUtils.newResource(GB, 1), 1));
fs.allocate(appAttemptId1, ask1, emptyId, null, null);
// Ask for a 2 GB container for app 2
List<ResourceRequest> ask2 = new ArrayList<ResourceRequest>();
ask2.add(BuilderUtils.newResourceRequest(BuilderUtils.newPriority(0),
ResourceRequest.ANY, BuilderUtils.newResource(2 * GB, 1), 1));
fs.allocate(appAttemptId2, ask2, emptyId, null, null);
// Trigger container assignment
fs.handle(new NodeUpdateSchedulerEvent(n1));
// Get the allocation for the applications and verify headroom
Allocation allocation1 = fs.allocate(appAttemptId1, emptyAsk, emptyId, null, null);
Assert.assertEquals("Allocation headroom", 1 * GB,
allocation1.getResourceLimit().getMemory());
Allocation allocation2 = fs.allocate(appAttemptId2, emptyAsk, emptyId, null, null);
Assert.assertEquals("Allocation headroom", 1 * GB,
allocation2.getResourceLimit().getMemory());
rm.stop();
}
@Test
public void testResourceOverCommit() throws Exception {
int waitCount;
MockRM rm = new MockRM(conf);
rm.start();
MockNM nm1 = rm.registerNode("127.0.0.1:1234", 4 * GB);
RMApp app1 = rm.submitApp(2048);
// kick the scheduling, 2 GB given to AM1, remaining 2GB on nm1
nm1.nodeHeartbeat(true);
RMAppAttempt attempt1 = app1.getCurrentAppAttempt();
MockAM am1 = rm.sendAMLaunched(attempt1.getAppAttemptId());
am1.registerAppAttempt();
SchedulerNodeReport report_nm1 = rm.getResourceScheduler().getNodeReport(
nm1.getNodeId());
// check node report, 2 GB used and 2 GB available
Assert.assertEquals(2 * GB, report_nm1.getUsedResource().getMemory());
Assert.assertEquals(2 * GB, report_nm1.getAvailableResource().getMemory());
// add request for containers
am1.addRequests(new String[] { "127.0.0.1", "127.0.0.2" }, 2 * GB, 1, 1);
AllocateResponse alloc1Response = am1.schedule(); // send the request
// kick the scheduler, 2 GB given to AM1, resource remaining 0
nm1.nodeHeartbeat(true);
while (alloc1Response.getAllocatedContainers().size() < 1) {
LOG.info("Waiting for containers to be created for app 1...");
Thread.sleep(1000);
alloc1Response = am1.schedule();
}
List<Container> allocated1 = alloc1Response.getAllocatedContainers();
Assert.assertEquals(1, allocated1.size());
Assert.assertEquals(2 * GB, allocated1.get(0).getResource().getMemory());
Assert.assertEquals(nm1.getNodeId(), allocated1.get(0).getNodeId());
report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
// check node report, 4 GB used and 0 GB available
Assert.assertEquals(0, report_nm1.getAvailableResource().getMemory());
Assert.assertEquals(4 * GB, report_nm1.getUsedResource().getMemory());
// check container is assigned with 2 GB.
Container c1 = allocated1.get(0);
Assert.assertEquals(2 * GB, c1.getResource().getMemory());
// update node resource to 2 GB, so resource is over-consumed.
Map<NodeId, ResourceOption> nodeResourceMap =
new HashMap<NodeId, ResourceOption>();
nodeResourceMap.put(nm1.getNodeId(),
ResourceOption.newInstance(Resource.newInstance(2 * GB, 1), -1));
UpdateNodeResourceRequest request =
UpdateNodeResourceRequest.newInstance(nodeResourceMap);
AdminService as = rm.adminService;
as.updateNodeResource(request);
waitCount = 0;
while (waitCount++ != 20) {
report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
if (null != report_nm1 &&
report_nm1.getAvailableResource().getMemory() != 0) {
break;
}
LOG.info("Waiting for RMNodeResourceUpdateEvent to be handled... Tried "
+ waitCount + " times already..");
Thread.sleep(1000);
}
// Now, the used resource is still 4 GB, and available resource is minus value.
Assert.assertEquals(4 * GB, report_nm1.getUsedResource().getMemory());
Assert.assertEquals(-2 * GB, report_nm1.getAvailableResource().getMemory());
// Check container can complete successfully in case of resource over-commitment.
ContainerStatus containerStatus = BuilderUtils.newContainerStatus(
c1.getId(), ContainerState.COMPLETE, "", 0);
nm1.containerStatus(containerStatus);
waitCount = 0;
while (attempt1.getJustFinishedContainers().size() < 1
&& waitCount++ != 20) {
LOG.info("Waiting for containers to be finished for app 1... Tried "
+ waitCount + " times already..");
Thread.sleep(100);
}
Assert.assertEquals(1, attempt1.getJustFinishedContainers().size());
Assert.assertEquals(1, am1.schedule().getCompletedContainersStatuses().size());
report_nm1 = rm.getResourceScheduler().getNodeReport(nm1.getNodeId());
Assert.assertEquals(2 * GB, report_nm1.getUsedResource().getMemory());
// As container return 2 GB back, the available resource becomes 0 again.
Assert.assertEquals(0 * GB, report_nm1.getAvailableResource().getMemory());
rm.stop();
}
public static void main(String[] args) throws Exception {
TestFifoScheduler t = new TestFifoScheduler();
t.test();
t.testDefaultMinimumAllocation();
t.testNonDefaultMinimumAllocation();
t.testReconnectedNode();
}
private RMAppImpl createMockRMApp(ApplicationAttemptId attemptId,
RMContext context) {
RMAppImpl app = mock(RMAppImpl.class);
when(app.getApplicationId()).thenReturn(attemptId.getApplicationId());
RMAppAttemptImpl attempt = mock(RMAppAttemptImpl.class);
when(attempt.getAppAttemptId()).thenReturn(attemptId);
RMAppAttemptMetrics attemptMetric = mock(RMAppAttemptMetrics.class);
when(attempt.getRMAppAttemptMetrics()).thenReturn(attemptMetric);
when(app.getCurrentAppAttempt()).thenReturn(attempt);
context.getRMApps().putIfAbsent(attemptId.getApplicationId(), app);
return app;
}
}
| apache-2.0 |
caskdata/coopr | coopr-server/src/main/java/co/cask/coopr/store/cluster/SQLClusterStore.java | 17083 | package co.cask.coopr.store.cluster;
import co.cask.coopr.cluster.Cluster;
import co.cask.coopr.cluster.ClusterSummary;
import co.cask.coopr.cluster.Node;
import co.cask.coopr.scheduler.task.ClusterJob;
import co.cask.coopr.scheduler.task.ClusterTask;
import co.cask.coopr.scheduler.task.JobId;
import co.cask.coopr.scheduler.task.TaskId;
import co.cask.coopr.spec.ProvisionerAction;
import co.cask.coopr.store.DBConnectionPool;
import co.cask.coopr.store.DBHelper;
import co.cask.coopr.store.DBPut;
import co.cask.coopr.store.DBQueryExecutor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;
import java.util.Set;
/**
* A full view of the cluster store backed by a sql database.
*/
public class SQLClusterStore implements ClusterStore {
private static final Logger LOG = LoggerFactory.getLogger(SQLClusterStore.class);
private final DBQueryExecutor dbQueryExecutor;
private final DBConnectionPool dbConnectionPool;
private final ClusterStoreView systemView;
SQLClusterStore(DBConnectionPool dbConnectionPool, DBQueryExecutor dbQueryExecutor) {
this.dbConnectionPool = dbConnectionPool;
this.dbQueryExecutor = dbQueryExecutor;
this.systemView = new SQLSystemClusterStoreView(dbConnectionPool, dbQueryExecutor);
}
@Override
public ClusterJob getClusterJob(JobId jobId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement = conn.prepareStatement("SELECT job FROM jobs WHERE job_num=? AND cluster_id=?");
try {
statement.setLong(1, jobId.getJobNum());
statement.setLong(2, Long.parseLong(jobId.getClusterId()));
return dbQueryExecutor.getQueryItem(statement, ClusterJob.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
LOG.error("Exception getting cluster job {}", jobId, e);
throw new IOException("Exception getting cluster job " + jobId, e);
}
}
@Override
public void writeClusterJob(ClusterJob clusterJob) throws IOException {
JobId jobId = JobId.fromString(clusterJob.getJobId());
long clusterId = Long.parseLong(jobId.getClusterId());
try {
Connection conn = dbConnectionPool.getConnection();
try {
byte[] jobBytes = dbQueryExecutor.toBytes(clusterJob, ClusterJob.class);
DBPut jobPut = new ClusterJobDBPut(clusterJob, jobBytes, jobId, clusterId);
jobPut.executePut(conn);
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public void deleteClusterJob(JobId jobId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement = conn.prepareStatement("DELETE FROM jobs WHERE job_num=? AND cluster_id=?");
try {
statement.setLong(1, jobId.getJobNum());
statement.setLong(2, Long.parseLong(jobId.getClusterId()));
statement.executeUpdate();
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public ClusterTask getClusterTask(TaskId taskId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement =
conn.prepareStatement("SELECT task FROM tasks WHERE task_num=? AND cluster_id=? AND job_num=?");
try {
statement.setLong(1, taskId.getTaskNum());
statement.setLong(2, Long.parseLong(taskId.getClusterId()));
statement.setLong(3, taskId.getJobNum());
return dbQueryExecutor.getQueryItem(statement, ClusterTask.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
LOG.error("Exception getting cluster task {}", taskId, e);
throw new IOException("Exception getting cluster task " + taskId, e);
}
}
@Override
public List<ClusterTask> getClusterTasks(ClusterTaskFilter filter) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
StringBuilder builder = new StringBuilder("SELECT task FROM tasks WHERE status = ? AND type IN (?,?)")
.append(addFilter("tenant_id = ", filter.getTenantId()))
.append(addFilter("user_id = ", filter.getUserId()))
.append(addFilter("cluster_id = ", filter.getClusterId()))
.append(addFilter("cluster_template_name = ", filter.getClusterTemplate()))
.append(" ORDER BY status_time ASC");
PreparedStatement statement =
conn.prepareStatement(builder.toString());
try {
int index = initializeFilter(statement, ClusterTask.Status.COMPLETE.name(), 1);
index = initializeFilter(statement, ProvisionerAction.CREATE.name(), index);
index = initializeFilter(statement, ProvisionerAction.DELETE.name(), index);
index = initializeFilter(statement, filter.getTenantId(), index);
index = initializeFilter(statement, filter.getUserId(), index);
index = initializeFilter(statement, filter.getClusterId(), index);
initializeFilter(statement, filter.getClusterTemplate(), index);
return dbQueryExecutor.getQueryList(statement, ClusterTask.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
LOG.error("Exception getting cluster tasks by filters {}", filter, e);
throw new IOException("Exception getting cluster tasks by filters " + filter, e);
}
}
private String addFilter(String key, Object value) {
if (value == null) {
return "";
}
return String.format(" AND %s?", key);
}
private int initializeFilter(PreparedStatement statement, String value, int index) throws SQLException {
if (value == null) {
return index;
}
statement.setString(index, value);
return ++index;
}
@Override
public void writeClusterTask(ClusterTask clusterTask) throws IOException {
TaskId taskId = TaskId.fromString(clusterTask.getTaskId());
long clusterId = Long.parseLong(taskId.getClusterId());
try {
Connection conn = dbConnectionPool.getConnection();
try {
byte[] taskBytes = dbQueryExecutor.toBytes(clusterTask, ClusterTask.class);
DBPut taskPut = new ClusterTaskDBPut(clusterTask, taskBytes, taskId, clusterId);
taskPut.executePut(conn);
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public void deleteClusterTask(TaskId taskId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement =
conn.prepareStatement("DELETE FROM tasks WHERE task_num=? AND cluster_id=? AND job_num=?");
statement.setLong(1, taskId.getTaskNum());
statement.setLong(2, Long.parseLong(taskId.getClusterId()));
statement.setLong(3, taskId.getJobNum());
try {
statement.executeUpdate();
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public Node getNode(String nodeId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement = conn.prepareStatement("SELECT node FROM nodes WHERE id=? ");
statement.setString(1, nodeId);
try {
return dbQueryExecutor.getQueryItem(statement, Node.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public void writeNode(Node node) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
byte[] nodeBytes = dbQueryExecutor.toBytes(node, Node.class);
DBPut nodePut = new NodeDBPut(node, nodeBytes);
nodePut.executePut(conn);
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public void deleteNode(String nodeId) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement = conn.prepareStatement("DELETE FROM nodes WHERE id=? ");
try {
statement.setString(1, nodeId);
statement.executeUpdate();
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public Set<ClusterTask> getRunningTasks(long timestamp) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement =
conn.prepareStatement("SELECT task FROM tasks WHERE status = ? AND submit_time < ?");
statement.setString(1, ClusterTask.Status.IN_PROGRESS.name());
statement.setTimestamp(2, DBHelper.getTimestamp(timestamp));
try {
return dbQueryExecutor.getQuerySet(statement, ClusterTask.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public Set<Cluster> getExpiringClusters(long timestamp) throws IOException {
try {
Connection conn = dbConnectionPool.getConnection();
try {
PreparedStatement statement =
conn.prepareStatement("SELECT cluster FROM clusters WHERE status IN (?, ?) AND expire_time < ?");
statement.setString(1, Cluster.Status.ACTIVE.name());
statement.setString(2, Cluster.Status.INCOMPLETE.name());
statement.setTimestamp(3, DBHelper.getTimestamp(timestamp));
try {
return dbQueryExecutor.getQuerySet(statement, Cluster.class);
} finally {
statement.close();
}
} finally {
conn.close();
}
} catch (SQLException e) {
throw new IOException(e);
}
}
@Override
public List<Cluster> getAllClusters() throws IOException {
return systemView.getAllClusters();
}
@Override
public List<ClusterSummary> getAllClusterSummaries() throws IOException {
return systemView.getAllClusterSummaries();
}
@Override
public List<ClusterSummary> getAllClusterSummaries(Set<Cluster.Status> states) throws IOException {
return systemView.getAllClusterSummaries(states);
}
@Override
public List<Cluster> getNonTerminatedClusters() throws IOException {
return systemView.getNonTerminatedClusters();
}
@Override
public Cluster getCluster(String clusterId) throws IOException {
return systemView.getCluster(clusterId);
}
@Override
public boolean clusterExists(String clusterId) throws IOException {
return systemView.clusterExists(clusterId);
}
@Override
public void writeCluster(Cluster cluster) throws IllegalAccessException, IOException {
systemView.writeCluster(cluster);
}
@Override
public void deleteCluster(String clusterId) throws IOException {
systemView.deleteCluster(clusterId);
}
@Override
public List<ClusterJob> getClusterJobs(String clusterId, int limit) throws IOException {
return systemView.getClusterJobs(clusterId, limit);
}
@Override
public Set<Node> getClusterNodes(String clusterId) throws IOException {
return systemView.getClusterNodes(clusterId);
}
private class ClusterJobDBPut extends DBPut {
private final ClusterJob clusterJob;
private final byte[] jobBytes;
private final JobId jobId;
private final long clusterId;
private ClusterJobDBPut(ClusterJob clusterJob, byte[] jobBytes, JobId jobId, long clusterId) {
this.clusterJob = clusterJob;
this.jobBytes = jobBytes;
this.jobId = jobId;
this.clusterId = clusterId;
}
@Override
public PreparedStatement createUpdateStatement(Connection conn) throws SQLException {
PreparedStatement updateStatement =
conn.prepareStatement("UPDATE jobs SET job=?, status=? WHERE job_num=? AND cluster_id=?");
updateStatement.setBytes(1, jobBytes);
updateStatement.setString(2, clusterJob.getJobStatus().name());
updateStatement.setLong(3, jobId.getJobNum());
updateStatement.setLong(4, clusterId);
return updateStatement;
}
@Override
public PreparedStatement createInsertStatement(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"INSERT INTO jobs (job_num, cluster_id, status, create_time, job) VALUES (?, ?, ?, ?, ?)");
statement.setLong(1, jobId.getJobNum());
statement.setLong(2, clusterId);
statement.setString(3, clusterJob.getJobStatus().name());
statement.setTimestamp(4, DBHelper.getTimestamp(System.currentTimeMillis()));
statement.setBytes(5, jobBytes);
return statement;
}
}
private class ClusterTaskDBPut extends DBPut {
private final ClusterTask clusterTask;
private final byte[] taskBytes;
private final TaskId taskId;
private final long clusterId;
private ClusterTaskDBPut(ClusterTask clusterTask, byte[] taskBytes, TaskId taskId, long clusterId) {
this.clusterTask = clusterTask;
this.taskBytes = taskBytes;
this.taskId = taskId;
this.clusterId = clusterId;
}
@Override
public PreparedStatement createUpdateStatement(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"UPDATE tasks SET task=?, status=?, submit_time=?, status_time=?, type=?, " +
"cluster_template_name=?, user_id=?, tenant_id=?" +
" WHERE task_num=? AND job_num=? AND cluster_id=?");
statement.setBytes(1, dbQueryExecutor.toBytes(clusterTask, ClusterTask.class));
statement.setString(2, clusterTask.getStatus().name());
statement.setTimestamp(3, DBHelper.getTimestamp(clusterTask.getSubmitTime()));
statement.setTimestamp(4, DBHelper.getTimestamp(clusterTask.getStatusTime()));
statement.setString(5, clusterTask.getTaskName().name());
statement.setString(6, clusterTask.getClusterTemplateName());
statement.setString(7, clusterTask.getAccount().getUserId());
statement.setString(8, clusterTask.getAccount().getTenantId());
statement.setLong(9, taskId.getTaskNum());
statement.setLong(10, taskId.getJobNum());
statement.setLong(11, clusterId);
return statement;
}
@Override
public PreparedStatement createInsertStatement(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"INSERT INTO tasks (task_num, job_num, cluster_id, status, submit_time, task, type, " +
"cluster_template_name, user_id, tenant_id)" +
" VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)");
statement.setLong(1, taskId.getTaskNum());
statement.setLong(2, taskId.getJobNum());
statement.setLong(3, clusterId);
statement.setString(4, clusterTask.getStatus().name());
statement.setTimestamp(5, DBHelper.getTimestamp(clusterTask.getSubmitTime()));
statement.setBytes(6, taskBytes);
statement.setString(7, clusterTask.getTaskName().name());
statement.setString(8, clusterTask.getClusterTemplateName());
statement.setString(9, clusterTask.getAccount().getUserId());
statement.setString(10, clusterTask.getAccount().getTenantId());
return statement;
}
}
private class NodeDBPut extends DBPut {
private final Node node;
private final byte[] nodeBytes;
private NodeDBPut(Node node, byte[] nodeBytes) {
this.node = node;
this.nodeBytes = nodeBytes;
}
@Override
public PreparedStatement createUpdateStatement(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement("UPDATE nodes SET node=? WHERE id=?");
statement.setBytes(1, nodeBytes);
statement.setString(2, node.getId());
return statement;
}
@Override
public PreparedStatement createInsertStatement(Connection conn) throws SQLException {
PreparedStatement statement = conn.prepareStatement(
"INSERT INTO nodes (id, cluster_id, node) VALUES (?, ?, ?)");
statement.setString(1, node.getId());
statement.setLong(2, Long.parseLong(node.getClusterId()));
statement.setBytes(3, nodeBytes);
return statement;
}
}
}
| apache-2.0 |
this/carbon-uuf | components/uuf-core/src/main/java/org/wso2/carbon/uuf/internal/RequestDispatcher.java | 8838 | /*
* Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
*
* WSO2 Inc. licenses this file to you under the Apache License,
* Version 2.0 (the "License"); you may not use this file except
* in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.wso2.carbon.uuf.internal;
import com.google.common.collect.ImmutableList;
import com.google.gson.JsonObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.wso2.carbon.uuf.api.config.Configuration;
import org.wso2.carbon.uuf.api.exception.UUFRuntimeException;
import org.wso2.carbon.uuf.core.App;
import org.wso2.carbon.uuf.internal.debug.DebugLogger;
import org.wso2.carbon.uuf.internal.debug.Debugger;
import org.wso2.carbon.uuf.internal.deployment.AppRegistry;
import org.wso2.carbon.uuf.internal.exception.DeploymentException;
import org.wso2.carbon.uuf.internal.exception.HttpErrorException;
import org.wso2.carbon.uuf.internal.exception.PageRedirectException;
import org.wso2.carbon.uuf.internal.filter.CsrfFilter;
import org.wso2.carbon.uuf.internal.filter.Filter;
import org.wso2.carbon.uuf.internal.filter.FilterResult;
import org.wso2.carbon.uuf.internal.io.StaticResolver;
import org.wso2.carbon.uuf.spi.HttpRequest;
import org.wso2.carbon.uuf.spi.HttpResponse;
import java.util.List;
import static org.wso2.carbon.uuf.spi.HttpResponse.CONTENT_TYPE_APPLICATION_JSON;
import static org.wso2.carbon.uuf.spi.HttpResponse.CONTENT_TYPE_TEXT_HTML;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_CACHE_CONTROL;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_EXPIRES;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_LOCATION;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_PRAGMA;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_X_CONTENT_TYPE_OPTIONS;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_X_FRAME_OPTIONS;
import static org.wso2.carbon.uuf.spi.HttpResponse.HEADER_X_XSS_PROTECTION;
import static org.wso2.carbon.uuf.spi.HttpResponse.STATUS_BAD_REQUEST;
import static org.wso2.carbon.uuf.spi.HttpResponse.STATUS_FOUND;
import static org.wso2.carbon.uuf.spi.HttpResponse.STATUS_INTERNAL_SERVER_ERROR;
import static org.wso2.carbon.uuf.spi.HttpResponse.STATUS_NOT_FOUND;
import static org.wso2.carbon.uuf.spi.HttpResponse.STATUS_OK;
public class RequestDispatcher {
private static final Logger LOGGER = LoggerFactory.getLogger(RequestDispatcher.class);
private final StaticResolver staticResolver;
private final Debugger debugger;
private final List<Filter> filters;
public RequestDispatcher() {
this(new StaticResolver(), (Debugger.isDebuggingEnabled() ? new Debugger() : null));
}
public RequestDispatcher(StaticResolver staticResolver, Debugger debugger) {
this.staticResolver = staticResolver;
this.debugger = debugger;
this.filters = ImmutableList.of(new CsrfFilter());
}
public void serve(HttpRequest request, HttpResponse response, AppRegistry appRegistry) {
if (!request.isValid()) {
serveDefaultErrorPage(STATUS_BAD_REQUEST, "Invalid URI '" + request.getUri() + "'.", response);
return;
}
if (request.isDefaultFaviconRequest()) {
serveDefaultFavicon(request, response);
return;
}
App app;
try {
app = appRegistry.getApp(request.getContextPath());
} catch (DeploymentException e) {
String msg = "Cannot deploy an app for context path '" + request.getContextPath() + "'.";
LOGGER.error(msg, e);
serveDefaultErrorPage(STATUS_INTERNAL_SERVER_ERROR, msg, response);
return;
}
if (app == null) {
serveDefaultErrorPage(STATUS_NOT_FOUND,
"Cannot find an app for context path '" + request.getContextPath() + "'.", response);
return;
}
serve(app, request, response);
}
private void serve(App app, HttpRequest request, HttpResponse response) {
try {
if (request.isStaticResourceRequest()) {
staticResolver.serve(app, request, response);
} else if (Debugger.isDebuggingEnabled() && request.isDebugRequest()) {
debugger.serve(app, request, response);
} else {
servePageOrFragment(app, request, response);
}
} catch (PageRedirectException e) {
response.setStatus(STATUS_FOUND);
response.setHeader(HEADER_LOCATION, e.getRedirectUrl());
} catch (HttpErrorException e) {
serveDefaultErrorPage(e.getHttpStatusCode(), e.getMessage(), response);
} catch (UUFRuntimeException e) {
String msg = "A server error occurred while serving for request '" + request + "'.";
LOGGER.error(msg, e);
serveDefaultErrorPage(STATUS_INTERNAL_SERVER_ERROR, msg, response);
} catch (Exception e) {
String msg = "An unexpected error occurred while serving for request '" + request + "'.";
LOGGER.error(msg, e);
serveDefaultErrorPage(STATUS_INTERNAL_SERVER_ERROR, msg, response);
}
}
private void servePageOrFragment(App app, HttpRequest request, HttpResponse response) {
DebugLogger.startRequest(request);
try {
// set default and configured http response headers for security purpose
setResponseSecurityHeaders(app, response);
if (request.isFragmentRequest()) {
JsonObject renderedFragment = app.renderFragment(request, response);
response.setContent(STATUS_OK, renderedFragment.toString(), CONTENT_TYPE_APPLICATION_JSON);
} else {
// Execute filters
Configuration configuration = app.getConfiguration();
for (Filter filter : filters) {
FilterResult result = filter.doFilter(request, configuration);
if (!result.isContinue()) {
serveDefaultErrorPage(result.getHttpStatusCode(), result.getMessage(), response);
return;
}
}
String html = app.renderPage(request, response);
response.setContent(STATUS_OK, html, CONTENT_TYPE_TEXT_HTML);
}
} catch (UUFRuntimeException e) {
throw e;
} catch (Exception e) {
// May be an UUFRuntimeException cause this 'e' Exception. Let's unwrap 'e' and find out.
Throwable th = e;
while ((th = th.getCause()) != null) {
if (th instanceof UUFRuntimeException) {
// Cause of 'e' is an UUFRuntimeException. Throw 'th' so that we can handle it properly.
throw (UUFRuntimeException) th;
}
}
// Cause of 'e' is not an UUFException.
throw e;
} finally {
DebugLogger.endRequest(request);
}
}
private void serveDefaultErrorPage(int httpStatusCode, String content, HttpResponse response) {
response.setContent(httpStatusCode, content);
}
private void serveDefaultFavicon(HttpRequest request, HttpResponse response) {
staticResolver.serveDefaultFavicon(request, response);
}
/**
* Sets some default mandatory and user configured security related headers to the response path.
*
* @param app the application used with getting the security related configuration.
* @param httpResponse the http response instance used with setting the headers.
*/
private void setResponseSecurityHeaders(App app, HttpResponse httpResponse) {
httpResponse.setHeader(HEADER_X_CONTENT_TYPE_OPTIONS, "nosniff");
httpResponse.setHeader(HEADER_X_XSS_PROTECTION, "1; mode=block");
httpResponse.setHeader(HEADER_CACHE_CONTROL, "no-store, no-cache, must-revalidate, private");
httpResponse.setHeader(HEADER_EXPIRES, "0");
httpResponse.setHeader(HEADER_PRAGMA, "no-cache");
httpResponse.setHeader(HEADER_X_FRAME_OPTIONS, "DENY");
// if there are any headers configured by the user for this app, then add them also to the response
app.getConfiguration().getResponseHeaders().getPages().forEach(httpResponse::setHeader);
}
}
| apache-2.0 |
vovagrechka/fucking-everything | phizdets/phizdetsc/src/org/jetbrains/kotlin/js/inline/JsInliner.java | 12950 | /*
* Copyright 2010-2016 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jetbrains.kotlin.js.inline;
import com.intellij.psi.PsiElement;
import kotlin.jvm.functions.Function1;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.kotlin.backend.common.CommonCoroutineCodegenUtilKt;
import org.jetbrains.kotlin.descriptors.CallableDescriptor;
import org.jetbrains.kotlin.descriptors.DeclarationDescriptor;
import org.jetbrains.kotlin.descriptors.FunctionDescriptor;
import org.jetbrains.kotlin.diagnostics.DiagnosticSink;
import org.jetbrains.kotlin.diagnostics.Errors;
import org.jetbrains.kotlin.js.backend.ast.*;
import org.jetbrains.kotlin.js.backend.ast.metadata.MetadataProperties;
import org.jetbrains.kotlin.js.inline.clean.FunctionPostProcessor;
import org.jetbrains.kotlin.js.inline.clean.RemoveUnusedFunctionDefinitionsKt;
import org.jetbrains.kotlin.js.inline.clean.RemoveUnusedLocalFunctionDeclarationsKt;
import org.jetbrains.kotlin.js.inline.context.FunctionContext;
import org.jetbrains.kotlin.js.inline.context.InliningContext;
import org.jetbrains.kotlin.js.inline.context.NamingContext;
import org.jetbrains.kotlin.js.inline.util.CollectUtilsKt;
import org.jetbrains.kotlin.js.inline.util.CollectionUtilsKt;
import org.jetbrains.kotlin.js.inline.util.NamingUtilsKt;
import org.jetbrains.kotlin.js.translate.context.TranslationContext;
import org.jetbrains.kotlin.resolve.inline.InlineStrategy;
import java.util.*;
import static org.jetbrains.kotlin.js.inline.FunctionInlineMutator.getInlineableCallReplacement;
import static org.jetbrains.kotlin.js.translate.utils.JsAstUtils.flattenStatement;
public class JsInliner extends JsVisitorWithContextImpl {
private final Map<JsName, JsFunction> functions;
private final Map<String, JsFunction> accessors;
private final Stack<JsInliningContext> inliningContexts = new Stack<JsInliningContext>();
private final Set<JsFunction> processedFunctions = CollectionUtilsKt.IdentitySet();
private final Set<JsFunction> inProcessFunctions = CollectionUtilsKt.IdentitySet();
private final FunctionReader functionReader;
private final DiagnosticSink trace;
// these are needed for error reporting, when inliner detects cycle
private final Stack<JsFunction> namedFunctionsStack = new Stack<JsFunction>();
private final LinkedList<JsCallInfo> inlineCallInfos = new LinkedList<JsCallInfo>();
private final Function1<JsNode, Boolean> canBeExtractedByInliner = new Function1<JsNode, Boolean>() {
@Override
public Boolean invoke(JsNode node) {
if (!(node instanceof JsInvocation)) return false;
JsInvocation call = (JsInvocation) node;
return hasToBeInlined(call);
}
};
public static JsProgram process(@NotNull TranslationContext context) {
JsProgram program = context.program();
Map<JsName, JsFunction> functions = CollectUtilsKt.collectNamedFunctions(program);
Map<String, JsFunction> accessors = CollectUtilsKt.collectAccessors(program);
new DummyAccessorInvocationTransformer().accept(program);
JsInliner inliner = new JsInliner(functions, accessors, new FunctionReader(context), context.bindingTrace());
inliner.accept(program);
RemoveUnusedFunctionDefinitionsKt.removeUnusedFunctionDefinitions(program, functions);
return program;
}
private JsInliner(
@NotNull Map<JsName, JsFunction> functions,
@NotNull Map<String, JsFunction> accessors,
@NotNull FunctionReader functionReader,
@NotNull DiagnosticSink trace
) {
this.functions = functions;
this.accessors = accessors;
this.functionReader = functionReader;
this.trace = trace;
}
@Override
public boolean visit(@NotNull JsFunction function, @NotNull JsContext context) {
inliningContexts.push(new JsInliningContext(function));
assert !inProcessFunctions.contains(function): "Inliner has revisited function";
inProcessFunctions.add(function);
if (functions.containsValue(function)) {
namedFunctionsStack.push(function);
}
return super.visit(function, context);
}
@Override
public void endVisit(@NotNull JsFunction function, @NotNull JsContext context) {
super.endVisit(function, context);
NamingUtilsKt.refreshLabelNames(function.getBody(), function.getScope());
RemoveUnusedLocalFunctionDeclarationsKt.removeUnusedLocalFunctionDeclarations(function);
processedFunctions.add(function);
new FunctionPostProcessor(function).apply();
assert inProcessFunctions.contains(function);
inProcessFunctions.remove(function);
inliningContexts.pop();
if (!namedFunctionsStack.empty() && namedFunctionsStack.peek() == function) {
namedFunctionsStack.pop();
}
}
@Override
public boolean visit(@NotNull JsInvocation call, @NotNull JsContext context) {
if (!hasToBeInlined(call)) return true;
JsFunction containingFunction = getCurrentNamedFunction();
if (containingFunction != null) {
inlineCallInfos.add(new JsCallInfo(call, containingFunction));
}
JsFunction definition = getFunctionContext().getFunctionDefinition(call);
if (inProcessFunctions.contains(definition)) {
reportInlineCycle(call, definition);
}
else if (!processedFunctions.contains(definition)) {
accept(definition);
}
return true;
}
@Override
public void endVisit(@NotNull JsInvocation x, @NotNull JsContext ctx) {
if (hasToBeInlined(x)) {
inline(x, ctx);
}
JsCallInfo lastCallInfo = null;
if (!inlineCallInfos.isEmpty()) {
lastCallInfo = inlineCallInfos.getLast();
}
if (lastCallInfo != null && lastCallInfo.call == x) {
inlineCallInfos.removeLast();
}
}
@Override
protected void doAcceptStatementList(List<JsStatement> statements) {
// at top level of js ast, contexts stack can be empty,
// but there is no inline calls anyway
if(!inliningContexts.isEmpty()) {
JsScope scope = getFunctionContext().getScope();
int i = 0;
while (i < statements.size()) {
List<JsStatement> additionalStatements =
ExpressionDecomposer.preserveEvaluationOrder(scope, statements.get(i), canBeExtractedByInliner);
statements.addAll(i, additionalStatements);
i += additionalStatements.size() + 1;
}
}
super.doAcceptStatementList(statements);
}
private void inline(@NotNull JsInvocation call, @NotNull JsContext context) {
DeclarationDescriptor callDescriptor = MetadataProperties.getDescriptor(call);
if (isSuspendWithCurrentContinuation(callDescriptor)) {
inlineSuspendWithCurrentContinuation(call, context);
return;
}
JsInliningContext inliningContext = getInliningContext();
InlineableResult inlineableResult = getInlineableCallReplacement(call, inliningContext);
JsStatement inlineableBody = inlineableResult.getInlineableBody();
JsExpression resultExpression = inlineableResult.getResultExpression();
JsContext<JsStatement> statementContext = inliningContext.getStatementContext();
// body of inline function can contain call to lambdas that need to be inlined
JsStatement inlineableBodyWithLambdasInlined = accept(inlineableBody);
assert inlineableBody == inlineableBodyWithLambdasInlined;
statementContext.addPrevious(flattenStatement(inlineableBody));
/*
* Assumes, that resultExpression == null, when result is not needed.
* @see FunctionInlineMutator.isResultNeeded()
*/
if (resultExpression == null) {
statementContext.removeMe();
return;
}
resultExpression = accept(resultExpression);
MetadataProperties.setSynthetic(resultExpression, true);
context.replaceMe(resultExpression);
}
private static boolean isSuspendWithCurrentContinuation(@Nullable DeclarationDescriptor descriptor) {
if (!(descriptor instanceof FunctionDescriptor)) return false;
return CommonCoroutineCodegenUtilKt.isBuiltInSuspendCoroutineOrReturn((FunctionDescriptor) descriptor.getOriginal());
}
private void inlineSuspendWithCurrentContinuation(@NotNull JsInvocation call, @NotNull JsContext context) {
JsInliningContext inliningContext = getInliningContext();
JsFunction containingFunction = inliningContext.function;
JsExpression lambda = call.getArguments().get(0);
JsParameter continuationParam = containingFunction.getParameters().get(containingFunction.getParameters().size() - 1);
JsInvocation invocation = new JsInvocation(lambda, continuationParam.getName().makeRef());
MetadataProperties.setSuspend(invocation, true);
context.replaceMe(accept(invocation));
}
@NotNull
private JsInliningContext getInliningContext() {
return inliningContexts.peek();
}
@NotNull
private FunctionContext getFunctionContext() {
return getInliningContext().getFunctionContext();
}
@Nullable
private JsFunction getCurrentNamedFunction() {
if (namedFunctionsStack.empty()) return null;
return namedFunctionsStack.peek();
}
private void reportInlineCycle(@NotNull JsInvocation call, @NotNull JsFunction calledFunction) {
MetadataProperties.setInlineStrategy(call, InlineStrategy.NOT_INLINE);
Iterator<JsCallInfo> it = inlineCallInfos.descendingIterator();
while (it.hasNext()) {
JsCallInfo callInfo = it.next();
PsiElement psiElement = MetadataProperties.getPsiElement(callInfo.call);
CallableDescriptor descriptor = MetadataProperties.getDescriptor(callInfo.call);
if (psiElement != null && descriptor != null) {
trace.report(Errors.INLINE_CALL_CYCLE.on(psiElement, descriptor));
}
if (callInfo.containingFunction == calledFunction) {
break;
}
}
}
private boolean hasToBeInlined(@NotNull JsInvocation call) {
InlineStrategy strategy = MetadataProperties.getInlineStrategy(call);
if (strategy == null || !strategy.isInline()) return false;
return getFunctionContext().hasFunctionDefinition(call);
}
private class JsInliningContext implements InliningContext {
private final FunctionContext functionContext;
@NotNull
public final JsFunction function;
JsInliningContext(@NotNull JsFunction function) {
this.function = function;
functionContext = new FunctionContext(function, functionReader) {
@Nullable
@Override
protected JsFunction lookUpStaticFunction(@Nullable JsName functionName) {
return functions.get(functionName);
}
@Nullable
@Override
protected JsFunction lookUpStaticFunctionByTag(@NotNull String functionTag) {
return accessors.get(functionTag);
}
};
}
@NotNull
@Override
public NamingContext newNamingContext() {
JsScope scope = getFunctionContext().getScope();
return new NamingContext(scope, getStatementContext());
}
@NotNull
@Override
public JsContext<JsStatement> getStatementContext() {
return getLastStatementLevelContext();
}
@NotNull
@Override
public FunctionContext getFunctionContext() {
return functionContext;
}
}
private static class JsCallInfo {
@NotNull
public final JsInvocation call;
@NotNull
public final JsFunction containingFunction;
private JsCallInfo(@NotNull JsInvocation call, @NotNull JsFunction function) {
this.call = call;
containingFunction = function;
}
}
}
| apache-2.0 |
mhurne/aws-sdk-java | aws-java-sdk-datapipeline/src/main/java/com/amazonaws/services/datapipeline/model/transform/FieldJsonUnmarshaller.java | 3212 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.datapipeline.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.datapipeline.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* Field JSON Unmarshaller
*/
public class FieldJsonUnmarshaller implements
Unmarshaller<Field, JsonUnmarshallerContext> {
public Field unmarshall(JsonUnmarshallerContext context) throws Exception {
Field field = new Field();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("key", targetDepth)) {
context.nextToken();
field.setKey(context.getUnmarshaller(String.class)
.unmarshall(context));
}
if (context.testExpression("stringValue", targetDepth)) {
context.nextToken();
field.setStringValue(context.getUnmarshaller(String.class)
.unmarshall(context));
}
if (context.testExpression("refValue", targetDepth)) {
context.nextToken();
field.setRefValue(context.getUnmarshaller(String.class)
.unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return field;
}
private static FieldJsonUnmarshaller instance;
public static FieldJsonUnmarshaller getInstance() {
if (instance == null)
instance = new FieldJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
Distrotech/fop | src/java/org/apache/fop/traits/BorderStyle.java | 3986 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.traits;
import java.io.ObjectStreamException;
import org.apache.fop.fo.Constants;
/** Enumeration class for border styles. */
public final class BorderStyle extends TraitEnum {
private static final long serialVersionUID = 1L;
private static final String[] BORDER_STYLE_NAMES = new String[]
{"none", "hidden", "dotted", "dashed",
"solid", "double", "groove", "ridge",
"inset", "outset"};
private static final int[] BORDER_STYLE_VALUES = new int[]
{Constants.EN_NONE, Constants.EN_HIDDEN, Constants.EN_DOTTED, Constants.EN_DASHED,
Constants.EN_SOLID, Constants.EN_DOUBLE, Constants.EN_GROOVE, Constants.EN_RIDGE,
Constants.EN_INSET, Constants.EN_OUTSET};
/** border-style: none */
public static final BorderStyle NONE = new BorderStyle(0);
/** border-style: hidden */
public static final BorderStyle HIDDEN = new BorderStyle(1);
/** border-style: dotted */
public static final BorderStyle DOTTED = new BorderStyle(2);
/** border-style: dashed */
public static final BorderStyle DASHED = new BorderStyle(3);
/** border-style: solid */
public static final BorderStyle SOLID = new BorderStyle(4);
/** border-style: double */
public static final BorderStyle DOUBLE = new BorderStyle(5);
/** border-style: groove */
public static final BorderStyle GROOVE = new BorderStyle(6);
/** border-style: ridge */
public static final BorderStyle RIDGE = new BorderStyle(7);
/** border-style: inset */
public static final BorderStyle INSET = new BorderStyle(8);
/** border-style: outset */
public static final BorderStyle OUTSET = new BorderStyle(9);
private static final BorderStyle[] STYLES = new BorderStyle[] {
NONE, HIDDEN, DOTTED, DASHED, SOLID, DOUBLE, GROOVE, RIDGE, INSET, OUTSET};
private BorderStyle(int index) {
super(BORDER_STYLE_NAMES[index], BORDER_STYLE_VALUES[index]);
}
/**
* Returns the enumeration/singleton object based on its name.
* @param name the name of the enumeration value
* @return the enumeration object
*/
public static BorderStyle valueOf(String name) {
for (int i = 0; i < STYLES.length; i++) {
if (STYLES[i].getName().equalsIgnoreCase(name)) {
return STYLES[i];
}
}
throw new IllegalArgumentException("Illegal border style: " + name);
}
/**
* Returns the enumeration/singleton object based on its name.
* @param enumValue the enumeration value
* @return the enumeration object
*/
public static BorderStyle valueOf(int enumValue) {
for (int i = 0; i < STYLES.length; i++) {
if (STYLES[i].getEnumValue() == enumValue) {
return STYLES[i];
}
}
throw new IllegalArgumentException("Illegal border style: " + enumValue);
}
private Object readResolve() throws ObjectStreamException {
return valueOf(getName());
}
/** {@inheritDoc} */
public String toString() {
return "BorderStyle:" + getName();
}
}
| apache-2.0 |
johncarl81/transfuse | transfuse/src/main/java/org/androidtransfuse/model/manifest/ReqNavigation.java | 1024 | /**
* Copyright 2011-2015 John Ericksen
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.androidtransfuse.model.manifest;
import org.androidtransfuse.annotations.Labeled;
public enum ReqNavigation implements Labeled {
UNDEFINED("undefined"),
NONAV("nonav"),
DPAD("dpad"),
TRACKBALL("trackball"),
WHEEL("wheel");
private String label;
private ReqNavigation(String label) {
this.label = label;
}
public String getLabel() {
return label;
}
} | apache-2.0 |
synchromedia/OpenGSN | services-client/src/generated-sources/com/greenstarnetwork/services/facilitymanager/RefreshResourceResponse.java | 2196 | /**
* Copyright 2009-2011 École de technologie supérieure,
* Communication Research Centre Canada,
* Inocybe Technologies Inc. and 6837247 CANADA Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.greenstarnetwork.services.facilitymanager;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for refreshResourceResponse complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="refreshResourceResponse">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="return" type="{http://www.w3.org/2001/XMLSchema}string" minOccurs="0"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "refreshResourceResponse", propOrder = {
"_return"
})
public class RefreshResourceResponse {
@XmlElement(name = "return")
protected String _return;
/**
* Gets the value of the return property.
*
* @return
* possible object is
* {@link String }
*
*/
public String getReturn() {
return _return;
}
/**
* Sets the value of the return property.
*
* @param value
* allowed object is
* {@link String }
*
*/
public void setReturn(String value) {
this._return = value;
}
}
| apache-2.0 |
gibello/roboconf | karaf/roboconf-karaf-commands-dm/src/test/java/net/roboconf/karaf/commands/dm/history/PruneHistoryCommandTest.java | 6384 | /**
* Copyright 2017 Linagora, Université Joseph Fourier, Floralis
*
* The present code is developed in the scope of the joint LINAGORA -
* Université Joseph Fourier - Floralis research program and is designated
* as a "Result" pursuant to the terms and conditions of the LINAGORA
* - Université Joseph Fourier - Floralis research program. Each copyright
* holder of Results enumerated here above fully & independently holds complete
* ownership of the complete Intellectual Property rights applicable to the whole
* of said Results, and may freely exploit it in any manner which does not infringe
* the moral rights of the other copyright holders.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package net.roboconf.karaf.commands.dm.history;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.PrintStream;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import javax.sql.DataSource;
import org.h2.jdbcx.JdbcDataSource;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
import org.mockito.Mockito;
import org.osgi.framework.BundleContext;
import org.osgi.framework.ServiceReference;
import net.roboconf.core.internal.tests.TestApplication;
import net.roboconf.core.model.runtime.CommandHistoryItem;
import net.roboconf.dm.internal.api.impl.CommandsMngrImpl;
import net.roboconf.dm.management.Manager;
/**
* @author Vincent Zurczak - Linagora
*/
public class PruneHistoryCommandTest {
@Rule
public TemporaryFolder folder = new TemporaryFolder();
@Test
public void testExecute_withRealDataSource() throws Exception {
// Create a H2 data source
JdbcDataSource ds = new JdbcDataSource();
File dataFile = this.folder.newFile();
ds.setURL( "jdbc:h2:" + dataFile.getAbsolutePath());
ds.setUser( "roboconf" );
ds.setPassword( "roboconf" );
// Prepare the mock
ServiceReference<DataSource> sr = Mockito.mock( ServiceReference.class );
PruneHistoryCommand pruneHistoryCmd = new PruneHistoryCommand();
pruneHistoryCmd.bundleContext = Mockito.mock( BundleContext.class );
Mockito.when( pruneHistoryCmd.bundleContext.getServiceReferences(
DataSource.class,
"(dataSourceName=roboconf-dm-db)" )).thenReturn( Arrays.asList( sr ));
Mockito.when( pruneHistoryCmd.bundleContext.getService( sr )).thenReturn( ds );
// Populate the database
Manager manager = Mockito.mock( Manager.class );
Mockito.when( manager.getDataSource()).thenReturn( ds );
CommandsMngrImpl cmdMngr = new CommandsMngrImpl( manager ) ;
TestApplication app = new TestApplication();
app.setDirectory( this.folder.newFolder());
String cmdName = "my-command";
String line = "rename /tomcat-vm as tomcat-vm-copy";
cmdMngr.createOrUpdateCommand( app, cmdName, line );
final int count = 5;
for( int i=0; i<count; i++ ) {
try {
cmdMngr.execute( app, cmdName, CommandHistoryItem.ORIGIN_REST_API, "some source" );
} catch( Exception e ) {
// nothing
}
}
List<CommandHistoryItem> historyItems = cmdMngr.getHistory( 0, 10, null, null, app.getName());
Assert.assertEquals( count, historyItems.size());
// Invalid argument? => Nothing is deleted.
ByteArrayOutputStream os = new ByteArrayOutputStream();
pruneHistoryCmd.out = new PrintStream( os, true, "UTF-8" );
pruneHistoryCmd.daysToKeep = -10;
pruneHistoryCmd.execute();
historyItems = cmdMngr.getHistory( 0, 10, null, null, app.getName());
Assert.assertEquals( count, historyItems.size());
Assert.assertEquals( "[ WARNING ] The daysToKeep argument must be equal or greater than 0. Operation cancelled.", os.toString( "UTF-8" ).trim());
// Remove entries older than two days. => Here, nothing will be deleted.
os = new ByteArrayOutputStream();
pruneHistoryCmd.out = new PrintStream( os, true, "UTF-8" );
pruneHistoryCmd.daysToKeep = 2;
pruneHistoryCmd.execute();
historyItems = cmdMngr.getHistory( 0, 10, null, null, app.getName());
Assert.assertEquals( count, historyItems.size());
Assert.assertEquals( "Pruning the commands history.\nOnly the last 2 days will be kept.\nPruning done.", os.toString( "UTF-8" ).trim());
// Remove entries older than two days. => Here, nothing will be deleted.
os = new ByteArrayOutputStream();
pruneHistoryCmd.out = new PrintStream( os, true, "UTF-8" );
pruneHistoryCmd.daysToKeep = 1;
pruneHistoryCmd.execute();
historyItems = cmdMngr.getHistory( 0, 10, null, null, app.getName());
Assert.assertEquals( count, historyItems.size());
Assert.assertEquals( "Pruning the commands history.\nOnly the last 1 day will be kept.\nPruning done.", os.toString( "UTF-8" ).trim());
// Remove all the entries
os = new ByteArrayOutputStream();
pruneHistoryCmd.out = new PrintStream( os, true, "UTF-8" );
pruneHistoryCmd.daysToKeep = 0;
pruneHistoryCmd.execute();
historyItems = cmdMngr.getHistory( 0, 10, null, null, app.getName());
Assert.assertEquals( 0, historyItems.size());
Assert.assertEquals( "Pruning the commands history.\nAll the entries will be deleted.\nPruning done.", os.toString( "UTF-8" ).trim());
}
@Test
public void testExecute_withoutDataSource() throws Exception {
// Prepare the mock
PruneHistoryCommand pruneHistoryCmd = new PruneHistoryCommand();
pruneHistoryCmd.bundleContext = Mockito.mock( BundleContext.class );
Mockito.when( pruneHistoryCmd.bundleContext.getServiceReferences(
DataSource.class,
"(dataSourceName=roboconf-dm-db)" )).thenReturn( new ArrayList<ServiceReference<DataSource>>( 0 ));
// Invalid argument? => Nothing is deleted.
ByteArrayOutputStream os = new ByteArrayOutputStream();
pruneHistoryCmd.out = new PrintStream( os, true, "UTF-8" );
pruneHistoryCmd.execute();
Assert.assertEquals( os.toString( "UTF-8" ).trim(), "No data source was found to prune the commands history." );
}
}
| apache-2.0 |
mhurne/aws-sdk-java | aws-java-sdk-codecommit/src/main/java/com/amazonaws/services/codecommit/model/transform/CreateRepositoryResultJsonUnmarshaller.java | 2982 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.codecommit.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.codecommit.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* CreateRepositoryResult JSON Unmarshaller
*/
public class CreateRepositoryResultJsonUnmarshaller implements
Unmarshaller<CreateRepositoryResult, JsonUnmarshallerContext> {
public CreateRepositoryResult unmarshall(JsonUnmarshallerContext context)
throws Exception {
CreateRepositoryResult createRepositoryResult = new CreateRepositoryResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("repositoryMetadata", targetDepth)) {
context.nextToken();
createRepositoryResult
.setRepositoryMetadata(RepositoryMetadataJsonUnmarshaller
.getInstance().unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return createRepositoryResult;
}
private static CreateRepositoryResultJsonUnmarshaller instance;
public static CreateRepositoryResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new CreateRepositoryResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
nikhilvibhav/camel | components/camel-salesforce/camel-salesforce-component/src/main/java/org/apache/camel/component/salesforce/api/dto/composite/ReferenceId.java | 3465 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.salesforce.api.dto.composite;
import java.io.Serializable;
import java.util.List;
import java.util.Objects;
import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.thoughtworks.xstream.annotations.XStreamAlias;
import com.thoughtworks.xstream.annotations.XStreamImplicit;
import org.apache.camel.component.salesforce.api.dto.RestError;
import static java.util.Collections.emptyList;
import static java.util.Collections.unmodifiableList;
import static java.util.Optional.ofNullable;
@XStreamAlias("results")
public final class ReferenceId implements Serializable {
private static final long serialVersionUID = 1L;
@XStreamImplicit
private final List<RestError> errors;
private final String id;
private final String referenceId;
@JsonCreator
ReferenceId(@JsonProperty("referenceId") final String referenceId, @JsonProperty("id") final String id,
@JsonProperty("errors") final List<RestError> errors) {
this.referenceId = referenceId;
this.id = id;
this.errors = errors;
}
@Override
public boolean equals(final Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof ReferenceId)) {
return false;
}
final ReferenceId other = (ReferenceId) obj;
return Objects.equals(id, other.id) && Objects.equals(referenceId, other.referenceId)
&& Objects.equals(getErrors(), other.getErrors());
}
public List<RestError> getErrors() {
return unmodifiableList(ofNullable(errors).orElse(emptyList()));
}
public String getId() {
return id;
}
public String getReferenceId() {
return referenceId;
}
@Override
public int hashCode() {
final int prime = 31;
int result = 1;
result = prime * result + (errors == null ? 0 : errors.hashCode());
result = prime * result + (id == null ? 0 : id.hashCode());
result = prime * result + (referenceId == null ? 0 : referenceId.hashCode());
return result;
}
@Override
public String toString() {
final StringBuilder buildy = new StringBuilder("Reference: ").append(referenceId).append(", Id: ");
final List<RestError> anyErrors = getErrors();
if (anyErrors.isEmpty()) {
buildy.append(", with no errors");
} else {
buildy.append(", with ");
buildy.append(anyErrors.size());
buildy.append(" error(s)");
}
return buildy.toString();
}
}
| apache-2.0 |
sijie/bookkeeper | bookkeeper-server/src/main/java/org/apache/bookkeeper/proto/checksum/CRC32DigestManager.java | 2061 | package org.apache.bookkeeper.proto.checksum;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import io.netty.buffer.ByteBuf;
import io.netty.buffer.ByteBufAllocator;
import io.netty.util.concurrent.FastThreadLocal;
/**
* Digest manager for CRC32 checksum.
*/
class CRC32DigestManager extends DigestManager {
/**
* Interface that abstracts different implementations of the CRC32 digest.
*/
interface CRC32Digest {
long getValueAndReset();
void update(ByteBuf buf);
}
private static final FastThreadLocal<CRC32Digest> crc = new FastThreadLocal<CRC32Digest>() {
@Override
protected CRC32Digest initialValue() {
if (DirectMemoryCRC32Digest.isSupported()) {
return new DirectMemoryCRC32Digest();
} else {
return new StandardCRC32Digest();
}
}
};
public CRC32DigestManager(long ledgerId, boolean useV2Protocol, ByteBufAllocator allocator) {
super(ledgerId, useV2Protocol, allocator);
}
@Override
int getMacCodeLength() {
return 8;
}
@Override
void populateValueAndReset(ByteBuf buf) {
buf.writeLong(crc.get().getValueAndReset());
}
@Override
void update(ByteBuf data) {
crc.get().update(data);
}
}
| apache-2.0 |
dubenju/javay | src/java/ee/ioc/cs/jbe/browser/codeedit/JAsmParser.java | 51962 | /*
*
*/
package ee.ioc.cs.jbe.browser.codeedit;
import java.util.ArrayList;
import org.apache.bcel.generic.*;
import org.gjt.jclasslib.bytecode.OpcodesUtil;
public class JAsmParser {
/*
* Parses the input code and returns an instructionlist, but also has a
* sideeffect: it updates the constant pool on the fly to have the required
* constants in the constant pool.
*/
private JAsmParseException parseException = new JAsmParseException();
public InstructionList parse(String code, ConstantPoolGen cpg)
throws JAsmParseException {
code = code.replaceAll("\r", "");
String[] codeLines = code.split("\n");
if (codeLines.length == 1 && codeLines[0].equals("")) {
return new InstructionList();
}
InstructionList instructions = new InstructionList();
ArrayList<InstructionHandle> instructionHandleList = new ArrayList<InstructionHandle> ();
ArrayList<TempSwitchData> lookupSwitches = new ArrayList<TempSwitchData>();
ArrayList<TempSwitchData> tableSwitches = new ArrayList<TempSwitchData>();
ArrayList<BranchPair> branches = new ArrayList<BranchPair>();
InstructionHandle ih;
String[] instrElems;
int codeLength = countLines(codeLines);
// InstructionHandle[] iha = new InstructionHandle[strt.countTokens()];
int labels = 0;
int switchMode = 0; // 0- normal , 1- tableswitch, 2 - lookupswitch
String fullInstr;
String instrName;
TempSwitchData tempSwitch = new TempSwitchData();
for (int i = 0; i < codeLines.length; i++) {
fullInstr = codeLines[i];
//switchmode, 1 denoting tableswitch, 2 lookupswitch
if (beginsWithWhitespace(fullInstr) && switchMode == 1) {
boolean isDefault = isDefaultLine(fullInstr.trim());
if (isDefault) {
int target = getLookupTarget(fullInstr.trim(), labels,
codeLength);
tempSwitch.getBranchPairs().add(
new BranchPair(-1, target));
} else {
int target = getTableArg(fullInstr.trim(), labels, codeLength);
tempSwitch.getBranchPairs().add(
new BranchPair(tempSwitch.getInitialLab(), target));
tempSwitch.incInitialLab();
}
} else if (beginsWithWhitespace(fullInstr) && switchMode == 2) {
int target = getLookupTarget(fullInstr.trim(), labels,
codeLength);
int value = getLookupSource(fullInstr.trim(), labels);
tempSwitch.getBranchPairs().add(new BranchPair(value, target));
} else if (beginsWithWhitespace(fullInstr)) {
parseException.addError(JAsmParseException.WHITESPACE_ERROR,
fullInstr, labels-1);
} else {
if (switchMode == 1) {
TABLESWITCH ts = new TABLESWITCH();
ih = instructions.append(ts);
instructionHandleList.add(ih);
tempSwitch.setHandle(ih);
tableSwitches.add(tempSwitch);
labels++;
switchMode = 0;
} else if (switchMode == 2) {
LOOKUPSWITCH ls = new LOOKUPSWITCH();
ih = instructions.append(ls);
instructionHandleList.add(ih);
tempSwitch.setHandle(ih);
lookupSwitches.add(tempSwitch);
labels++;
switchMode = 0;
}
instrElems = fullInstr.split(" ");
instrName = instrElems[0].toLowerCase().trim();
if (instrName.equals("bipush")) {
byte arg = getSingleByteArg(instrElems, labels);
ih = instructions.append(new BIPUSH(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("sipush")) {
short arg = getSingleShortArg(instrElems, labels);
ih = instructions.append(new SIPUSH(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iinc")) {
int arg1 = 0;
int arg2 = 0;
try {
arg1 = Integer.parseInt(instrElems[1]);
arg2 = Integer.parseInt(instrElems[2]);
} catch (NumberFormatException nfe) {
parseException.addError(
JAsmParseException.INT_REQUIRED, instrElems[0],
labels);
} catch (ArrayIndexOutOfBoundsException aobe) {
parseException.addError(
JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], labels);
}
ih = instructions.append(new IINC(arg1, arg2));
instructionHandleList.add(ih);
labels++;
}
/*
* Class and object operations.
*/
else if (instrName.equals("anewarray")) {
int arg = getClassConstRef(instrElems, cpg, labels);
ih = instructions.append(new ANEWARRAY(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("checkcast")) {
int arg = getClassConstRef(instrElems, cpg, labels);
ih = instructions.append(new CHECKCAST(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("instanceof")) {
int arg = getClassConstRef(instrElems, cpg, labels);
ih = instructions.append(new INSTANCEOF(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("new")) {
int arg = getClassConstRef(instrElems, cpg, labels);
ih = instructions.append(new NEW(arg));
instructionHandleList.add(ih);
labels++;
}
/*
* Invoke instructions
*/
else if (instrName.equals("invokevirtual")) {
int arg = getMethodConstRef(instrElems, cpg, labels);
ih = instructions.append(new INVOKEVIRTUAL(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("invokestatic")) {
int arg = getMethodConstRef(instrElems, cpg, labels);
ih = instructions.append(new INVOKESTATIC(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("invokespecial")) {
int arg = getMethodConstRef(instrElems, cpg, labels);
ih = instructions.append(new INVOKESPECIAL(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("invokeinterface")) {
int index = getInterfaceConstRef(instrElems, cpg, labels);
int nargs = Integer.parseInt(instrElems[2]);
ih = instructions.append(new INVOKEINTERFACE(index, nargs));
instructionHandleList.add(ih);
labels++;
}
/*
* Field instructions
*/
else if (instrName.equals("getstatic")) {
int arg = getFieldConstRef(instrElems, cpg, labels);
ih = instructions.append(new GETSTATIC(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("getfield")) {
int arg = getFieldConstRef(instrElems, cpg, labels);
ih = instructions.append(new GETFIELD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("putstatic")) {
int arg = getFieldConstRef(instrElems, cpg, labels);
ih = instructions.append(new PUTSTATIC(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("putfield")) {
int arg = getFieldConstRef(instrElems, cpg, labels);
ih = instructions.append(new PUTFIELD(arg));
instructionHandleList.add(ih);
labels++;
}
/*
* Newarray instructions
*/
else if (instrName.equals("newarray")) {
byte arg = getArrayRef(instrElems, labels);
ih = instructions.append(new NEWARRAY(arg));
instructionHandleList.add(ih);
labels++;
}
else if (instrName.equals("multianewarray")) {
short dim = 1;
int arg = 0;
try {
dim = Short.parseShort(instrElems[2]);
} catch (NumberFormatException nfe) {
parseException.addError(
JAsmParseException.SHORT_REQUIRED,
instrElems[0], labels);
} catch (ArrayIndexOutOfBoundsException aobe) {
parseException.addError(
JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], labels);
}
try {
arg = Integer.parseInt(instrElems[1]);
} catch (NumberFormatException nfe) {
String classN = instrElems[1];
arg = cpg.addClass(classN);
} catch (ArrayIndexOutOfBoundsException aobe) {
parseException.addError(
JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], labels);
}
ih = instructions.append(new MULTIANEWARRAY(arg, dim));
instructionHandleList.add(ih);
labels++;
}
/*
* Load constant instructions
*/
else if (instrName.equals("ldc")) {
int arg = getConstRef4ldc(instrElems, cpg, labels);
ih = instructions.append(new LDC(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ldc_w")) {
int arg = getConstRef4ldc(instrElems, cpg, labels);
ih = instructions.append(new LDC_W(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ldc2_w")) {
int arg = getConstRefldc2_w(instrElems, cpg, labels);
ih = instructions.append(new LDC2_W(arg));
instructionHandleList.add(ih);
labels++;
}
/*
* Local Variable instructions
*/
else if (instrName.equals("ret")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new RET(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aload")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new ALOAD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("astore")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new ASTORE(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dload")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new DLOAD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dstore")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new DSTORE(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fload")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new FLOAD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fstore")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new FSTORE(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iload")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new ILOAD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("istore")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new ISTORE(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lload")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new LLOAD(arg));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lstore")) {
int arg = getSingleIntArg(instrElems, labels);
ih = instructions.append(new LSTORE(arg));
instructionHandleList.add(ih);
labels++;
}
/*
* Switch instructions
*/
else if (instrName.equals("tableswitch")) {
switchMode = 1;
int arg = getSingleIntArg(instrElems, labels);
tempSwitch = new TempSwitchData(2, arg);
} else if (instrName.equals("lookupswitch")) {
switchMode = 2;
tempSwitch = new TempSwitchData(1);
}
/*
* 0 parameter instructions
*/
else if (instrName.equals("aaload")) {
ih = instructions.append(new AALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aastore")) {
ih = instructions.append(new AASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aconst_null")) {
ih = instructions.append(new ACONST_NULL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aload_0")) {
ih = instructions.append(new ALOAD(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aload_1")) {
ih = instructions.append(new ALOAD(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aload_2")) {
ih = instructions.append(new ALOAD(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("aload_3")) {
ih = instructions.append(new ALOAD(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("areturn")) {
ih = instructions.append(new ARETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("arraylength")) {
ih = instructions.append(new ARRAYLENGTH());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("astore_0")) {
ih = instructions.append(new ASTORE(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("astore_1")) {
ih = instructions.append(new ASTORE(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("astore_2")) {
ih = instructions.append(new ASTORE(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("astore_3")) {
ih = instructions.append(new ASTORE(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("athrow")) {
ih = instructions.append(new ATHROW());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("baload")) {
ih = instructions.append(new BALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("bastore")) {
ih = instructions.append(new BASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("breakpoint")) {
ih = instructions.append(new BREAKPOINT());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("caload")) {
ih = instructions.append(new CALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("castore")) {
ih = instructions.append(new CASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("d2f")) {
ih = instructions.append(new D2F());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("d2i")) {
ih = instructions.append(new D2I());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("d2l")) {
ih = instructions.append(new D2L());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dadd")) {
ih = instructions.append(new DADD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("daload")) {
ih = instructions.append(new DALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dastore")) {
ih = instructions.append(new DASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dcmpg")) {
ih = instructions.append(new DCMPG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dcmpl")) {
ih = instructions.append(new DCMPL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dconst_0")) {
ih = instructions.append(new DCONST(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dconst_1")) {
ih = instructions.append(new DCONST(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ddiv")) {
ih = instructions.append(new DDIV());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dload_0")) {
ih = instructions.append(new DLOAD(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dload_1")) {
ih = instructions.append(new DLOAD(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dload_2")) {
ih = instructions.append(new DLOAD(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dload_3")) {
ih = instructions.append(new DLOAD(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dmul")) {
ih = instructions.append(new DMUL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dneg")) {
ih = instructions.append(new DNEG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("drem")) {
ih = instructions.append(new DREM());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dreturn")) {
ih = instructions.append(new DRETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dstore_0")) {
ih = instructions.append(new DSTORE(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dstore_1")) {
ih = instructions.append(new DSTORE(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dstore_2")) {
ih = instructions.append(new DSTORE(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dstore_3")) {
ih = instructions.append(new DSTORE(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dsub")) {
ih = instructions.append(new DSUB());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup")) {
ih = instructions.append(new DUP());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup2")) {
ih = instructions.append(new DUP2());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup2_x1")) {
ih = instructions.append(new DUP2_X1());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup2_x2")) {
ih = instructions.append(new DUP2_X2());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup_x1")) {
ih = instructions.append(new DUP_X1());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("dup_x2")) {
ih = instructions.append(new DUP_X2());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("f2d")) {
ih = instructions.append(new F2D());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("f2i")) {
ih = instructions.append(new F2I());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("f2l")) {
ih = instructions.append(new F2L());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fadd")) {
ih = instructions.append(new FADD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("faload")) {
ih = instructions.append(new FALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fastore")) {
ih = instructions.append(new FASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fcmpg")) {
ih = instructions.append(new FCMPG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fcmpl")) {
ih = instructions.append(new FCMPL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fconst_0")) {
ih = instructions.append(new FCONST(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fconst_1")) {
ih = instructions.append(new FCONST(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fconst_2")) {
ih = instructions.append(new FCONST(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fdiv")) {
ih = instructions.append(new FDIV());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fload_0")) {
ih = instructions.append(new FLOAD(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fload_1")) {
ih = instructions.append(new FLOAD(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fload_2")) {
ih = instructions.append(new FLOAD(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fload_3")) {
ih = instructions.append(new FLOAD(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fmul")) {
ih = instructions.append(new FMUL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fneg")) {
ih = instructions.append(new FNEG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("frem")) {
ih = instructions.append(new FREM());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("freturn")) {
ih = instructions.append(new FRETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fstore_0")) {
ih = instructions.append(new FSTORE(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fstore_1")) {
ih = instructions.append(new FSTORE(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fstore_2")) {
ih = instructions.append(new FSTORE(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fstore_3")) {
ih = instructions.append(new FSTORE(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("fsub")) {
ih = instructions.append(new FSUB());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2d")) {
ih = instructions.append(new I2D());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2f")) {
ih = instructions.append(new I2F());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2l")) {
ih = instructions.append(new I2L());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iadd")) {
ih = instructions.append(new IADD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iaload")) {
ih = instructions.append(new IALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iand")) {
ih = instructions.append(new IAND());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iastore")) {
ih = instructions.append(new IASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_0")) {
ih = instructions.append(new ICONST(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_1")) {
ih = instructions.append(new ICONST(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_2")) {
ih = instructions.append(new ICONST(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_3")) {
ih = instructions.append(new ICONST(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_4")) {
ih = instructions.append(new ICONST(4));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_5")) {
ih = instructions.append(new ICONST(5));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iconst_m1")) {
ih = instructions.append(new ICONST(-1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("idiv")) {
ih = instructions.append(new IDIV());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iload_0")) {
ih = instructions.append(new ILOAD(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iload_1")) {
ih = instructions.append(new ILOAD(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iload_2")) {
ih = instructions.append(new ILOAD(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iload_3")) {
ih = instructions.append(new ILOAD(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("imul")) {
ih = instructions.append(new IMUL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ineg")) {
ih = instructions.append(new INEG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2b")) {
ih = instructions.append(new I2B());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2c")) {
ih = instructions.append(new I2C());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("i2s")) {
ih = instructions.append(new I2S());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ior")) {
ih = instructions.append(new IOR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("irem")) {
ih = instructions.append(new IREM());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ireturn")) {
ih = instructions.append(new IRETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ishl")) {
ih = instructions.append(new ISHL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ishr")) {
ih = instructions.append(new ISHR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("istore_0")) {
ih = instructions.append(new ISTORE(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("istore_1")) {
ih = instructions.append(new ISTORE(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("istore_2")) {
ih = instructions.append(new ISTORE(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("istore_3")) {
ih = instructions.append(new ISTORE(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("isub")) {
ih = instructions.append(new ISUB());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("iushr")) {
ih = instructions.append(new IUSHR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ixor")) {
ih = instructions.append(new IXOR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("l2d")) {
ih = instructions.append(new L2D());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("l2f")) {
ih = instructions.append(new L2F());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("l2i")) {
ih = instructions.append(new L2I());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ladd")) {
ih = instructions.append(new LADD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("laload")) {
ih = instructions.append(new LALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("land")) {
ih = instructions.append(new LAND());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lastore")) {
ih = instructions.append(new LASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lcmp")) {
ih = instructions.append(new LCMP());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lconst_0")) {
ih = instructions.append(new LCONST(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lconst_1")) {
ih = instructions.append(new LCONST(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("ldiv")) {
ih = instructions.append(new LDIV());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lload_0")) {
ih = instructions.append(new LLOAD(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lload_1")) {
ih = instructions.append(new LLOAD(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lload_2")) {
ih = instructions.append(new LLOAD(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lload_3")) {
ih = instructions.append(new LLOAD(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lmul")) {
ih = instructions.append(new LMUL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lneg")) {
ih = instructions.append(new LNEG());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lor")) {
ih = instructions.append(new LOR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lrem")) {
ih = instructions.append(new LREM());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lreturn")) {
ih = instructions.append(new LRETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lshl")) {
ih = instructions.append(new LSHL());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lshr")) {
ih = instructions.append(new LSHR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lstore_0")) {
ih = instructions.append(new LSTORE(0));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lstore_1")) {
ih = instructions.append(new LSTORE(1));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lstore_2")) {
ih = instructions.append(new LSTORE(2));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lstore_3")) {
ih = instructions.append(new LSTORE(3));
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lsub")) {
ih = instructions.append(new LSUB());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lushr")) {
ih = instructions.append(new LUSHR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("lxor")) {
ih = instructions.append(new LXOR());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("monitorenter")) {
ih = instructions.append(new MONITORENTER());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("monitorexit")) {
ih = instructions.append(new MONITOREXIT());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("nop")) {
ih = instructions.append(new NOP());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("pop")) {
ih = instructions.append(new POP());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("pop2")) {
ih = instructions.append(new POP2());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("return")) {
ih = instructions.append(new RETURN());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("saload")) {
ih = instructions.append(new SALOAD());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("sastore")) {
ih = instructions.append(new SASTORE());
instructionHandleList.add(ih);
labels++;
} else if (instrName.equals("swap")) {
ih = instructions.append(new SWAP());
instructionHandleList.add(ih);
labels++;
}
// Jump instructions
else if (instrName.equals("goto")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new GOTO(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("goto_w")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new GOTO_W(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_acmpeq")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ACMPEQ(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_acmpne")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ACMPNE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmpeq")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPEQ(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmpge")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPGE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmpgt")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPGT(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmple")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPLE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmplt")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPLT(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("if_icmpne")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IF_ICMPNE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifeq")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFEQ(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifge")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFGE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifgt")) {
int arg = getJumpArg(instrElems, labels, codeLength);
;
ih = instructions.append(new IFGT(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifle")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFLE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("iflt")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFLT(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifne")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFNE(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifnonnull")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFNONNULL(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("ifnull")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new IFNULL(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("jsr")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new JSR(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else if (instrName.equals("jsr_w")) {
int arg = getJumpArg(instrElems, labels, codeLength);
ih = instructions.append(new JSR_W(null));
instructionHandleList.add(ih);
branches.add(new BranchPair(labels, arg));
labels++;
} else {
parseException.addError(JAsmParseException.SYNTAX_ERROR,
fullInstr, labels);
labels++;
}
}
}
if (parseException.errorCount() > 0) {
throw parseException;
}
for (int i = 0; i < lookupSwitches.size(); i++) {
TempSwitchData tsd = (TempSwitchData) lookupSwitches.get(i);
int targetArrSize = 0;
for (int j = 0; j < tsd.getBranchPairs().size(); j++) {
BranchPair bp = (BranchPair) tsd.getBranchPairs().get(j);
if (bp.source != -1) {
targetArrSize++;
}
}
int[] targets = new int[targetArrSize];
InstructionHandle[] targetInstrs = new InstructionHandle[targetArrSize];
int count = 0;
InstructionHandle defaultTarget = null;
for (int j = 0; j < tsd.getBranchPairs().size(); j++) {
BranchPair bp = (BranchPair) tsd.getBranchPairs().get(j);
if (bp.source != -1) {
targets[count] = bp.source;
targetInstrs[count] = (InstructionHandle) instructionHandleList
.get(bp.target - 1);
count++;
} else {
defaultTarget = (InstructionHandle) instructionHandleList
.get(bp.target - 1);
}
}
LOOKUPSWITCH lus = (LOOKUPSWITCH) tsd.ih.getInstruction();
lus.setMatchesTargets(targets, targetInstrs);
lus.setTarget(defaultTarget);
}
for (int i = 0; i < tableSwitches.size(); i++) {
TempSwitchData tsd = (TempSwitchData) tableSwitches.get(i);
int targetArrSize = 0;
for (int j = 0; j < tsd.getBranchPairs().size(); j++) {
BranchPair bp = (BranchPair) tsd.getBranchPairs().get(j);
if (bp.source != -1) {
targetArrSize++;
}
}
int[] targets = new int[targetArrSize];
InstructionHandle[] targetInstrs = new InstructionHandle[targetArrSize];
int count = 0;
InstructionHandle defaultTarget = null;
for (int j = 0; j < tsd.getBranchPairs().size(); j++) {
BranchPair bp = (BranchPair) tsd.getBranchPairs().get(j);
if (bp.source != -1) {
targets[count] = bp.source;
targetInstrs[count] = (InstructionHandle) instructionHandleList
.get(bp.target - 1);
count++;
} else {
defaultTarget = (InstructionHandle) instructionHandleList
.get(bp.target - 1);
}
}
TABLESWITCH ts = (TABLESWITCH) tsd.ih.getInstruction();
ts.setMatchesTargets(targets, targetInstrs);
ts.setTarget(defaultTarget);
}
for (int i = 0; i < branches.size(); i++) {
BranchPair bp = (BranchPair) branches.get(i);
ih = (InstructionHandle) instructionHandleList.get(bp.source);
if (ih.getInstruction() instanceof GotoInstruction) {
GotoInstruction jInst = (GotoInstruction) ih.getInstruction();
jInst.setTarget((InstructionHandle) instructionHandleList
.get(bp.target - 1));
} else {
IfInstruction jInst = (IfInstruction) ih.getInstruction();
jInst.setTarget((InstructionHandle) instructionHandleList
.get(bp.target - 1));
}
}
return instructions;
}
private boolean isDefaultLine(String arg) {
String[] args = arg.split(":");
if (args.length == 2 & args[0].trim().equals("default")) {
return true;
}
return false;
}
private int getLookupSource(String arg, int line) {
try {
String[] args = arg.split(":");
if (args.length != 2) {
parseException.addError(JAsmParseException.BAD_LOOKUP_ARGUMENT,
arg, line);
return 1;
}
if (args[0].trim().equals("default")) {
return -1;
}
int b = Integer.parseInt(args[0].trim());
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS, arg,
line);
return 1;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.BAD_LOOKUP_ARGUMENT,
arg, line);
return 1;
}
}
private int getLookupTarget(String arg, int line, int codeLength) {
try {
String[] args = arg.split(":");
if (args.length != 2) {
parseException.addError(JAsmParseException.BAD_LOOKUP_ARGUMENT,
arg, line);
return 1;
}
int b = Integer.parseInt(args[1].trim());
if (b < 1 || b > codeLength) {
parseException.addError(JAsmParseException.JUMP_OUT_OF_DOMAIN,
arg, line);
return 1;
}
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS, arg,
line);
return 1;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.BAD_LOOKUP_ARGUMENT,
arg, line);
return 1;
}
}
private int countLines(String[] codeLines) {
int count = 0;
for (int i = 0; i < codeLines.length; i++) {
if (!beginsWithWhitespace(codeLines[i])) {
count++;
}
}
return count;
}
private boolean beginsWithWhitespace(String line) {
if (!line.equals("")) {
if (line.charAt(0) == ' ' || line.charAt(0) == '\t')
return true;
}
return false;
}
private int getTableArg(String arg, int line, int codeLength) {
try {
int i = Integer.parseInt(arg);
if (i < 1 || i > codeLength) {
parseException.addError(JAsmParseException.JUMP_OUT_OF_DOMAIN,
arg, line);
return 1;
}
return i;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS, arg,
line);
return 1;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.INT_REQUIRED, arg, line);
return 1;
}
}
private int getJumpArg(String[] instrElems, int line, int codeLength) {
try {
int b = Integer.parseInt(instrElems[1]);
if (b < 1 || b > codeLength) {
parseException.addError(JAsmParseException.JUMP_OUT_OF_DOMAIN,
instrElems[0], line);
return 1;
}
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 1;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.INT_REQUIRED,
instrElems[0], line);
return 1;
}
}
private short getSingleShortArg(String[] instrElems, int line) {
try {
short b = Short.parseShort(instrElems[1]);
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.SHORT_REQUIRED,
instrElems[0], line);
return 0;
}
}
private int getSingleIntArg(String[] instrElems, int line) {
try {
int b = Integer.parseInt(instrElems[1]);
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.INT_REQUIRED,
instrElems[0], line);
return 0;
}
}
private byte getSingleByteArg(String[] instrElems, int line) {
try {
byte b = Byte.parseByte(instrElems[1]);
return b;
} catch (ArrayIndexOutOfBoundsException exc1) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
} catch (NumberFormatException exc1) {
parseException.addError(JAsmParseException.BYTE_REQUIRED,
instrElems[0], line);
return 0;
}
}
private int getConstRefldc2_w(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
try {
long larg = Long.parseLong(instrElems[1]);
return cpg.addLong(larg);
} catch (NumberFormatException nfei) {
}
try {
double darg = Double.parseDouble(instrElems[1]);
return cpg.addDouble(darg);
} catch (NumberFormatException nfed) {
}
parseException.addError(JAsmParseException.ARG_TYPE_ERROR_LDC2_W,
instrElems[0], line);
return 0;
}
private byte getArrayRef(String[] instrElems, int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
byte arg;
try {
arg = Byte.parseByte(instrElems[1]);
} catch (NumberFormatException nfe) {
arg = OpcodesUtil.getArrayType(instrElems[1]);
if (arg == 0) {
parseException.addError(JAsmParseException.ARG_TYPE_ERROR,
instrElems[0], line);
}
}
return arg;
}
private int getConstRef4ldc(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
try {
int iarg = Integer.parseInt(instrElems[1]);
return cpg.addInteger(iarg);
} catch (NumberFormatException nfei) {
}
try {
float farg = Float.parseFloat(instrElems[1]);
return cpg.addFloat(farg);
} catch (NumberFormatException nfed) {
}
if (instrElems[1].startsWith("\"")) {
StringBuffer sb = new StringBuffer(instrElems[1]);
for (int i = 2; i < instrElems.length; i++) {
sb.append(" ").append(instrElems[i]);
}
String sarg = sb.toString();
if (sarg.startsWith("\"") && sarg.endsWith("\"")) {
sarg = sarg.substring(1, sarg.length() - 1);
return cpg.addString(sarg);
} else {
parseException.addError(JAsmParseException.ARG_TYPE_ERROR,
instrElems[0], line);
return 0;
}
}
parseException.addError(JAsmParseException.ARG_TYPE_ERROR,
instrElems[0], line);
return 0;
}
private int getClassConstRef(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
int arg;
try {
arg = Integer.parseInt(instrElems[1]);
} catch (NumberFormatException nfe) {
String classN = instrElems[1];
arg = cpg.addClass(classN);
}
return arg;
}
private int getFieldConstRef(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 3) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
int arg;
try {
arg = Integer.parseInt(instrElems[1]);
} catch (NumberFormatException nfe) {
String classN = getClassFromFieldName(instrElems[1]);
String fieldN = getFieldFromFieldName(instrElems[1]);
String descr = instrElems[2];
arg = cpg.addFieldref(classN, fieldN, descr);
}
return arg;
}
private int getMethodConstRef(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
int arg;
try {
arg = Integer.parseInt(instrElems[1]);
} catch (NumberFormatException nfe) {
String classN = getClassFromFullMethod(instrElems[1]);
String methodN = getMethodFromFullMethod(instrElems[1]);
String descr = getDescrFromFullMethod(instrElems[1]);
arg = cpg.addMethodref(classN, methodN, descr);
}
return arg;
}
private int getInterfaceConstRef(String[] instrElems, ConstantPoolGen cpg,
int line) {
if (instrElems.length < 2) {
parseException.addError(JAsmParseException.MISSING_ARGUMENTS,
instrElems[0], line);
return 0;
}
int arg;
try {
arg = Integer.parseInt(instrElems[1]);
} catch (NumberFormatException nfe) {
String classN = getClassFromFullMethod(instrElems[1]);
String methodN = getMethodFromFullMethod(instrElems[1]);
String descr = getDescrFromFullMethod(instrElems[1]);
arg = cpg.addInterfaceMethodref(classN, methodN, descr);
}
return arg;
}
public String getClassFromFullMethod(String fullMethod) {
String classAndMeth = fullMethod.substring(0, fullMethod.indexOf('('));
String className = getClassFromFieldName(classAndMeth);
return className;
}
public String getMethodFromFullMethod(String fullMethod) {
String classAndMeth = fullMethod.substring(0, fullMethod.indexOf('('));
String methName = getFieldFromFieldName(classAndMeth);
return methName;
}
public String getDescrFromFullMethod(String fullMethod) {
String description = fullMethod.substring(fullMethod.indexOf('('),
fullMethod.length());
return description;
}
public String getClassFromFieldName(String fieldName) {
String className = fieldName.substring(0, fieldName.lastIndexOf('/'));
return className.replace('/', '.');
}
public String getFieldFromFieldName(String fieldName) {
String field = fieldName.substring(fieldName.lastIndexOf('/') + 1,
fieldName.length());
return field;
}
class BranchPair {
int source, target;
BranchPair(int s, int t) {
source = s;
target = t;
}
}
class TempSwitchData {
int type; // 1 - table, 2 - lookup
int initialLab;
ArrayList<BranchPair> branchPairs = new ArrayList<BranchPair>();
private InstructionHandle ih;
public TempSwitchData(int type, int label) {
this.type = type;
initialLab = label;
}
public void setHandle(InstructionHandle ih) {
this.ih = ih;
}
public void incInitialLab() {
initialLab++;
}
public int getInitialLab() {
return initialLab;
}
public ArrayList<BranchPair> getBranchPairs() {
return branchPairs;
}
public TempSwitchData() {
}
public TempSwitchData(int type) {
this.type = type;
}
}
}
| apache-2.0 |
sensui/guava-libraries | guava-tests/test/com/google/common/primitives/FloatsTest.java | 17236 | /*
* Copyright (C) 2008 The Guava Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.primitives;
import static java.lang.Float.NaN;
import static org.truth0.Truth.ASSERT;
import com.google.common.annotations.GwtCompatible;
import com.google.common.annotations.GwtIncompatible;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.testing.Helpers;
import com.google.common.testing.NullPointerTester;
import com.google.common.testing.SerializableTester;
import junit.framework.TestCase;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
/**
* Unit test for {@link Floats}.
*
* @author Kevin Bourrillion
*/
@GwtCompatible(emulated = true)
@SuppressWarnings("cast") // redundant casts are intentional and harmless
public class FloatsTest extends TestCase {
private static final float[] EMPTY = {};
private static final float[] ARRAY1 = {(float) 1};
private static final float[] ARRAY234
= {(float) 2, (float) 3, (float) 4};
private static final float LEAST = Float.NEGATIVE_INFINITY;
private static final float GREATEST = Float.POSITIVE_INFINITY;
private static final float[] NUMBERS = new float[] {
LEAST, -Float.MAX_VALUE, -1f, -0f, 0f, 1f, Float.MAX_VALUE, GREATEST,
Float.MIN_NORMAL, -Float.MIN_NORMAL, Float.MIN_VALUE, -Float.MIN_VALUE,
Integer.MIN_VALUE, Integer.MAX_VALUE, Long.MIN_VALUE, Long.MAX_VALUE
};
private static final float[] VALUES
= Floats.concat(NUMBERS, new float[] {NaN});
public void testHashCode() {
for (float value : VALUES) {
assertEquals(((Float) value).hashCode(), Floats.hashCode(value));
}
}
public void testIsFinite() {
for (float value : NUMBERS) {
assertEquals(!(Float.isInfinite(value) || Float.isNaN(value)), Floats.isFinite(value));
}
}
public void testCompare() {
for (float x : VALUES) {
for (float y : VALUES) {
// note: spec requires only that the sign is the same
assertEquals(x + ", " + y,
Float.valueOf(x).compareTo(y),
Floats.compare(x, y));
}
}
}
public void testContains() {
assertFalse(Floats.contains(EMPTY, (float) 1));
assertFalse(Floats.contains(ARRAY1, (float) 2));
assertFalse(Floats.contains(ARRAY234, (float) 1));
assertTrue(Floats.contains(new float[] {(float) -1}, (float) -1));
assertTrue(Floats.contains(ARRAY234, (float) 2));
assertTrue(Floats.contains(ARRAY234, (float) 3));
assertTrue(Floats.contains(ARRAY234, (float) 4));
for (float value : NUMBERS) {
assertTrue("" + value, Floats.contains(new float[] {5f, value}, value));
}
assertFalse(Floats.contains(new float[] {5f, NaN}, NaN));
}
public void testIndexOf() {
assertEquals(-1, Floats.indexOf(EMPTY, (float) 1));
assertEquals(-1, Floats.indexOf(ARRAY1, (float) 2));
assertEquals(-1, Floats.indexOf(ARRAY234, (float) 1));
assertEquals(0, Floats.indexOf(
new float[] {(float) -1}, (float) -1));
assertEquals(0, Floats.indexOf(ARRAY234, (float) 2));
assertEquals(1, Floats.indexOf(ARRAY234, (float) 3));
assertEquals(2, Floats.indexOf(ARRAY234, (float) 4));
assertEquals(1, Floats.indexOf(
new float[] { (float) 2, (float) 3, (float) 2, (float) 3 },
(float) 3));
for (float value : NUMBERS) {
assertEquals("" + value, 1,
Floats.indexOf(new float[] {5f, value}, value));
}
assertEquals(-1, Floats.indexOf(new float[] {5f, NaN}, NaN));
}
public void testIndexOf_arrayTarget() {
assertEquals(0, Floats.indexOf(EMPTY, EMPTY));
assertEquals(0, Floats.indexOf(ARRAY234, EMPTY));
assertEquals(-1, Floats.indexOf(EMPTY, ARRAY234));
assertEquals(-1, Floats.indexOf(ARRAY234, ARRAY1));
assertEquals(-1, Floats.indexOf(ARRAY1, ARRAY234));
assertEquals(0, Floats.indexOf(ARRAY1, ARRAY1));
assertEquals(0, Floats.indexOf(ARRAY234, ARRAY234));
assertEquals(0, Floats.indexOf(
ARRAY234, new float[] { (float) 2, (float) 3 }));
assertEquals(1, Floats.indexOf(
ARRAY234, new float[] { (float) 3, (float) 4 }));
assertEquals(1, Floats.indexOf(ARRAY234, new float[] { (float) 3 }));
assertEquals(2, Floats.indexOf(ARRAY234, new float[] { (float) 4 }));
assertEquals(1, Floats.indexOf(new float[] { (float) 2, (float) 3,
(float) 3, (float) 3, (float) 3 },
new float[] { (float) 3 }
));
assertEquals(2, Floats.indexOf(
new float[] { (float) 2, (float) 3, (float) 2,
(float) 3, (float) 4, (float) 2, (float) 3},
new float[] { (float) 2, (float) 3, (float) 4}
));
assertEquals(1, Floats.indexOf(
new float[] { (float) 2, (float) 2, (float) 3,
(float) 4, (float) 2, (float) 3, (float) 4},
new float[] { (float) 2, (float) 3, (float) 4}
));
assertEquals(-1, Floats.indexOf(
new float[] { (float) 4, (float) 3, (float) 2},
new float[] { (float) 2, (float) 3, (float) 4}
));
for (float value : NUMBERS) {
assertEquals("" + value, 1, Floats.indexOf(
new float[] {5f, value, value, 5f}, new float[] {value, value}));
}
assertEquals(-1, Floats.indexOf(
new float[] {5f, NaN, NaN, 5f}, new float[] {NaN, NaN}));
}
public void testLastIndexOf() {
assertEquals(-1, Floats.lastIndexOf(EMPTY, (float) 1));
assertEquals(-1, Floats.lastIndexOf(ARRAY1, (float) 2));
assertEquals(-1, Floats.lastIndexOf(ARRAY234, (float) 1));
assertEquals(0, Floats.lastIndexOf(
new float[] {(float) -1}, (float) -1));
assertEquals(0, Floats.lastIndexOf(ARRAY234, (float) 2));
assertEquals(1, Floats.lastIndexOf(ARRAY234, (float) 3));
assertEquals(2, Floats.lastIndexOf(ARRAY234, (float) 4));
assertEquals(3, Floats.lastIndexOf(
new float[] { (float) 2, (float) 3, (float) 2, (float) 3 },
(float) 3));
for (float value : NUMBERS) {
assertEquals("" + value,
0, Floats.lastIndexOf(new float[] {value, 5f}, value));
}
assertEquals(-1, Floats.lastIndexOf(new float[] {NaN, 5f}, NaN));
}
public void testMax_noArgs() {
try {
Floats.max();
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testMax() {
assertEquals(GREATEST, Floats.max(GREATEST));
assertEquals(LEAST, Floats.max(LEAST));
assertEquals((float) 9, Floats.max(
(float) 8, (float) 6, (float) 7,
(float) 5, (float) 3, (float) 0, (float) 9));
assertEquals(0f, Floats.max(-0f, 0f));
assertEquals(0f, Floats.max(0f, -0f));
assertEquals(GREATEST, Floats.max(NUMBERS));
assertTrue(Float.isNaN(Floats.max(VALUES)));
}
public void testMin_noArgs() {
try {
Floats.min();
fail();
} catch (IllegalArgumentException expected) {
}
}
public void testMin() {
assertEquals(LEAST, Floats.min(LEAST));
assertEquals(GREATEST, Floats.min(GREATEST));
assertEquals((float) 0, Floats.min(
(float) 8, (float) 6, (float) 7,
(float) 5, (float) 3, (float) 0, (float) 9));
assertEquals(-0f, Floats.min(-0f, 0f));
assertEquals(-0f, Floats.min(0f, -0f));
assertEquals(LEAST, Floats.min(NUMBERS));
assertTrue(Float.isNaN(Floats.min(VALUES)));
}
public void testConcat() {
assertTrue(Arrays.equals(EMPTY, Floats.concat()));
assertTrue(Arrays.equals(EMPTY, Floats.concat(EMPTY)));
assertTrue(Arrays.equals(EMPTY, Floats.concat(EMPTY, EMPTY, EMPTY)));
assertTrue(Arrays.equals(ARRAY1, Floats.concat(ARRAY1)));
assertNotSame(ARRAY1, Floats.concat(ARRAY1));
assertTrue(Arrays.equals(ARRAY1, Floats.concat(EMPTY, ARRAY1, EMPTY)));
assertTrue(Arrays.equals(
new float[] {(float) 1, (float) 1, (float) 1},
Floats.concat(ARRAY1, ARRAY1, ARRAY1)));
assertTrue(Arrays.equals(
new float[] {(float) 1, (float) 2, (float) 3, (float) 4},
Floats.concat(ARRAY1, ARRAY234)));
}
public void testEnsureCapacity() {
assertSame(EMPTY, Floats.ensureCapacity(EMPTY, 0, 1));
assertSame(ARRAY1, Floats.ensureCapacity(ARRAY1, 0, 1));
assertSame(ARRAY1, Floats.ensureCapacity(ARRAY1, 1, 1));
assertTrue(Arrays.equals(
new float[] {(float) 1, (float) 0, (float) 0},
Floats.ensureCapacity(ARRAY1, 2, 1)));
}
public void testEnsureCapacity_fail() {
try {
Floats.ensureCapacity(ARRAY1, -1, 1);
fail();
} catch (IllegalArgumentException expected) {
}
try {
// notice that this should even fail when no growth was needed
Floats.ensureCapacity(ARRAY1, 1, -1);
fail();
} catch (IllegalArgumentException expected) {
}
}
@GwtIncompatible("Float.toString returns different value in GWT.")
public void testJoin() {
assertEquals("", Floats.join(",", EMPTY));
assertEquals("1.0", Floats.join(",", ARRAY1));
assertEquals("1.0,2.0", Floats.join(",", (float) 1, (float) 2));
assertEquals("1.02.03.0",
Floats.join("", (float) 1, (float) 2, (float) 3));
}
public void testLexicographicalComparator() {
List<float[]> ordered = Arrays.asList(
new float[] {},
new float[] {LEAST},
new float[] {LEAST, LEAST},
new float[] {LEAST, (float) 1},
new float[] {(float) 1},
new float[] {(float) 1, LEAST},
new float[] {GREATEST, Float.MAX_VALUE},
new float[] {GREATEST, GREATEST},
new float[] {GREATEST, GREATEST, GREATEST});
Comparator<float[]> comparator = Floats.lexicographicalComparator();
Helpers.testComparator(comparator, ordered);
}
@GwtIncompatible("SerializableTester")
public void testLexicographicalComparatorSerializable() {
Comparator<float[]> comparator = Floats.lexicographicalComparator();
assertSame(comparator, SerializableTester.reserialize(comparator));
}
public void testToArray() {
// need explicit type parameter to avoid javac warning!?
List<Float> none = Arrays.<Float>asList();
assertTrue(Arrays.equals(EMPTY, Floats.toArray(none)));
List<Float> one = Arrays.asList((float) 1);
assertTrue(Arrays.equals(ARRAY1, Floats.toArray(one)));
float[] array = {(float) 0, (float) 1, (float) 3};
List<Float> three = Arrays.asList((float) 0, (float) 1, (float) 3);
assertTrue(Arrays.equals(array, Floats.toArray(three)));
assertTrue(Arrays.equals(array, Floats.toArray(Floats.asList(array))));
}
public void testToArray_threadSafe() {
for (int delta : new int[] { +1, 0, -1 }) {
for (int i = 0; i < VALUES.length; i++) {
List<Float> list = Floats.asList(VALUES).subList(0, i);
Collection<Float> misleadingSize =
Helpers.misleadingSizeCollection(delta);
misleadingSize.addAll(list);
float[] arr = Floats.toArray(misleadingSize);
assertEquals(i, arr.length);
for (int j = 0; j < i; j++) {
assertEquals(VALUES[j], arr[j]);
}
}
}
}
public void testToArray_withNull() {
List<Float> list = Arrays.asList((float) 0, (float) 1, null);
try {
Floats.toArray(list);
fail();
} catch (NullPointerException expected) {
}
}
public void testToArray_withConversion() {
float[] array = {(float) 0, (float) 1, (float) 2};
List<Byte> bytes = Arrays.asList((byte) 0, (byte) 1, (byte) 2);
List<Short> shorts = Arrays.asList((short) 0, (short) 1, (short) 2);
List<Integer> ints = Arrays.asList(0, 1, 2);
List<Float> floats = Arrays.asList((float) 0, (float) 1, (float) 2);
List<Long> longs = Arrays.asList((long) 0, (long) 1, (long) 2);
List<Double> doubles = Arrays.asList((double) 0, (double) 1, (double) 2);
assertTrue(Arrays.equals(array, Floats.toArray(bytes)));
assertTrue(Arrays.equals(array, Floats.toArray(shorts)));
assertTrue(Arrays.equals(array, Floats.toArray(ints)));
assertTrue(Arrays.equals(array, Floats.toArray(floats)));
assertTrue(Arrays.equals(array, Floats.toArray(longs)));
assertTrue(Arrays.equals(array, Floats.toArray(doubles)));
}
public void testAsList_isAView() {
float[] array = {(float) 0, (float) 1};
List<Float> list = Floats.asList(array);
list.set(0, (float) 2);
assertTrue(Arrays.equals(new float[] {(float) 2, (float) 1}, array));
array[1] = (float) 3;
ASSERT.that(list).has().allOf((float) 2, (float) 3).inOrder();
}
public void testAsList_toArray_roundTrip() {
float[] array = { (float) 0, (float) 1, (float) 2 };
List<Float> list = Floats.asList(array);
float[] newArray = Floats.toArray(list);
// Make sure it returned a copy
list.set(0, (float) 4);
assertTrue(Arrays.equals(
new float[] { (float) 0, (float) 1, (float) 2 }, newArray));
newArray[1] = (float) 5;
assertEquals((float) 1, (float) list.get(1));
}
// This test stems from a real bug found by andrewk
public void testAsList_subList_toArray_roundTrip() {
float[] array = { (float) 0, (float) 1, (float) 2, (float) 3 };
List<Float> list = Floats.asList(array);
assertTrue(Arrays.equals(new float[] { (float) 1, (float) 2 },
Floats.toArray(list.subList(1, 3))));
assertTrue(Arrays.equals(new float[] {},
Floats.toArray(list.subList(2, 2))));
}
public void testAsListEmpty() {
assertSame(Collections.emptyList(), Floats.asList(EMPTY));
}
/**
* A reference implementation for {@code tryParse} that just catches the exception from
* {@link Float#valueOf}.
*/
private static Float referenceTryParse(String input) {
if (input.trim().length() < input.length()) {
return null;
}
try {
return Float.valueOf(input);
} catch (NumberFormatException e) {
return null;
}
}
@GwtIncompatible("Floats.tryParse")
private static void checkTryParse(String input) {
assertEquals(referenceTryParse(input), Floats.tryParse(input));
}
@GwtIncompatible("Floats.tryParse")
private static void checkTryParse(float expected, String input) {
assertEquals(Float.valueOf(expected), Floats.tryParse(input));
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseHex() {
for (String signChar : ImmutableList.of("", "+", "-")) {
for (String hexPrefix : ImmutableList.of("0x", "0X")) {
for (String iPart : ImmutableList.of("", "0", "1", "F", "f", "c4", "CE")) {
for (String fPart : ImmutableList.of("", ".", ".F", ".52", ".a")) {
for (String expMarker : ImmutableList.of("p", "P")) {
for (String exponent : ImmutableList.of("0", "-5", "+20", "52")) {
for (String typePart : ImmutableList.of("", "D", "F", "d", "f")) {
checkTryParse(
signChar + hexPrefix + iPart + fPart + expMarker + exponent + typePart);
}
}
}
}
}
}
}
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseAllCodePoints() {
// Exercise non-ASCII digit test cases and the like.
char[] tmp = new char[2];
for (int i = Character.MIN_CODE_POINT; i < Character.MAX_CODE_POINT; i++) {
Character.toChars(i, tmp, 0);
checkTryParse(String.copyValueOf(tmp, 0, Character.charCount(i)));
}
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseOfToStringIsOriginal() {
for (float f : NUMBERS) {
checkTryParse(f, Float.toString(f));
}
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseOfToHexStringIsOriginal() {
for (float f : NUMBERS) {
checkTryParse(f, Float.toHexString(f));
}
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseNaN() {
checkTryParse("NaN");
checkTryParse("+NaN");
checkTryParse("-NaN");
}
@GwtIncompatible("Floats.tryParse")
public void testTryParseInfinity() {
checkTryParse(Float.POSITIVE_INFINITY, "Infinity");
checkTryParse(Float.POSITIVE_INFINITY, "+Infinity");
checkTryParse(Float.NEGATIVE_INFINITY, "-Infinity");
}
private static final String[] BAD_TRY_PARSE_INPUTS =
{ "", "+-", "+-0", " 5", "32 ", " 55 ", "infinity", "POSITIVE_INFINITY", "0x9A", "0x9A.bE-5",
".", ".e5", "NaNd", "InfinityF" };
@GwtIncompatible("Floats.tryParse")
public void testTryParseFailures() {
for (String badInput : BAD_TRY_PARSE_INPUTS) {
assertEquals(referenceTryParse(badInput), Floats.tryParse(badInput));
assertNull(Floats.tryParse(badInput));
}
}
@GwtIncompatible("NullPointerTester")
public void testNulls() {
new NullPointerTester().testAllPublicStaticMethods(Floats.class);
}
}
| apache-2.0 |
jeffmaury/golo-lang | src/main/java/fr/insalyon/citi/golo/cli/Main.java | 12772 | /*
* Copyright 2012-2013 Institut National des Sciences Appliquées de Lyon (INSA-Lyon)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package fr.insalyon.citi.golo.cli;
import com.beust.jcommander.*;
import fr.insalyon.citi.golo.compiler.GoloClassLoader;
import fr.insalyon.citi.golo.compiler.GoloCompilationException;
import fr.insalyon.citi.golo.compiler.GoloCompiler;
import fr.insalyon.citi.golo.compiler.ir.GoloModule;
import fr.insalyon.citi.golo.compiler.ir.IrTreeDumper;
import fr.insalyon.citi.golo.compiler.parser.ASTCompilationUnit;
import fr.insalyon.citi.golo.compiler.parser.GoloParser;
import fr.insalyon.citi.golo.compiler.parser.ASTCompilationUnit;
import fr.insalyon.citi.golo.compiler.parser.GoloParser;
import fr.insalyon.citi.golo.compiler.parser.ParseException;
import fr.insalyon.citi.golo.doc.AbstractProcessor;
import fr.insalyon.citi.golo.doc.HtmlProcessor;
import fr.insalyon.citi.golo.doc.MarkdownProcessor;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.lang.invoke.MethodHandle;
import java.lang.reflect.Method;
import java.net.MalformedURLException;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.LinkedList;
import java.util.List;
import static java.lang.invoke.MethodHandles.publicLookup;
import static java.lang.invoke.MethodType.genericMethodType;
import static java.lang.invoke.MethodType.methodType;
public class Main {
static class GlobalArguments {
@Parameter(names = {"--help"}, description = "Prints this message", help = true)
boolean help;
}
@Parameters(commandDescription = "Queries the Golo version")
static class VersionCommand {
@Parameter(names = "--full", description = "Prints the full information details")
boolean full = false;
}
@Parameters(commandDescription = "Compiles Golo source files")
static class CompilerCommand {
@Parameter(names = "--output", description = "The compiled classes output directory")
String output = ".";
@Parameter(description = "Golo source files (*.golo)")
List<String> sources = new LinkedList<>();
}
@Parameters(commandDescription = "Runs compiled Golo code")
static class RunCommand {
@Parameter(names = "--module", description = "The Golo module with a main function", required = true)
String module;
@Parameter(description = "Program arguments")
List<String> arguments = new LinkedList<>();
@Parameter(names = "--classpath", variableArity = true, description = "Classpath elements (.jar and directories)")
List<String> classpath = new LinkedList<>();
}
@Parameters(commandDescription = "Dynamically loads and runs from Golo source files")
static class GoloGoloCommand {
@Parameter(names = "--files", variableArity = true, description = "Golo source files (the last one has a main function)", required = true)
List<String> files = new LinkedList<>();
@Parameter(names = "--args", variableArity = true, description = "Program arguments")
List<String> arguments = new LinkedList<>();
@Parameter(names = "--classpath", variableArity = true, description = "Classpath elements (.jar and directories)")
List<String> classpath = new LinkedList<>();
}
@Parameters(commandDescription = "Diagnosis for the Golo compiler internals")
static class DiagnoseCommand {
@Parameter(names = "--tool", description = "The diagnosis tool to use: {ast, ir}", validateWith = DiagnoseModeValidator.class)
String mode = "ir";
@Parameter(description = "Golo source files (*.golo)")
List<String> files = new LinkedList<>();
}
public static class DiagnoseModeValidator implements IParameterValidator {
@Override
public void validate(String name, String value) throws ParameterException {
switch (value) {
case "ast":
case "ir":
return;
default:
throw new ParameterException("Diagnosis tool must be in: {ast, ir}");
}
}
}
@Parameters(commandDescription = "Generate documentation from Golo source files")
private static class DocCommand {
@Parameter(names = "--format", description = "Documentation output format (html, markdown)", validateWith = DocFormatValidator.class)
String format = "html";
@Parameter(names = "--output", description = "The documentation output directory")
String output = ".";
@Parameter(description = "Golo source files (*.golo)")
List<String> sources = new LinkedList<>();
}
public static class DocFormatValidator implements IParameterValidator {
@Override
public void validate(String name, String value) throws ParameterException {
switch (value) {
case "html":
case "markdown":
return;
default:
throw new ParameterException("Output format must be in: {html, markdown}");
}
}
}
public static void main(String... args) throws Throwable {
GlobalArguments global = new GlobalArguments();
JCommander cmd = new JCommander(global);
cmd.setProgramName("golo");
VersionCommand version = new VersionCommand();
cmd.addCommand("version", version);
CompilerCommand goloc = new CompilerCommand();
cmd.addCommand("compile", goloc);
RunCommand golo = new RunCommand();
cmd.addCommand("run", golo);
GoloGoloCommand gologolo = new GoloGoloCommand();
cmd.addCommand("golo", gologolo);
DiagnoseCommand diagnose = new DiagnoseCommand();
cmd.addCommand("diagnose", diagnose);
DocCommand doc = new DocCommand();
cmd.addCommand("doc", doc);
try {
cmd.parse(args);
if (global.help || cmd.getParsedCommand() == null) {
cmd.usage();
} else {
switch (cmd.getParsedCommand()) {
case "version":
version(version);
break;
case "compile":
compile(goloc);
break;
case "run":
run(golo);
break;
case "golo":
golo(gologolo);
break;
case "diagnose":
diagnose(diagnose);
break;
case "doc":
doc(doc);
break;
default:
throw new AssertionError("WTF?");
}
}
} catch (ParameterException exception) {
System.err.println(exception.getMessage());
System.out.println();
cmd.usage();
}
}
private static void diagnose(DiagnoseCommand diagnose) {
try {
switch (diagnose.mode) {
case "ast":
dumpASTs(diagnose.files);
break;
case "ir":
dumpIRs(diagnose.files);
break;
default:
throw new AssertionError("WTF?");
}
} catch (FileNotFoundException e) {
System.err.println(e.getMessage());
} catch (GoloCompilationException e) {
handleCompilationException(e);
}
}
private static void dumpASTs(List<String> files) throws FileNotFoundException {
GoloCompiler compiler = new GoloCompiler();
for (String file : files) {
System.out.println(">>> AST for: " + file);
ASTCompilationUnit ast = compiler.parse(file, new GoloParser(new FileInputStream(file)));
ast.dump("% ");
System.out.println();
}
}
private static void dumpIRs(List<String> files) throws FileNotFoundException {
GoloCompiler compiler = new GoloCompiler();
IrTreeDumper dumper = new IrTreeDumper();
for (String file : files) {
System.out.println(">>> IR for: " + file);
ASTCompilationUnit ast = compiler.parse(file, new GoloParser(new FileInputStream(file)));
GoloModule module = compiler.check(ast);
dumper.visitModule(module);
System.out.println();
}
}
static void handleCompilationException(GoloCompilationException e) {
if (e.getMessage() != null) {
System.out.println("[error] " + e.getMessage());
}
if (e.getCause() != null) {
System.out.println("[error] " + e.getCause().getMessage());
}
for (GoloCompilationException.Problem problem : e.getProblems()) {
System.out.println("[error] " + problem.getDescription());
}
System.exit(1);
}
private static void version(VersionCommand options) {
if (options.full) {
System.out.println(Metadata.VERSION + " (build " + Metadata.TIMESTAMP + ")");
} else {
System.out.println(Metadata.VERSION);
}
}
private static void compile(CompilerCommand options) {
GoloCompiler compiler = new GoloCompiler();
File outputDir = new File(options.output);
for (String source : options.sources) {
File file = new File(source);
try (FileInputStream in = new FileInputStream(file)) {
compiler.compileTo(file.getName(), in, outputDir);
} catch (IOException e) {
System.out.println("[error] " + source + " does not exist or could not be opened.");
return;
} catch (GoloCompilationException e) {
handleCompilationException(e);
}
}
}
private static void callRun(Class<?> klass, String[] arguments) throws Throwable {
MethodHandle main = publicLookup().findStatic(klass, "main", methodType(void.class, String[].class));
main.invoke(arguments);
}
private static void run(RunCommand golo) throws Throwable {
try {
URLClassLoader primaryClassLoader = primaryClassLoader(golo.classpath);
Thread.currentThread().setContextClassLoader(primaryClassLoader);
Class<?> module = Class.forName(golo.module, true, primaryClassLoader);
callRun(module, golo.arguments.toArray(new String[golo.arguments.size()]));
} catch (ClassNotFoundException e) {
System.out.println("The module " + golo.module + " could not be loaded.");
} catch (NoSuchMethodException e) {
System.out.println("The module " + golo.module + " does not have a main method with an argument.");
}
}
private static URLClassLoader primaryClassLoader(List<String> classpath) throws MalformedURLException {
URL[] urls = new URL[classpath.size()];
int index = 0;
for (String element : classpath) {
urls[index] = new File(element).toURI().toURL();
index = index + 1;
}
return new URLClassLoader(urls);
}
private static void golo(GoloGoloCommand gologolo) throws Throwable {
URLClassLoader primaryClassLoader = primaryClassLoader(gologolo.classpath);
Thread.currentThread().setContextClassLoader(primaryClassLoader);
GoloClassLoader loader = new GoloClassLoader(primaryClassLoader);
Class<?> lastClass = null;
for (String goloFile : gologolo.files) {
File file = new File(goloFile);
if (!file.exists()) {
System.out.println("Error: " + file + " does not exist.");
return;
}
if (!file.isFile()) {
System.out.println("Error: " + file + " is not a file.");
return;
}
try (FileInputStream in = new FileInputStream(file)) {
lastClass = loader.load(file.getName(), in);
} catch (GoloCompilationException e) {
handleCompilationException(e);
}
}
callRun(lastClass, gologolo.arguments.toArray(new String[gologolo.arguments.size()]));
}
private static void doc(DocCommand options) {
AbstractProcessor processor;
switch (options.format) {
case "markdown":
processor = new MarkdownProcessor();
break;
case "html":
processor = new HtmlProcessor();
break;
default:
throw new AssertionError("WTF?");
}
LinkedList<ASTCompilationUnit> units = new LinkedList<>();
for (String source : options.sources) {
try (FileInputStream in = new FileInputStream(source)) {
units.add(new GoloParser(in).CompilationUnit());
} catch (IOException e) {
System.out.println("[error] " + source + " does not exist or could not be opened.");
} catch (ParseException e) {
System.out.println("[error] " + source + " has syntax errors: " + e.getMessage());
}
}
try {
processor.process(units, Paths.get(options.output));
} catch (Throwable throwable) {
System.out.println("[error] " + throwable.getMessage());
}
}
}
| apache-2.0 |
ederign/uberfire | uberfire-extensions/uberfire-commons-editor/uberfire-commons-editor-client/src/main/java/org/uberfire/ext/editor/commons/client/file/popups/DeletePopUpView.java | 3430 | /*
* Copyright 2016 Red Hat, Inc. and/or its affiliates.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.uberfire.ext.editor.commons.client.file.popups;
import javax.enterprise.context.Dependent;
import javax.inject.Inject;
import org.gwtbootstrap3.client.ui.ModalFooter;
import org.gwtbootstrap3.client.ui.constants.ButtonType;
import org.gwtbootstrap3.client.ui.constants.IconType;
import org.jboss.errai.common.client.dom.Div;
import org.jboss.errai.ui.client.local.api.IsElement;
import org.jboss.errai.ui.client.local.spi.TranslationService;
import org.jboss.errai.ui.shared.api.annotations.DataField;
import org.jboss.errai.ui.shared.api.annotations.Templated;
import org.uberfire.ext.editor.commons.client.file.popups.commons.ToggleCommentPresenter;
import org.uberfire.ext.editor.commons.client.resources.i18n.Constants;
import org.uberfire.ext.widgets.common.client.common.popups.BaseModal;
import org.uberfire.ext.widgets.common.client.common.popups.footers.GenericModalFooter;
import org.uberfire.mvp.Command;
@Dependent
@Templated
public class DeletePopUpView implements DeletePopUpPresenter.View,
IsElement {
@Inject
@DataField("body")
Div body;
@Inject
private TranslationService translationService;
private DeletePopUpPresenter presenter;
private BaseModal modal;
@Override
public void init(DeletePopUpPresenter presenter) {
this.presenter = presenter;
modalSetup();
setupComment();
}
@Override
public void show() {
modal.show();
}
@Override
public void hide() {
modal.hide();
}
private void modalSetup() {
this.modal = new CommonModalBuilder()
.addHeader(translate(Constants.DeletePopUpView_ConfirmDelete))
.addBody(body)
.addFooter(footer())
.build();
}
private ModalFooter footer() {
GenericModalFooter footer = new GenericModalFooter();
footer.addButton(translate(Constants.DeletePopUpView_Cancel),
cancelCommand(),
ButtonType.DEFAULT);
footer.addButton(translate(Constants.DeletePopUpView_Delete),
deleteCommand(),
IconType.REMOVE,
ButtonType.DANGER);
return footer;
}
private String translate(final String key) {
return translationService.format(key);
}
private Command deleteCommand() {
return () -> presenter.delete();
}
private Command cancelCommand() {
return () -> presenter.cancel();
}
private void setupComment() {
body.appendChild(toggleCommentPresenter().getViewElement());
}
private ToggleCommentPresenter toggleCommentPresenter() {
return presenter.getToggleCommentPresenter();
}
}
| apache-2.0 |
rakawestu/mCalendarView | mcalendarview/src/main/java/sun/bob/mcalendarview/mCalendarView.java | 5945 | package sun.bob.mcalendarview;
import android.content.Context;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.drawable.ShapeDrawable;
import android.graphics.drawable.shapes.RectShape;
import android.support.v4.app.FragmentActivity;
import android.support.v4.view.ViewPager;
import android.util.AttributeSet;
import android.view.View;
import sun.bob.mcalendarview.adapters.CalendarViewAdapter;
import sun.bob.mcalendarview.listeners.OnDateClickListener;
import sun.bob.mcalendarview.listeners.OnMonthChangeListener;
import sun.bob.mcalendarview.utils.CalendarUtil;
import sun.bob.mcalendarview.utils.CurrentCalendar;
import sun.bob.mcalendarview.views.BaseCellView;
import sun.bob.mcalendarview.vo.DateData;
import sun.bob.mcalendarview.vo.MarkedDates;
/**
* Created by bob.sun on 15/8/27.
*/
public class mCalendarView extends ViewPager {
private int dateCellViewResId = -1;
private View dateCellView = null;
private int markedStyle = -1;
private int markedCellResId = -1;
private View markedCellView = null;
private boolean initted = false;
private DateData currentDate;
private CalendarViewAdapter adapter;
private int width, height;
public mCalendarView(Context context) {
super(context);
if (context instanceof FragmentActivity){
init((FragmentActivity) context);
}
}
public mCalendarView(Context context, AttributeSet attrs) {
super(context, attrs);
if (context instanceof FragmentActivity){
init((FragmentActivity) context);
}
}
public void init(FragmentActivity activity){
if (initted){
return;
}
initted = true;
if (currentDate == null){
currentDate = CurrentCalendar.getCurrentDateData();
}
// TODO: 15/8/28 Will this cause trouble when achieved?
if (this.getId() == View.NO_ID){
this.setId(R.id.calendarViewPager);
}
adapter = new CalendarViewAdapter(activity.getSupportFragmentManager()).setDate(currentDate);
this.setAdapter(adapter);
this.setCurrentItem(500);
addBackground();
}
private void addBackground(){
ShapeDrawable drawable = new ShapeDrawable(new RectShape());
drawable.getPaint().setColor(Color.LTGRAY);
drawable.getPaint().setStyle(Paint.Style.STROKE);
this.setBackground(drawable);
}
//// TODO: 15/8/28 May cause trouble when invoked after inited
public mCalendarView travelTo(DateData dateData){
this.currentDate = dateData;
CalendarUtil.date = dateData;
this.initted = false;
init((FragmentActivity) getContext());
return this;
}
public mCalendarView markDate(int year, int month, int day){
MarkedDates.getInstance().add(new DateData(year, month, day));
return this;
}
public mCalendarView unMarkDate(int year, int month, int day){
MarkedDates.getInstance().remove(new DateData(year, month, day));
return this;
}
public mCalendarView markDate(DateData date){
MarkedDates.getInstance().add(date);
return this;
}
public mCalendarView unMarkDate(DateData date){
MarkedDates.getInstance().remove(date);
return this;
}
public MarkedDates getMarkedDates(){
return MarkedDates.getInstance();
}
public mCalendarView setDateCell(int resId){
adapter.setDateCellId(resId);
return this;
}
// public mCalendarView setDateCellView(BaseCellView view){
// adapter.setDateCell(view);
// return this;
// }
public mCalendarView setMarkedStyle(int style, int color){
MarkStyle.current = style;
MarkStyle.color = color;
return this;
}
public mCalendarView setMarkedStyle(int style){
MarkStyle.current = style;
return this;
}
public mCalendarView setMarkedCell(int resId){
adapter.setMarkCellId(resId);
return this;
}
// public mCalendarView setMarkedCellView(BaseCellView view){
// adapter.setDateCell(view);
// return this;
// }
public mCalendarView setOnMonthChangeListener(OnMonthChangeListener listener){
this.addOnPageChangeListener(listener);
return this;
}
public mCalendarView setOnDateClickListener(OnDateClickListener onDateClickListener){
OnDateClickListener.instance = onDateClickListener;
return this;
}
@Override
protected void onMeasure(int measureWidthSpec,int measureHeightSpec){
super.onMeasure(measureWidthSpec, measureHeightSpec);
width = measureWidth(measureWidthSpec);
height = measureHeight(measureHeightSpec);
this.setMeasuredDimension(width, height);
}
private int measureWidth(int measureSpec) {
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
int result = 0;
if (specMode == MeasureSpec.AT_MOST) {
result = (int) (CellConfig.cellWidth * 7 * getContext().getResources().getSystem().getDisplayMetrics().density);
} else if (specMode == MeasureSpec.EXACTLY) {
result = specSize;
} else {
result = CellConfig.cellHeight;
}
return result;
}
private int measureHeight(int measureSpec) {
int specMode = MeasureSpec.getMode(measureSpec);
int specSize = MeasureSpec.getSize(measureSpec);
int result = 0;
if (specMode == MeasureSpec.AT_MOST) {
result = (int) (CellConfig.cellWidth * 7 * getContext().getResources().getSystem().getDisplayMetrics().density);
} else if (specMode == MeasureSpec.EXACTLY) {
result = specSize;
} else {
result = CellConfig.cellHeight;
}
return result;
}
}
| apache-2.0 |
liveontologies/elk-reasoner | elk-reasoner/src/main/java/org/semanticweb/elk/reasoner/indexing/model/IndexedRangeFiller.java | 2380 | package org.semanticweb.elk.reasoner.indexing.model;
import org.semanticweb.elk.owl.interfaces.ElkClassExpression;
/*
* #%L
* ELK Reasoner
* $Id:$
* $HeadURL:$
* %%
* Copyright (C) 2011 - 2015 Department of Computer Science, University of Oxford
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import org.semanticweb.elk.owl.interfaces.ElkObjectProperty;
import org.semanticweb.elk.owl.interfaces.ElkObjectSomeValuesFrom;
/**
* An {@link IndexedContextRoot} constructed from an
* {@link IndexedObjectProperty} and an {@link IndexedClassExpression}.<br>
*
* Notation:
*
* <pre>
* C ⊓ ∃R-
* </pre>
*
* It is logically equivalent to an OWL class expression
* {@code ObjectIntersectionOf(C ObjectSomeValuesFrom(ObjectInverseOf(R) owl:Thing))}
* <br>
*
* The parameters can be obtained as follows:<br>
*
* C = {@link #getFiller()}<br>
* R = {@link #getProperty()}<br>
*
* @see IndexedObjectSomeValuesFrom#getRangeFiller()
*
*/
public interface IndexedRangeFiller extends IndexedContextRoot {
/**
* @return The representation of the {@link ElkObjectProperty} which range
* this {@link IndexedRangeFiller} subsumes. It is the property of
* the {@link ElkObjectSomeValuesFrom} corresponding to this
* {@link IndexedRangeFiller}.
*/
IndexedObjectProperty getProperty();
/**
* @return The representation of the {@link ElkClassExpression} which this
* {@link IndexedRangeFiller} subsumes. It is the filler of the
* {@link ElkObjectSomeValuesFrom} corresponding to this
* {@link IndexedRangeFiller}.
*/
IndexedClassExpression getFiller();
/**
* The visitor pattern for instances
*
* @author Yevgeny Kazakov
*
* @param <O>
* the type of the output
*/
interface Visitor<O> {
O visit(IndexedRangeFiller element);
}
}
| apache-2.0 |
IllusionRom-deprecated/android_platform_tools_idea | python/src/com/jetbrains/python/testing/nosetest/PythonNoseTestRunConfigurationParams.java | 1002 | /*
* Copyright 2000-2013 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.jetbrains.python.testing.nosetest;
import com.jetbrains.python.testing.AbstractPythonTestRunConfigurationParams;
/**
* User: catherine
*/
public interface PythonNoseTestRunConfigurationParams {
String getParams();
void setParams(String params);
boolean useParam();
void useParam(boolean useParam);
AbstractPythonTestRunConfigurationParams getTestRunConfigurationParams();
}
| apache-2.0 |
smgoller/geode | geode-for-redis/src/main/java/org/apache/geode/redis/internal/services/locking/StripedRunnable.java | 1028 | /*
* Copyright (C) 2000-2012 Heinz Max Kabutz
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership. Heinz Max Kabutz licenses
* this file to you under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License. You may
* obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.geode.redis.internal.services.locking;
/**
* All of the Runnables in the same "Stripe" will be executed consecutively.
*
* @author Dr Heinz M. Kabutz
* @see StripedExecutorService
*/
public interface StripedRunnable extends Runnable, StripedObject {
}
| apache-2.0 |
izeye/spring-boot | spring-boot-samples/spring-boot-sample-actuator-log4j2/src/test/java/sample/actuator/log4j2/SampleActuatorLog4J2ApplicationTests.java | 1966 | /*
* Copyright 2012-2016 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sample.actuator.log4j2;
import java.util.Map;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.boot.context.web.LocalServerPort;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.boot.test.context.SpringBootTest.WebEnvironment;
import org.springframework.boot.test.web.client.TestRestTemplate;
import org.springframework.http.HttpStatus;
import org.springframework.http.ResponseEntity;
import org.springframework.test.annotation.DirtiesContext;
import org.springframework.test.context.junit4.SpringRunner;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Basic integration tests for service demo application.
*
* @author Dave Syer
*/
@RunWith(SpringRunner.class)
@SpringBootTest(webEnvironment = WebEnvironment.RANDOM_PORT)
@DirtiesContext
public class SampleActuatorLog4J2ApplicationTests {
@LocalServerPort
private int port;
@Test
public void testHome() throws Exception {
@SuppressWarnings("rawtypes")
ResponseEntity<Map> entity = new TestRestTemplate()
.getForEntity("http://localhost:" + this.port, Map.class);
assertThat(entity.getStatusCode()).isEqualTo(HttpStatus.OK);
@SuppressWarnings("unchecked")
Map<String, Object> body = entity.getBody();
assertThat(body.get("message")).isEqualTo("Hello Daniel");
}
}
| apache-2.0 |
mmatz-ccri/geomesa | geomesa-utils/src/main/java/org/locationtech/geomesa/utils/interop/SimpleFeatureTypes.java | 969 | /*
* Copyright 2015 Commonwealth Computer Research, Inc.
*
* Licensed under the Apache License, Version 2.0 (the License);
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an AS IS BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.locationtech.geomesa.utils.interop;
import org.locationtech.geomesa.utils.geotools.SimpleFeatureTypes$;
import org.opengis.feature.simple.SimpleFeatureType;
public class SimpleFeatureTypes {
public static SimpleFeatureType createType(String name, String spec) {
return SimpleFeatureTypes$.MODULE$.createType(name, spec);
}
}
| apache-2.0 |
agentmilindu/stratos | components/org.apache.stratos.cloud.controller/src/main/java/org/apache/stratos/cloud/controller/iaases/vcloud/VCloudPartitionValidator.java | 2069 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.stratos.cloud.controller.iaases.vcloud;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.stratos.cloud.controller.domain.IaasProvider;
import org.apache.stratos.cloud.controller.domain.Partition;
import org.apache.stratos.cloud.controller.exception.InvalidPartitionException;
import org.apache.stratos.cloud.controller.iaases.Iaas;
import org.apache.stratos.cloud.controller.iaases.PartitionValidator;
import java.util.Properties;
/**
* The VCloud {@link org.apache.stratos.cloud.controller.iaases.PartitionValidator} implementation.
*/
public class VCloudPartitionValidator implements PartitionValidator {
@SuppressWarnings("unused")
private static final Log log = LogFactory.getLog(VCloudPartitionValidator.class);
private IaasProvider iaasProvider;
@SuppressWarnings("unused")
private Iaas iaas;
@Override
public IaasProvider validate(Partition partition, Properties properties) throws InvalidPartitionException {
//TODO: implement real validation logic
return iaasProvider;
}
@Override
public void setIaasProvider(IaasProvider iaas) {
this.iaasProvider = iaas;
this.iaas = iaas.getIaas();
}
}
| apache-2.0 |
SES-fortiss/SmartGridCoSimulation | projects/previousProjects/memapLinProgDenisThesis_2Houses/src/main/java/linprogMPC/helper/MyTimeUnit.java | 536 | package linprogMPC.helper;
import java.util.concurrent.TimeUnit;
import linprogMPC.ThesisTopologySimple;
public class MyTimeUnit {
public static double stepLength(TimeUnit timeUnit) {
double stepLengthInSeconds = 24*3600/ThesisTopologySimple.TIMESTEPS_PER_DAY;
switch (timeUnit) {
case SECONDS:
return stepLengthInSeconds;
case MINUTES:
return stepLengthInSeconds/60;
case HOURS:
return stepLengthInSeconds/3600;
default:
throw new RuntimeException("Timeunit " + timeUnit + " not implemented.");
}
}
}
| apache-2.0 |
psakar/Resteasy | server-adapters/resteasy-netty/src/main/java/org/jboss/resteasy/plugins/server/netty/NettyHttpRequest.java | 4167 | package org.jboss.resteasy.plugins.server.netty;
import org.jboss.resteasy.core.SynchronousDispatcher;
import org.jboss.resteasy.core.SynchronousExecutionContext;
import org.jboss.resteasy.plugins.providers.FormUrlEncodedProvider;
import org.jboss.resteasy.plugins.server.BaseHttpRequest;
import org.jboss.resteasy.specimpl.ResteasyHttpHeaders;
import org.jboss.resteasy.spi.NotImplementedYetException;
import org.jboss.resteasy.spi.ResteasyAsynchronousContext;
import org.jboss.resteasy.spi.ResteasyUriInfo;
import org.jboss.resteasy.util.Encode;
import javax.ws.rs.core.HttpHeaders;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.MultivaluedMap;
import java.io.IOException;
import java.io.InputStream;
import java.net.URI;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
/**
* Abstraction for an inbound http request on the server, or a response from a server to a client
* <p/>
* We have this abstraction so that we can reuse marshalling objects in a client framework and serverside framework
*
* @author <a href="mailto:bill@burkecentral.com">Bill Burke</a>
* @author Norman Maurer
* @version $Revision: 1 $
*/
public class NettyHttpRequest extends BaseHttpRequest
{
protected ResteasyHttpHeaders httpHeaders;
protected SynchronousDispatcher dispatcher;
protected String httpMethod;
protected InputStream inputStream;
protected Map<String, Object> attributes = new HashMap<String, Object>();
protected NettyHttpResponse httpResponse;
private final boolean is100ContinueExpected;
public NettyHttpRequest(ResteasyHttpHeaders httpHeaders, ResteasyUriInfo uri, String httpMethod, SynchronousDispatcher dispatcher, NettyHttpResponse httpResponse, boolean is100ContinueExpected)
{
super(uri);
this.is100ContinueExpected = is100ContinueExpected;
this.httpResponse = httpResponse;
this.dispatcher = dispatcher;
this.httpHeaders = httpHeaders;
this.httpMethod = httpMethod;
}
@Override
public MultivaluedMap<String, String> getMutableHeaders()
{
return httpHeaders.getMutableHeaders();
}
@Override
public void setHttpMethod(String method)
{
this.httpMethod = method;
}
@Override
public Enumeration<String> getAttributeNames()
{
Enumeration<String> en = new Enumeration<String>()
{
private Iterator<String> it = attributes.keySet().iterator();
@Override
public boolean hasMoreElements()
{
return it.hasNext();
}
@Override
public String nextElement()
{
return it.next();
}
};
return en;
}
@Override
public ResteasyAsynchronousContext getAsyncContext()
{
return new SynchronousExecutionContext(dispatcher, this, httpResponse);
}
@Override
public Object getAttribute(String attribute)
{
return attributes.get(attribute);
}
@Override
public void setAttribute(String name, Object value)
{
attributes.put(name, value);
}
@Override
public void removeAttribute(String name)
{
attributes.remove(name);
}
@Override
public HttpHeaders getHttpHeaders()
{
return httpHeaders;
}
@Override
public InputStream getInputStream()
{
return inputStream;
}
@Override
public void setInputStream(InputStream stream)
{
this.inputStream = stream;
}
@Override
public String getHttpMethod()
{
return httpMethod;
}
public NettyHttpResponse getResponse()
{
return httpResponse;
}
public boolean isKeepAlive()
{
return httpResponse.isKeepAlive();
}
public boolean is100ContinueExpected()
{
return is100ContinueExpected;
}
@Override
public void forward(String path)
{
throw new NotImplementedYetException();
}
@Override
public boolean wasForwarded()
{
return false;
}
}
| apache-2.0 |
ursgraf/compiler | src/ch/ntb/inf/deep/config/Segment.java | 1898 | /*
* Copyright 2011 - 2013 NTB University of Applied Sciences in Technology
* Buchs, Switzerland, http://www.ntb.ch/inf
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package ch.ntb.inf.deep.config;
import ch.ntb.inf.deep.classItems.Item;
import ch.ntb.inf.deep.strings.HString;
public class Segment extends Item {
public Device owner;
public int attributes = 0;
public int size = 0;
public int usedSize = 0;
public int width = 0;
public Segment(String jname, Device owner){
this.name = HString.getRegisteredHString(jname);
this.owner = owner;
}
public void addAttributes(int attributes) {
this.attributes |= attributes;
}
public void setEndAddress(int endAddress) {
if (this.address >= 0) {
this.size = endAddress - this.address;
}
}
public HString getFullName() {
String name = this.name.toString();
return HString.getRegisteredHString(name);
}
public void addToUsedSize(int size){
usedSize += size;
}
public void print(int indentLevel) {
indent(indentLevel);
vrb.print("segment = " + name.toString() + " {");
vrb.print("attributes = 0x" + Integer.toHexString(attributes) + ", ");
vrb.print("width = " + width + ", ");
vrb.print("base = 0x" + Integer.toHexString(address) + ", ");
vrb.println("size = 0x" + Integer.toHexString(size) + "}");
}
}
| apache-2.0 |
pperalta/ignite | modules/core/src/test/java/org/apache/ignite/internal/processors/cache/distributed/dht/GridCacheDhtMappingSelfTest.java | 3777 | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.cache.distributed.dht;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteCache;
import org.apache.ignite.cache.CacheWriteSynchronizationMode;
import org.apache.ignite.configuration.CacheConfiguration;
import org.apache.ignite.configuration.IgniteConfiguration;
import org.apache.ignite.internal.IgniteKernal;
import org.apache.ignite.internal.processors.cache.distributed.near.GridNearCacheAdapter;
import org.apache.ignite.spi.discovery.tcp.TcpDiscoverySpi;
import org.apache.ignite.spi.discovery.tcp.ipfinder.TcpDiscoveryIpFinder;
import org.apache.ignite.spi.discovery.tcp.ipfinder.vm.TcpDiscoveryVmIpFinder;
import org.apache.ignite.testframework.junits.common.GridCommonAbstractTest;
import static org.apache.ignite.cache.CacheAtomicityMode.TRANSACTIONAL;
import static org.apache.ignite.cache.CacheMode.PARTITIONED;
import static org.apache.ignite.cache.CacheRebalanceMode.SYNC;
/**
* Tests dht mapping.
*/
public class GridCacheDhtMappingSelfTest extends GridCommonAbstractTest {
/** Number of key backups. */
private static final int BACKUPS = 1;
/** IP finder. */
private TcpDiscoveryIpFinder ipFinder = new TcpDiscoveryVmIpFinder(true);
/** {@inheritDoc} */
@Override protected IgniteConfiguration getConfiguration(String igniteInstanceName) throws Exception {
IgniteConfiguration cfg = super.getConfiguration(igniteInstanceName);
CacheConfiguration cacheCfg = defaultCacheConfiguration();
cacheCfg.setCacheMode(PARTITIONED);
cacheCfg.setWriteSynchronizationMode(CacheWriteSynchronizationMode.FULL_SYNC);
cacheCfg.setRebalanceMode(SYNC);
cacheCfg.setBackups(BACKUPS);
cacheCfg.setAtomicityMode(TRANSACTIONAL);
TcpDiscoverySpi disco = new TcpDiscoverySpi();
disco.setIpFinder(ipFinder);
cfg.setDiscoverySpi(disco);
cfg.setCacheConfiguration(cacheCfg);
return cfg;
}
/** {@inheritDoc} */
@Override protected void afterTestsStopped() throws Exception {
super.afterTestsStopped();
stopAllGrids();
}
/** @throws Exception If failed. */
public void testMapping() throws Exception {
int nodeCnt = 5;
startGridsMultiThreaded(nodeCnt);
IgniteCache<Integer, Integer> cache = grid(nodeCnt - 1).cache(null);
int kv = 1;
cache.put(kv, kv);
int cnt = 0;
for (int i = 0; i < nodeCnt; i++) {
Ignite g = grid(i);
GridDhtCacheAdapter<Integer, Integer> dht = ((GridNearCacheAdapter<Integer, Integer>)
((IgniteKernal)g).<Integer, Integer>internalCache()).dht();
if (localPeek(dht, kv) != null) {
info("Key found on node: " + g.cluster().localNode().id());
cnt++;
}
}
// Test key should be on primary and backup node only.
assertEquals(1 + BACKUPS, cnt);
}
} | apache-2.0 |
gAmUssA/hazelcast-simulator | probes/src/main/java/com/hazelcast/simulator/probes/probes/impl/HdrLatencyDistributionProbe.java | 1440 | /*
* Copyright (c) 2008-2015, Hazelcast, Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.hazelcast.simulator.probes.probes.impl;
import org.HdrHistogram.Histogram;
import java.util.concurrent.TimeUnit;
public class HdrLatencyDistributionProbe
extends AbstractIntervalProbe<HdrLatencyDistributionResult, HdrLatencyDistributionProbe> {
public static final long MAXIMUM_LATENCY = TimeUnit.SECONDS.toMicros(60);
private final Histogram histogram = new Histogram(MAXIMUM_LATENCY, 4);
@Override
public void recordValue(long latencyNanos) {
histogram.recordValue((int) TimeUnit.NANOSECONDS.toMicros(latencyNanos));
}
@Override
public long getInvocationCount() {
return histogram.getTotalCount();
}
@Override
public HdrLatencyDistributionResult getResult() {
return new HdrLatencyDistributionResult(histogram);
}
}
| apache-2.0 |
bsspirit/kettle-4.4.0-stable | src/org/pentaho/di/job/entries/deletefolders/JobEntryDeleteFolders.java | 15026 | /*******************************************************************************
*
* Pentaho Data Integration
*
* Copyright (C) 2002-2012 by Pentaho : http://www.pentaho.com
*
*******************************************************************************
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
******************************************************************************/
package org.pentaho.di.job.entries.deletefolders;
import static org.pentaho.di.job.entry.validator.AbstractFileValidator.putVariableSpace;
import static org.pentaho.di.job.entry.validator.AndValidator.putValidators;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.andValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.fileExistsValidator;
import static org.pentaho.di.job.entry.validator.JobEntryValidatorUtils.notNullValidator;
import java.io.IOException;
import java.util.List;
import org.apache.commons.vfs.FileObject;
import org.apache.commons.vfs.FileSelectInfo;
import org.apache.commons.vfs.FileSelector;
import org.apache.commons.vfs.FileType;
import org.pentaho.di.cluster.SlaveServer;
import org.pentaho.di.core.CheckResultInterface;
import org.pentaho.di.core.Const;
import org.pentaho.di.core.Result;
import org.pentaho.di.core.RowMetaAndData;
import org.pentaho.di.core.database.DatabaseMeta;
import org.pentaho.di.core.exception.KettleDatabaseException;
import org.pentaho.di.core.exception.KettleException;
import org.pentaho.di.core.exception.KettleXMLException;
import org.pentaho.di.core.vfs.KettleVFS;
import org.pentaho.di.core.xml.XMLHandler;
import org.pentaho.di.i18n.BaseMessages;
import org.pentaho.di.job.JobMeta;
import org.pentaho.di.job.entry.JobEntryBase;
import org.pentaho.di.job.entry.JobEntryInterface;
import org.pentaho.di.job.entry.validator.ValidatorContext;
import org.pentaho.di.repository.ObjectId;
import org.pentaho.di.repository.Repository;
import org.pentaho.di.resource.ResourceEntry;
import org.pentaho.di.resource.ResourceReference;
import org.pentaho.di.resource.ResourceEntry.ResourceType;
import org.w3c.dom.Node;
/**
* This defines a 'delete folders' job entry.
*
* @author Samatar Hassan
* @since 13-05-2008
*/
public class JobEntryDeleteFolders extends JobEntryBase implements Cloneable, JobEntryInterface {
private static Class<?> PKG = JobEntryDeleteFolders.class; // for i18n purposes, needed by Translator2!! $NON-NLS-1$
public boolean argFromPrevious;
public String arguments[];
private String success_condition;
public String SUCCESS_IF_AT_LEAST_X_FOLDERS_DELETED="success_when_at_least";
public String SUCCESS_IF_ERRORS_LESS="success_if_errors_less";
public String SUCCESS_IF_NO_ERRORS="success_if_no_errors";
private String limit_folders;
int NrErrors=0;
int NrSuccess=0;
boolean successConditionBroken=false;
boolean successConditionBrokenExit=false;
int limitFolders=0;
public JobEntryDeleteFolders(String n) {
super(n, ""); //$NON-NLS-1$
argFromPrevious = false;
arguments = null;
success_condition=SUCCESS_IF_NO_ERRORS;
limit_folders="10";
setID(-1L);
}
public JobEntryDeleteFolders() {
this(""); //$NON-NLS-1$
}
public Object clone() {
JobEntryDeleteFolders je = (JobEntryDeleteFolders) super.clone();
return je;
}
public String getXML() {
StringBuffer retval = new StringBuffer(300);
retval.append(super.getXML());
retval.append(" ").append(XMLHandler.addTagValue("arg_from_previous", argFromPrevious)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("success_condition", success_condition)); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" ").append(XMLHandler.addTagValue("limit_folders", limit_folders));
retval.append(" <fields>").append(Const.CR); //$NON-NLS-1$
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
retval.append(" <field>").append(Const.CR); //$NON-NLS-1$
retval.append(" ").append(XMLHandler.addTagValue("name", arguments[i])); //$NON-NLS-1$ //$NON-NLS-2$
retval.append(" </field>").append(Const.CR); //$NON-NLS-1$
}
}
retval.append(" </fields>").append(Const.CR); //$NON-NLS-1$
return retval.toString();
}
public void loadXML(Node entrynode, List<DatabaseMeta> databases, List<SlaveServer> slaveServers, Repository rep) throws KettleXMLException {
try {
super.loadXML(entrynode, databases, slaveServers);
argFromPrevious = "Y".equalsIgnoreCase(XMLHandler.getTagValue(entrynode, "arg_from_previous")); //$NON-NLS-1$ //$NON-NLS-2$
success_condition = XMLHandler.getTagValue(entrynode, "success_condition");
limit_folders = XMLHandler.getTagValue(entrynode, "limit_folders");
Node fields = XMLHandler.getSubNode(entrynode, "fields"); //$NON-NLS-1$
// How many field arguments?
int nrFields = XMLHandler.countNodes(fields, "field"); //$NON-NLS-1$
arguments = new String[nrFields];
// Read them all...
for (int i = 0; i < nrFields; i++) {
Node fnode = XMLHandler.getSubNodeByNr(fields, "field", i); //$NON-NLS-1$
arguments[i] = XMLHandler.getTagValue(fnode, "name"); //$NON-NLS-1$
}
} catch (KettleXMLException xe) {
throw new KettleXMLException(BaseMessages.getString(PKG, "JobEntryDeleteFolders.UnableToLoadFromXml"), xe); //$NON-NLS-1$
}
}
public void loadRep(Repository rep, ObjectId id_jobentry, List<DatabaseMeta> databases, List<SlaveServer> slaveServers) throws KettleException {
try {
argFromPrevious = rep.getJobEntryAttributeBoolean(id_jobentry, "arg_from_previous"); //$NON-NLS-1$
limit_folders = rep.getJobEntryAttributeString(id_jobentry, "limit_folders");
success_condition = rep.getJobEntryAttributeString(id_jobentry, "success_condition");
// How many arguments?
int argnr = rep.countNrJobEntryAttributes(id_jobentry, "name"); //$NON-NLS-1$
arguments = new String[argnr];
// Read them all...
for (int a = 0; a < argnr; a++) {
arguments[a] = rep.getJobEntryAttributeString(id_jobentry, a, "name"); //$NON-NLS-1$
}
} catch (KettleException dbe) {
throw new KettleException(BaseMessages.getString(PKG, "JobEntryDeleteFolders.UnableToLoadFromRepo", String.valueOf(id_jobentry)), dbe); //$NON-NLS-1$
}
}
public void saveRep(Repository rep, ObjectId id_job) throws KettleException {
try {
rep.saveJobEntryAttribute(id_job, getObjectId(), "arg_from_previous", argFromPrevious); //$NON-NLS-1$
rep.saveJobEntryAttribute(id_job, getObjectId(), "limit_folders", limit_folders);
rep.saveJobEntryAttribute(id_job, getObjectId(), "success_condition", success_condition);
// save the arguments...
if (arguments != null) {
for (int i = 0; i < arguments.length; i++) {
rep.saveJobEntryAttribute(id_job, getObjectId(), i, "name", arguments[i]); //$NON-NLS-1$
}
}
} catch (KettleDatabaseException dbe) {
throw new KettleException(
BaseMessages.getString(PKG, "JobEntryDeleteFolders.UnableToSaveToRepo", String.valueOf(id_job)), dbe); //$NON-NLS-1$
}
}
public Result execute(Result result, int nr) throws KettleException {
List<RowMetaAndData> rows = result.getRows();
RowMetaAndData resultRow = null;
result.setNrErrors(1);
result.setResult(false);
NrErrors=0;
NrSuccess=0;
successConditionBroken=false;
successConditionBrokenExit=false;
limitFolders=Const.toInt(environmentSubstitute(getLimitFolders()),10);
if (argFromPrevious) {
if(log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.FoundPreviousRows", String.valueOf((rows != null ? rows.size() : 0)))); //$NON-NLS-1$
}
if (argFromPrevious && rows != null){
for (int iteration = 0; iteration < rows.size() && !parentJob.isStopped(); iteration++) {
if(successConditionBroken){
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
resultRow = rows.get(iteration);
String args_previous = resultRow.getString(0, null);
if(!Const.isEmpty(args_previous)){
if(deleteFolder(args_previous)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.EmptyLine"));
}
}
} else if (arguments != null) {
for (int i = 0; i < arguments.length && !parentJob.isStopped(); i++) {
if(successConditionBroken)
{
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.SuccessConditionbroken",""+NrErrors));
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
return result;
}
String realfilename=environmentSubstitute(arguments[i]);
if(!Const.isEmpty(realfilename))
{
if(deleteFolder(realfilename)){
updateSuccess();
}else {
updateErrors();
}
}else{
// empty filename !
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.EmptyLine"));
}
}
}
if(log.isDetailed()){
logDetailed("=======================================");
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Log.Info.NrError","" + NrErrors));
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Log.Info.NrDeletedFolders","" + NrSuccess));
logDetailed("=======================================");
}
result.setNrErrors(NrErrors);
result.setNrLinesDeleted(NrSuccess);
if(getSuccessStatus()) result.setResult(true);
return result;
}
private void updateErrors()
{
NrErrors++;
if(checkIfSuccessConditionBroken()){
// Success condition was broken
successConditionBroken=true;
}
}
private boolean checkIfSuccessConditionBroken()
{
boolean retval=false;
if ((NrErrors>0 && getSuccessCondition().equals(SUCCESS_IF_NO_ERRORS))
|| (NrErrors>=limitFolders && getSuccessCondition().equals(SUCCESS_IF_ERRORS_LESS))){
retval=true;
}
return retval;
}
private void updateSuccess()
{
NrSuccess++;
}
private boolean getSuccessStatus()
{
boolean retval=false;
if ((NrErrors==0 && getSuccessCondition().equals(SUCCESS_IF_NO_ERRORS))
|| (NrSuccess>=limitFolders && getSuccessCondition().equals(SUCCESS_IF_AT_LEAST_X_FOLDERS_DELETED))
|| (NrErrors<=limitFolders && getSuccessCondition().equals(SUCCESS_IF_ERRORS_LESS))){
retval=true;
}
return retval;
}
private boolean deleteFolder(String foldername) {
boolean rcode = false;
FileObject filefolder = null;
try {
filefolder = KettleVFS.getFileObject(foldername, this);
if (filefolder.exists()) {
// the file or folder exists
if (filefolder.getType() == FileType.FOLDER) {
// It's a folder
if (log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.ProcessingFolder", foldername)); //$NON-NLS-1$
// Delete Files
int Nr = filefolder.delete(new TextFileSelector());
if (log.isDetailed())
logDetailed(BaseMessages.getString(PKG, "JobEntryDeleteFolders.TotalDeleted", foldername,String.valueOf(Nr))); //$NON-NLS-1$
rcode = true;
} else {
// Error...This file is not a folder!
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.Error.NotFolder"));
}
} else {
// File already deleted, no reason to try to delete it
if(log.isBasic()) logBasic(BaseMessages.getString(PKG, "JobEntryDeleteFolders.FolderAlreadyDeleted", foldername)); //$NON-NLS-1$
rcode = true;
}
} catch (Exception e) {
logError(BaseMessages.getString(PKG, "JobEntryDeleteFolders.CouldNotDelete", foldername, e.getMessage()), e); //$NON-NLS-1$
} finally {
if (filefolder != null) {
try {
filefolder.close();
filefolder=null;
} catch (IOException ex) {
};
}
}
return rcode;
}
private class TextFileSelector implements FileSelector
{
public boolean includeFile(FileSelectInfo info) {
return true;
}
public boolean traverseDescendents(FileSelectInfo info) {
return true;
}
}
public void setPrevious(boolean argFromPrevious) {
this.argFromPrevious = argFromPrevious;
}
public boolean evaluates() {
return true;
}
public void check(List<CheckResultInterface> remarks, JobMeta jobMeta) {
boolean res = andValidator().validate(this, "arguments", remarks, putValidators(notNullValidator())); //$NON-NLS-1$
if (res == false) {
return;
}
ValidatorContext ctx = new ValidatorContext();
putVariableSpace(ctx, getVariables());
putValidators(ctx, notNullValidator(), fileExistsValidator());
for (int i = 0; i < arguments.length; i++) {
andValidator().validate(this, "arguments[" + i + "]", remarks, ctx); //$NON-NLS-1$ //$NON-NLS-2$
}
}
public List<ResourceReference> getResourceDependencies(JobMeta jobMeta) {
List<ResourceReference> references = super.getResourceDependencies(jobMeta);
if (arguments != null) {
ResourceReference reference = null;
for (int i=0; i<arguments.length; i++) {
String filename = jobMeta.environmentSubstitute(arguments[i]);
if (reference == null) {
reference = new ResourceReference(this);
references.add(reference);
}
reference.getEntries().add( new ResourceEntry(filename, ResourceType.FILE));
}
}
return references;
}
public boolean isArgFromPrevious()
{
return argFromPrevious;
}
public String[] getArguments()
{
return arguments;
}
public void setSuccessCondition(String success_condition)
{
this.success_condition=success_condition;
}
public String getSuccessCondition()
{
return success_condition;
}
public void setLimitFolders(String limit_folders)
{
this.limit_folders=limit_folders;
}
public String getLimitFolders()
{
return limit_folders;
}
} | apache-2.0 |
nmldiegues/stibt | infinispan/core/src/test/java/org/infinispan/distribution/rehash/RehashWithL1Test.java | 3376 | /*
* Copyright 2012 Red Hat, Inc. and/or its affiliates.
*
* This is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation; either version 2.1 of
* the License, or (at your option) any later version.
*
* This software is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
* 02110-1301 USA
*/
package org.infinispan.distribution.rehash;
import org.infinispan.configuration.cache.CacheMode;
import org.infinispan.configuration.cache.ConfigurationBuilder;
import org.infinispan.manager.EmbeddedCacheManager;
import org.infinispan.test.MultipleCacheManagersTest;
import org.infinispan.test.TestingUtil;
import org.infinispan.test.fwk.TransportFlags;
import org.testng.annotations.Test;
import java.util.concurrent.Callable;
import java.util.concurrent.Future;
import static org.testng.AssertJUnit.assertEquals;
import static org.testng.AssertJUnit.assertFalse;
/**
* Tests rehashing with distributed caches with L1 enabled.
*
* @author Galder Zamarreño
* @since 5.2
*/
@Test(groups = "functional", testName = "distribution.rehash.RehashWithL1Test")
public class RehashWithL1Test extends MultipleCacheManagersTest {
ConfigurationBuilder builder;
@Override
protected void createCacheManagers() throws Throwable {
builder = getDefaultClusteredCacheConfig(CacheMode.DIST_SYNC, false);
// Enable rehashing explicitly
builder.clustering().stateTransfer().fetchInMemoryState(true)
.hash().l1().enable();
createClusteredCaches(2, builder);
}
public void testPutWithRehashAndCacheClear() throws Exception {
Future<Void> node3Join = null;
int opCount = 100;
for (int i = 0; i < opCount; i++) {
cache(0).put("k" + i, "some data");
if (i == (opCount / 2)) {
node3Join = fork(new Callable<Void>() {
@Override
public Void call() throws Exception {
EmbeddedCacheManager cm = addClusterEnabledCacheManager(builder,
new TransportFlags().withMerge(true));
cm.getCache();
return null;
}
});
}
Thread.sleep(10);
}
if (node3Join == null) throw new Exception("Node 3 not joined");
node3Join.get();
for (int i = 0; i < opCount; i++) {
cache(0).remove("k" + i);
Thread.sleep(10);
}
for (int i = 0; i < opCount; i++) {
String key = "k" + i;
assertFalse(cache(0).containsKey(key));
assertFalse("Key: " + key + " is present in cache at " + cache(1),
cache(1).containsKey(key));
assertFalse("Key: " + key + " is present in cache at " + cache(2),
cache(2).containsKey(key));
}
assertEquals(0, cache(0).size());
assertEquals(0, cache(1).size());
assertEquals(0, cache(2).size());
}
}
| apache-2.0 |
kevinmdavis/cloud-trace-java | sdk/core/src/main/java/com/google/cloud/trace/core/TraceOptions.java | 4000 | // Copyright 2016 Google Inc. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.cloud.trace.core;
import com.google.common.base.MoreObjects;
import java.util.Objects;
/**
* A class that represents trace options. These options determine options such as whether a span
* should be traced or whether stack trace information should be collected.
*/
public class TraceOptions {
private final static int TRACE_ENABLED = 0x1;
private final static int STACK_TRACE_ENABLED = 0x2;
private final int optionsMask;
/**
* Returns a new trace options with the trace option disabled.
*
* @return the new trace options.
*/
public static TraceOptions forTraceDisabled() {
return new TraceOptions();
}
/**
* Returns a new trace options with the trace option enabled.
*
* @return the new trace options.
*/
public static TraceOptions forTraceEnabled() {
return new TraceOptions(TRACE_ENABLED);
}
/**
* Creates a new trace options with default options values.
*/
public TraceOptions() {
this(0);
}
/**
* Creates a new trace options with the given options mask.
*
* @param optionsMask an options mask for the new trace options.
*/
public TraceOptions(int optionsMask) {
this.optionsMask = optionsMask;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof TraceOptions)) {
return false;
}
TraceOptions that = (TraceOptions)obj;
return Objects.equals(optionsMask, that.optionsMask);
}
@Override
public int hashCode() {
return Objects.hash(optionsMask);
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("optionsMask", optionsMask)
.toString();
}
/**
* Returns the options mask.
*
* @return the options mask.
*/
public int getOptionsMask() {
return optionsMask;
}
/**
* Returns whether the trace option is enabled.
*
* @return a boolean indicating whether the trace option is enabled.
*/
public boolean getTraceEnabled() {
return applyMask(TRACE_ENABLED);
}
/**
* Returns whether the stack trace option is enabled.
*
* @return a boolean indicating whether the stack trace option is enabled.
*/
public boolean getStackTraceEnabled() {
return applyMask(STACK_TRACE_ENABLED);
}
/**
* Returns a new trace options with this trace options' options values, overriden with the given
* trace option.
*
* @param enabled a boolean that indicates the trace option.
* @return the new trace options.
*/
public TraceOptions overrideTraceEnabled(boolean enabled) {
return new TraceOptions(enabled ? set(TRACE_ENABLED) : clear(TRACE_ENABLED));
}
/**
* Returns a new trace options with this trace options' options values, overriden with the given
* stack trace option.
*
* @param enabled a boolean that indicates the stack trace option.
* @return the new trace options.
*/
public TraceOptions overrideStackTraceEnabled(boolean enabled) {
return new TraceOptions(enabled ? set(STACK_TRACE_ENABLED) : clear(STACK_TRACE_ENABLED));
}
private boolean applyMask(int optionsMask) {
return (this.optionsMask & optionsMask) != 0;
}
private int clear(int optionsMask) {
return this.optionsMask & ~optionsMask;
}
private int set(int optionsMask) {
return this.optionsMask | optionsMask;
}
}
| apache-2.0 |
kaoru2010/android-startup-project | tools/smc/smc_6_5_0/examples/Ant/EX6/src/TcpClientListener.java | 1660 | //
// The contents of this file are subject to the Mozilla Public
// License Version 1.1 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy
// of the License at http://www.mozilla.org/MPL/
//
// Software distributed under the License is distributed on an
// "AS IS" basis, WITHOUT WARRANTY OF ANY KIND, either express or
// implied. See the License for the specific language governing
// rights and limitations under the License.
//
// The Original Code is State Machine Compiler (SMC).
//
// The Initial Developer of the Original Code is Charles W. Rapp.
// Portions created by Charles W. Rapp are
// Copyright (C) 2000 - 2003 Charles W. Rapp.
// All Rights Reserved.
//
// Contributor(s):
//
// Name
// TcpClient.java
//
// Description
// A TCP client connection.
//
// RCS ID
// $Id: TcpClientListener.java,v 1.2 2007/08/05 13:21:09 cwrapp Exp $
//
// CHANGE LOG
// $Log: TcpClientListener.java,v $
// Revision 1.2 2007/08/05 13:21:09 cwrapp
// Version 5.0.1 check-in. See net/sf/smc/CODE_README.txt for more information.
//
// Revision 1.1 2005/05/28 12:49:21 cwrapp
// Added Ant examples 1 - 7.
//
// Revision 1.0 2004/05/31 13:27:41 charlesr
// Initial revision
//
package smc_ex6;
public interface TcpClientListener
extends TcpConnectionListener
{
public void opened(TcpClient client);
public void openFailed(String reason, TcpClient client);
public void closed(String reason, TcpClient client);
public void transmitted(TcpClient client);
public void transmitFailed(String reason, TcpClient client);
public void receive(byte[] data, TcpClient client);
}
| apache-2.0 |
petrandreev/arquillian-core | container/impl-base/src/main/java/org/jboss/arquillian/container/impl/client/container/ContainerOperationResult.java | 5898 | /*
* JBoss, Home of Professional Open Source
* Copyright 2010 Red Hat Inc. and/or its affiliates and other contributors
* by the @authors tag. See the copyright.txt in the distribution for a
* full listing of individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.jboss.arquillian.container.impl.client.container;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.jboss.arquillian.container.spi.Container;
import org.jboss.arquillian.container.spi.client.deployment.DeploymentDescription;
/**
* OperationResult
*
* @author <a href="mailto:aslak@redhat.com">Aslak Knutsen</a>
* @version $Revision: $
*/
public class ContainerOperationResult<T>
{
public enum Type
{
SETUP, START, STOP, DEPLOY, UNDEPLOY
}
private List<Operation<T>> operations;
public ContainerOperationResult()
{
operations = new ArrayList<Operation<T>>();
}
void add(Operation<T> operation)
{
operations.add(operation);
}
public List<Operation<T>> getOperations()
{
return Collections.unmodifiableList(operations);
}
public boolean hasFailure()
{
for(Operation<T> operation : operations)
{
if(operation.hasFailed())
{
return true;
}
}
return false;
}
public interface Operation<T>
{
Type getType();
Container getContainer();
boolean hasFailed();
Throwable getFailure();
T getObject();
}
public static class GenericOperation<X> implements Operation<X>
{
private Type type;
private Container contianer;
private Throwable exception;
private X object;
public GenericOperation(Type type, Container contianer)
{
this(type, contianer, null, null);
}
public GenericOperation(Type type, Container contianer, X object)
{
this(type, contianer, null, object);
}
public GenericOperation(Type type, Container contianer, Throwable exception)
{
this(type, contianer, exception, null);
}
public GenericOperation(Type type, Container contianer, Throwable exception, X object)
{
this.type = type;
this.contianer = contianer;
this.exception = exception;
this.object = object;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.impl.container.ContainerOperationResult.Operation#getContainer()
*/
public Container getContainer()
{
return contianer;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.impl.container.ContainerOperationResult.Operation#hasFailed()
*/
public boolean hasFailed()
{
return getFailure() != null;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.impl.container.ContainerOperationResult.Operation#getFailure()
*/
public Throwable getFailure()
{
return exception;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.impl.container.ContainerOperationResult.Operation#getObject()
*/
public X getObject()
{
return object;
}
/* (non-Javadoc)
* @see org.jboss.arquillian.impl.container.ContainerOperationResult.Operation#getType()
*/
public Type getType()
{
return type;
}
}
public static Operation<Void> setupSuccess(Container container)
{
return new GenericOperation<Void>(Type.SETUP, container);
}
public static Operation<Void> setupFailure(Container container, Throwable exception)
{
return new GenericOperation<Void>(Type.SETUP, container, exception);
}
public static Operation<Void> startSuccess(Container container)
{
return new GenericOperation<Void>(Type.START, container);
}
public static Operation<Void> startFailure(Container container, Throwable exception)
{
return new GenericOperation<Void>(Type.START, container, exception);
}
public static Operation<Void> stopSuccess(Container container)
{
return new GenericOperation<Void>(Type.STOP, container);
}
public static Operation<Void> stopFailure(Container container, Throwable exception)
{
return new GenericOperation<Void>(Type.STOP, container, exception);
}
public static Operation<DeploymentDescription> deploySuccess(Container container, DeploymentDescription deployment)
{
return new GenericOperation<DeploymentDescription>(Type.DEPLOY, container, deployment);
}
public static Operation<DeploymentDescription> deployFailure(Container container, DeploymentDescription deployment, Throwable exception)
{
return new GenericOperation<DeploymentDescription>(Type.DEPLOY, container, exception, deployment);
}
public static Operation<DeploymentDescription> unDeploySuccess(Container container, DeploymentDescription deployment)
{
return new GenericOperation<DeploymentDescription>(Type.UNDEPLOY, container, deployment);
}
public static Operation<DeploymentDescription> unDeployFailure(Container container, DeploymentDescription deployment, Throwable exception)
{
return new GenericOperation<DeploymentDescription>(Type.UNDEPLOY, container, exception, deployment);
}
}
| apache-2.0 |
Tycheo/coffeemud | com/planet_ink/coffee_mud/Abilities/Thief/Thief_MarkInvisibility.java | 3481 | package com.planet_ink.coffee_mud.Abilities.Thief;
import com.planet_ink.coffee_mud.core.interfaces.*;
import com.planet_ink.coffee_mud.core.*;
import com.planet_ink.coffee_mud.core.collections.*;
import com.planet_ink.coffee_mud.Abilities.interfaces.*;
import com.planet_ink.coffee_mud.Areas.interfaces.*;
import com.planet_ink.coffee_mud.Behaviors.interfaces.*;
import com.planet_ink.coffee_mud.CharClasses.interfaces.*;
import com.planet_ink.coffee_mud.Commands.interfaces.*;
import com.planet_ink.coffee_mud.Common.interfaces.*;
import com.planet_ink.coffee_mud.Exits.interfaces.*;
import com.planet_ink.coffee_mud.Items.interfaces.*;
import com.planet_ink.coffee_mud.Libraries.interfaces.*;
import com.planet_ink.coffee_mud.Locales.interfaces.*;
import com.planet_ink.coffee_mud.MOBS.interfaces.*;
import com.planet_ink.coffee_mud.Races.interfaces.*;
import java.util.*;
/*
Copyright 2003-2015 Bo Zimmerman
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
public class Thief_MarkInvisibility extends ThiefSkill
{
@Override public String ID() { return "Thief_MarkInvisibility"; }
private final static String localizedName = CMLib.lang().L("Invisibility to Mark");
@Override public String name() { return localizedName; }
@Override public String displayText(){ return "";}
@Override protected int canAffectCode(){return CAN_MOBS;}
@Override protected int canTargetCode(){return 0;}
@Override public int abstractQuality(){return Ability.QUALITY_OK_SELF;}
@Override public int classificationCode(){return Ability.ACODE_THIEF_SKILL|Ability.DOMAIN_STEALTHY;}
@Override public boolean isAutoInvoked(){return true;}
@Override public boolean canBeUninvoked(){return false;}
public boolean active=false;
public MOB mark=null;
public MOB getMark(MOB mob)
{
final Thief_Mark A=(Thief_Mark)mob.fetchEffect("Thief_Mark");
if(A!=null)
return A.mark;
return null;
}
public int getMarkTicks(MOB mob)
{
final Thief_Mark A=(Thief_Mark)mob.fetchEffect("Thief_Mark");
if((A!=null)&&(A.mark!=null))
return A.ticks;
return -1;
}
@Override
public void affectPhyStats(Physical affected, PhyStats affectableStats)
{
super.affectPhyStats(affected,affectableStats);
if(active)
affectableStats.setDisposition(affectableStats.disposition()|PhyStats.IS_INVISIBLE);
}
@Override
public boolean tick(Tickable ticking, int tickID)
{
if(!super.tick(ticking,tickID))
return false;
if(affected instanceof MOB)
{
final MOB mob=(MOB)affected;
mark=getMark(mob);
if((mark!=null)
&&(mob.location()!=null)
&&(mob.location().isInhabitant(mark))
&&((mob.fetchAbility(ID())==null)||proficiencyCheck(mob,0,false)))
{
if(!active)
{
active=true;
helpProficiency(mob, 0);
mob.recoverPhyStats();
}
}
else
if(active)
{
active=false;
mob.recoverPhyStats();
}
}
return true;
}
}
| apache-2.0 |
rpinzon/kiji-schema | kiji-schema/src/test/java/org/kiji/schema/TestKijiDataRequest.java | 10149 | /**
* (c) Copyright 2012 WibiData, Inc.
*
* See the NOTICE file distributed with this work for additional
* information regarding copyright ownership.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kiji.schema;
import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import org.junit.Test;
import org.kiji.schema.KijiDataRequestBuilder.ColumnsDef;
import org.kiji.schema.avro.TestRecord1;
import org.kiji.schema.layout.ColumnReaderSpec;
/** Tests for KijiDataRequest and KijiDataRequestBuilder. */
public class TestKijiDataRequest {
/** Checks that KijiDataRequest serializes and deserializes correctly. */
@Test
public void testSerializability() throws Exception {
final KijiDataRequest expected = KijiDataRequest.builder()
.addColumns(ColumnsDef.create()
.add("foo", "bar1")
.add("foo", "bar2")
.add("foo", "bar3", ColumnReaderSpec.bytes())
.add("foo", "bar4")
)
.build();
ByteArrayOutputStream byteOutput = new ByteArrayOutputStream();
new ObjectOutputStream(byteOutput).writeObject(expected);
byte[] bytes = byteOutput.toByteArray();
ByteArrayInputStream byteInput = new ByteArrayInputStream(bytes);
KijiDataRequest actual = (KijiDataRequest) (new ObjectInputStream(byteInput).readObject());
assertEquals(expected, actual);
}
/** Checks that KijiDataRequest with schema overrides serializes and deserializes correctly. */
@Test
public void testSchemaOverrideSerializability() throws Exception {
final KijiColumnName columnName = KijiColumnName.create("family", "empty");
final KijiDataRequest overrideRequest = KijiDataRequest.builder()
.addColumns(ColumnsDef.create()
.add(columnName, ColumnReaderSpec.avroReaderSchemaSpecific(TestRecord1.class))).build();
final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final ObjectOutputStream oos = new ObjectOutputStream(baos);
oos.writeObject(overrideRequest);
final ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
final ObjectInputStream ois = new ObjectInputStream(bais);
final KijiDataRequest deserializedRequest = (KijiDataRequest) ois.readObject();
assertEquals(overrideRequest, deserializedRequest);
}
@Test
public void testColumnRequestEquals() {
KijiDataRequestBuilder builder = KijiDataRequest.builder();
builder.newColumnsDef().add("foo", "bar");
KijiDataRequest req0 = builder.build();
builder = KijiDataRequest.builder();
builder.newColumnsDef().add("foo", "bar");
KijiDataRequest req1 = builder.build();
assertTrue(req0 != req1);
assertEquals(req0, req0);
KijiDataRequest.Column foo0 = req0.getColumn("foo", "bar");
KijiDataRequest.Column foo1 = req1.getColumn("foo", "bar");
assertEquals(foo0, foo0);
assertEquals(foo0, foo1);
assertEquals(foo1, foo0);
builder = KijiDataRequest.builder();
builder.newColumnsDef().withMaxVersions(2).add("foo", "bar");
builder.newColumnsDef().add("foo", "baz");
KijiDataRequest req2 = builder.build();
KijiDataRequest.Column foo2 = req2.getColumn("foo", "bar");
assertThat(new Object(), is(not((Object) foo2)));
assertFalse(foo0.equals(foo2));
assertFalse(foo2.equals(foo0));
assertThat(foo1, is(not(foo2)));
KijiDataRequest.Column foo3 = req2.getColumn("foo", "baz");
assertFalse(foo0.equals(foo3));
assertThat(foo1, is(not(foo3)));
}
@Test
public void testDataRequestEquals() {
KijiDataRequestBuilder builder0 = KijiDataRequest.builder()
.withTimeRange(3L, 4L);
builder0.newColumnsDef().withMaxVersions(2).addFamily("foo");
builder0.newColumnsDef().withMaxVersions(5).add("bar", "baz");
KijiDataRequest request0 = builder0.build();
KijiDataRequestBuilder builder1 = KijiDataRequest.builder()
.withTimeRange(3L, 4L);
builder1.newColumnsDef().withMaxVersions(2).addFamily("foo");
builder1.newColumnsDef().withMaxVersions(5).add("bar", "baz");
KijiDataRequest request1 = builder1.build();
KijiDataRequestBuilder builder2 = KijiDataRequest.builder()
.withTimeRange(3L, 4L);
builder2.newColumnsDef().withMaxVersions(2).addFamily("foo");
builder2.newColumnsDef().withMaxVersions(5).add("car", "bot");
KijiDataRequest request2 = builder2.build();
KijiDataRequestBuilder builder3 = KijiDataRequest.builder()
.withTimeRange(3L, 4L);
builder3.newColumnsDef().withMaxVersions(2).addFamily("foo");
builder3.newColumnsDef().withMaxVersions(3).add("car", "bot");
KijiDataRequest request3 = builder3.build();
assertEquals(request0, request1);
assertThat(new Object(), is(not((Object) request0)));
assertThat(request0, is(not(request2)));
assertThat(request2, is(not(request3)));
}
@Test
public void testMerge() {
KijiDataRequestBuilder builder1 = KijiDataRequest.builder().withTimeRange(3, 4);
builder1.newColumnsDef().withMaxVersions(2).add("foo", "bar");
KijiDataRequest first = builder1.build();
KijiDataRequestBuilder builder2 = KijiDataRequest.builder().withTimeRange(2, 4);
builder2.newColumnsDef().add("baz", "bot");
builder2.newColumnsDef().withMaxVersions(6).add("foo", "bar");
KijiDataRequest second = builder2.build();
KijiDataRequest merged = first.merge(second);
assertTrue("merge() cannot mutate the object in place", first != merged);
KijiDataRequest.Column fooBarColumnRequest = merged.getColumn("foo", "bar");
assertNotNull("Missing column foo:bar from merged request", fooBarColumnRequest);
assertEquals("Max versions was not increased", 6, fooBarColumnRequest.getMaxVersions());
assertEquals("Time range was not extended", 2L, merged.getMinTimestamp());
assertEquals(4L, merged.getMaxTimestamp());
KijiDataRequest.Column bazBotColumnRequest = merged.getColumn("baz", "bot");
assertNotNull("Missing column from merged-in request", bazBotColumnRequest);
KijiDataRequest symmetricMerged = second.merge(first);
assertEquals("Merge must be symmetric", merged, symmetricMerged);
}
@Test
public void testInvalidColumnSpec() {
// The user really wants 'builder.columns().add("family", "qualifier")'.
// This will throw an exception.
try {
KijiDataRequest.builder().newColumnsDef().addFamily("family:qualifier");
fail("An exception should have been thrown.");
} catch (KijiInvalidNameException kine) {
assertEquals(
"Invalid family name: family:qualifier Name must match pattern: [a-zA-Z_][a-zA-Z0-9_]*",
kine.getMessage());
}
}
@Test
public void testPageSize() {
final KijiDataRequestBuilder builder1 = KijiDataRequest.builder();
builder1.newColumnsDef().withPageSize(1).add("foo", "bar");
final KijiDataRequest first = builder1.build();
final KijiDataRequestBuilder builder2 = KijiDataRequest.builder();
builder2.newColumnsDef().add("foo", "bar");
final KijiDataRequest second = builder2.build();
assertThat(first, is(not(second)));
assertFalse(first.equals(second));
assertFalse(second.equals(first));
}
@Test
public void testPageSizeMerge() {
// Page size should merge to the smallest value.
final KijiDataRequestBuilder builder1 = KijiDataRequest.builder();
builder1.newColumnsDef().withPageSize(1).add("foo", "bar");
final KijiDataRequest first = builder1.build();
final KijiDataRequestBuilder builder2 = KijiDataRequest.builder();
builder2.newColumnsDef().withPageSize(3).add("foo", "bar");
final KijiDataRequest second = builder2.build();
assertEquals("Unexpected page size for 'first'",
1, first.getColumn("foo", "bar").getPageSize());
assertEquals("Unexpected page size for 'second'",
3, second.getColumn("foo", "bar").getPageSize());
final KijiDataRequest merge1 = first.merge(second);
final KijiDataRequest merge2 = second.merge(first);
assertEquals("Merged results should be symmetric", merge1, merge2);
assertEquals("Unexpected merged page size",
1, merge1.getColumn("foo", "bar").getPageSize());
}
@Test
public void testPageSizeMergeWithZero() {
// ... unless the smallest value is zero, in which case we go with the
// non-zero value.
final KijiDataRequestBuilder builder1 = KijiDataRequest.builder();
builder1.newColumnsDef().withPageSize(4).add("foo", "bar");
final KijiDataRequest first = builder1.build();
final KijiDataRequestBuilder builder2 = KijiDataRequest.builder();
builder2.newColumnsDef().add("foo", "bar");
final KijiDataRequest second = builder2.build();
assertEquals("Unexpected page size for 'first'",
4, first.getColumn("foo", "bar").getPageSize());
assertEquals("Unexpected page size for 'second'",
0, second.getColumn("foo", "bar").getPageSize());
final KijiDataRequest merge1 = first.merge(second);
final KijiDataRequest merge2 = second.merge(first);
assertEquals("Merged results should be symmetric", merge1, merge2);
assertEquals("Unexpected merged page size",
4, merge1.getColumn("foo", "bar").getPageSize());
}
}
| apache-2.0 |
flofreud/aws-sdk-java | aws-java-sdk-storagegateway/src/main/java/com/amazonaws/services/storagegateway/model/transform/ListVolumeRecoveryPointsResultJsonUnmarshaller.java | 3506 | /*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights
* Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.storagegateway.model.transform;
import java.util.Map;
import java.util.Map.Entry;
import java.math.*;
import java.nio.ByteBuffer;
import com.amazonaws.services.storagegateway.model.*;
import com.amazonaws.transform.SimpleTypeJsonUnmarshallers.*;
import com.amazonaws.transform.*;
import com.fasterxml.jackson.core.JsonToken;
import static com.fasterxml.jackson.core.JsonToken.*;
/**
* ListVolumeRecoveryPointsResult JSON Unmarshaller
*/
public class ListVolumeRecoveryPointsResultJsonUnmarshaller implements
Unmarshaller<ListVolumeRecoveryPointsResult, JsonUnmarshallerContext> {
public ListVolumeRecoveryPointsResult unmarshall(
JsonUnmarshallerContext context) throws Exception {
ListVolumeRecoveryPointsResult listVolumeRecoveryPointsResult = new ListVolumeRecoveryPointsResult();
int originalDepth = context.getCurrentDepth();
String currentParentElement = context.getCurrentParentElement();
int targetDepth = originalDepth + 1;
JsonToken token = context.getCurrentToken();
if (token == null)
token = context.nextToken();
if (token == VALUE_NULL)
return null;
while (true) {
if (token == null)
break;
if (token == FIELD_NAME || token == START_OBJECT) {
if (context.testExpression("GatewayARN", targetDepth)) {
context.nextToken();
listVolumeRecoveryPointsResult.setGatewayARN(context
.getUnmarshaller(String.class).unmarshall(context));
}
if (context.testExpression("VolumeRecoveryPointInfos",
targetDepth)) {
context.nextToken();
listVolumeRecoveryPointsResult
.setVolumeRecoveryPointInfos(new ListUnmarshaller<VolumeRecoveryPointInfo>(
VolumeRecoveryPointInfoJsonUnmarshaller
.getInstance()).unmarshall(context));
}
} else if (token == END_ARRAY || token == END_OBJECT) {
if (context.getLastParsedParentElement() == null
|| context.getLastParsedParentElement().equals(
currentParentElement)) {
if (context.getCurrentDepth() <= originalDepth)
break;
}
}
token = context.nextToken();
}
return listVolumeRecoveryPointsResult;
}
private static ListVolumeRecoveryPointsResultJsonUnmarshaller instance;
public static ListVolumeRecoveryPointsResultJsonUnmarshaller getInstance() {
if (instance == null)
instance = new ListVolumeRecoveryPointsResultJsonUnmarshaller();
return instance;
}
}
| apache-2.0 |
youngwookim/presto | presto-main/src/main/java/io/prestosql/sql/relational/Expressions.java | 3380 | /*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.sql.relational;
import com.google.common.collect.ImmutableList;
import io.prestosql.metadata.Signature;
import io.prestosql.spi.type.Type;
import java.util.Arrays;
import java.util.List;
public final class Expressions
{
private Expressions()
{
}
public static ConstantExpression constant(Object value, Type type)
{
return new ConstantExpression(value, type);
}
public static ConstantExpression constantNull(Type type)
{
return new ConstantExpression(null, type);
}
public static CallExpression call(Signature signature, Type returnType, RowExpression... arguments)
{
return new CallExpression(signature, returnType, Arrays.asList(arguments));
}
public static CallExpression call(Signature signature, Type returnType, List<RowExpression> arguments)
{
return new CallExpression(signature, returnType, arguments);
}
public static InputReferenceExpression field(int field, Type type)
{
return new InputReferenceExpression(field, type);
}
public static List<RowExpression> subExpressions(Iterable<RowExpression> expressions)
{
final ImmutableList.Builder<RowExpression> builder = ImmutableList.builder();
for (RowExpression expression : expressions) {
expression.accept(new RowExpressionVisitor<Void, Void>()
{
@Override
public Void visitCall(CallExpression call, Void context)
{
builder.add(call);
for (RowExpression argument : call.getArguments()) {
argument.accept(this, context);
}
return null;
}
@Override
public Void visitInputReference(InputReferenceExpression reference, Void context)
{
builder.add(reference);
return null;
}
@Override
public Void visitConstant(ConstantExpression literal, Void context)
{
builder.add(literal);
return null;
}
@Override
public Void visitLambda(LambdaDefinitionExpression lambda, Void context)
{
builder.add(lambda);
lambda.getBody().accept(this, context);
return null;
}
@Override
public Void visitVariableReference(VariableReferenceExpression reference, Void context)
{
builder.add(reference);
return null;
}
}, null);
}
return builder.build();
}
}
| apache-2.0 |
katre/bazel | src/test/java/com/google/devtools/build/lib/packages/NativeExistingRulesTest.java | 18415 | // Copyright 2021 The Bazel Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package com.google.devtools.build.lib.packages;
import static com.google.common.truth.Truth.assertThat;
import com.google.devtools.build.lib.analysis.ConfiguredRuleClassProvider;
import com.google.devtools.build.lib.analysis.util.BuildViewTestCase;
import com.google.devtools.build.lib.testutil.TestRuleClassProvider;
import java.util.HashMap;
import java.util.Map;
import net.starlark.java.annot.Param;
import net.starlark.java.annot.StarlarkBuiltin;
import net.starlark.java.annot.StarlarkMethod;
import net.starlark.java.eval.Dict;
import net.starlark.java.eval.Starlark;
import net.starlark.java.eval.StarlarkInt;
import net.starlark.java.eval.StarlarkValue;
import net.starlark.java.eval.Tuple;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/**
* Tests for {@code native.existing_rule} and {@code native.existing_rules} functions.
*
* <p>This class covers the legacy behavior where the {@code
* --incompatible_existing_rules_immutable_view} flag is disabled. The enabled case is covered by
* the subclass, {@link WithImmutableView}.
*/
@RunWith(JUnit4.class)
public class NativeExistingRulesTest extends BuildViewTestCase {
private TestStarlarkBuiltin testStarlarkBuiltin; // initialized by createRuleClassProvider()
// Intended to be overridden by this test case's subclasses. Note that overriding of JUnit's
// @Before methods is not recommended.
protected void setupOptions() throws Exception {
// --noincompatible_existing_rules_immutable_view is the default; set it explicitly for clarity.
setBuildLanguageOptions("--noincompatible_existing_rules_immutable_view");
}
@Before
public final void setUp() throws Exception {
setupOptions();
}
@StarlarkBuiltin(name = "test")
private static final class TestStarlarkBuiltin implements StarlarkValue {
private final Map<String, Object> saved = new HashMap<>();
@StarlarkMethod(
name = "save",
parameters = {
@Param(name = "name", doc = "Name under which to save the value"),
@Param(name = "value", doc = "Value to save")
},
doc = "Saves a Starlark value for testing from Java")
public synchronized void save(String name, Object value) {
saved.put(name, value);
}
}
@Override
protected ConfiguredRuleClassProvider createRuleClassProvider() {
ConfiguredRuleClassProvider.Builder builder = new ConfiguredRuleClassProvider.Builder();
TestRuleClassProvider.addStandardRules(builder);
testStarlarkBuiltin = new TestStarlarkBuiltin();
builder.addStarlarkAccessibleTopLevels("test", testStarlarkBuiltin);
return builder.build();
}
private Object getSaved(String name) {
return testStarlarkBuiltin.saved.get(name);
}
@Test
public void existingRule_handlesSelect() throws Exception {
scratch.file("test/starlark/BUILD");
scratch.file(
"test/starlark/rulestr.bzl", //
"def rule_dict(name):",
" return native.existing_rule(name)");
scratch.file(
"test/getrule/BUILD",
"load('//test/starlark:rulestr.bzl', 'rule_dict')",
"cc_library(",
" name ='x',",
" srcs = select({'//conditions:default': []}),",
")",
"rule_dict('x')");
// Parse the BUILD file, to make sure select() makes it out of native.existing_rule().
assertThat(getConfiguredTarget("//test/getrule:x")).isNotNull();
}
@Test
public void existingRule_returnsNone() throws Exception {
scratch.file(
"test/rulestr.bzl",
"def test_rule(name, x):",
" print(native.existing_rule(x))",
" if native.existing_rule(x) == None:",
" native.cc_library(name = name)");
scratch.file(
"test/BUILD",
"load('//test:rulestr.bzl', 'test_rule')",
"test_rule('a', 'does not exist')",
"test_rule('b', 'BUILD')"); // exists, but as a target and not a rule
assertThat(getConfiguredTarget("//test:a")).isNotNull();
assertThat(getConfiguredTarget("//test:b")).isNotNull();
}
@Test
public void existingRule_roundTripsSelect() throws Exception {
scratch.file(
"test/existing_rule.bzl",
"def macro():",
" s = select({'//foo': ['//bar']})",
" print('Passed: ' + repr(s))",
" native.cc_library(name = 'x', srcs = s)",
" print('Returned: ' + repr(native.existing_rule('x')['srcs']))",
// The value returned here should round-trip fine.
" native.cc_library(name = 'y', srcs = native.existing_rule('x')['srcs'])");
scratch.file(
"test/BUILD",
"load('//test:existing_rule.bzl', 'macro')",
"macro()",
"cc_library(name = 'a', srcs = [])");
getConfiguredTarget("//test:a");
assertContainsEvent("Passed: select({\"//foo\": [\"//bar\"]}");
// The short labels are now in their canonical form, and the sequence is represented as
// tuple instead of list, but the meaning is unchanged.
assertContainsEvent("Returned: select({\"//foo:foo\": (\"//bar:bar\",)}");
}
@Test
public void existingRule_shortensLabelsInSamePackage() throws Exception {
scratch.file(
"test/existing_rule.bzl",
"def save_deps():",
" r = native.existing_rule('b')",
" test.save(\"r['deps']\", r['deps'])");
scratch.file(
"test/BUILD",
"load('//test:existing_rule.bzl', 'save_deps')",
"cc_library(name = 'a', srcs = [])",
"cc_binary(name = 'b', deps = ['//test:a'])",
"save_deps()");
getConfiguredTarget("//test:b");
assertThat(Starlark.toIterable(getSaved("r['deps']")))
.containsExactly(":a"); // as opposed to "//test:a"
}
@Test
public void existingRules_findsRulesAndAttributes() throws Exception {
scratch.file("test/BUILD");
scratch.file("test/starlark/BUILD");
scratch.file(
"test/starlark/rulestr.bzl",
"def rule_dict(name):",
" return native.existing_rule(name)",
"def rules_dict():",
" return native.existing_rules()",
"def nop(ctx):",
" pass",
"nop_rule = rule(attrs = {'x': attr.label()}, implementation = nop)",
"def test_save(name, value):",
" test.save(name, value)");
scratch.file(
"test/getrule/BUILD",
"load('//test/starlark:rulestr.bzl', 'rules_dict', 'rule_dict', 'nop_rule', 'test_save')",
"genrule(name = 'a', outs = ['a.txt'], ",
" licenses = ['notice'],",
" output_to_bindir = False,",
" tools = [ '//test:bla' ], cmd = 'touch $@')",
"nop_rule(name = 'c', x = ':a')",
"rlist = rules_dict()",
"test_save('all_str', [rlist['a']['kind'], rlist['a']['name'],",
" rlist['c']['kind'], rlist['c']['name']])",
"adict = rule_dict('a')",
"cdict = rule_dict('c')",
"test_save('a_str', [adict['kind'], adict['name'], adict['outs'][0], adict['tools'][0]])",
"test_save('c_str', [cdict['kind'], cdict['name'], cdict['x']])",
"test_save('adict.keys()', adict.keys())");
getConfiguredTarget("//test/getrule:BUILD");
assertThat(Starlark.toIterable(getSaved("all_str")))
.containsExactly("genrule", "a", "nop_rule", "c").inOrder();
assertThat(Starlark.toIterable(getSaved("a_str")))
.containsExactly("genrule", "a", ":a.txt", "//test:bla").inOrder();
assertThat(Starlark.toIterable(getSaved("c_str")))
.containsExactly("nop_rule", "c", ":a")
.inOrder();
assertThat(Starlark.toIterable(getSaved("adict.keys()")))
.containsAtLeast(
"name",
"visibility",
"transitive_configs",
"tags",
"generator_name",
"generator_function",
"generator_location",
"features",
"compatible_with",
"target_compatible_with",
"restricted_to",
"srcs",
"tools",
"toolchains",
"outs",
"cmd",
"output_to_bindir",
"local",
"message",
"executable",
"stamp",
"heuristic_label_expansion",
"kind");
}
@Test
public void existingRule_returnsObjectWithCorrectMutability() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" r = native.existing_rule('x')",
" r['no_such_attribute'] = 'foo'",
" r['define_values']['key'] = 123"); // mutate the dict
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error on mutation
}
@Test
public void existingRule_returnsDictLikeObject() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" r = native.existing_rule('x')",
" print('r == %s' % repr(r))",
" test.save('[key for key in r]', [key for key in r])",
" test.save('list(r)', list(r))",
" test.save('r.keys()', r.keys())",
" test.save('r.values()', r.values())",
" test.save('r.items()', r.items())",
" test.save(\"r['define_values']\", r['define_values'])",
" test.save(\"r.get('define_values', 123)\", r.get('define_values', 123))",
" test.save(\"r.get('invalid_attr', 123)\", r.get('invalid_attr', 123))",
" test.save(\"'define_values' in r\", 'define_values' in r)",
" test.save(\"'invalid_attr' in r\", 'invalid_attr' in r)");
Dict<?, ?> expectedDefineValues = Dict.builder().put("key", "value").buildImmutable();
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error
assertThat(Starlark.toIterable(getSaved("[key for key in r]")))
.containsAtLeast("define_values", "name", "kind");
assertThat(Starlark.toIterable(getSaved("list(r)")))
.containsAtLeast("define_values", "name", "kind");
assertThat(Starlark.toIterable(getSaved("r.keys()")))
.containsAtLeast("define_values", "name", "kind");
assertThat(Starlark.toIterable(getSaved("r.values()")))
.containsAtLeast(expectedDefineValues, "x", "config_setting");
assertThat(Starlark.toIterable(getSaved("r.items()")))
.containsAtLeast(
Tuple.of("define_values", expectedDefineValues),
Tuple.of("name", "x"),
Tuple.of("kind", "config_setting"));
assertThat(getSaved("r['define_values']")).isEqualTo(expectedDefineValues);
assertThat(getSaved("r.get('define_values', 123)")).isEqualTo(expectedDefineValues);
assertThat(getSaved("r.get('invalid_attr', 123)")).isEqualTo(StarlarkInt.of(123));
assertThat(getSaved("'define_values' in r")).isEqualTo(true);
assertThat(getSaved("'invalid_attr' in r")).isEqualTo(false);
}
@Test
public void existingRules_returnsObjectWithCorrectMutability() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" rs = native.existing_rules()",
" rs['no_such_rule'] = {'name': 'no_such_rule', 'kind': 'config_setting'}"); // mutate
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error on mutation
}
@Test
public void existingRules_returnsDictLikeObject() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key_x': 'value_x'})",
" native.config_setting(name='y', define_values={'key_y': 'value_y'})",
" rs = native.existing_rules()",
" print('rs == %s' % repr(rs))",
" test.save('[key for key in rs]', [key for key in rs])",
" test.save('list(rs)', list(rs))",
" test.save('rs.keys()', rs.keys())",
" test.save(\"[v['name'] for v in rs.values()]\", [v['name'] for v in rs.values()])",
" test.save(\"[(i[0], i[1]['name']) for i in rs.items()]\", [(i[0], i[1]['name']) for i in"
+ " rs.items()])",
" test.save(\"rs['x']['define_values']\", rs['x']['define_values'])",
" test.save(\"rs.get('x', {'name': 'z'})['name']\", rs.get('x', {'name': 'z'})['name'])",
" test.save(\"rs.get('invalid_rule', {'name': 'invalid_rule'})\", rs.get('invalid_rule',"
+ " {'name': 'invalid_rule'}))",
" test.save(\"'x' in rs\", 'x' in rs)",
" test.save(\"'invalid_rule' in rs\", 'invalid_rule' in rs)");
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error
assertThat(Starlark.toIterable(getSaved("[key for key in rs]"))).containsExactly("x", "y");
assertThat(Starlark.toIterable(getSaved("list(rs)"))).containsExactly("x", "y");
assertThat(Starlark.toIterable(getSaved("rs.keys()"))).containsExactly("x", "y");
assertThat(Starlark.toIterable(getSaved("[v['name'] for v in rs.values()]")))
.containsExactly("x", "y");
assertThat(Starlark.toIterable(getSaved("[(i[0], i[1]['name']) for i in rs.items()]")))
.containsExactly(Tuple.of("x", "x"), Tuple.of("y", "y"));
assertThat(getSaved("rs['x']['define_values']"))
.isEqualTo(Dict.builder().put("key_x", "value_x").buildImmutable());
assertThat(getSaved("rs.get('x', {'name': 'z'})['name']")).isEqualTo("x");
assertThat(getSaved("rs.get('invalid_rule', {'name': 'invalid_rule'})"))
.isEqualTo(Dict.builder().put("name", "invalid_rule").buildImmutable());
assertThat(getSaved("'x' in rs")).isEqualTo(true);
assertThat(getSaved("'invalid_rule' in rs")).isEqualTo(false);
}
@Test
public void existingRules_returnsSnapshotOfOnlyRulesInstantiatedUpToThatPoint() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key_x': 'value_x'})",
" rs1 = native.existing_rules()",
" native.config_setting(name='y', define_values={'key_y': 'value_y'})",
" rs2 = native.existing_rules()",
" native.config_setting(name='z', define_values={'key_z': 'value_z'})",
" rs3 = native.existing_rules()",
" test.save('rs1.keys()', rs1.keys())",
" test.save('rs2.keys()', rs2.keys())",
" test.save('rs3.keys()', rs3.keys())");
assertThat(getConfiguredTarget("//test:BUILD")).isNotNull(); // no error
assertThat(Starlark.toIterable(getSaved("rs1.keys()"))).containsExactly("x");
assertThat(Starlark.toIterable(getSaved("rs2.keys()"))).containsExactly("x", "y");
assertThat(Starlark.toIterable(getSaved("rs3.keys()"))).containsExactly("x", "y", "z");
}
/**
* Tests for {@code native.existing_rule} and {@code native.existing_rules} Starlark functions
* with the {@code --incompatible_existing_rules_immutable_view} flag set.
*/
@RunWith(JUnit4.class)
public static final class WithImmutableView extends NativeExistingRulesTest {
@Override
protected void setupOptions() throws Exception {
setBuildLanguageOptions("--incompatible_existing_rules_immutable_view");
}
@Test
@Override
public void existingRule_returnsObjectWithCorrectMutability() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" r = native.existing_rule('x')",
" r['no_such_attribute'] = 123"); // mutate the view
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//test:BUILD")).isNull(); // mutation fails
assertContainsEvent("can only assign an element in a dictionary or a list");
}
@Test
@Override
public void existingRules_returnsObjectWithCorrectMutability() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" rs = native.existing_rules()",
" rs['no_such_rule'] = {'name': 'no_such_rule', 'kind': 'config_setting'}"); // mutate
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//test:BUILD")).isNull(); // mutation fails
assertContainsEvent("can only assign an element in a dictionary or a list");
}
@Test
public void existingRules_returnsDeeplyImmutableView() throws Exception {
scratch.file(
"test/BUILD",
"load('inc.bzl', 'f')", //
"f()");
scratch.file(
"test/inc.bzl", //
"def f():",
" native.config_setting(name='x', define_values={'key': 'value'})",
" rs = native.existing_rules()",
" rs['x']['define_values']['key'] = 123"); // mutate an attribute value within the view
reporter.removeHandler(failFastHandler);
assertThat(getConfiguredTarget("//test:BUILD")).isNull();
assertContainsEvent("trying to mutate a frozen dict value");
}
}
}
| apache-2.0 |
tjordanchat/rundeck | rundeckapp/src/java/com/dtolabs/rundeck/execution/UnauthorizedStatusResult.java | 1443 | /*
* Copyright 2011 DTO Solutions, Inc. (http://dtosolutions.com)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* UnauthorizedInterpreterResult.java
*
* User: Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
* Created: 10/26/11 11:05 AM
*
*/
package com.dtolabs.rundeck.execution;
import com.dtolabs.rundeck.core.execution.StatusResult;
import java.util.*;
/**
* UnauthorizedInterpreterResult is ...
*
* @author Greg Schueler <a href="mailto:greg@dtosolutions.com">greg@dtosolutions.com</a>
*/
public class UnauthorizedStatusResult implements StatusResult{
private String reason;
public UnauthorizedStatusResult(String reason) {
this.reason = reason;
}
public boolean isSuccess() {
return false;
}
public String getReason() {
return reason;
}
@Override
public String toString() {
return getReason();
}
}
| apache-2.0 |
papicella/snappy-store | gemfire-core/src/main/java/com/gemstone/gemfire/admin/internal/ManagedEntityConfigXmlParser.java | 16415 | /*
* Copyright (c) 2010-2015 Pivotal Software, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you
* may not use this file except in compliance with the License. You
* may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License. See accompanying
* LICENSE file.
*/
package com.gemstone.gemfire.admin.internal;
import com.gemstone.gemfire.admin.*;
import com.gemstone.gemfire.internal.Assert;
import com.gemstone.gemfire.internal.i18n.LocalizedStrings;
import javax.xml.parsers.SAXParser;
import javax.xml.parsers.SAXParserFactory;
import org.xml.sax.*;
import org.xml.sax.helpers.DefaultHandler;
import java.io.*;
import java.util.*;
/**
* Parses an XML file and configures a {@link DistributedSystemConfig}
* from it.
*
* @author David Whitlock
* @since 4.0
*/
public class ManagedEntityConfigXmlParser
extends ManagedEntityConfigXml implements ContentHandler {
/** The <code>DistributedSystemConfig</code> to be configured */
private DistributedSystemConfig config;
/** The stack of intermediate values used while parsing */
private Stack stack = new Stack();
////////////////////// Static Methods //////////////////////
/**
* Parses XML data and from it configures a
* <code>DistributedSystemConfig</code>.
*
* @throws AdminXmlException
* If an error is encountered while parsing the XML
*/
public static void parse(InputStream is,
DistributedSystemConfig config) {
ManagedEntityConfigXmlParser handler =
new ManagedEntityConfigXmlParser();
handler.config = config;
try {
SAXParserFactory factory = SAXParserFactory.newInstance();
factory.setValidating(true);
SAXParser parser = factory.newSAXParser();
parser.parse(is, new DefaultHandlerDelegate(handler));
} catch (Exception ex) {
if (ex instanceof AdminXmlException) {
throw (AdminXmlException) ex;
} else if (ex.getCause() instanceof AdminXmlException) {
throw (AdminXmlException) ex.getCause();
} else if (ex instanceof SAXException) {
// Silly JDK 1.4.2 XML parser wraps RunTime exceptions in a
// SAXException. Pshaw!
SAXException sax = (SAXException) ex;
Exception cause = sax.getException();
if (cause instanceof AdminXmlException) {
throw (AdminXmlException) cause;
}
}
throw new AdminXmlException(LocalizedStrings.ManagedEntityConfigXmlParser_WHILE_PARSING_XML.toLocalizedString(), ex);
}
}
/**
* Helper method for parsing an integer
*
* @throws com.gemstone.gemfire.cache.CacheXmlException
* If <code>s</code> is a malformed integer
*/
private static int parseInt(String s) {
try {
return Integer.parseInt(s);
} catch (NumberFormatException ex) {
throw new AdminXmlException(LocalizedStrings.ManagedEntityConfigXmlParser_MALFORMED_INTEGER_0.toLocalizedString(s), ex);
}
}
////////////////////// Instance Methods //////////////////////
// if (this.system.isMcastEnabled()) {
// generateMulticast();
// }
public void startElement(String namespaceURI, String localName,
String qName, Attributes atts)
throws SAXException {
if (qName.equals(DISTRIBUTED_SYSTEM)) {
startDistributedSystem(atts);
} else if (qName.equals(REMOTE_COMMAND)) {
startRemoteCommand(atts);
} else if (qName.equals(LOCATORS)) {
startLocators(atts);
} else if (qName.equals(MULTICAST)) {
startMulticast(atts);
} else if (qName.equals(LOCATOR)) {
startLocator(atts);
} else if (qName.equals(HOST)) {
startHost(atts);
} else if (qName.equals(WORKING_DIRECTORY)) {
startWorkingDirectory(atts);
} else if (qName.equals(PRODUCT_DIRECTORY)) {
startProductDirectory(atts);
} else if (qName.equals(SSL)) {
startSSL(atts);
} else if (qName.equals(PROTOCOLS)) {
startProtocols(atts);
} else if (qName.equals(CIPHERS)) {
startCiphers(atts);
} else if (qName.equals(PROPERTY)) {
startProperty(atts);
} else if (qName.equals(KEY)) {
startKey(atts);
} else if (qName.equals(VALUE)) {
startValue(atts);
} else if (qName.equals(CACHE_SERVER)) {
startCacheServer(atts);
} else if (qName.equals(CLASSPATH)) {
startClassPath(atts);
} else {
throw new AdminXmlException(LocalizedStrings.ManagedEntityConfigXmlParser_UNKNOWN_XML_ELEMENT_0.toLocalizedString(qName));
}
}
public void endElement(String namespaceURI, String localName,
String qName)
throws SAXException {
if (qName.equals(DISTRIBUTED_SYSTEM)) {
endDistributedSystem();
} else if (qName.equals(REMOTE_COMMAND)) {
endRemoteCommand();
} else if (qName.equals(LOCATORS)) {
endLocators();
} else if (qName.equals(MULTICAST)) {
endMulticast();
} else if (qName.equals(LOCATOR)) {
endLocator();
} else if (qName.equals(HOST)) {
endHost();
} else if (qName.equals(WORKING_DIRECTORY)) {
endWorkingDirectory();
} else if (qName.equals(PRODUCT_DIRECTORY)) {
endProductDirectory();
} else if (qName.equals(SSL)) {
endSSL();
} else if (qName.equals(PROTOCOLS)) {
endProtocols();
} else if (qName.equals(CIPHERS)) {
endCiphers();
} else if (qName.equals(PROPERTY)) {
endProperty();
} else if (qName.equals(KEY)) {
endKey();
} else if (qName.equals(VALUE)) {
endValue();
} else if (qName.equals(CACHE_SERVER)) {
endCacheServer();
} else if (qName.equals(CLASSPATH)) {
endClassPath();
} else {
throw new AdminXmlException(LocalizedStrings.ManagedEntityConfigXmlParser_UNKNOWN_XML_ELEMENT_0.toLocalizedString(qName));
}
}
/**
* When a <code>distributed-system</code> element is encountered, we
* push the <code>DistributedSystemConfig</code> on the stack.
*/
private void startDistributedSystem(Attributes atts) {
Assert.assertTrue(stack.isEmpty());
String id = atts.getValue(ID);
if (id != null) {
this.config.setSystemId(id);
}
String disable_tcp = atts.getValue(DISABLE_TCP);
if (disable_tcp != null) {
this.config.setDisableTcp(DISABLE_TCP.equalsIgnoreCase("true"));
}
stack.push(this.config);
}
/**
* When a <code>distributed-system</code> element is finished
*/
private void endDistributedSystem() {
}
/**
* When a <code>multicast</code> is first encountered, get the
* <code>DistributedSystemConfig</code> off of the top of the stack
* and set its multicast config appropriately.
*/
private void startMulticast(Attributes atts) {
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
String port = atts.getValue(PORT);
config.setMcastPort(parseInt(port));
String address = atts.getValue(ADDRESS);
if (address != null) {
config.setMcastAddress(address);
}
}
private void endMulticast() {
}
/**
* Starts a <code>remote-command</code> element. The item on top of
* the stack may be a <code>DistributedSystemConfig</code> or it
* might be a <code>ManagedEntityConfig</code>.
*/
private void startRemoteCommand(Attributes atts) {
}
/**
* Ends a <code>remote-command</code> element. Pop the command off
* the top of the stack and set it on the
* <code>DistributedSystemConfig</code> or it might be a
* <code>ManagedEntityConfig</code> on top of the stack.
*/
private void endRemoteCommand() {
String remoteCommand = popString();
Object top = stack.peek();
Assert.assertTrue(top != null);
if (top instanceof DistributedSystemConfig) {
((DistributedSystemConfig) top).setRemoteCommand(remoteCommand);
} else if (top instanceof ManagedEntityConfig) {
((ManagedEntityConfig) top).setRemoteCommand(remoteCommand);
} else {
String s = "Did not expect a " + top.getClass().getName() +
" on top of the stack";
Assert.assertTrue(false, s);
}
}
private void startLocators(Attributes atts) {
}
private void endLocators() {
}
private void startLocator(Attributes atts) {
String port = atts.getValue(PORT);
DistributedSystemConfig system =
(DistributedSystemConfig) stack.peek();
system.setMcastPort(0);
DistributionLocatorConfig config =
system.createDistributionLocatorConfig();
config.setPort(parseInt(port));
stack.push(config);
}
private void endLocator() {
Object o = stack.pop();
Assert.assertTrue(o instanceof DistributionLocatorConfig);
}
private void startHost(Attributes atts) {
}
/**
* We assume that there is a <code>ManagedEntityConfig</code> on top
* of the stack.
*/
private void endHost() {
String host = popString();
ManagedEntityConfig config = (ManagedEntityConfig) stack.peek();
config.setHost(host);
}
private void startWorkingDirectory(Attributes atts) {
}
private void endWorkingDirectory() {
String workingDirectory = popString();
ManagedEntityConfig config = (ManagedEntityConfig) stack.peek();
config.setWorkingDirectory(workingDirectory);
}
private void startProductDirectory(Attributes atts) {
}
private void endProductDirectory() {
String productDirectory = popString();
ManagedEntityConfig config = (ManagedEntityConfig) stack.peek();
config.setProductDirectory(productDirectory);
}
private void startSSL(Attributes atts) {
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
config.setSSLEnabled(true);
String authenticationRequired =
atts.getValue(AUTHENTICATION_REQUIRED);
config.setSSLAuthenticationRequired(Boolean.valueOf(authenticationRequired).booleanValue());
}
private void endSSL() {
}
private void startProtocols(Attributes atts) {
}
private void endProtocols() {
String protocols = popString();
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
config.setSSLProtocols(protocols);
}
private void startCiphers(Attributes atts) {
}
private void endCiphers() {
String ciphers = popString();
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
config.setSSLCiphers(ciphers);
}
private void startProperty(Attributes atts) {
}
private void endProperty() {
String value = popString();
String key = popString();
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
config.addSSLProperty(key, value);
}
private void startKey(Attributes atts) {
}
private void endKey() {
String key = popString();
stack.push(key);
}
private void startValue(Attributes atts) {
}
private void endValue() {
String value = popString();
stack.push(value);
}
private void startCacheServer(Attributes atts) {
DistributedSystemConfig config =
(DistributedSystemConfig) stack.peek();
CacheServerConfig server =
config.createCacheServerConfig();
stack.push(server);
}
private void endCacheServer() {
/* CacheServerConfig server = (CacheServerConfig) */ stack.pop();
}
private void startClassPath(Attributes atts) {
}
private void endClassPath() {
String classpath = popString();
CacheServerConfig server = (CacheServerConfig) stack.peek();
server.setClassPath(classpath);
}
/**
* Pops a <code>String</code> off of the stack.
*/
private String popString() {
Object o = stack.pop();
if (o instanceof StringBuffer) {
StringBuffer sb = (StringBuffer) o;
return sb.toString();
} else {
return (String) o;
}
}
/**
* Long strings in XML files may generate multiple
* <code>characters</code> callbacks. Coalesce multiple callbacks
* into one big string by using a <code>StringBuffer</code>. See
* bug 32122.
*/
public void characters(char[] ch, int start, int length)
throws SAXException {
Object top = stack.peek();
StringBuffer sb;
if (top instanceof StringBuffer) {
sb = (StringBuffer) top;
} else {
sb = new StringBuffer();
stack.push(sb);
}
sb.append(ch, start, length);
}
////////// Inherited methods that don't do anything //////////
public void setDocumentLocator(Locator locator) { }
public void startDocument() throws SAXException { }
public void endDocument() throws SAXException { }
public void startPrefixMapping(String prefix, String uri)
throws SAXException { }
public void endPrefixMapping(String prefix)
throws SAXException { }
public void ignorableWhitespace(char[] ch, int start, int length)
throws SAXException { }
public void processingInstruction(String target, String data)
throws SAXException { }
public void skippedEntity(String name) throws SAXException { }
/////////////////////// Inner Classes ///////////////////////
/**
* Class that delegates all of the methods of a {@link
* DefaultHandler} to a {@link ManagedEntityConfigXmlParser} that
* implements all of the methods of <code>DefaultHandler</code>, but
* <B>is not</B> a <code>DefaultHandler</code>.
*/
static class DefaultHandlerDelegate extends DefaultHandler {
/** The <code>ManagedEntityConfigXmlParser</code> that does the
* real work */
private ManagedEntityConfigXmlParser handler;
/**
* Creates a new <code>DefaultHandlerDelegate</code> that
* delegates to the given
* <code>ManagedEntityConfigXmlParser</code>.
*/
public DefaultHandlerDelegate(ManagedEntityConfigXmlParser handler) {
this.handler = handler;
}
@Override
public InputSource resolveEntity(String publicId,
String systemId)
throws SAXException {
return handler.resolveEntity(publicId, systemId);
}
@Override
public void setDocumentLocator(Locator locator) {
handler.setDocumentLocator(locator);
}
@Override
public void startDocument() throws SAXException {
handler.startDocument();
}
@Override
public void endDocument() throws SAXException {
handler.endDocument();
}
@Override
public void startPrefixMapping(String prefix, String uri)
throws SAXException {
handler.startPrefixMapping(prefix, uri);
}
@Override
public void endPrefixMapping(String prefix) throws SAXException {
handler.endPrefixMapping(prefix);
}
@Override
public void startElement(String uri, String localName,
String qName, Attributes attributes)
throws SAXException {
handler.startElement(uri, localName, qName, attributes);
}
@Override
public void endElement(String uri, String localName, String qName)
throws SAXException {
handler.endElement(uri, localName, qName);
}
@Override
public void characters(char[] ch, int start, int length)
throws SAXException {
handler.characters(ch, start, length);
}
@Override
public void ignorableWhitespace(char[] ch, int start, int length)
throws SAXException {
handler.ignorableWhitespace(ch, start, length);
}
@Override
public void processingInstruction(String target, String data)
throws SAXException {
handler.processingInstruction(target, data);
}
@Override
public void skippedEntity(String name) throws SAXException {
handler.skippedEntity(name);
}
@Override
public void warning(SAXParseException e) throws SAXException {
handler.warning(e);
}
@Override
public void error(SAXParseException e) throws SAXException {
handler.error(e);
}
@Override
public void fatalError(SAXParseException e) throws SAXException {
handler.fatalError(e);
}
}
}
| apache-2.0 |
apache/syncope | ext/flowable/flowable-bpmn/src/main/java/org/apache/syncope/core/flowable/support/SyncopeSession.java | 997 | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.syncope.core.flowable.support;
import org.flowable.common.engine.impl.interceptor.Session;
public interface SyncopeSession extends Session {
Class<?> getType();
}
| apache-2.0 |
android-ia/platform_tools_idea | platform/util/src/com/intellij/openapi/diff/impl/processing/ByWord.java | 14751 | /*
* Copyright 2000-2009 JetBrains s.r.o.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.intellij.openapi.diff.impl.processing;
import com.intellij.openapi.diagnostic.Logger;
import com.intellij.openapi.diff.ex.DiffFragment;
import com.intellij.openapi.diff.impl.ComparisonPolicy;
import com.intellij.openapi.diff.impl.highlighting.FragmentSide;
import com.intellij.openapi.diff.impl.highlighting.Util;
import com.intellij.openapi.util.TextRange;
import com.intellij.util.diff.Diff;
import com.intellij.util.diff.FilesTooBigForDiffException;
import java.util.ArrayList;
public class ByWord implements DiffPolicy{
private static final Logger LOG = Logger.getInstance("#com.intellij.openapi.diff.impl.processing.ByWord");
private final ComparisonPolicy myComparisonPolicy;
public ByWord(ComparisonPolicy comparisonPolicy) {
myComparisonPolicy = comparisonPolicy;
}
public DiffFragment[] buildFragments(String text1, String text2) throws FilesTooBigForDiffException {
Word[] words1 = buildWords(text1, myComparisonPolicy);
Word[] words2 = buildWords(text2, myComparisonPolicy);
Diff.Change change = Diff.buildChanges(words1, words2);
change = Util.concatEquals(change, words1, words2);
if (Math.max(countNotWhitespaces(words1), countNotWhitespaces(words2)) > 0 && countEqual(change, words1, words2) == 0)
return new DiffFragment[]{myComparisonPolicy.createFragment(text1, text2)};
FragmentBuilder result = new FragmentBuilder(words1, words2, myComparisonPolicy, text1, text2);
FragmentBuilder.Version version1 = result.getVersion1();
FragmentBuilder.Version version2 = result.getVersion2();
while (change != null) {
if (change.line0 > version1.getCurrentWordIndex()) {
processEquals(change.line0, change.line1, result);
}
if (change.inserted == 0) {
processOneside(version1, change.deleted);
} else if (change.deleted == 0) {
processOneside(version2, change.inserted);
} else {
String prefix1 = version1.getCurrentWordPrefix();
String prefix2 = version2.getCurrentWordPrefix();
if (prefix1.length() > 0 || prefix2.length() > 0)
result.add(myComparisonPolicy.createFragment(prefix1, prefix2));
result.addChangedWords(change.deleted, change.inserted);
}
change = change.link;
}
processEquals(words1.length, words2.length, result);
result.addTails();
DiffFragment[] fragments = result.getFragments();
DiffFragment firstFragment = fragments[0];
if (DiffFragment.isEmpty(firstFragment)) {
DiffFragment[] newFragments = new DiffFragment[fragments.length - 1];
System.arraycopy(fragments, 1, newFragments, 0, newFragments.length);
fragments = newFragments;
}
return fragments;
}
private int countNotWhitespaces(Word[] words) {
int counter = 0;
for (int i = 0; i < words.length; i++) {
Word word = words[i];
if (!word.isWhitespace()) counter++;
}
return counter;
}
private int countEqual(Diff.Change change, Word[] words1, Word[] words2) {
int counter = 0;
int position1 = 0;
int position2 = 0;
while (change != null) {
if (change.line0 > position1) {
int same = change.line0 - position1;
LOG.assertTrue(same == change.line1 - position2);
for (int i = 0; i < same; i++) {
if (!words1[position1 + i].isWhitespace() && !words2[position2 + i].isWhitespace()) counter++;
}
position1 += same;
position2 += same;
}
position1 += change.deleted;
position2 += change.inserted;
change = change.link;
}
int tailCount = words1.length - position1;
LOG.assertTrue(tailCount == words2.length - position2);
while (tailCount > 0) {
if (!words1[words1.length - tailCount].isWhitespace() &&
!words2[words2.length - tailCount].isWhitespace()) counter++;
tailCount--;
}
return counter;
}
private void processOneside(FragmentBuilder.Version version, int wordCount) {
String prefix = version.getCurrentWordPrefix();
version.addOneSide(prefix, wordCount);
}
private void processEquals(int changed1, int changed2, FragmentBuilder result) throws FilesTooBigForDiffException {
while (result.getVersion1().getCurrentWordIndex() < changed1) {
result.processEqual();
}
LOG.assertTrue(changed2 == result.getVersion2().getCurrentWordIndex());
}
static Word[] buildWords(String text, ComparisonPolicy policy) {
ArrayList<Word> words = new ArrayList<Word>();
if (text.length() == 0 || !Character.isWhitespace(text.charAt(0)))
words.add(policy.createFormatting(text, TextRange.EMPTY_RANGE));
int start = 0;
boolean withinFormatting = true;
for (int i = 0; i < text.length(); i++) {
char nextChar = text.charAt(i);
boolean isWhitespace = Character.isWhitespace(nextChar);
if (withinFormatting) {
if (isWhitespace) continue;
if (start != -1 && start < i) words.add(policy.createFormatting(text, new TextRange(start, i)));
start = -1;
withinFormatting = false;
}
if (nextChar == '\n') {
if (start != -1) words.add(new Word(text, new TextRange(start, i)));
start = i;
withinFormatting = true;
} else if (Util.DELIMITERS_SET.contains(nextChar)) {
if (start != -1) {
words.add(new Word(text, new TextRange(start, i)));
start = -1;
}
} else {
if (start == -1) start = i;
}
}
if (start != -1) {
TextRange range = new TextRange(start, text.length());
Word lastWord = withinFormatting ? policy.createFormatting(text, range) : new Word(text, range);
words.add(lastWord);
}
return words.toArray(new Word[words.size()]);
}
private static class FragmentBuilder {
private final ArrayList<DiffFragment> myFragments = new ArrayList<DiffFragment>();
private final Version myVersion1;
private final Version myVersion2;
private final DiffPolicy.ByChar BY_CHAR;
private final DiffCorrection.ChangedSpace CORRECTION;
private final ComparisonPolicy myComparisonPolicy;
public FragmentBuilder(Word[] words1, Word[] words2, ComparisonPolicy comparisonPolicy, String text1, String text2) {
myVersion1 = new Version(words1, text1, this, true);
myVersion2 = new Version(words2, text2, this, false);
BY_CHAR = new ByChar(comparisonPolicy);
CORRECTION = new DiffCorrection.ChangedSpace(comparisonPolicy);
myComparisonPolicy = comparisonPolicy;
}
public DiffFragment[] getFragments() {
return myFragments.toArray(new DiffFragment[myFragments.size()]);
}
public Version getVersion1() { return myVersion1; }
public Version getVersion2() { return myVersion2; }
private void addAll(DiffFragment[] fragments) {
for (int i = 0; i < fragments.length; i++) {
DiffFragment fragment = fragments[i];
add(fragment);
}
}
private void add(DiffFragment fragment) {
String text1 = fragment.getText1();
String text2 = fragment.getText2();
if (text1 != null) myVersion1.addOffset(text1.length());
if (text2 != null) myVersion2.addOffset(text2.length());
if (fragment.isEqual() && myFragments.size() > 0) {
int lastIndex = myFragments.size() - 1;
DiffFragment prevFragment = myFragments.get(lastIndex);
if (prevFragment.isEqual()) {
myFragments.remove(lastIndex);
fragment = DiffFragment.unchanged(prevFragment.getText1() + fragment.getText1(),
prevFragment.getText2() + fragment.getText2());
}
}
myFragments.add(fragment);
}
private void addEqual(Word word1, Word word2) throws FilesTooBigForDiffException {
addAll(CORRECTION.correct(new DiffFragment[]{myComparisonPolicy.createFragment(word1, word2)}));
}
public void processEqual() throws FilesTooBigForDiffException {
Word word1 = myVersion1.getCurrentWord();
Word word2 = myVersion2.getCurrentWord();
addAll(fragmentsByChar(myVersion1.getCurrentWordPrefix(), myVersion2.getCurrentWordPrefix()));
addEqual(word1, word2);
addPostfixes();
myVersion1.incCurrentWord();
myVersion2.incCurrentWord();
}
private DiffFragment[] fragmentsByChar(String text1, String text2) throws FilesTooBigForDiffException {
if (text1.length() == 0 && text2.length() == 0) {
return DiffFragment.EMPTY_ARRAY;
}
final String side1 = myVersion1.getPrevChar() + text1;
final String side2 = myVersion2.getPrevChar() + text2;
DiffFragment[] fragments = BY_CHAR.buildFragments(side1, side2);
return Util.cutFirst(fragments);
}
private void addPostfixes() throws FilesTooBigForDiffException {
String postfix1 = myVersion1.getCurrentWordPostfixAndOneMore();
String postfix2 = myVersion2.getCurrentWordPostfixAndOneMore();
int length1 = postfix1.length();
int length2 = postfix2.length();
DiffFragment wholePostfix = myComparisonPolicy.createFragment(postfix1, postfix2);
if (wholePostfix.isEqual()) {
add(DiffFragment.unchanged(cutLast(postfix1, length1), cutLast(postfix2, length2)));
return;
}
if (length1 > 0 || length2 > 0) {
DiffFragment[] fragments = BY_CHAR.buildFragments(postfix1, postfix2);
DiffFragment firstFragment = fragments[0];
if (firstFragment.isEqual()) {
final String text1 = cutLast(firstFragment.getText1(), length1);
final String text2 = cutLast(firstFragment.getText2(), length2);
add(myComparisonPolicy.createFragment(text1, text2));
//add(firstFragment);
}
}
}
private String cutLast(String text, int length) {
if (text.length() < length) return text;
else return text.substring(0, text.length() - 1);
}
private void addOneSide(String text, FragmentSide side) {
DiffFragment fragment = side.createFragment(text, null, false);
add(myComparisonPolicy.createFragment(fragment.getText1(), fragment.getText2()));
}
public void addChangedWords(int wordCount1, int wordCount2) {
add(new DiffFragment(myVersion1.getWordSequence(wordCount1), myVersion2.getWordSequence(wordCount2)));
myVersion1.incCurrentWord(wordCount1);
myVersion2.incCurrentWord(wordCount2);
}
public void addTails() throws FilesTooBigForDiffException {
String tail1 = myVersion1.getNotProcessedTail();
String tail2 = myVersion2.getNotProcessedTail();
if (tail1.length() == 0 && tail2.length() == 0) return;
DiffFragment[] fragments = fragmentsByChar(tail1, tail2);
if (myFragments.size() > 0) {
DiffFragment lastFragment = myFragments.get(myFragments.size() - 1);
if (lastFragment.isChange()) {
int oneSideCount = 0;
while (oneSideCount < fragments.length && fragments[oneSideCount].isOneSide()) oneSideCount++;
if (oneSideCount > 0) {
myFragments.remove(myFragments.size() - 1);
DiffFragment[] onesideFragments = new DiffFragment[oneSideCount];
DiffFragment[] otherFragments = new DiffFragment[fragments.length - oneSideCount];
System.arraycopy(fragments, 0, onesideFragments, 0, oneSideCount);
System.arraycopy(fragments, oneSideCount, otherFragments, 0, otherFragments.length);
DiffFragment startingOneSides = UniteSameType.uniteAll(onesideFragments);
if (startingOneSides.isOneSide()) {
myFragments.add(lastFragment);
add(startingOneSides);
} else {
lastFragment = Util.unite(lastFragment, startingOneSides);
myFragments.add(lastFragment);
}
fragments = otherFragments;
}
}
}
addAll(fragments);
}
public static class Version {
private final Word[] myWords;
private int myCurrentWord = 0;
private int myOffset = 0;
private final String myText;
private final FragmentBuilder myBuilder;
private final FragmentSide mySide;
public Version(Word[] words, String text, FragmentBuilder builder, boolean delete) {
myWords = words;
myText = text;
myBuilder = builder;
mySide = delete ? FragmentSide.SIDE1 : FragmentSide.SIDE2;
}
public int getProcessedOffset() {
return myOffset;
}
public int getCurrentWordIndex() {
return myCurrentWord;
}
public void addOffset(int offset) {
myOffset += offset;
}
public void incCurrentWord() {
incCurrentWord(1);
}
public String getWordSequence(int wordCount) {
int start = myWords[myCurrentWord].getStart();
int end = myWords[myCurrentWord+wordCount-1].getEnd();
return myText.substring(start, end);
}
public void incCurrentWord(int inserted) {
myCurrentWord += inserted;
}
public Word getCurrentWord() {
return myWords[myCurrentWord];
}
public String getCurrentWordPrefix() {
return getCurrentWord().getPrefix(getProcessedOffset());
}
public String getCurrentWordPostfixAndOneMore() {
int nextStart = myCurrentWord < myWords.length - 1 ? myWords[myCurrentWord + 1].getStart() : myText.length();
Word word = getCurrentWord();
String postfix = myText.substring(word.getEnd(), nextStart);
return postfix + (nextStart == myText.length() ? '\n' : myText.charAt(nextStart));
}
public String getNotProcessedTail() {
LOG.assertTrue(myCurrentWord == myWords.length);
return myText.substring(myOffset, myText.length());
}
public char getPrevChar() {
return myOffset == 0 ? '\n' : myText.charAt(myOffset - 1);
}
public void addOneSide(String prefix, int wordCount) {
if (prefix.length() > 0) myBuilder.addOneSide(prefix, mySide);
myBuilder.addOneSide(getWordSequence(wordCount), mySide);
incCurrentWord(wordCount);
}
}
}
}
| apache-2.0 |
papicella/snappy-store | gemfirexd/core/src/main/java/com/pivotal/gemfirexd/internal/impl/sql/compile/MaxMinAggregateDefinition.java | 3349 | /*
Derby - Class com.pivotal.gemfirexd.internal.impl.sql.compile.MaxMinAggregateDefinition
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package com.pivotal.gemfirexd.internal.impl.sql.compile;
import com.pivotal.gemfirexd.internal.catalog.TypeDescriptor;
import com.pivotal.gemfirexd.internal.iapi.error.StandardException;
import com.pivotal.gemfirexd.internal.iapi.reference.ClassName;
import com.pivotal.gemfirexd.internal.iapi.services.context.ContextService;
import com.pivotal.gemfirexd.internal.iapi.services.sanity.SanityManager;
import com.pivotal.gemfirexd.internal.iapi.sql.conn.LanguageConnectionContext;
import com.pivotal.gemfirexd.internal.iapi.types.DataTypeDescriptor;
import com.pivotal.gemfirexd.internal.iapi.types.NumberDataValue;
import com.pivotal.gemfirexd.internal.iapi.types.TypeId;
import com.pivotal.gemfirexd.internal.impl.sql.execute.MaxMinAggregator;
/**
* Defintion for the MAX()/MIN() aggregates.
*
*/
public class MaxMinAggregateDefinition
implements AggregateDefinition
{
private boolean isMax;
/**
* Niladic constructor. Does nothing. For ease
* Of use, only.
*/
public MaxMinAggregateDefinition() { super(); }
/**
* Determines the result datatype. Accept NumberDataValues
* only.
* <P>
* <I>Note</I>: In the future you should be able to do
* a sum user data types. One option would be to run
* sum on anything that implements divide().
*
* @param inputType the input type, either a user type or a java.lang object
*
* @return the output Class (null if cannot operate on
* value expression of this type.
*/
public final DataTypeDescriptor getAggregator(DataTypeDescriptor inputType,
StringBuilder aggregatorClass, boolean isAvg)
{
LanguageConnectionContext lcc = (LanguageConnectionContext)
ContextService.getContext(LanguageConnectionContext.CONTEXT_ID);
/*
** MIN and MAX may return null
*/
DataTypeDescriptor dts = inputType.getNullabilityType(true);
TypeId compType = dts.getTypeId();
/*
** If the class implements NumberDataValue, then we
** are in business. Return type is same as input
** type.
*/
if (compType.orderable(
lcc.getLanguageConnectionFactory().getClassFactory()))
{
aggregatorClass.append(ClassName.MaxMinAggregator);
return dts;
}
return null;
}
/**
* This is set by the parser.
*/
public final void setMaxOrMin(boolean isMax)
{
this.isMax = isMax;
}
/**
* Return if the aggregator class is for min/max.
*
* @return boolean true/false
*/
public final boolean isMax()
{
return(isMax);
}
}
| apache-2.0 |
taohaox/CellDevourer | src/com/alibaba/fastjson/parser/deserializer/IntegerDeserializer.java | 1613 | package com.alibaba.fastjson.parser.deserializer;
import java.lang.reflect.Type;
import java.math.BigDecimal;
import java.util.concurrent.atomic.AtomicInteger;
import com.alibaba.fastjson.parser.DefaultJSONParser;
import com.alibaba.fastjson.parser.JSONLexer;
import com.alibaba.fastjson.parser.JSONToken;
import com.alibaba.fastjson.util.TypeUtils;
public class IntegerDeserializer implements ObjectDeserializer {
public final static IntegerDeserializer instance = new IntegerDeserializer();
@SuppressWarnings("unchecked")
public <T> T deserialze(DefaultJSONParser parser, Type clazz, Object fieldName) {
final JSONLexer lexer = parser.getLexer();
if (lexer.token() == JSONToken.NULL) {
lexer.nextToken(JSONToken.COMMA);
return null;
}
Integer intObj;
if (lexer.token() == JSONToken.LITERAL_INT) {
int val = lexer.intValue();
lexer.nextToken(JSONToken.COMMA);
intObj = Integer.valueOf(val);
} else if (lexer.token() == JSONToken.LITERAL_FLOAT) {
BigDecimal decimalValue = lexer.decimalValue();
lexer.nextToken(JSONToken.COMMA);
intObj = Integer.valueOf(decimalValue.intValue());
} else {
Object value = parser.parse();
intObj = TypeUtils.castToInt(value);
}
if (clazz == AtomicInteger.class) {
return (T) new AtomicInteger(intObj.intValue());
}
return (T) intObj;
}
public int getFastMatchToken() {
return JSONToken.LITERAL_INT;
}
}
| apache-2.0 |
Technoboy-/RocketMQ | rocketmq-remoting/src/main/java/com/alibaba/rocketmq/remoting/netty/NettyEncoder.java | 2224 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.rocketmq.remoting.netty;
import com.alibaba.rocketmq.remoting.common.RemotingHelper;
import com.alibaba.rocketmq.remoting.common.RemotingUtil;
import com.alibaba.rocketmq.remoting.protocol.RemotingCommand;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToByteEncoder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
/**
* @author shijia.wxr
*
*/
public class NettyEncoder extends MessageToByteEncoder<RemotingCommand> {
private static final Logger log = LoggerFactory.getLogger(RemotingHelper.RemotingLogName);
@Override
public void encode(ChannelHandlerContext ctx, RemotingCommand remotingCommand, ByteBuf out)
throws Exception {
try {
ByteBuffer header = remotingCommand.encodeHeader();
out.writeBytes(header);
byte[] body = remotingCommand.getBody();
if (body != null) {
out.writeBytes(body);
}
} catch (Exception e) {
log.error("encode exception, " + RemotingHelper.parseChannelRemoteAddr(ctx.channel()), e);
if (remotingCommand != null) {
log.error(remotingCommand.toString());
}
RemotingUtil.closeChannel(ctx.channel());
}
}
}
| apache-2.0 |
koscejev/camel | camel-core/src/main/java/org/apache/camel/model/ProcessorDefinitionHelper.java | 36088 | /**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.model;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledExecutorService;
import javax.xml.namespace.QName;
import org.apache.camel.CamelContext;
import org.apache.camel.Exchange;
import org.apache.camel.spi.ExecutorServiceManager;
import org.apache.camel.spi.RouteContext;
import org.apache.camel.util.CamelContextHelper;
import org.apache.camel.util.IntrospectionSupport;
import org.apache.camel.util.ObjectHelper;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Helper class for ProcessorDefinition and the other model classes.
*/
public final class ProcessorDefinitionHelper {
private static final Logger LOG = LoggerFactory.getLogger(ProcessorDefinitionHelper.class);
private static final ThreadLocal<RestoreAction> CURRENT_RESTORE_ACTION = new ThreadLocal<RestoreAction>();
private ProcessorDefinitionHelper() {
}
/**
* Looks for the given type in the list of outputs and recurring all the children as well.
*
* @param outputs list of outputs, can be null or empty.
* @param type the type to look for
* @return the found definitions, or <tt>null</tt> if not found
*/
public static <T> Iterator<T> filterTypeInOutputs(List<ProcessorDefinition<?>> outputs, Class<T> type) {
return filterTypeInOutputs(outputs, type, -1);
}
/**
* Looks for the given type in the list of outputs and recurring all the children as well.
*
* @param outputs list of outputs, can be null or empty.
* @param type the type to look for
* @param maxDeep maximum levels deep to traverse
* @return the found definitions, or <tt>null</tt> if not found
*/
public static <T> Iterator<T> filterTypeInOutputs(List<ProcessorDefinition<?>> outputs, Class<T> type, int maxDeep) {
List<T> found = new ArrayList<T>();
doFindType(outputs, type, found, maxDeep);
return found.iterator();
}
/**
* Looks for the given type in the list of outputs and recurring all the children as well.
* Will stop at first found and return it.
*
* @param outputs list of outputs, can be null or empty.
* @param type the type to look for
* @return the first found type, or <tt>null</tt> if not found
*/
public static <T> T findFirstTypeInOutputs(List<ProcessorDefinition<?>> outputs, Class<T> type) {
List<T> found = new ArrayList<T>();
doFindType(outputs, type, found, -1);
if (found.isEmpty()) {
return null;
}
return found.iterator().next();
}
/**
* Is the given child the first in the outputs from the parent?
*
* @param parentType the type the parent must be
* @param node the node
* @return <tt>true</tt> if first child, <tt>false</tt> otherwise
*/
public static boolean isFirstChildOfType(Class<?> parentType, ProcessorDefinition<?> node) {
if (node == null || node.getParent() == null) {
return false;
}
if (node.getParent().getOutputs().isEmpty()) {
return false;
}
if (!(node.getParent().getClass().equals(parentType))) {
return false;
}
return node.getParent().getOutputs().get(0).equals(node);
}
/**
* Is the given node parent(s) of the given type
*
* @param parentType the parent type
* @param node the current node
* @param recursive whether or not to check grand parent(s) as well
* @return <tt>true</tt> if parent(s) is of given type, <tt>false</tt> otherwise
*/
public static boolean isParentOfType(Class<?> parentType, ProcessorDefinition<?> node, boolean recursive) {
if (node == null || node.getParent() == null) {
return false;
}
if (parentType.isAssignableFrom(node.getParent().getClass())) {
return true;
} else if (recursive) {
// recursive up the tree of parents
return isParentOfType(parentType, node.getParent(), true);
} else {
// no match
return false;
}
}
/**
* Gets the route definition the given node belongs to.
*
* @param node the node
* @return the route, or <tt>null</tt> if not possible to find
*/
public static RouteDefinition getRoute(ProcessorDefinition<?> node) {
if (node == null) {
return null;
}
ProcessorDefinition<?> def = node;
// drill to the top
while (def != null && def.getParent() != null) {
def = def.getParent();
}
if (def instanceof RouteDefinition) {
return (RouteDefinition) def;
} else {
// not found
return null;
}
}
/**
* Gets the route id the given node belongs to.
*
* @param node the node
* @return the route id, or <tt>null</tt> if not possible to find
*/
public static String getRouteId(ProcessorDefinition<?> node) {
RouteDefinition route = getRoute(node);
return route != null ? route.getId() : null;
}
/**
* Traverses the node, including its children (recursive), and gathers all the node ids.
*
* @param node the target node
* @param set set to store ids, if <tt>null</tt> a new set will be created
* @param onlyCustomId whether to only store custom assigned ids (ie. {@link org.apache.camel.model.OptionalIdentifiedDefinition#hasCustomIdAssigned()}
* @param includeAbstract whether to include abstract nodes (ie. {@link org.apache.camel.model.ProcessorDefinition#isAbstract()}
* @return the set with the found ids.
*/
public static Set<String> gatherAllNodeIds(ProcessorDefinition<?> node, Set<String> set,
boolean onlyCustomId, boolean includeAbstract) {
if (node == null) {
return set;
}
// skip abstract
if (node.isAbstract() && !includeAbstract) {
return set;
}
if (set == null) {
set = new LinkedHashSet<String>();
}
// add ourselves
if (node.getId() != null) {
if (!onlyCustomId || node.hasCustomIdAssigned() && onlyCustomId) {
set.add(node.getId());
}
}
// traverse outputs and recursive children as well
List<ProcessorDefinition<?>> children = node.getOutputs();
if (children != null && !children.isEmpty()) {
for (ProcessorDefinition<?> child : children) {
// traverse children also
gatherAllNodeIds(child, set, onlyCustomId, includeAbstract);
}
}
return set;
}
private static <T> void doFindType(List<ProcessorDefinition<?>> outputs, Class<T> type, List<T> found, int maxDeep) {
// do we have any top level abstracts, then we should max deep one more level down
// as that is really what we want to traverse as well
if (maxDeep > 0) {
for (ProcessorDefinition<?> out : outputs) {
if (out.isAbstract() && out.isTopLevelOnly()) {
maxDeep = maxDeep + 1;
break;
}
}
}
// start from level 1
doFindType(outputs, type, found, 1, maxDeep);
}
@SuppressWarnings({"unchecked", "rawtypes"})
private static <T> void doFindType(List<ProcessorDefinition<?>> outputs, Class<T> type, List<T> found, int current, int maxDeep) {
if (outputs == null || outputs.isEmpty()) {
return;
}
// break out
if (maxDeep > 0 && current > maxDeep) {
return;
}
for (ProcessorDefinition out : outputs) {
// send is much common
if (out instanceof SendDefinition) {
SendDefinition send = (SendDefinition) out;
List<ProcessorDefinition<?>> children = send.getOutputs();
doFindType(children, type, found, ++current, maxDeep);
}
// special for choice
if (out instanceof ChoiceDefinition) {
ChoiceDefinition choice = (ChoiceDefinition) out;
// ensure to add ourself if we match also
if (type.isInstance(choice)) {
found.add((T) choice);
}
for (WhenDefinition when : choice.getWhenClauses()) {
if (type.isInstance(when)) {
found.add((T) when);
}
List<ProcessorDefinition<?>> children = when.getOutputs();
doFindType(children, type, found, ++current, maxDeep);
}
// otherwise is optional
if (choice.getOtherwise() != null) {
List<ProcessorDefinition<?>> children = choice.getOtherwise().getOutputs();
doFindType(children, type, found, ++current, maxDeep);
}
// do not check children as we already did that
continue;
}
// special for try ... catch ... finally
if (out instanceof TryDefinition) {
TryDefinition doTry = (TryDefinition) out;
// ensure to add ourself if we match also
if (type.isInstance(doTry)) {
found.add((T) doTry);
}
List<ProcessorDefinition<?>> doTryOut = doTry.getOutputsWithoutCatches();
doFindType(doTryOut, type, found, ++current, maxDeep);
List<CatchDefinition> doTryCatch = doTry.getCatchClauses();
for (CatchDefinition doCatch : doTryCatch) {
doFindType(doCatch.getOutputs(), type, found, ++current, maxDeep);
}
if (doTry.getFinallyClause() != null) {
doFindType(doTry.getFinallyClause().getOutputs(), type, found, ++current, maxDeep);
}
// do not check children as we already did that
continue;
}
// special for some types which has special outputs
if (out instanceof OutputDefinition) {
OutputDefinition outDef = (OutputDefinition) out;
// ensure to add ourself if we match also
if (type.isInstance(outDef)) {
found.add((T) outDef);
}
List<ProcessorDefinition<?>> outDefOut = outDef.getOutputs();
doFindType(outDefOut, type, found, ++current, maxDeep);
// do not check children as we already did that
continue;
}
if (type.isInstance(out)) {
found.add((T) out);
}
// try children as well
List<ProcessorDefinition<?>> children = out.getOutputs();
doFindType(children, type, found, ++current, maxDeep);
}
}
/**
* Is there any outputs in the given list.
* <p/>
* Is used for check if the route output has any real outputs (non abstracts)
*
* @param outputs the outputs
* @param excludeAbstract whether or not to exclude abstract outputs (e.g. skip onException etc.)
* @return <tt>true</tt> if has outputs, otherwise <tt>false</tt> is returned
*/
@SuppressWarnings({"unchecked", "rawtypes"})
public static boolean hasOutputs(List<ProcessorDefinition<?>> outputs, boolean excludeAbstract) {
if (outputs == null || outputs.isEmpty()) {
return false;
}
if (!excludeAbstract) {
return !outputs.isEmpty();
}
for (ProcessorDefinition output : outputs) {
if (output instanceof TransactedDefinition || output instanceof PolicyDefinition) {
// special for those as they wrap entire output, so we should just check its output
return hasOutputs(output.getOutputs(), excludeAbstract);
}
if (!output.isAbstract()) {
return true;
}
}
return false;
}
/**
* Determines whether a new thread pool will be created or not.
* <p/>
* This is used to know if a new thread pool will be created, and therefore is not shared by others, and therefore
* exclusive to the definition.
*
* @param routeContext the route context
* @param definition the node definition which may leverage executor service.
* @param useDefault whether to fallback and use a default thread pool, if no explicit configured
* @return <tt>true</tt> if a new thread pool will be created, <tt>false</tt> if not
* @see #getConfiguredExecutorService(org.apache.camel.spi.RouteContext, String, ExecutorServiceAwareDefinition, boolean)
*/
public static boolean willCreateNewThreadPool(RouteContext routeContext, ExecutorServiceAwareDefinition<?> definition, boolean useDefault) {
ExecutorServiceManager manager = routeContext.getCamelContext().getExecutorServiceManager();
ObjectHelper.notNull(manager, "ExecutorServiceManager", routeContext.getCamelContext());
if (definition.getExecutorService() != null) {
// no there is a custom thread pool configured
return false;
} else if (definition.getExecutorServiceRef() != null) {
ExecutorService answer = routeContext.getCamelContext().getRegistry().lookupByNameAndType(definition.getExecutorServiceRef(), ExecutorService.class);
// if no existing thread pool, then we will have to create a new thread pool
return answer == null;
} else if (useDefault) {
return true;
}
return false;
}
/**
* Will lookup in {@link org.apache.camel.spi.Registry} for a {@link ExecutorService} registered with the given
* <tt>executorServiceRef</tt> name.
* <p/>
* This method will lookup for configured thread pool in the following order
* <ul>
* <li>from the {@link org.apache.camel.spi.Registry} if found</li>
* <li>from the known list of {@link org.apache.camel.spi.ThreadPoolProfile ThreadPoolProfile(s)}.</li>
* <li>if none found, then <tt>null</tt> is returned.</li>
* </ul>
*
* @param routeContext the route context
* @param name name which is appended to the thread name, when the {@link java.util.concurrent.ExecutorService}
* is created based on a {@link org.apache.camel.spi.ThreadPoolProfile}.
* @param source the source to use the thread pool
* @param executorServiceRef reference name of the thread pool
* @return the executor service, or <tt>null</tt> if none was found.
*/
public static ExecutorService lookupExecutorServiceRef(RouteContext routeContext, String name,
Object source, String executorServiceRef) {
ExecutorServiceManager manager = routeContext.getCamelContext().getExecutorServiceManager();
ObjectHelper.notNull(manager, "ExecutorServiceManager", routeContext.getCamelContext());
ObjectHelper.notNull(executorServiceRef, "executorServiceRef");
// lookup in registry first and use existing thread pool if exists
ExecutorService answer = routeContext.getCamelContext().getRegistry().lookupByNameAndType(executorServiceRef, ExecutorService.class);
if (answer == null) {
// then create a thread pool assuming the ref is a thread pool profile id
answer = manager.newThreadPool(source, name, executorServiceRef);
}
return answer;
}
/**
* Will lookup and get the configured {@link java.util.concurrent.ExecutorService} from the given definition.
* <p/>
* This method will lookup for configured thread pool in the following order
* <ul>
* <li>from the definition if any explicit configured executor service.</li>
* <li>from the {@link org.apache.camel.spi.Registry} if found</li>
* <li>from the known list of {@link org.apache.camel.spi.ThreadPoolProfile ThreadPoolProfile(s)}.</li>
* <li>if none found, then <tt>null</tt> is returned.</li>
* </ul>
* The various {@link ExecutorServiceAwareDefinition} should use this helper method to ensure they support
* configured executor services in the same coherent way.
*
* @param routeContext the route context
* @param name name which is appended to the thread name, when the {@link java.util.concurrent.ExecutorService}
* is created based on a {@link org.apache.camel.spi.ThreadPoolProfile}.
* @param definition the node definition which may leverage executor service.
* @param useDefault whether to fallback and use a default thread pool, if no explicit configured
* @return the configured executor service, or <tt>null</tt> if none was configured.
* @throws IllegalArgumentException is thrown if lookup of executor service in {@link org.apache.camel.spi.Registry} was not found
*/
public static ExecutorService getConfiguredExecutorService(RouteContext routeContext, String name,
ExecutorServiceAwareDefinition<?> definition,
boolean useDefault) throws IllegalArgumentException {
ExecutorServiceManager manager = routeContext.getCamelContext().getExecutorServiceManager();
ObjectHelper.notNull(manager, "ExecutorServiceManager", routeContext.getCamelContext());
// prefer to use explicit configured executor on the definition
if (definition.getExecutorService() != null) {
return definition.getExecutorService();
} else if (definition.getExecutorServiceRef() != null) {
// lookup in registry first and use existing thread pool if exists
ExecutorService answer = lookupExecutorServiceRef(routeContext, name, definition, definition.getExecutorServiceRef());
if (answer == null) {
throw new IllegalArgumentException("ExecutorServiceRef " + definition.getExecutorServiceRef() + " not found in registry or as a thread pool profile.");
}
return answer;
} else if (useDefault) {
return manager.newDefaultThreadPool(definition, name);
}
return null;
}
/**
* Will lookup in {@link org.apache.camel.spi.Registry} for a {@link ScheduledExecutorService} registered with the given
* <tt>executorServiceRef</tt> name.
* <p/>
* This method will lookup for configured thread pool in the following order
* <ul>
* <li>from the {@link org.apache.camel.spi.Registry} if found</li>
* <li>from the known list of {@link org.apache.camel.spi.ThreadPoolProfile ThreadPoolProfile(s)}.</li>
* <li>if none found, then <tt>null</tt> is returned.</li>
* </ul>
*
* @param routeContext the route context
* @param name name which is appended to the thread name, when the {@link java.util.concurrent.ExecutorService}
* is created based on a {@link org.apache.camel.spi.ThreadPoolProfile}.
* @param source the source to use the thread pool
* @param executorServiceRef reference name of the thread pool
* @return the executor service, or <tt>null</tt> if none was found.
*/
public static ScheduledExecutorService lookupScheduledExecutorServiceRef(RouteContext routeContext, String name,
Object source, String executorServiceRef) {
ExecutorServiceManager manager = routeContext.getCamelContext().getExecutorServiceManager();
ObjectHelper.notNull(manager, "ExecutorServiceManager", routeContext.getCamelContext());
ObjectHelper.notNull(executorServiceRef, "executorServiceRef");
// lookup in registry first and use existing thread pool if exists
ScheduledExecutorService answer = routeContext.getCamelContext().getRegistry().lookupByNameAndType(executorServiceRef, ScheduledExecutorService.class);
if (answer == null) {
// then create a thread pool assuming the ref is a thread pool profile id
answer = manager.newScheduledThreadPool(source, name, executorServiceRef);
}
return answer;
}
/**
* Will lookup and get the configured {@link java.util.concurrent.ScheduledExecutorService} from the given definition.
* <p/>
* This method will lookup for configured thread pool in the following order
* <ul>
* <li>from the definition if any explicit configured executor service.</li>
* <li>from the {@link org.apache.camel.spi.Registry} if found</li>
* <li>from the known list of {@link org.apache.camel.spi.ThreadPoolProfile ThreadPoolProfile(s)}.</li>
* <li>if none found, then <tt>null</tt> is returned.</li>
* </ul>
* The various {@link ExecutorServiceAwareDefinition} should use this helper method to ensure they support
* configured executor services in the same coherent way.
*
* @param routeContext the rout context
* @param name name which is appended to the thread name, when the {@link java.util.concurrent.ExecutorService}
* is created based on a {@link org.apache.camel.spi.ThreadPoolProfile}.
* @param definition the node definition which may leverage executor service.
* @param useDefault whether to fallback and use a default thread pool, if no explicit configured
* @return the configured executor service, or <tt>null</tt> if none was configured.
* @throws IllegalArgumentException is thrown if the found instance is not a ScheduledExecutorService type,
* or lookup of executor service in {@link org.apache.camel.spi.Registry} was not found
*/
public static ScheduledExecutorService getConfiguredScheduledExecutorService(RouteContext routeContext, String name,
ExecutorServiceAwareDefinition<?> definition,
boolean useDefault) throws IllegalArgumentException {
ExecutorServiceManager manager = routeContext.getCamelContext().getExecutorServiceManager();
ObjectHelper.notNull(manager, "ExecutorServiceManager", routeContext.getCamelContext());
// prefer to use explicit configured executor on the definition
if (definition.getExecutorService() != null) {
ExecutorService executorService = definition.getExecutorService();
if (executorService instanceof ScheduledExecutorService) {
return (ScheduledExecutorService) executorService;
}
throw new IllegalArgumentException("ExecutorServiceRef " + definition.getExecutorServiceRef() + " is not an ScheduledExecutorService instance");
} else if (definition.getExecutorServiceRef() != null) {
ScheduledExecutorService answer = lookupScheduledExecutorServiceRef(routeContext, name, definition, definition.getExecutorServiceRef());
if (answer == null) {
throw new IllegalArgumentException("ExecutorServiceRef " + definition.getExecutorServiceRef() + " not found in registry or as a thread pool profile.");
}
return answer;
} else if (useDefault) {
return manager.newDefaultScheduledThreadPool(definition, name);
}
return null;
}
/**
* The RestoreAction is used to track all the undo/restore actions
* that need to be performed to undo any resolution to property placeholders
* that have been applied to the camel route defs. This class is private
* so it does not get used directly. It's mainly used by the {@see createPropertyPlaceholdersChangeReverter()}
* method.
*/
private static final class RestoreAction implements Runnable {
private final RestoreAction prevChange;
private final ArrayList<Runnable> actions = new ArrayList<Runnable>();
private RestoreAction(RestoreAction prevChange) {
this.prevChange = prevChange;
}
@Override
public void run() {
for (Runnable action : actions) {
action.run();
}
actions.clear();
if (prevChange == null) {
CURRENT_RESTORE_ACTION.remove();
} else {
CURRENT_RESTORE_ACTION.set(prevChange);
}
}
}
/**
* Creates a Runnable which when run will revert property placeholder
* updates to the camel route definitions that were done after this method
* is called. The Runnable MUST be executed and MUST be executed in the
* same thread this method is called from. Therefore it's recommend you
* use it in try/finally block like in the following example:
* <p/>
* <pre>
* Runnable undo = ProcessorDefinitionHelper.createPropertyPlaceholdersChangeReverter();
* try {
* // All property resolutions in this block will be reverted.
* } finally {
* undo.run();
* }
* </pre>
*
* @return a Runnable that when run, will revert any property place holder
* changes that occurred on the current thread .
*/
public static Runnable createPropertyPlaceholdersChangeReverter() {
RestoreAction prevChanges = CURRENT_RESTORE_ACTION.get();
RestoreAction rc = new RestoreAction(prevChanges);
CURRENT_RESTORE_ACTION.set(rc);
return rc;
}
private static void addRestoreAction(final Object target, final Map<String, Object> properties) {
if (properties.isEmpty()) {
return;
}
RestoreAction restoreAction = CURRENT_RESTORE_ACTION.get();
if (restoreAction == null) {
return;
}
restoreAction.actions.add(new Runnable() {
@Override
public void run() {
try {
IntrospectionSupport.setProperties(null, target, properties);
} catch (Exception e) {
LOG.warn("Could not restore definition properties", e);
}
}
});
}
public static void addPropertyPlaceholdersChangeRevertAction(Runnable action) {
RestoreAction restoreAction = CURRENT_RESTORE_ACTION.get();
if (restoreAction == null) {
return;
}
restoreAction.actions.add(action);
}
/**
* Inspects the given definition and resolves any property placeholders from its properties.
* <p/>
* This implementation will check all the getter/setter pairs on this instance and for all the values
* (which is a String type) will be property placeholder resolved.
*
* @param routeContext the route context
* @param definition the definition
* @throws Exception is thrown if property placeholders was used and there was an error resolving them
* @see org.apache.camel.CamelContext#resolvePropertyPlaceholders(String)
* @see org.apache.camel.component.properties.PropertiesComponent
* @deprecated use {@link #resolvePropertyPlaceholders(org.apache.camel.CamelContext, Object)}
*/
@Deprecated
public static void resolvePropertyPlaceholders(RouteContext routeContext, Object definition) throws Exception {
resolvePropertyPlaceholders(routeContext.getCamelContext(), definition);
}
/**
* Inspects the given definition and resolves any property placeholders from its properties.
* <p/>
* This implementation will check all the getter/setter pairs on this instance and for all the values
* (which is a String type) will be property placeholder resolved.
*
* @param camelContext the Camel context
* @param definition the definition
* @throws Exception is thrown if property placeholders was used and there was an error resolving them
* @see org.apache.camel.CamelContext#resolvePropertyPlaceholders(String)
* @see org.apache.camel.component.properties.PropertiesComponent
*/
public static void resolvePropertyPlaceholders(CamelContext camelContext, Object definition) throws Exception {
LOG.trace("Resolving property placeholders for: {}", definition);
// find all getter/setter which we can use for property placeholders
Map<String, Object> properties = new HashMap<String, Object>();
IntrospectionSupport.getProperties(definition, properties, null);
ProcessorDefinition<?> processorDefinition = null;
if (definition instanceof ProcessorDefinition) {
processorDefinition = (ProcessorDefinition<?>) definition;
}
// include additional properties which have the Camel placeholder QName
// and when the definition parameter is this (otherAttributes belong to this)
if (processorDefinition != null && processorDefinition.getOtherAttributes() != null) {
for (QName key : processorDefinition.getOtherAttributes().keySet()) {
if (Constants.PLACEHOLDER_QNAME.equals(key.getNamespaceURI())) {
String local = key.getLocalPart();
Object value = processorDefinition.getOtherAttributes().get(key);
if (value != null && value instanceof String) {
// enforce a properties component to be created if none existed
CamelContextHelper.lookupPropertiesComponent(camelContext, true);
// value must be enclosed with placeholder tokens
String s = (String) value;
String prefixToken = camelContext.getPropertyPrefixToken();
String suffixToken = camelContext.getPropertySuffixToken();
if (prefixToken == null) {
throw new IllegalArgumentException("Property with name [" + local + "] uses property placeholders; however, no properties component is configured.");
}
if (!s.startsWith(prefixToken)) {
s = prefixToken + s;
}
if (!s.endsWith(suffixToken)) {
s = s + suffixToken;
}
value = s;
}
properties.put(local, value);
}
}
}
Map<String, Object> changedProperties = new HashMap<String, Object>();
if (!properties.isEmpty()) {
LOG.trace("There are {} properties on: {}", properties.size(), definition);
// lookup and resolve properties for String based properties
for (Map.Entry<String, Object> entry : properties.entrySet()) {
// the name is always a String
String name = entry.getKey();
Object value = entry.getValue();
if (value instanceof String) {
// value must be a String, as a String is the key for a property placeholder
String text = (String) value;
text = camelContext.resolvePropertyPlaceholders(text);
if (text != value) {
// invoke setter as the text has changed
boolean changed = IntrospectionSupport.setProperty(camelContext.getTypeConverter(), definition, name, text);
if (!changed) {
throw new IllegalArgumentException("No setter to set property: " + name + " to: " + text + " on: " + definition);
}
changedProperties.put(name, value);
if (LOG.isDebugEnabled()) {
LOG.debug("Changed property [{}] from: {} to: {}", new Object[]{name, value, text});
}
}
}
}
}
addRestoreAction(definition, changedProperties);
}
/**
* Inspects the given definition and resolves known fields
* <p/>
* This implementation will check all the getter/setter pairs on this instance and for all the values
* (which is a String type) will check if it refers to a known field (such as on Exchange).
*
* @param definition the definition
*/
public static void resolveKnownConstantFields(Object definition) throws Exception {
LOG.trace("Resolving known fields for: {}", definition);
// find all String getter/setter
Map<String, Object> properties = new HashMap<String, Object>();
IntrospectionSupport.getProperties(definition, properties, null);
Map<String, Object> changedProperties = new HashMap<String, Object>();
if (!properties.isEmpty()) {
LOG.trace("There are {} properties on: {}", properties.size(), definition);
// lookup and resolve known constant fields for String based properties
for (Map.Entry<String, Object> entry : properties.entrySet()) {
String name = entry.getKey();
Object value = entry.getValue();
if (value instanceof String) {
// we can only resolve String typed values
String text = (String) value;
// is the value a known field (currently we only support constants from Exchange.class)
if (text.startsWith("Exchange.")) {
String field = ObjectHelper.after(text, "Exchange.");
String constant = ObjectHelper.lookupConstantFieldValue(Exchange.class, field);
if (constant != null) {
// invoke setter as the text has changed
IntrospectionSupport.setProperty(definition, name, constant);
changedProperties.put(name, value);
if (LOG.isDebugEnabled()) {
LOG.debug("Changed property [{}] from: {} to: {}", new Object[]{name, value, constant});
}
} else {
throw new IllegalArgumentException("Constant field with name: " + field + " not found on Exchange.class");
}
}
}
}
}
addRestoreAction(definition, changedProperties);
}
}
| apache-2.0 |
Pushjet/Pushjet-Android | gradle/wrapper/dists/gradle-1.12-all/4ff8jj5a73a7zgj5nnzv1ubq0/gradle-1.12/src/native/org/gradle/internal/nativeplatform/console/UnixConsoleMetaData.java | 1607 | /*
* Copyright 2012 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.gradle.internal.nativeplatform.console;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public class UnixConsoleMetaData implements ConsoleMetaData {
public static final Logger LOGGER = LoggerFactory.getLogger(UnixConsoleMetaData.class);
private final boolean stdout;
private final boolean stderr;
public UnixConsoleMetaData(boolean stdout, boolean stderr) {
this.stdout = stdout;
this.stderr = stderr;
}
public boolean isStdOut() {
return stdout;
}
public boolean isStdErr() {
return stderr;
}
public int getCols() {
final String columns = System.getenv("COLUMNS");
if (columns != null) {
try {
return Integer.parseInt(columns);
} catch (NumberFormatException ex) {
LOGGER.debug("Cannot parse COLUMNS environment variable to get console width. Value: '{}'", columns);
}
}
return 0;
}
}
| bsd-2-clause |
codeaudit/Foundry | Components/LearningCore/Test/gov/sandia/cognition/learning/function/distance/MinkowskiDistanceMetricTest.java | 6757 | /*
* File: MinkowskiDistanceMetricTest.java
* Authors: Justin Basilico
* Project: Cognitive Foundry
*
* Copyright 2011 Cognitive Foundry. All rights reserved.
*/
package gov.sandia.cognition.learning.function.distance;
import gov.sandia.cognition.math.matrix.Vector;
import gov.sandia.cognition.math.matrix.VectorFactory;
import gov.sandia.cognition.math.matrix.Vectorizable;
import gov.sandia.cognition.math.matrix.mtj.Vector2;
/**
* Unit tests for class MinkowskiDistanceMetric.
*
* @author Justin Basilico
* @since 3.3.3
*/
public class MinkowskiDistanceMetricTest
extends MetricTestHarness<Vectorizable>
{
/**
* Creates a new test.
*
* @param testName
* The test name.
*/
public MinkowskiDistanceMetricTest(
final String testName)
{
super(testName);
}
/**
* Test of constructors method, of class MinkowskiDistanceMetric.
*/
public void testConstructors()
{
double power = MinkowskiDistanceMetric.DEFAULT_POWER;
MinkowskiDistanceMetric instance = new MinkowskiDistanceMetric();
assertEquals(power, instance.getPower(), 0.0);
power *= RANDOM.nextDouble();
instance = new MinkowskiDistanceMetric(power);
assertEquals(power, instance.getPower(), 0.0);
}
/**
* Test of evaluate method, of class MinkowskiDistanceMetric.
*/
public void testEvaluate()
{
double epsilon = 1e-10;
int d = 10;
Vector x = VectorFactory.getDefault().createUniformRandom(d, -100, +100, RANDOM);
Vector y = VectorFactory.getDefault().createUniformRandom(d, -100, +100, RANDOM);
MinkowskiDistanceMetric instance = new MinkowskiDistanceMetric();
instance.setPower(1.0);
assertEquals(x.minus(y).norm1(), instance.evaluate(x, y), epsilon);
assertEquals(x.minus(y).norm1(), instance.evaluate(y, x), epsilon);
assertEquals(0.0, instance.evaluate(x, x), epsilon);
assertEquals(0.0, instance.evaluate(y, y), epsilon);
instance.setPower(2.0);
assertEquals(x.minus(y).norm2(), instance.evaluate(x, y), epsilon);
assertEquals(x.minus(y).norm2(), instance.evaluate(y, x), epsilon);
assertEquals(0.0, instance.evaluate(x, x), epsilon);
assertEquals(0.0, instance.evaluate(y, y), epsilon);
}
/**
* Test of getPower method, of class MinkowskiDistanceMetric.
*/
public void testGetPower()
{
this.testSetPower();
}
/**
* Test of setPower method, of class MinkowskiDistanceMetric.
*/
public void testSetPower()
{
double power = MinkowskiDistanceMetric.DEFAULT_POWER;
MinkowskiDistanceMetric instance = new MinkowskiDistanceMetric();
assertEquals(power, instance.getPower(), 0.0);
double[] goodValues = {0.001, 0.2, 1.0, 10.0, 123.0, RANDOM.nextDouble()};
for (double goodValue : goodValues)
{
power = goodValue;
instance.setPower(power);
assertEquals(power, instance.getPower(), 0.0);
}
double[] badValues = {0.0, -0.2, -1.0, -RANDOM.nextDouble()};
for (double badValue : badValues)
{
boolean exceptionThrown = false;
try
{
instance.setPower(badValue);
}
catch (IllegalArgumentException e)
{
exceptionThrown = true;
}
finally
{
assertTrue(exceptionThrown);
}
assertEquals(power, instance.getPower(), 0.0);
}
}
@Override
public MinkowskiDistanceMetric createInstance()
{
return new MinkowskiDistanceMetric(RANDOM.nextDouble() * 10 + 1.0);
}
@Override
public Vector generateRandomFirstType()
{
return VectorFactory.getDefault().createUniformRandom(5,-10.0,5.0, RANDOM);
}
@Override
public void testKnownValues()
{
System.out.println( "Known Values" );
MinkowskiDistanceMetric instance = new MinkowskiDistanceMetric(2.0);
// Create four points to compute the distances between.
Vector2 v00 = new Vector2(0.0, 0.0);
Vector2 v01 = new Vector2(0.0, 1.0);
Vector2 v10 = new Vector2(1.0, 0.0);
Vector2 v11 = new Vector2(1.0, 1.0);
// Make sure the distance to self is zero.
assertEquals(0.0, instance.evaluate(v00, v00));
assertEquals(0.0, instance.evaluate(v01, v01));
assertEquals(0.0, instance.evaluate(v10, v10));
assertEquals(0.0, instance.evaluate(v11, v11));
// Make sure the distances between points are correct.
assertEquals(1.0, instance.evaluate(v00, v01));
assertEquals(1.0, instance.evaluate(v00, v10));
assertEquals(1.0, instance.evaluate(v01, v00));
assertEquals(1.0, instance.evaluate(v01, v11));
assertEquals(Math.sqrt(2), instance.evaluate(v00, v11));
// Make sure that it works with negative points.
assertEquals(1.0, instance.evaluate(v00, v01.scale(-1.0)));
assertEquals(1.0, instance.evaluate(v00, v10.scale(-1.0)));
assertEquals(Math.sqrt(2), instance.evaluate(v00, v11.scale(-1.0)));
// Test examples of manhattan distance by setting power to 2.0.
instance.setPower(1.0);
assertEquals(0.0, instance.evaluate(v00, v00));
assertEquals(0.0, instance.evaluate(v01, v01));
assertEquals(0.0, instance.evaluate(v10, v10));
assertEquals(0.0, instance.evaluate(v11, v11));
// Make sure the distances between points are correct.
assertEquals(1.0, instance.evaluate(v00, v01));
assertEquals(1.0, instance.evaluate(v00, v10));
assertEquals(1.0, instance.evaluate(v01, v00));
assertEquals(1.0, instance.evaluate(v01, v11));
assertEquals(2.0, instance.evaluate(v00, v11));
// Make sure that it works with negative points.
assertEquals(1.0, instance.evaluate(v00, v01.scale(-1.0)));
assertEquals(1.0, instance.evaluate(v00, v10.scale(-1.0)));
assertEquals(2.0, instance.evaluate(v00, v11.scale(-1.0)));
for (int i = 0; i < NUM_SAMPLES; i++)
{
Vector x = this.generateRandomFirstType();
Vector y = this.generateRandomFirstType();
double expected = x.minus(y).norm1();
double result = instance.evaluate(x, y);
assertEquals(expected, result);
assertTrue(result >= 0.0);
assertEquals(0.0, instance.evaluate(x, x), TOLERANCE);
assertEquals(0.0, instance.evaluate(y, y), TOLERANCE);
}
}
} | bsd-3-clause |
LWJGL-CI/lwjgl3 | modules/lwjgl/vulkan/src/generated/java/org/lwjgl/vulkan/VkExportMemoryAllocateInfoKHR.java | 12279 | /*
* Copyright LWJGL. All rights reserved.
* License terms: https://www.lwjgl.org/license
* MACHINE GENERATED FILE, DO NOT EDIT
*/
package org.lwjgl.vulkan;
import javax.annotation.*;
import java.nio.*;
import org.lwjgl.*;
import org.lwjgl.system.*;
import static org.lwjgl.system.MemoryUtil.*;
import static org.lwjgl.system.MemoryStack.*;
/**
* See {@link VkExportMemoryAllocateInfo}.
*
* <h3>Layout</h3>
*
* <pre><code>
* struct VkExportMemoryAllocateInfoKHR {
* VkStructureType sType;
* void const * pNext;
* VkExternalMemoryHandleTypeFlags handleTypes;
* }</code></pre>
*/
public class VkExportMemoryAllocateInfoKHR extends VkExportMemoryAllocateInfo {
/**
* Creates a {@code VkExportMemoryAllocateInfoKHR} instance at the current position of the specified {@link ByteBuffer} container. Changes to the buffer's content will be
* visible to the struct instance and vice versa.
*
* <p>The created instance holds a strong reference to the container object.</p>
*/
public VkExportMemoryAllocateInfoKHR(ByteBuffer container) {
super(container);
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR sType(@NativeType("VkStructureType") int value) { nsType(address(), value); return this; }
/** Sets the {@link VK11#VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO} value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR sType$Default() { return sType(VK11.VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkExportMemoryAllocateInfoKHR pNext(@NativeType("void const *") long value) { npNext(address(), value); return this; }
/** Sets the specified value to the {@code handleTypes} field. */
@Override
public VkExportMemoryAllocateInfoKHR handleTypes(@NativeType("VkExternalMemoryHandleTypeFlags") int value) { nhandleTypes(address(), value); return this; }
/** Initializes this struct with the specified values. */
@Override
public VkExportMemoryAllocateInfoKHR set(
int sType,
long pNext,
int handleTypes
) {
sType(sType);
pNext(pNext);
handleTypes(handleTypes);
return this;
}
/**
* Copies the specified struct data to this struct.
*
* @param src the source struct
*
* @return this struct
*/
public VkExportMemoryAllocateInfoKHR set(VkExportMemoryAllocateInfoKHR src) {
memCopy(src.address(), address(), SIZEOF);
return this;
}
// -----------------------------------
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed. */
public static VkExportMemoryAllocateInfoKHR malloc() {
return wrap(VkExportMemoryAllocateInfoKHR.class, nmemAllocChecked(SIZEOF));
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed. */
public static VkExportMemoryAllocateInfoKHR calloc() {
return wrap(VkExportMemoryAllocateInfoKHR.class, nmemCallocChecked(1, SIZEOF));
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated with {@link BufferUtils}. */
public static VkExportMemoryAllocateInfoKHR create() {
ByteBuffer container = BufferUtils.createByteBuffer(SIZEOF);
return wrap(VkExportMemoryAllocateInfoKHR.class, memAddress(container), container);
}
/** Returns a new {@code VkExportMemoryAllocateInfoKHR} instance for the specified memory address. */
public static VkExportMemoryAllocateInfoKHR create(long address) {
return wrap(VkExportMemoryAllocateInfoKHR.class, address);
}
/** Like {@link #create(long) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExportMemoryAllocateInfoKHR createSafe(long address) {
return address == NULL ? null : wrap(VkExportMemoryAllocateInfoKHR.class, address);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link MemoryUtil#memAlloc memAlloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer malloc(int capacity) {
return wrap(Buffer.class, nmemAllocChecked(__checkMalloc(capacity, SIZEOF)), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link MemoryUtil#memCalloc memCalloc}. The instance must be explicitly freed.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer calloc(int capacity) {
return wrap(Buffer.class, nmemCallocChecked(capacity, SIZEOF), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated with {@link BufferUtils}.
*
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer create(int capacity) {
ByteBuffer container = __create(capacity, SIZEOF);
return wrap(Buffer.class, memAddress(container), capacity, container);
}
/**
* Create a {@link VkExportMemoryAllocateInfoKHR.Buffer} instance at the specified memory.
*
* @param address the memory address
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer create(long address, int capacity) {
return wrap(Buffer.class, address, capacity);
}
/** Like {@link #create(long, int) create}, but returns {@code null} if {@code address} is {@code NULL}. */
@Nullable
public static VkExportMemoryAllocateInfoKHR.Buffer createSafe(long address, int capacity) {
return address == NULL ? null : wrap(Buffer.class, address, capacity);
}
// -----------------------------------
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR mallocStack() { return malloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR callocStack() { return calloc(stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR mallocStack(MemoryStack stack) { return malloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR callocStack(MemoryStack stack) { return calloc(stack); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer mallocStack(int capacity) { return malloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer callocStack(int capacity) { return calloc(capacity, stackGet()); }
/** Deprecated for removal in 3.4.0. Use {@link #malloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer mallocStack(int capacity, MemoryStack stack) { return malloc(capacity, stack); }
/** Deprecated for removal in 3.4.0. Use {@link #calloc(int, MemoryStack)} instead. */
@Deprecated public static VkExportMemoryAllocateInfoKHR.Buffer callocStack(int capacity, MemoryStack stack) { return calloc(capacity, stack); }
/**
* Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
*/
public static VkExportMemoryAllocateInfoKHR malloc(MemoryStack stack) {
return wrap(VkExportMemoryAllocateInfoKHR.class, stack.nmalloc(ALIGNOF, SIZEOF));
}
/**
* Returns a new {@code VkExportMemoryAllocateInfoKHR} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
*/
public static VkExportMemoryAllocateInfoKHR calloc(MemoryStack stack) {
return wrap(VkExportMemoryAllocateInfoKHR.class, stack.ncalloc(ALIGNOF, 1, SIZEOF));
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated on the specified {@link MemoryStack}.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer malloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.nmalloc(ALIGNOF, capacity * SIZEOF), capacity);
}
/**
* Returns a new {@link VkExportMemoryAllocateInfoKHR.Buffer} instance allocated on the specified {@link MemoryStack} and initializes all its bits to zero.
*
* @param stack the stack from which to allocate
* @param capacity the buffer capacity
*/
public static VkExportMemoryAllocateInfoKHR.Buffer calloc(int capacity, MemoryStack stack) {
return wrap(Buffer.class, stack.ncalloc(ALIGNOF, capacity, SIZEOF), capacity);
}
// -----------------------------------
/** An array of {@link VkExportMemoryAllocateInfoKHR} structs. */
public static class Buffer extends VkExportMemoryAllocateInfo.Buffer {
private static final VkExportMemoryAllocateInfoKHR ELEMENT_FACTORY = VkExportMemoryAllocateInfoKHR.create(-1L);
/**
* Creates a new {@code VkExportMemoryAllocateInfoKHR.Buffer} instance backed by the specified container.
*
* Changes to the container's content will be visible to the struct buffer instance and vice versa. The two buffers' position, limit, and mark values
* will be independent. The new buffer's position will be zero, its capacity and its limit will be the number of bytes remaining in this buffer divided
* by {@link VkExportMemoryAllocateInfoKHR#SIZEOF}, and its mark will be undefined.
*
* <p>The created buffer instance holds a strong reference to the container object.</p>
*/
public Buffer(ByteBuffer container) {
super(container);
}
public Buffer(long address, int cap) {
super(address, null, -1, 0, cap, cap);
}
Buffer(long address, @Nullable ByteBuffer container, int mark, int pos, int lim, int cap) {
super(address, container, mark, pos, lim, cap);
}
@Override
protected Buffer self() {
return this;
}
@Override
protected VkExportMemoryAllocateInfoKHR getElementFactory() {
return ELEMENT_FACTORY;
}
/** Sets the specified value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer sType(@NativeType("VkStructureType") int value) { VkExportMemoryAllocateInfoKHR.nsType(address(), value); return this; }
/** Sets the {@link VK11#VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO} value to the {@code sType} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer sType$Default() { return sType(VK11.VK_STRUCTURE_TYPE_EXPORT_MEMORY_ALLOCATE_INFO); }
/** Sets the specified value to the {@code pNext} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer pNext(@NativeType("void const *") long value) { VkExportMemoryAllocateInfoKHR.npNext(address(), value); return this; }
/** Sets the specified value to the {@code handleTypes} field. */
@Override
public VkExportMemoryAllocateInfoKHR.Buffer handleTypes(@NativeType("VkExternalMemoryHandleTypeFlags") int value) { VkExportMemoryAllocateInfoKHR.nhandleTypes(address(), value); return this; }
}
} | bsd-3-clause |
ezegarra/microbrowser | src/prefuse/data/expression/parser/TokenMgrError.java | 4221 | /* Generated By:JavaCC: Do not edit this line. TokenMgrError.java Version 3.0 */
package prefuse.data.expression.parser;
public class TokenMgrError extends Error
{
/*
* Ordinals for various reasons why an Error of this type can be thrown.
*/
/**
* Lexical error occured.
*/
static final int LEXICAL_ERROR = 0;
/**
* An attempt wass made to create a second instance of a static token manager.
*/
static final int STATIC_LEXER_ERROR = 1;
/**
* Tried to change to an invalid lexical state.
*/
static final int INVALID_LEXICAL_STATE = 2;
/**
* Detected (and bailed out of) an infinite loop in the token manager.
*/
static final int LOOP_DETECTED = 3;
/**
* Indicates the reason why the exception is thrown. It will have
* one of the above 4 values.
*/
int errorCode;
/**
* Replaces unprintable characters by their espaced (or unicode escaped)
* equivalents in the given string
*/
protected static final String addEscapes(String str) {
StringBuffer retval = new StringBuffer();
char ch;
for (int i = 0; i < str.length(); i++) {
switch (str.charAt(i))
{
case 0 :
continue;
case '\b':
retval.append("\\b");
continue;
case '\t':
retval.append("\\t");
continue;
case '\n':
retval.append("\\n");
continue;
case '\f':
retval.append("\\f");
continue;
case '\r':
retval.append("\\r");
continue;
case '\"':
retval.append("\\\"");
continue;
case '\'':
retval.append("\\\'");
continue;
case '\\':
retval.append("\\\\");
continue;
default:
if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
String s = "0000" + Integer.toString(ch, 16);
retval.append("\\u" + s.substring(s.length() - 4, s.length()));
} else {
retval.append(ch);
}
continue;
}
}
return retval.toString();
}
/**
* Returns a detailed message for the Error when it is thrown by the
* token manager to indicate a lexical error.
* Parameters :
* EOFSeen : indicates if EOF caused the lexicl error
* curLexState : lexical state in which this error occured
* errorLine : line number when the error occured
* errorColumn : column number when the error occured
* errorAfter : prefix that was seen before this error occured
* curchar : the offending character
* Note: You can customize the lexical error message by modifying this method.
*/
protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
return("Lexical error at line " +
errorLine + ", column " +
errorColumn + ". Encountered: " +
(EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
"after : \"" + addEscapes(errorAfter) + "\"");
}
/**
* You can also modify the body of this method to customize your error messages.
* For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
* of end-users concern, so you can return something like :
*
* "Internal Error : Please file a bug report .... "
*
* from this method for such cases in the release version of your parser.
*/
public String getMessage() {
return super.getMessage();
}
/*
* Constructors of various flavors follow.
*/
public TokenMgrError() {
}
public TokenMgrError(String message, int reason) {
super(message);
errorCode = reason;
}
public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
}
}
| bsd-3-clause |
dhis2/dhis2-core | dhis-2/dhis-web-api-test/src/test/java/org/hisp/dhis/webapi/controller/method/ApiMethodAllExcludeV32Controller.java | 2610 | /*
* Copyright (c) 2004-2022, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.webapi.controller.method;
import java.io.IOException;
import javax.servlet.http.HttpServletResponse;
import org.hisp.dhis.common.DhisApiVersion;
import org.hisp.dhis.webapi.mvc.annotation.ApiVersion;
import org.springframework.stereotype.Controller;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
/**
* @author Morten Olav Hansen <mortenoh@gmail.com>
*/
@Controller
@RequestMapping( "/method/testAllExcludeV32" )
public class ApiMethodAllExcludeV32Controller
{
@GetMapping( "a" )
@ApiVersion( value = DhisApiVersion.ALL, exclude = DhisApiVersion.V32 )
public void testAllA( HttpServletResponse response )
throws IOException
{
response.getWriter().println( "TEST" );
}
@GetMapping( "b" )
@ApiVersion( value = DhisApiVersion.ALL, exclude = DhisApiVersion.V32 )
public void testAllB( HttpServletResponse response )
throws IOException
{
response.getWriter().println( "TEST" );
}
}
| bsd-3-clause |
eoogbe/api-client-staging | generated/java/proto-google-common-protos/src/main/java/com/google/api/Advice.java | 17436 | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/api/config_change.proto
package com.google.api;
/**
* <pre>
* Generated advice about this change, used for providing more
* information about how a change will affect the existing service.
* </pre>
*
* Protobuf type {@code google.api.Advice}
*/
public final class Advice extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.api.Advice)
AdviceOrBuilder {
// Use Advice.newBuilder() to construct.
private Advice(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Advice() {
description_ = "";
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return com.google.protobuf.UnknownFieldSet.getDefaultInstance();
}
private Advice(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
this();
int mutable_bitField0_ = 0;
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!input.skipField(tag)) {
done = true;
}
break;
}
case 18: {
java.lang.String s = input.readStringRequireUtf8();
description_ = s;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e).setUnfinishedMessage(this);
} finally {
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.ConfigChangeProto.internal_static_google_api_Advice_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ConfigChangeProto.internal_static_google_api_Advice_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Advice.class, com.google.api.Advice.Builder.class);
}
public static final int DESCRIPTION_FIELD_NUMBER = 2;
private volatile java.lang.Object description_;
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
}
}
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (!getDescriptionBytes().isEmpty()) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, description_);
}
}
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!getDescriptionBytes().isEmpty()) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, description_);
}
memoizedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.api.Advice)) {
return super.equals(obj);
}
com.google.api.Advice other = (com.google.api.Advice) obj;
boolean result = true;
result = result && getDescription()
.equals(other.getDescription());
return result;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DESCRIPTION_FIELD_NUMBER;
hash = (53 * hash) + getDescription().hashCode();
hash = (29 * hash) + unknownFields.hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.api.Advice parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Advice parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Advice parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Advice parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Advice parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.api.Advice parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.api.Advice parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.Advice parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.Advice parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.api.Advice parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.api.Advice parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.api.Advice parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.api.Advice prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Generated advice about this change, used for providing more
* information about how a change will affect the existing service.
* </pre>
*
* Protobuf type {@code google.api.Advice}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.api.Advice)
com.google.api.AdviceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.api.ConfigChangeProto.internal_static_google_api_Advice_descriptor;
}
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.api.ConfigChangeProto.internal_static_google_api_Advice_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.api.Advice.class, com.google.api.Advice.Builder.class);
}
// Construct using com.google.api.Advice.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
}
}
public Builder clear() {
super.clear();
description_ = "";
return this;
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.api.ConfigChangeProto.internal_static_google_api_Advice_descriptor;
}
public com.google.api.Advice getDefaultInstanceForType() {
return com.google.api.Advice.getDefaultInstance();
}
public com.google.api.Advice build() {
com.google.api.Advice result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.google.api.Advice buildPartial() {
com.google.api.Advice result = new com.google.api.Advice(this);
result.description_ = description_;
onBuilt();
return result;
}
public Builder clone() {
return (Builder) super.clone();
}
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.setField(field, value);
}
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return (Builder) super.clearField(field);
}
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return (Builder) super.clearOneof(oneof);
}
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, Object value) {
return (Builder) super.setRepeatedField(field, index, value);
}
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
Object value) {
return (Builder) super.addRepeatedField(field, value);
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.api.Advice) {
return mergeFrom((com.google.api.Advice)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.api.Advice other) {
if (other == com.google.api.Advice.getDefaultInstance()) return this;
if (!other.getDescription().isEmpty()) {
description_ = other.description_;
onChanged();
}
onChanged();
return this;
}
public final boolean isInitialized() {
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.google.api.Advice parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.google.api.Advice) e.getUnfinishedMessage();
throw e.unwrapIOException();
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private java.lang.Object description_ = "";
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public java.lang.String getDescription() {
java.lang.Object ref = description_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
description_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public com.google.protobuf.ByteString
getDescriptionBytes() {
java.lang.Object ref = description_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
description_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder setDescription(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
description_ = value;
onChanged();
return this;
}
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder clearDescription() {
description_ = getDefaultInstance().getDescription();
onChanged();
return this;
}
/**
* <pre>
* Useful description for why this advice was applied and what actions should
* be taken to mitigate any implied risks.
* </pre>
*
* <code>string description = 2;</code>
*/
public Builder setDescriptionBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
description_ = value;
onChanged();
return this;
}
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return this;
}
// @@protoc_insertion_point(builder_scope:google.api.Advice)
}
// @@protoc_insertion_point(class_scope:google.api.Advice)
private static final com.google.api.Advice DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.api.Advice();
}
public static com.google.api.Advice getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Advice>
PARSER = new com.google.protobuf.AbstractParser<Advice>() {
public Advice parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Advice(input, extensionRegistry);
}
};
public static com.google.protobuf.Parser<Advice> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Advice> getParserForType() {
return PARSER;
}
public com.google.api.Advice getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
| bsd-3-clause |
axinging/chromium-crosswalk | chrome/android/java/src/org/chromium/chrome/browser/firstrun/ToSAckedReceiver.java | 2578 | // Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
package org.chromium.chrome.browser.firstrun;
import android.content.BroadcastReceiver;
import android.content.Context;
import android.content.Intent;
import android.content.SharedPreferences;
import android.os.Bundle;
import org.chromium.base.ContextUtils;
import org.chromium.sync.signin.AccountManagerHelper;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
/**
* This receiver is notified when a user goes through the Setup Wizard and acknowledges
* the Chrome ToS so that we don't show the ToS string during our first run.
*/
public class ToSAckedReceiver extends BroadcastReceiver {
private static final String TOS_ACKED_ACCOUNTS = "ToS acknowledged accounts";
private static final String EXTRA_ACCOUNT_NAME = "TosAckedReceiver.account";
@Override
public void onReceive(Context context, Intent intent) {
Bundle args = intent.getExtras();
if (args == null) return;
String accountName = args.getString(EXTRA_ACCOUNT_NAME, null);
if (accountName == null) return;
SharedPreferences prefs = ContextUtils.getAppSharedPreferences();
// Make sure to construct a new set so it can be modified safely. See crbug.com/568369.
Set<String> accounts =
new HashSet<String>(prefs.getStringSet(TOS_ACKED_ACCOUNTS, new HashSet<String>()));
accounts.add(accountName);
prefs.edit().remove(TOS_ACKED_ACCOUNTS).apply();
prefs.edit().putStringSet(TOS_ACKED_ACCOUNTS, accounts).apply();
}
/**
* Checks whether any of the current google accounts has seen the ToS in setup wizard.
* @param context Context for the app.
* @return Whether or not the the ToS has been seen.
*/
public static boolean checkAnyUserHasSeenToS(Context context) {
Set<String> toSAckedAccounts =
ContextUtils.getAppSharedPreferences().getStringSet(
TOS_ACKED_ACCOUNTS, null);
if (toSAckedAccounts == null || toSAckedAccounts.isEmpty()) return false;
AccountManagerHelper accountHelper = AccountManagerHelper.get(context);
List<String> accountNames = accountHelper.getGoogleAccountNames();
if (accountNames.isEmpty()) return false;
for (int k = 0; k < accountNames.size(); k++) {
if (toSAckedAccounts.contains(accountNames.get(k))) return true;
}
return false;
}
}
| bsd-3-clause |
dhimmel/owltools | OWLTools-Core/src/main/java/owltools/io/GraphRenderer.java | 139 | package owltools.io;
import owltools.graph.OWLGraphWrapper;
public interface GraphRenderer {
public void render(OWLGraphWrapper g);
}
| bsd-3-clause |
dhimmel/owltools | OWLTools-Oort/src/main/java/owltools/ontologyrelease/OntologyVersionTools.java | 1531 | package owltools.ontologyrelease;
import java.text.DateFormat;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.obolibrary.obo2owl.Obo2OWLConstants;
/**
* Tools to handle the writing and reading of version information of
* owl ontology files.
*
* @author hdietze
*
*/
public class OntologyVersionTools {
private static final ThreadLocal<DateFormat> versionIRIDateFormat = new ThreadLocal<DateFormat>() {
@Override
protected DateFormat initialValue() {
return new SimpleDateFormat("yyyy-MM-dd");
}
};
private static final Pattern versionIRIPattern = Pattern.compile(Obo2OWLConstants.DEFAULT_IRI_PREFIX+"\\S+/(.+)/\\S+\\.owl");
private OntologyVersionTools() {
// No instances, static methods only.
}
/**
* Try to parse the IRI as a version IRI using the both patterns
* for version IRI and OboInOwl.
*
* @param versionIRI
* @return date or null
*/
public static String parseVersion(String versionIRI) {
if (versionIRI == null || versionIRI.length() <= Obo2OWLConstants.DEFAULT_IRI_PREFIX.length()) {
return null;
}
Matcher versionIRIMatcher = versionIRIPattern.matcher(versionIRI);
if (versionIRIMatcher.matches()) {
return versionIRIMatcher.group(1);
}
return null;
}
/**
* Format a date into the canonical format of YYYY-MM-DD.
*
* @param date
* @return string
*/
public static String format(Date date) {
return versionIRIDateFormat.get().format(date);
}
}
| bsd-3-clause |